Bug 598242 - Normalize WebM buffered TimeRanges. r=kinetik a=blocking2.0

This commit is contained in:
Chris Pearce 2010-10-07 11:58:36 +13:00
parent f3d96bd23a
commit 9e4234dc37
9 changed files with 44 additions and 18 deletions

View File

@ -152,7 +152,7 @@ nsBuiltinDecoderStateMachine::nsBuiltinDecoderStateMachine(nsBuiltinDecoder* aDe
mPositionChangeQueued(PR_FALSE),
mAudioCompleted(PR_FALSE),
mBufferExhausted(PR_FALSE),
mGotDurationFromHeader(PR_FALSE),
mGotDurationFromMetaData(PR_FALSE),
mStopDecodeThreads(PR_TRUE),
mEventManager(aDecoder)
{
@ -1287,7 +1287,7 @@ void nsBuiltinDecoderStateMachine::AdvanceFrame()
if (mVideoFrameEndTime != -1 || mAudioEndTime != -1) {
// These will be non -1 if we've displayed a video frame, or played an audio sample.
clock_time = NS_MIN(clock_time, NS_MAX(mVideoFrameEndTime, mAudioEndTime));
if (clock_time - mStartTime > mCurrentFrameTime) {
if (clock_time > GetMediaTime()) {
// Only update the playback position if the clock time is greater
// than the previous playback position. The audio clock can
// sometimes report a time less than its previously reported in
@ -1349,7 +1349,7 @@ VideoData* nsBuiltinDecoderStateMachine::FindStartTime()
}
if (startTime != 0) {
mStartTime = startTime;
if (mGotDurationFromHeader) {
if (mGotDurationFromMetaData) {
NS_ASSERTION(mEndTime != -1,
"We should have mEndTime as supplied duration here");
// We were specified a duration from a Content-Duration HTTP header.
@ -1426,6 +1426,8 @@ void nsBuiltinDecoderStateMachine::LoadMetadata()
mDecoder->StartProgressUpdates();
const nsVideoInfo& info = mReader->GetInfo();
mGotDurationFromMetaData = (GetDuration() != -1);
if (!info.mHasVideo && !info.mHasAudio) {
mState = DECODER_STATE_SHUTDOWN;
nsCOMPtr<nsIRunnable> event =

View File

@ -245,6 +245,11 @@ public:
mReader->NotifyDataArrived(aBuffer, aLength, aOffset);
}
PRInt64 GetEndMediaTime() const {
mDecoder->GetMonitor().AssertCurrentThreadIn();
return mEndTime;
}
protected:
// Returns the number of unplayed ms of audio we've got decoded and/or
@ -476,9 +481,9 @@ protected:
// Synchronised via the decoder monitor.
PRPackedBool mBufferExhausted;
// PR_TRUE if mDuration has a value obtained from an HTTP header.
// Accessed on the state machine thread.
PRPackedBool mGotDurationFromHeader;
// PR_TRUE if mDuration has a value obtained from an HTTP header, or from
// the media index/metadata. Accessed on the state machine thread.
PRPackedBool mGotDurationFromMetaData;
// PR_FALSE while decode threads should be running. Accessed on audio,
// state machine and decode threads. Syncrhonised by decoder monitor.

View File

@ -50,14 +50,12 @@ void nsOggDecoderStateMachine::LoadMetadata()
{
nsBuiltinDecoderStateMachine::LoadMetadata();
// TODO: Get the duration from Skeleton index, if available.
// Get the duration from the media file. We only do this if the
// content length of the resource is known as we need to seek
// to the end of the file to get the last time field. We also
// only do this if the resource is seekable and if we haven't
// already obtained the duration via an HTTP header.
mGotDurationFromHeader = (GetDuration() != -1);
if (mState != DECODER_STATE_SHUTDOWN &&
mDecoder->GetCurrentStream()->GetLength() >= 0 &&
mSeekable &&

View File

@ -192,6 +192,7 @@ _TEST_FILES += \
bug580982.webm \
chain.ogv \
dirac.ogg \
split.webm \
seek.ogv \
seek.webm \
seek.yuv \

View File

@ -107,6 +107,9 @@ var gPlayTests = [
// Test playback of a webm file
{ name:"seek.webm", type:"video/webm", duration:3.966 },
// Test playback of a WebM file with non-zero start time.
{ name:"split.webm", type:"video/webm", duration:1.967 },
// Test playback of a raw file
{ name:"seek.yuv", type:"video/x-raw-yuv", duration:1.833 },
@ -204,6 +207,7 @@ var gSeekTests = [
{ name:"320x240.ogv", type:"video/ogg", duration:0.233 },
{ name:"seek.webm", type:"video/webm", duration:3.966 },
{ name:"bug516323.indexed.ogv", type:"video/ogg", duration:4.208 },
{ name:"split.webm", type:"video/webm", duration:1.967 },
{ name:"bogus.duh", type:"bogus/duh", duration:123 }
];

Binary file not shown.

View File

@ -205,7 +205,8 @@ void nsWebMBufferedParser::Append(const unsigned char* aBuffer, PRUint32 aLength
void nsWebMBufferedState::CalculateBufferedForRange(nsTimeRanges* aBuffered,
PRInt64 aStartOffset, PRInt64 aEndOffset,
PRUint64 aTimecodeScale)
PRUint64 aTimecodeScale,
PRInt64 aStartTimeOffsetNS)
{
// Find the first nsWebMTimeDataOffset at or after aStartOffset.
PRUint32 start;
@ -239,8 +240,12 @@ void nsWebMBufferedState::CalculateBufferedForRange(nsTimeRanges* aBuffered,
"Must have found greatest nsWebMTimeDataOffset for end");
}
float startTime = mTimeMapping[start].mTimecode * aTimecodeScale / NS_PER_S;
float endTime = mTimeMapping[end].mTimecode * aTimecodeScale / NS_PER_S;
// The timestamp of the first media sample, in ns. We must subtract this
// from the ranges' start and end timestamps, so that those timestamps are
// normalized in the range [0,duration].
float startTime = (mTimeMapping[start].mTimecode * aTimecodeScale - aStartTimeOffsetNS) / NS_PER_S;
float endTime = (mTimeMapping[end].mTimecode * aTimecodeScale - aStartTimeOffsetNS) / NS_PER_S;
aBuffered->Add(startTime, endTime);
}

View File

@ -223,7 +223,8 @@ public:
void NotifyDataArrived(const char* aBuffer, PRUint32 aLength, PRUint32 aOffset);
void CalculateBufferedForRange(nsTimeRanges* aBuffered,
PRInt64 aStartOffset, PRInt64 aEndOffset,
PRUint64 aTimecodeScale);
PRUint64 aTimecodeScale,
PRInt64 aStartTimeOffsetNS);
private:
// Sorted (by offset) map of data offsets to timecodes. Populated

View File

@ -399,6 +399,7 @@ PRBool nsWebMReader::DecodeAudioPacket(nestegg_packet* aPacket)
mAudioSamples = 0;
}
PRInt32 total_samples = 0;
for (PRUint32 i = 0; i < count; ++i) {
unsigned char* data;
size_t length;
@ -423,7 +424,6 @@ PRBool nsWebMReader::DecodeAudioPacket(nestegg_packet* aPacket)
float** pcm = 0;
PRInt32 samples = 0;
PRInt32 total_samples = 0;
while ((samples = vorbis_synthesis_pcmout(&mVorbisDsp, &pcm)) > 0) {
float* buffer = new float[samples * mChannels];
float* p = buffer;
@ -597,10 +597,15 @@ PRBool nsWebMReader::DecodeVideoFrame(PRBool &aKeyframeSkip,
}
mVideoPackets.PushFront(next_packet);
} else {
r = nestegg_duration(mContext, &next_tstamp);
if (r == -1) {
MonitorAutoExit exitMon(mMonitor);
MonitorAutoEnter decoderMon(mDecoder->GetMonitor());
nsBuiltinDecoderStateMachine* s =
static_cast<nsBuiltinDecoderStateMachine*>(mDecoder->GetStateMachine());
PRInt64 endTime = s->GetEndMediaTime();
if (endTime == -1) {
return PR_FALSE;
}
next_tstamp = endTime * NS_PER_MS;
}
}
@ -718,15 +723,20 @@ nsresult nsWebMReader::GetBuffered(nsTimeRanges* aBuffered, PRInt64 aStartTime)
if (stream->IsDataCachedToEndOfStream(0)) {
uint64_t duration = 0;
if (nestegg_duration(mContext, &duration) == 0) {
aBuffered->Add(aStartTime / MS_PER_S, duration / NS_PER_S);
aBuffered->Add(0, duration / NS_PER_S);
}
} else {
PRInt64 startOffset = stream->GetNextCachedData(0);
PRInt64 startTimeOffsetNS = aStartTime * NS_PER_MS;
while (startOffset >= 0) {
PRInt64 endOffset = stream->GetCachedDataEnd(startOffset);
NS_ASSERTION(startOffset < endOffset, "Cached range invalid");
mBufferedState->CalculateBufferedForRange(aBuffered, startOffset, endOffset, timecodeScale);
mBufferedState->CalculateBufferedForRange(aBuffered,
startOffset,
endOffset,
timecodeScale,
startTimeOffsetNS);
// Advance to the next cached data range.
startOffset = stream->GetNextCachedData(endOffset);