Bug 1061046. Part 6: Remove MediaDecoderStateMachine's USECS_PER_S video rate and use the graph rate instead. r=karlt

This commit is contained in:
Robert O'Callahan 2014-09-18 11:50:01 +12:00
parent 0346ea3022
commit c6d35e9658
2 changed files with 20 additions and 10 deletions

View File

@ -341,18 +341,22 @@ void MediaDecoderStateMachine::SendStreamAudio(AudioData* aAudio,
aOutput->ApplyVolume(mVolume);
}
static void WriteVideoToMediaStream(layers::Image* aImage,
int64_t aDuration,
static void WriteVideoToMediaStream(MediaStream* aStream,
layers::Image* aImage,
int64_t aEndMicroseconds,
int64_t aStartMicroseconds,
const IntSize& aIntrinsicSize,
VideoSegment* aOutput)
{
nsRefPtr<layers::Image> image = aImage;
aOutput->AppendFrame(image.forget(), aDuration, aIntrinsicSize);
StreamTime duration =
aStream->MicrosecondsToStreamTimeRoundDown(aEndMicroseconds) -
aStream->MicrosecondsToStreamTimeRoundDown(aStartMicroseconds);
aOutput->AppendFrame(image.forget(), duration, aIntrinsicSize);
}
static const TrackID TRACK_AUDIO = 1;
static const TrackID TRACK_VIDEO = 2;
static const TrackRate RATE_VIDEO = USECS_PER_S;
void MediaDecoderStateMachine::SendStreamData()
{
@ -395,7 +399,8 @@ void MediaDecoderStateMachine::SendStreamData()
}
if (mInfo.HasVideo()) {
VideoSegment* video = new VideoSegment();
mediaStream->AddTrack(TRACK_VIDEO, RATE_VIDEO, 0, video);
mediaStream->AddTrack(TRACK_VIDEO,
mediaStream->GraphRate(), 0, video);
stream->mStream->DispatchWhenNotEnoughBuffered(TRACK_VIDEO,
GetStateMachineThread(), GetWakeDecoderRunnable());
}
@ -437,16 +442,16 @@ void MediaDecoderStateMachine::SendStreamData()
mediaStream, v->mTime - stream->mNextVideoTime);
// Write last video frame to catch up. mLastVideoImage can be null here
// which is fine, it just means there's no video.
WriteVideoToMediaStream(stream->mLastVideoImage,
v->mTime - stream->mNextVideoTime, stream->mLastVideoImageDisplaySize,
WriteVideoToMediaStream(mediaStream, stream->mLastVideoImage,
v->mTime, stream->mNextVideoTime, stream->mLastVideoImageDisplaySize,
&output);
stream->mNextVideoTime = v->mTime;
}
if (stream->mNextVideoTime < v->GetEndTime()) {
VERBOSE_LOG("writing video frame %lldus to MediaStream %p for %lldus",
v->mTime, mediaStream, v->GetEndTime() - stream->mNextVideoTime);
WriteVideoToMediaStream(v->mImage,
v->GetEndTime() - stream->mNextVideoTime, v->mDisplay,
WriteVideoToMediaStream(mediaStream, v->mImage,
v->GetEndTime(), stream->mNextVideoTime, v->mDisplay,
&output);
stream->mNextVideoTime = v->GetEndTime();
stream->mLastVideoImage = v->mImage;
@ -464,7 +469,8 @@ void MediaDecoderStateMachine::SendStreamData()
stream->mHaveSentFinishVideo = true;
}
endPosition = std::max(endPosition,
mediaStream->TicksToTimeRoundDown(RATE_VIDEO, stream->mNextVideoTime - stream->mInitialTime));
mediaStream->MicrosecondsToStreamTimeRoundDown(
stream->mNextVideoTime - stream->mInitialTime));
}
if (!stream->mHaveSentFinish) {

View File

@ -495,6 +495,10 @@ public:
{
return TimeToTicksRoundDown(1000000, aTime);
}
StreamTime MicrosecondsToStreamTimeRoundDown(int64_t aMicroseconds) {
return (aMicroseconds*mBuffer.GraphRate())/1000000;
}
TrackTicks TimeToTicksRoundUp(TrackRate aRate, StreamTime aTime)
{
return RateConvertTicksRoundUp(aRate, mBuffer.GraphRate(), aTime);