Bug 1127235 - refactor stream clock calculation in MediaDecoderStateMachine. r=roc.

This commit is contained in:
JW Wang 2015-03-03 10:43:38 +08:00
parent b318db0383
commit 7cb6284555
4 changed files with 54 additions and 128 deletions

View File

@ -421,10 +421,8 @@ void MediaDecoder::DestroyDecodedStream()
MOZ_ASSERT(NS_IsMainThread());
GetReentrantMonitor().AssertCurrentThreadIn();
if (GetDecodedStream()) {
GetStateMachine()->ResyncMediaStreamClock();
} else {
// Avoid the redundant blocking to output stream.
// Avoid the redundant blocking to output stream.
if (!GetDecodedStream()) {
return;
}
@ -457,9 +455,6 @@ void MediaDecoder::UpdateStreamBlockingForStateMachinePlaying()
if (!mDecodedStream) {
return;
}
if (mDecoderStateMachine) {
mDecoderStateMachine->SetSyncPointForMediaStream();
}
bool blockForStateMachineNotPlaying =
mDecoderStateMachine && !mDecoderStateMachine->IsPlaying() &&
mDecoderStateMachine->GetState() != MediaDecoderStateMachine::DECODER_STATE_COMPLETED;
@ -629,11 +624,6 @@ void MediaDecoder::Shutdown()
mShuttingDown = true;
{
ReentrantMonitorAutoEnter mon(GetReentrantMonitor());
DestroyDecodedStream();
}
// This changes the decoder state to SHUTDOWN and does other things
// necessary to unblock the state machine thread if it's blocked, so
// the asynchronous shutdown in nsDestroyStateMachine won't deadlock.
@ -659,6 +649,12 @@ void MediaDecoder::Shutdown()
MediaDecoder::~MediaDecoder()
{
MOZ_ASSERT(NS_IsMainThread());
{
// Don't destroy the decoded stream until destructor in order to keep the
// invariant that the decoded stream is always available in capture mode.
ReentrantMonitorAutoEnter mon(GetReentrantMonitor());
DestroyDecodedStream();
}
MediaMemoryTracker::RemoveMediaDecoder(this);
UnpinForSeek();
MOZ_COUNT_DTOR(MediaDecoder);
@ -1353,12 +1349,7 @@ void MediaDecoder::PlaybackPositionChanged()
// and we don't want to override the seek algorithm and change the
// current time after the seek has started but before it has
// completed.
if (GetDecodedStream()) {
mCurrentTime = mDecoderStateMachine->GetCurrentTimeViaMediaStreamSync()/
static_cast<double>(USECS_PER_S);
} else {
mCurrentTime = mDecoderStateMachine->GetCurrentTime();
}
mCurrentTime = mDecoderStateMachine->GetCurrentTime();
}
mDecoderStateMachine->ClearPositionChangeFlag();
}

View File

@ -392,8 +392,13 @@ public:
~DecodedStreamData();
// microseconds
int64_t GetLastOutputTime() { return mListener->GetLastOutputTime(); }
bool IsFinished() { return mListener->IsFinishedOnMainThread(); }
bool IsFinished() const {
return mListener->IsFinishedOnMainThread();
}
int64_t GetClock() const {
return mInitialTime + mListener->GetLastOutputTime();
}
// The following group of fields are protected by the decoder's monitor
// and can be read or written on any thread.
@ -401,7 +406,7 @@ public:
int64_t mAudioFramesWritten;
// Saved value of aInitialTime. Timestamp of the first audio and/or
// video packet written.
int64_t mInitialTime; // microseconds
const int64_t mInitialTime; // microseconds
// mNextVideoTime is the end timestamp for the last packet sent to the stream.
// Therefore video packets starting at or after this time need to be copied
// to the output stream.
@ -454,13 +459,6 @@ public:
MutexAutoLock lock(mMutex);
mStream = nullptr;
}
bool SetFinishedOnMainThread(bool aFinished)
{
MutexAutoLock lock(mMutex);
bool result = !mStreamFinishedOnMainThread;
mStreamFinishedOnMainThread = aFinished;
return result;
}
bool IsFinishedOnMainThread()
{
MutexAutoLock lock(mMutex);

View File

@ -205,8 +205,6 @@ MediaDecoderStateMachine::MediaDecoderStateMachine(MediaDecoder* aDecoder,
aDecoder->GetReentrantMonitor(),
&MediaDecoderStateMachine::TimeoutExpired, this, aRealTime)),
mState(DECODER_STATE_DECODING_NONE),
mSyncPointInMediaStream(-1),
mSyncPointInDecodedStream(-1),
mPlayDuration(0),
mStartTime(-1),
mEndTime(-1),
@ -344,9 +342,7 @@ void MediaDecoderStateMachine::SendStreamAudio(AudioData* aAudio,
// Write silence to catch up
VERBOSE_LOG("writing %lld frames of silence to MediaStream", silentFrames);
AudioSegment silence;
StreamTime duration = aStream->mStream->TicksToTimeRoundDown(
mInfo.mAudio.mRate, silentFrames);
silence.InsertNullDataAtStart(duration);
silence.InsertNullDataAtStart(silentFrames);
aStream->mAudioFramesWritten += silentFrames;
audioWrittenOffset += silentFrames;
aOutput->AppendFrom(&silence);
@ -634,8 +630,7 @@ MediaDecoderStateMachine::NeedToSkipToNextKeyframe()
// Don't skip frame for video-only decoded stream because the clock time of
// the stream relies on the video frame.
if (mDecoder->GetDecodedStream() && !HasAudio()) {
DECODER_LOG("Video-only decoded stream, set skipToNextKeyFrame to false");
if (mAudioCaptured && !HasAudio()) {
return false;
}
@ -1184,45 +1179,6 @@ void MediaDecoderStateMachine::StopPlayback()
GetStateMachineThread()->Dispatch(event, NS_DISPATCH_NORMAL);
}
void MediaDecoderStateMachine::SetSyncPointForMediaStream()
{
AssertCurrentThreadInMonitor();
DecodedStreamData* stream = mDecoder->GetDecodedStream();
if (!stream) {
return;
}
mSyncPointInMediaStream = stream->GetLastOutputTime();
TimeDuration timeSincePlayStart = mPlayStartTime.IsNull() ? TimeDuration(0) :
TimeStamp::Now() - mPlayStartTime;
mSyncPointInDecodedStream = mStartTime + mPlayDuration +
timeSincePlayStart.ToMicroseconds();
DECODER_LOG("SetSyncPointForMediaStream MediaStream=%lldus, DecodedStream=%lldus",
mSyncPointInMediaStream, mSyncPointInDecodedStream);
}
void MediaDecoderStateMachine::ResyncMediaStreamClock()
{
AssertCurrentThreadInMonitor();
MOZ_ASSERT(mDecoder->GetDecodedStream());
if (IsPlaying()) {
SetPlayStartTime(TimeStamp::Now());
mPlayDuration = GetCurrentTimeViaMediaStreamSync() - mStartTime;
}
}
int64_t MediaDecoderStateMachine::GetCurrentTimeViaMediaStreamSync() const
{
AssertCurrentThreadInMonitor();
NS_ASSERTION(mSyncPointInDecodedStream >= 0, "Should have set up sync point");
DecodedStreamData* stream = mDecoder->GetDecodedStream();
int64_t streamDelta = stream->GetLastOutputTime() - mSyncPointInMediaStream;
return mSyncPointInDecodedStream + streamDelta;
}
void MediaDecoderStateMachine::MaybeStartPlayback()
{
AssertCurrentThreadInMonitor();
@ -1360,21 +1316,11 @@ void MediaDecoderStateMachine::SetAudioCaptured()
{
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
AssertCurrentThreadInMonitor();
if (!mAudioCaptured && !mStopAudioThread) {
// Make sure the state machine runs as soon as possible. That will
// stop the audio sink.
// If mStopAudioThread is true then we're already stopping the audio sink
// and since we set mAudioCaptured to true, nothing can start it again.
if (!mAudioCaptured) {
mAudioCaptured = true;
// Schedule the state machine to send stream data as soon as possible.
ScheduleStateMachine();
if (HasAudio()) {
// The audio clock is active so force a resync now in case the audio
// clock is ahead of us (observed on Android), since after mAudioCaptured
// gets set can't call GetAudioClock().
ResyncAudioClock();
}
}
mAudioCaptured = true;
}
double MediaDecoderStateMachine::GetCurrentTime() const
@ -1798,9 +1744,15 @@ MediaDecoderStateMachine::StartSeek(const SeekTarget& aTarget)
DECODER_LOG("Changed state to SEEKING (to %lld)", mSeekTarget.mTime);
SetState(DECODER_STATE_SEEKING);
// TODO: We should re-create the decoded stream after seek completed as we do
// for audio thread since it is until then we know which position we seek to
// as far as fast-seek is concerned. It also fix the problem where stream
// clock seems to go backwards during seeking.
if (mAudioCaptured) {
mDecoder->RecreateDecodedStream(seekTime - mStartTime);
}
ScheduleStateMachine();
}
@ -2699,9 +2651,6 @@ MediaDecoderStateMachine::SeekCompleted()
// Ensure timestamps are up to date.
UpdatePlaybackPositionInternal(newCurrentTime);
if (mDecoder->GetDecodedStream()) {
SetSyncPointForMediaStream();
}
// Try to decode another frame to detect if we're at the end...
DECODER_LOG("Seek completed, mCurrentFrameTime=%lld", mCurrentFrameTime);
@ -3083,7 +3032,7 @@ MediaDecoderStateMachine::GetAudioClock() const
// audio sink to ensure that it doesn't get destroyed on the audio sink
// while we're using it.
AssertCurrentThreadInMonitor();
MOZ_ASSERT(HasAudio() && !mAudioCaptured);
MOZ_ASSERT(HasAudio() && !mAudioCompleted);
return mAudioStartTime +
(mAudioSink ? mAudioSink->GetPosition() : 0);
}
@ -3115,17 +3064,20 @@ int64_t MediaDecoderStateMachine::GetClock() const
if (!IsPlaying()) {
clock_time = mPlayDuration + mStartTime;
} else {
if (mDecoder->GetDecodedStream()) {
clock_time = GetCurrentTimeViaMediaStreamSync();
} else if (HasAudio() && !mAudioCompleted && !mAudioCaptured) {
if (mAudioCaptured) {
clock_time = mStartTime + mDecoder->GetDecodedStream()->GetClock();
} else if (HasAudio() && !mAudioCompleted) {
clock_time = GetAudioClock();
} else {
// Audio is disabled on this system. Sync to the system clock.
clock_time = GetVideoStreamPosition();
}
// Ensure the clock can never go backwards.
NS_ASSERTION(GetMediaTime() <= clock_time || mPlaybackRate <= 0,
"Clock should go forwards if the playback rate is > 0.");
// Note we allow clock going backwards in capture mode during seeking.
NS_ASSERTION(GetMediaTime() <= clock_time ||
mPlaybackRate <= 0 ||
(mAudioCaptured && mState == DECODER_STATE_SEEKING),
"Clock should go forwards.");
}
return clock_time;
@ -3225,6 +3177,23 @@ void MediaDecoderStateMachine::AdvanceFrame()
MaybeStartPlayback();
}
// Cap the current time to the larger of the audio and video end time.
// This ensures that if we're running off the system clock, we don't
// advance the clock to after the media end time.
if (mVideoFrameEndTime != -1 || mAudioEndTime != -1) {
// These will be non -1 if we've displayed a video frame, or played an audio frame.
int64_t t = std::min(clock_time, std::max(mVideoFrameEndTime, mAudioEndTime));
// FIXME: Bug 1091422 - chained ogg files hit this assertion.
//MOZ_ASSERT(t >= GetMediaTime());
if (t > GetMediaTime()) {
UpdatePlaybackPosition(t);
}
}
// Note we have to update playback position before releasing the monitor.
// Otherwise, MediaDecoder::AddOutputStream could kick in when we are outside
// the monitor and get a staled value from GetCurrentTimeUs() which hits the
// assertion in GetClock().
if (currentFrame) {
// Decode one frame and display it.
int64_t delta = currentFrame->mTime - clock_time;
@ -3253,19 +3222,6 @@ void MediaDecoderStateMachine::AdvanceFrame()
currentFrame = nullptr;
}
// Cap the current time to the larger of the audio and video end time.
// This ensures that if we're running off the system clock, we don't
// advance the clock to after the media end time.
if (mVideoFrameEndTime != -1 || mAudioEndTime != -1) {
// These will be non -1 if we've displayed a video frame, or played an audio frame.
int64_t t = std::min(clock_time, std::max(mVideoFrameEndTime, mAudioEndTime));
// FIXME: Bug 1091422 - chained ogg files hit this assertion.
//MOZ_ASSERT(t >= GetMediaTime());
if (t > GetMediaTime()) {
UpdatePlaybackPosition(t);
}
}
// If the number of audio/video frames queued has changed, either by
// this function popping and playing a video frame, or by the audio
// thread popping and playing an audio frame, we may need to update our

View File

@ -350,19 +350,6 @@ public:
mDecoder = nullptr;
}
// If we're playing into a MediaStream, record the current point in the
// MediaStream and the current point in our media resource so later we can
// convert MediaStream playback positions to media resource positions. Best to
// call this while we're not playing (while the MediaStream is blocked). Can
// be called on any thread with the decoder monitor held.
void SetSyncPointForMediaStream();
// Called when the decoded stream is destroyed. |mPlayStartTime| and
// |mPlayDuration| are updated to provide a good base for calculating video
// stream time using the system clock.
void ResyncMediaStreamClock();
int64_t GetCurrentTimeViaMediaStreamSync() const;
// Copy queued audio/video data in the reader to any output MediaStreams that
// need it.
void SendStreamData();
@ -804,12 +791,6 @@ protected:
// Accessed only via the state machine thread. Must be set via SetPlayStartTime.
TimeStamp mPlayStartTime;
// When we start writing decoded data to a new DecodedDataStream, or we
// restart writing due to PlaybackStarted(), we record where we are in the
// MediaStream and what that corresponds to in the media.
int64_t mSyncPointInMediaStream; // microseconds
int64_t mSyncPointInDecodedStream; // microseconds
// The amount of time we've spent playing already the media. The current
// playback position is therefore |Now() - mPlayStartTime +
// mPlayDuration|, which must be adjusted by mStartTime if used with media