2010-04-01 20:03:07 -07:00
|
|
|
/* vim:set ts=2 sw=2 sts=2 et cindent: */
|
2012-05-21 04:12:37 -07:00
|
|
|
/* This Source Code Form is subject to the terms of the Mozilla Public
|
|
|
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
|
|
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2013-05-02 17:39:19 -07:00
|
|
|
#ifdef XP_WIN
|
|
|
|
// Include Windows headers required for enabling high precision timers.
|
2013-05-06 02:33:00 -07:00
|
|
|
#include "windows.h"
|
|
|
|
#include "mmsystem.h"
|
2013-05-02 17:39:19 -07:00
|
|
|
#endif
|
2014-02-17 14:53:53 -08:00
|
|
|
|
2012-12-14 15:58:45 -08:00
|
|
|
#include "mozilla/DebugOnly.h"
|
2013-07-30 07:25:31 -07:00
|
|
|
#include <stdint.h>
|
2012-12-14 15:58:45 -08:00
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
#include "MediaDecoderStateMachine.h"
|
2013-12-19 19:24:42 -08:00
|
|
|
#include "AudioStream.h"
|
2010-04-01 20:03:07 -07:00
|
|
|
#include "nsTArray.h"
|
2012-11-14 11:46:40 -08:00
|
|
|
#include "MediaDecoder.h"
|
|
|
|
#include "MediaDecoderReader.h"
|
2010-04-01 20:03:07 -07:00
|
|
|
#include "mozilla/mozalloc.h"
|
2010-04-27 01:53:44 -07:00
|
|
|
#include "VideoUtils.h"
|
2013-03-02 11:14:44 -08:00
|
|
|
#include "mozilla/dom/TimeRanges.h"
|
2012-01-19 10:30:29 -08:00
|
|
|
#include "nsDeque.h"
|
2012-04-29 20:12:42 -07:00
|
|
|
#include "AudioSegment.h"
|
|
|
|
#include "VideoSegment.h"
|
2012-08-20 21:06:46 -07:00
|
|
|
#include "ImageContainer.h"
|
2013-09-05 13:25:17 -07:00
|
|
|
#include "nsComponentManagerUtils.h"
|
|
|
|
#include "nsITimer.h"
|
2013-12-17 19:59:11 -08:00
|
|
|
#include "nsContentUtils.h"
|
|
|
|
#include "MediaShutdownManager.h"
|
2014-02-17 14:53:52 -08:00
|
|
|
#include "SharedThreadPool.h"
|
|
|
|
#include "MediaTaskQueue.h"
|
2014-02-17 14:53:53 -08:00
|
|
|
#include "nsIEventTarget.h"
|
2012-11-30 05:17:54 -08:00
|
|
|
#include "prenv.h"
|
2011-09-26 17:25:41 -07:00
|
|
|
#include "mozilla/Preferences.h"
|
2014-02-09 00:04:38 -08:00
|
|
|
#include "gfx2DGlue.h"
|
|
|
|
|
2013-01-15 04:22:03 -08:00
|
|
|
#include <algorithm>
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2012-11-14 11:45:33 -08:00
|
|
|
namespace mozilla {
|
|
|
|
|
2012-12-04 02:59:36 -08:00
|
|
|
using namespace mozilla::layers;
|
2012-11-15 19:25:26 -08:00
|
|
|
using namespace mozilla::dom;
|
2014-02-09 00:04:38 -08:00
|
|
|
using namespace mozilla::gfx;
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2014-04-23 02:29:04 -07:00
|
|
|
// avoid redefined macro in unified build
|
|
|
|
#undef DECODER_LOG
|
|
|
|
#undef VERBOSE_LOG
|
|
|
|
|
2010-04-01 20:03:07 -07:00
|
|
|
#ifdef PR_LOGGING
|
2012-11-14 11:46:40 -08:00
|
|
|
extern PRLogModuleInfo* gMediaDecoderLog;
|
2014-04-23 02:29:04 -07:00
|
|
|
#define DECODER_LOG(type, msg, ...) \
|
|
|
|
PR_LOG(gMediaDecoderLog, type, ("Decoder=%p " msg, mDecoder.get(), ##__VA_ARGS__))
|
|
|
|
#define VERBOSE_LOG(msg, ...) \
|
|
|
|
PR_BEGIN_MACRO \
|
|
|
|
if (!PR_GetEnv("MOZ_QUIET")) { \
|
|
|
|
DECODER_LOG(PR_LOG_DEBUG, msg, ##__VA_ARGS__); \
|
|
|
|
} \
|
|
|
|
PR_END_MACRO
|
2010-04-01 20:03:07 -07:00
|
|
|
#else
|
2014-04-23 02:29:04 -07:00
|
|
|
#define DECODER_LOG(type, msg, ...)
|
|
|
|
#define VERBOSE_LOG(msg, ...)
|
2010-04-01 20:03:07 -07:00
|
|
|
#endif
|
|
|
|
|
2014-02-05 15:11:25 -08:00
|
|
|
// GetCurrentTime is defined in winbase.h as zero argument macro forwarding to
|
|
|
|
// GetTickCount() and conflicts with MediaDecoderStateMachine::GetCurrentTime
|
|
|
|
// implementation. With unified builds, putting this in headers is not enough.
|
|
|
|
#ifdef GetCurrentTime
|
|
|
|
#undef GetCurrentTime
|
|
|
|
#endif
|
|
|
|
|
2012-06-06 16:43:25 -07:00
|
|
|
// Wait this number of seconds when buffering, then leave and play
|
2010-04-01 20:03:07 -07:00
|
|
|
// as best as we can if the required amount of data hasn't been
|
|
|
|
// retrieved.
|
2012-08-22 08:56:38 -07:00
|
|
|
static const uint32_t BUFFERING_WAIT_S = 30;
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2011-04-13 15:12:23 -07:00
|
|
|
// If audio queue has less than this many usecs of decoded audio, we won't risk
|
2010-04-27 01:53:44 -07:00
|
|
|
// trying to decode the video, we'll skip decoding video up to the next
|
2010-11-28 12:06:38 -08:00
|
|
|
// keyframe. We may increase this value for an individual decoder if we
|
|
|
|
// encounter video frames which take a long time to decode.
|
2012-08-22 08:56:38 -07:00
|
|
|
static const uint32_t LOW_AUDIO_USECS = 300000;
|
2010-04-27 01:53:44 -07:00
|
|
|
|
2011-04-13 15:12:23 -07:00
|
|
|
// If more than this many usecs of decoded audio is queued, we'll hold off
|
2010-11-28 12:06:38 -08:00
|
|
|
// decoding more audio. If we increase the low audio threshold (see
|
2011-04-13 15:12:23 -07:00
|
|
|
// LOW_AUDIO_USECS above) we'll also increase this value to ensure it's not
|
2010-11-28 12:06:38 -08:00
|
|
|
// less than the low audio threshold.
|
2012-08-22 08:56:38 -07:00
|
|
|
const int64_t AMPLE_AUDIO_USECS = 1000000;
|
2010-05-12 17:59:42 -07:00
|
|
|
|
2014-03-20 15:47:17 -07:00
|
|
|
// When we're only playing audio and we don't have a video stream, we divide
|
|
|
|
// AMPLE_AUDIO_USECS and LOW_AUDIO_USECS by the following value. This reduces
|
|
|
|
// the amount of decoded audio we buffer, reducing our memory usage. We only
|
|
|
|
// need to decode far ahead when we're decoding video using software decoding,
|
|
|
|
// as otherwise a long video decode could cause an audio underrun.
|
|
|
|
const int64_t NO_VIDEO_AMPLE_AUDIO_DIVISOR = 8;
|
|
|
|
|
2013-12-19 19:24:42 -08:00
|
|
|
// Maximum number of bytes we'll allocate and write at once to the audio
|
|
|
|
// hardware when the audio stream contains missing frames and we're
|
|
|
|
// writing silence in order to fill the gap. We limit our silence-writes
|
|
|
|
// to 32KB in order to avoid allocating an impossibly large chunk of
|
|
|
|
// memory if we encounter a large chunk of silence.
|
|
|
|
const uint32_t SILENCE_BYTES_CHUNK = 32 * 1024;
|
|
|
|
|
2010-04-27 01:53:44 -07:00
|
|
|
// If we have fewer than LOW_VIDEO_FRAMES decoded frames, and
|
2014-03-10 20:44:08 -07:00
|
|
|
// we're not "prerolling video", we'll skip the video up to the next keyframe
|
2010-04-27 01:53:44 -07:00
|
|
|
// which is at or after the current playback position.
|
2012-08-22 08:56:38 -07:00
|
|
|
static const uint32_t LOW_VIDEO_FRAMES = 1;
|
2010-04-27 01:53:44 -07:00
|
|
|
|
2010-05-30 21:02:00 -07:00
|
|
|
// Arbitrary "frame duration" when playing only audio.
|
2011-04-13 15:12:23 -07:00
|
|
|
static const int AUDIO_DURATION_USECS = 40000;
|
2010-05-30 21:02:00 -07:00
|
|
|
|
2011-04-13 15:12:23 -07:00
|
|
|
// If we increase our "low audio threshold" (see LOW_AUDIO_USECS above), we
|
2010-11-28 12:06:38 -08:00
|
|
|
// use this as a factor in all our calculations. Increasing this will cause
|
|
|
|
// us to be more likely to increase our low audio threshold, and to
|
|
|
|
// increase it by more.
|
|
|
|
static const int THRESHOLD_FACTOR = 2;
|
|
|
|
|
2011-03-23 15:28:57 -07:00
|
|
|
// If we have less than this much undecoded data available, we'll consider
|
|
|
|
// ourselves to be running low on undecoded data. We determine how much
|
|
|
|
// undecoded data we have remaining using the reader's GetBuffered()
|
|
|
|
// implementation.
|
2012-08-22 08:56:38 -07:00
|
|
|
static const int64_t LOW_DATA_THRESHOLD_USECS = 5000000;
|
2010-11-28 12:06:38 -08:00
|
|
|
|
2011-04-13 15:12:23 -07:00
|
|
|
// LOW_DATA_THRESHOLD_USECS needs to be greater than AMPLE_AUDIO_USECS, otherwise
|
2011-03-23 15:28:57 -07:00
|
|
|
// the skip-to-keyframe logic can activate when we're running low on data.
|
2013-11-11 00:03:59 -08:00
|
|
|
static_assert(LOW_DATA_THRESHOLD_USECS > AMPLE_AUDIO_USECS,
|
|
|
|
"LOW_DATA_THRESHOLD_USECS is too small");
|
2010-11-28 12:06:38 -08:00
|
|
|
|
2011-04-13 15:12:23 -07:00
|
|
|
// Amount of excess usecs of data to add in to the "should we buffer" calculation.
|
2012-08-22 08:56:38 -07:00
|
|
|
static const uint32_t EXHAUSTED_DATA_MARGIN_USECS = 60000;
|
2011-03-23 15:28:57 -07:00
|
|
|
|
2011-04-13 15:12:23 -07:00
|
|
|
// If we enter buffering within QUICK_BUFFER_THRESHOLD_USECS seconds of starting
|
2011-03-23 15:28:57 -07:00
|
|
|
// decoding, we'll enter "quick buffering" mode, which exits a lot sooner than
|
|
|
|
// normal buffering mode. This exists so that if the decode-ahead exhausts the
|
|
|
|
// downloaded data while decode/playback is just starting up (for example
|
|
|
|
// after a seek while the media is still playing, or when playing a media
|
|
|
|
// as soon as it's load started), we won't necessarily stop for 30s and wait
|
|
|
|
// for buffering. We may actually be able to playback in this case, so exit
|
|
|
|
// buffering early and try to play. If it turns out we can't play, we'll fall
|
|
|
|
// back to buffering normally.
|
2012-08-22 08:56:38 -07:00
|
|
|
static const uint32_t QUICK_BUFFER_THRESHOLD_USECS = 2000000;
|
2011-03-23 15:28:57 -07:00
|
|
|
|
|
|
|
// If we're quick buffering, we'll remain in buffering mode while we have less than
|
2011-04-13 15:12:23 -07:00
|
|
|
// QUICK_BUFFERING_LOW_DATA_USECS of decoded data available.
|
2012-08-22 08:56:38 -07:00
|
|
|
static const uint32_t QUICK_BUFFERING_LOW_DATA_USECS = 1000000;
|
2011-03-23 15:28:57 -07:00
|
|
|
|
2011-04-13 15:12:23 -07:00
|
|
|
// If QUICK_BUFFERING_LOW_DATA_USECS is > AMPLE_AUDIO_USECS, we won't exit
|
2011-03-23 15:28:57 -07:00
|
|
|
// quick buffering in a timely fashion, as the decode pauses when it
|
2011-04-13 15:12:23 -07:00
|
|
|
// reaches AMPLE_AUDIO_USECS decoded data, and thus we'll never reach
|
|
|
|
// QUICK_BUFFERING_LOW_DATA_USECS.
|
2013-11-11 00:03:59 -08:00
|
|
|
static_assert(QUICK_BUFFERING_LOW_DATA_USECS <= AMPLE_AUDIO_USECS,
|
|
|
|
"QUICK_BUFFERING_LOW_DATA_USECS is too large");
|
2011-03-23 15:28:57 -07:00
|
|
|
|
2013-12-19 19:24:42 -08:00
|
|
|
// This value has been chosen empirically.
|
|
|
|
static const uint32_t AUDIOSTREAM_MIN_WRITE_BEFORE_START_USECS = 200000;
|
|
|
|
|
2013-09-09 17:45:33 -07:00
|
|
|
// The amount of instability we tollerate in calls to
|
|
|
|
// MediaDecoderStateMachine::UpdateEstimatedDuration(); changes of duration
|
|
|
|
// less than this are ignored, as they're assumed to be the result of
|
|
|
|
// instability in the duration estimation.
|
|
|
|
static const int64_t ESTIMATED_DURATION_FUZZ_FACTOR_USECS = USECS_PER_S / 2;
|
|
|
|
|
2012-08-22 08:56:38 -07:00
|
|
|
static TimeDuration UsecsToDuration(int64_t aUsecs) {
|
2011-04-13 15:12:23 -07:00
|
|
|
return TimeDuration::FromMilliseconds(static_cast<double>(aUsecs) / USECS_PER_MS);
|
2011-03-23 15:28:57 -07:00
|
|
|
}
|
|
|
|
|
2012-08-22 08:56:38 -07:00
|
|
|
static int64_t DurationToUsecs(TimeDuration aDuration) {
|
|
|
|
return static_cast<int64_t>(aDuration.ToSeconds() * USECS_PER_S);
|
2011-03-23 15:28:57 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
MediaDecoderStateMachine::MediaDecoderStateMachine(MediaDecoder* aDecoder,
|
2012-11-27 18:34:53 -08:00
|
|
|
MediaDecoderReader* aReader,
|
|
|
|
bool aRealTime) :
|
2010-04-01 20:03:07 -07:00
|
|
|
mDecoder(aDecoder),
|
|
|
|
mState(DECODER_STATE_DECODING_METADATA),
|
2014-04-02 05:51:47 -07:00
|
|
|
mInRunningStateMachine(false),
|
2013-12-01 13:09:06 -08:00
|
|
|
mSyncPointInMediaStream(-1),
|
|
|
|
mSyncPointInDecodedStream(-1),
|
2013-12-19 19:24:42 -08:00
|
|
|
mResetPlayStartTime(false),
|
2010-04-01 20:03:07 -07:00
|
|
|
mPlayDuration(0),
|
|
|
|
mStartTime(-1),
|
|
|
|
mEndTime(-1),
|
2011-08-24 16:42:23 -07:00
|
|
|
mFragmentEndTime(-1),
|
2010-05-05 19:31:02 -07:00
|
|
|
mReader(aReader),
|
2010-04-01 20:03:07 -07:00
|
|
|
mCurrentFrameTime(0),
|
|
|
|
mAudioStartTime(-1),
|
|
|
|
mAudioEndTime(-1),
|
2010-05-30 21:02:00 -07:00
|
|
|
mVideoFrameEndTime(-1),
|
2010-04-01 20:03:07 -07:00
|
|
|
mVolume(1.0),
|
2012-11-22 02:38:28 -08:00
|
|
|
mPlaybackRate(1.0),
|
|
|
|
mPreservesPitch(true),
|
|
|
|
mBasePosition(0),
|
2014-03-10 20:44:08 -07:00
|
|
|
mAmpleVideoFrames(2),
|
|
|
|
mLowAudioThresholdUsecs(LOW_AUDIO_USECS),
|
|
|
|
mAmpleAudioThresholdUsecs(AMPLE_AUDIO_USECS),
|
2014-06-13 13:20:37 -07:00
|
|
|
mDispatchedAudioDecodeTask(false),
|
|
|
|
mDispatchedVideoDecodeTask(false),
|
2012-04-29 20:12:42 -07:00
|
|
|
mAudioCaptured(false),
|
2012-11-30 05:17:54 -08:00
|
|
|
mTransportSeekable(true),
|
|
|
|
mMediaSeekable(true),
|
2011-09-29 16:34:37 -07:00
|
|
|
mPositionChangeQueued(false),
|
|
|
|
mAudioCompleted(false),
|
|
|
|
mGotDurationFromMetaData(false),
|
2014-02-17 14:53:52 -08:00
|
|
|
mDispatchedEventToDecode(false),
|
2011-09-29 16:34:37 -07:00
|
|
|
mStopAudioThread(true),
|
|
|
|
mQuickBuffering(false),
|
2014-03-31 20:43:57 -07:00
|
|
|
mMinimizePreroll(false),
|
2011-09-29 16:34:37 -07:00
|
|
|
mDecodeThreadWaiting(false),
|
|
|
|
mRealTime(aRealTime),
|
2014-05-05 17:12:05 -07:00
|
|
|
mDispatchedDecodeMetadataTask(false),
|
2014-06-13 13:20:37 -07:00
|
|
|
mDispatchedDecodeSeekTask(false),
|
2014-04-23 02:29:14 -07:00
|
|
|
mLastFrameStatus(MediaDecoderOwner::NEXT_FRAME_UNINITIALIZED),
|
|
|
|
mTimerId(0)
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2012-11-14 11:46:40 -08:00
|
|
|
MOZ_COUNT_CTOR(MediaDecoderStateMachine);
|
2011-07-11 20:39:34 -07:00
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
2011-11-07 17:38:17 -08:00
|
|
|
|
2014-03-10 20:44:08 -07:00
|
|
|
// Only enable realtime mode when "media.realtime_decoder.enabled" is true.
|
2011-09-28 23:19:26 -07:00
|
|
|
if (Preferences::GetBool("media.realtime_decoder.enabled", false) == false)
|
2011-09-29 16:34:37 -07:00
|
|
|
mRealTime = false;
|
2011-09-26 17:25:41 -07:00
|
|
|
|
2014-03-10 20:44:08 -07:00
|
|
|
mAmpleVideoFrames =
|
|
|
|
std::max<uint32_t>(Preferences::GetUint("media.video-queue.default-size", 10), 3);
|
|
|
|
|
2012-06-06 16:43:25 -07:00
|
|
|
mBufferingWait = mRealTime ? 0 : BUFFERING_WAIT_S;
|
2011-09-26 17:25:41 -07:00
|
|
|
mLowDataThresholdUsecs = mRealTime ? 0 : LOW_DATA_THRESHOLD_USECS;
|
2012-09-28 10:34:03 -07:00
|
|
|
|
2014-03-10 20:44:08 -07:00
|
|
|
mVideoPrerollFrames = mRealTime ? 0 : mAmpleVideoFrames / 2;
|
2014-03-10 20:44:08 -07:00
|
|
|
mAudioPrerollUsecs = mRealTime ? 0 : LOW_AUDIO_USECS * 2;
|
|
|
|
|
2013-05-02 17:39:19 -07:00
|
|
|
#ifdef XP_WIN
|
|
|
|
// Ensure high precision timers are enabled on Windows, otherwise the state
|
|
|
|
// machine thread isn't woken up at reliable intervals to set the next frame,
|
|
|
|
// and we drop frames while painting. Note that multiple calls to this
|
|
|
|
// function per-process is OK, provided each call is matched by a corresponding
|
|
|
|
// timeEndPeriod() call.
|
|
|
|
timeBeginPeriod(1);
|
|
|
|
#endif
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
MediaDecoderStateMachine::~MediaDecoderStateMachine()
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2014-02-17 14:53:53 -08:00
|
|
|
MOZ_ASSERT(NS_IsMainThread(), "Should be on main thread.");
|
2012-11-14 11:46:40 -08:00
|
|
|
MOZ_COUNT_DTOR(MediaDecoderStateMachine);
|
2012-07-31 05:17:22 -07:00
|
|
|
NS_ASSERTION(!mPendingWakeDecoder.get(),
|
|
|
|
"WakeDecoder should have been revoked already");
|
2014-02-17 14:53:52 -08:00
|
|
|
|
2014-05-11 11:12:00 -07:00
|
|
|
MOZ_ASSERT(!mDecodeTaskQueue, "Should be released in SHUTDOWN");
|
|
|
|
// No need to cancel the timer here for we've done that in SHUTDOWN.
|
|
|
|
MOZ_ASSERT(!mTimer, "Should be released in SHUTDOWN");
|
2012-07-30 07:20:58 -07:00
|
|
|
mReader = nullptr;
|
2013-01-24 04:38:32 -08:00
|
|
|
|
2013-05-02 17:39:19 -07:00
|
|
|
#ifdef XP_WIN
|
|
|
|
timeEndPeriod(1);
|
|
|
|
#endif
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2014-04-27 18:12:50 -07:00
|
|
|
bool MediaDecoderStateMachine::HasFutureAudio() {
|
2013-11-03 14:11:09 -08:00
|
|
|
AssertCurrentThreadInMonitor();
|
2010-09-14 16:24:47 -07:00
|
|
|
NS_ASSERTION(HasAudio(), "Should only call HasFutureAudio() when we have audio");
|
|
|
|
// We've got audio ready to play if:
|
|
|
|
// 1. We've not completed playback of audio, and
|
|
|
|
// 2. we either have more than the threshold of decoded audio available, or
|
|
|
|
// we've completely decoded all audio (but not finished playing it yet
|
|
|
|
// as per 1).
|
|
|
|
return !mAudioCompleted &&
|
2014-04-27 18:12:50 -07:00
|
|
|
(AudioDecodedUsecs() > LOW_AUDIO_USECS * mPlaybackRate || AudioQueue().IsFinished());
|
2010-05-12 17:59:42 -07:00
|
|
|
}
|
|
|
|
|
2014-04-27 18:12:50 -07:00
|
|
|
bool MediaDecoderStateMachine::HaveNextFrameData() {
|
2013-11-03 14:11:09 -08:00
|
|
|
AssertCurrentThreadInMonitor();
|
2010-09-14 16:24:47 -07:00
|
|
|
return (!HasAudio() || HasFutureAudio()) &&
|
2014-04-27 18:12:50 -07:00
|
|
|
(!HasVideo() || VideoQueue().GetSize() > 0);
|
2010-05-12 17:59:42 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
int64_t MediaDecoderStateMachine::GetDecodedAudioDuration() {
|
2014-03-10 20:44:09 -07:00
|
|
|
NS_ASSERTION(OnDecodeThread() || OnStateMachineThread(),
|
|
|
|
"Should be on decode thread or state machine thread");
|
2013-11-03 14:11:09 -08:00
|
|
|
AssertCurrentThreadInMonitor();
|
2014-04-27 18:12:50 -07:00
|
|
|
int64_t audioDecoded = AudioQueue().Duration();
|
2011-01-12 17:06:15 -08:00
|
|
|
if (mAudioEndTime != -1) {
|
|
|
|
audioDecoded += mAudioEndTime - GetMediaTime();
|
|
|
|
}
|
|
|
|
return audioDecoded;
|
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::SendStreamAudio(AudioData* aAudio,
|
2012-12-04 02:59:36 -08:00
|
|
|
DecodedStreamData* aStream,
|
|
|
|
AudioSegment* aOutput)
|
2012-04-29 20:12:42 -07:00
|
|
|
{
|
2013-01-29 20:20:03 -08:00
|
|
|
NS_ASSERTION(OnDecodeThread() ||
|
|
|
|
OnStateMachineThread(), "Should be on decode thread or state machine thread");
|
2013-11-03 14:11:09 -08:00
|
|
|
AssertCurrentThreadInMonitor();
|
2012-04-29 20:12:42 -07:00
|
|
|
|
|
|
|
if (aAudio->mTime <= aStream->mLastAudioPacketTime) {
|
|
|
|
// ignore packet that we've already processed
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
aStream->mLastAudioPacketTime = aAudio->mTime;
|
2013-10-24 19:44:58 -07:00
|
|
|
aStream->mLastAudioPacketEndTime = aAudio->GetEndTime();
|
2012-04-29 20:12:42 -07:00
|
|
|
|
2013-12-19 19:24:42 -08:00
|
|
|
// This logic has to mimic AudioLoop closely to make sure we write
|
2012-04-29 20:12:42 -07:00
|
|
|
// the exact same silences
|
2013-09-26 22:22:38 -07:00
|
|
|
CheckedInt64 audioWrittenOffset = UsecsToFrames(mInfo.mAudio.mRate,
|
2012-07-31 05:17:22 -07:00
|
|
|
aStream->mInitialTime + mStartTime) + aStream->mAudioFramesWritten;
|
2013-09-26 22:22:38 -07:00
|
|
|
CheckedInt64 frameOffset = UsecsToFrames(mInfo.mAudio.mRate, aAudio->mTime);
|
2012-05-14 12:50:20 -07:00
|
|
|
if (!audioWrittenOffset.isValid() || !frameOffset.isValid())
|
2012-04-29 20:12:42 -07:00
|
|
|
return;
|
|
|
|
if (audioWrittenOffset.value() < frameOffset.value()) {
|
|
|
|
// Write silence to catch up
|
2014-04-23 02:29:04 -07:00
|
|
|
VERBOSE_LOG("writing %d frames of silence to MediaStream",
|
|
|
|
int32_t(frameOffset.value() - audioWrittenOffset.value()));
|
2012-04-29 20:12:42 -07:00
|
|
|
AudioSegment silence;
|
|
|
|
silence.InsertNullDataAtStart(frameOffset.value() - audioWrittenOffset.value());
|
|
|
|
aStream->mAudioFramesWritten += silence.GetDuration();
|
|
|
|
aOutput->AppendFrom(&silence);
|
|
|
|
}
|
|
|
|
|
2012-08-22 08:56:38 -07:00
|
|
|
int64_t offset;
|
2012-04-29 20:12:42 -07:00
|
|
|
if (aStream->mAudioFramesWritten == 0) {
|
|
|
|
NS_ASSERTION(frameOffset.value() <= audioWrittenOffset.value(),
|
|
|
|
"Otherwise we'd have taken the write-silence path");
|
|
|
|
// We're starting in the middle of a packet. Split the packet.
|
|
|
|
offset = audioWrittenOffset.value() - frameOffset.value();
|
|
|
|
} else {
|
|
|
|
// Write the entire packet.
|
|
|
|
offset = 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (offset >= aAudio->mFrames)
|
|
|
|
return;
|
|
|
|
|
|
|
|
aAudio->EnsureAudioBuffer();
|
|
|
|
nsRefPtr<SharedBuffer> buffer = aAudio->mAudioBuffer;
|
2012-11-21 21:04:27 -08:00
|
|
|
AudioDataValue* bufferData = static_cast<AudioDataValue*>(buffer->Data());
|
|
|
|
nsAutoTArray<const AudioDataValue*,2> channels;
|
|
|
|
for (uint32_t i = 0; i < aAudio->mChannels; ++i) {
|
|
|
|
channels.AppendElement(bufferData + i*aAudio->mFrames + offset);
|
|
|
|
}
|
|
|
|
aOutput->AppendFrames(buffer.forget(), channels, aAudio->mFrames);
|
2014-04-23 02:29:04 -07:00
|
|
|
VERBOSE_LOG("writing %d frames of data to MediaStream for AudioData at %lld",
|
|
|
|
aAudio->mFrames - int32_t(offset), aAudio->mTime);
|
2012-08-22 08:56:38 -07:00
|
|
|
aStream->mAudioFramesWritten += aAudio->mFrames - int32_t(offset);
|
2012-04-29 20:12:42 -07:00
|
|
|
}
|
|
|
|
|
2012-12-18 20:48:32 -08:00
|
|
|
static void WriteVideoToMediaStream(layers::Image* aImage,
|
2014-02-09 00:04:38 -08:00
|
|
|
int64_t aDuration,
|
|
|
|
const IntSize& aIntrinsicSize,
|
2012-04-29 20:12:42 -07:00
|
|
|
VideoSegment* aOutput)
|
|
|
|
{
|
2012-12-18 20:48:32 -08:00
|
|
|
nsRefPtr<layers::Image> image = aImage;
|
2014-02-09 00:04:38 -08:00
|
|
|
aOutput->AppendFrame(image.forget(), aDuration, aIntrinsicSize);
|
2012-04-29 20:12:42 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
static const TrackID TRACK_AUDIO = 1;
|
|
|
|
static const TrackID TRACK_VIDEO = 2;
|
|
|
|
static const TrackRate RATE_VIDEO = USECS_PER_S;
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::SendStreamData()
|
2012-04-29 20:12:42 -07:00
|
|
|
{
|
2012-12-04 02:59:36 -08:00
|
|
|
NS_ASSERTION(OnDecodeThread() ||
|
|
|
|
OnStateMachineThread(), "Should be on decode thread or state machine thread");
|
2013-11-03 14:11:09 -08:00
|
|
|
AssertCurrentThreadInMonitor();
|
2012-04-29 20:12:42 -07:00
|
|
|
|
2012-07-31 05:17:22 -07:00
|
|
|
DecodedStreamData* stream = mDecoder->GetDecodedStream();
|
|
|
|
if (!stream)
|
|
|
|
return;
|
|
|
|
|
2012-04-29 20:12:42 -07:00
|
|
|
if (mState == DECODER_STATE_DECODING_METADATA)
|
|
|
|
return;
|
|
|
|
|
2013-12-19 19:24:42 -08:00
|
|
|
// If there's still an audio thread alive, then we can't send any stream
|
|
|
|
// data yet since both SendStreamData and the audio thread want to be in
|
|
|
|
// charge of popping the audio queue. We're waiting for the audio thread
|
2012-12-04 02:59:36 -08:00
|
|
|
// to die before sending anything to our stream.
|
2013-12-19 19:24:42 -08:00
|
|
|
if (mAudioThread)
|
2012-12-04 02:59:36 -08:00
|
|
|
return;
|
|
|
|
|
2012-09-27 23:57:33 -07:00
|
|
|
int64_t minLastAudioPacketTime = INT64_MAX;
|
2013-11-22 03:33:24 -08:00
|
|
|
bool finished =
|
2014-04-27 18:12:50 -07:00
|
|
|
(!mInfo.HasAudio() || AudioQueue().IsFinished()) &&
|
|
|
|
(!mInfo.HasVideo() || VideoQueue().IsFinished());
|
2013-11-22 03:33:24 -08:00
|
|
|
if (mDecoder->IsSameOriginMedia()) {
|
|
|
|
SourceMediaStream* mediaStream = stream->mStream;
|
|
|
|
StreamTime endPosition = 0;
|
|
|
|
|
|
|
|
if (!stream->mStreamInitialized) {
|
|
|
|
if (mInfo.HasAudio()) {
|
|
|
|
AudioSegment* audio = new AudioSegment();
|
|
|
|
mediaStream->AddTrack(TRACK_AUDIO, mInfo.mAudio.mRate, 0, audio);
|
2014-02-14 10:38:58 -08:00
|
|
|
stream->mStream->DispatchWhenNotEnoughBuffered(TRACK_AUDIO,
|
|
|
|
GetStateMachineThread(), GetWakeDecoderRunnable());
|
2013-11-22 03:33:24 -08:00
|
|
|
}
|
|
|
|
if (mInfo.HasVideo()) {
|
|
|
|
VideoSegment* video = new VideoSegment();
|
|
|
|
mediaStream->AddTrack(TRACK_VIDEO, RATE_VIDEO, 0, video);
|
2014-02-14 10:38:58 -08:00
|
|
|
stream->mStream->DispatchWhenNotEnoughBuffered(TRACK_VIDEO,
|
|
|
|
GetStateMachineThread(), GetWakeDecoderRunnable());
|
2013-11-22 03:33:24 -08:00
|
|
|
}
|
|
|
|
stream->mStreamInitialized = true;
|
2012-07-31 05:17:22 -07:00
|
|
|
}
|
|
|
|
|
2013-11-22 03:33:24 -08:00
|
|
|
if (mInfo.HasAudio()) {
|
|
|
|
nsAutoTArray<AudioData*,10> audio;
|
|
|
|
// It's OK to hold references to the AudioData because while audio
|
|
|
|
// is captured, only the decoder thread pops from the queue (see below).
|
2014-04-27 18:12:50 -07:00
|
|
|
AudioQueue().GetElementsAfter(stream->mLastAudioPacketTime, &audio);
|
2013-11-22 03:33:24 -08:00
|
|
|
AudioSegment output;
|
|
|
|
for (uint32_t i = 0; i < audio.Length(); ++i) {
|
|
|
|
SendStreamAudio(audio[i], stream, &output);
|
|
|
|
}
|
|
|
|
if (output.GetDuration() > 0) {
|
|
|
|
mediaStream->AppendToTrack(TRACK_AUDIO, &output);
|
|
|
|
}
|
2014-04-27 18:12:50 -07:00
|
|
|
if (AudioQueue().IsFinished() && !stream->mHaveSentFinishAudio) {
|
2013-11-22 03:33:24 -08:00
|
|
|
mediaStream->EndTrack(TRACK_AUDIO);
|
|
|
|
stream->mHaveSentFinishAudio = true;
|
|
|
|
}
|
|
|
|
minLastAudioPacketTime = std::min(minLastAudioPacketTime, stream->mLastAudioPacketTime);
|
|
|
|
endPosition = std::max(endPosition,
|
2014-06-11 21:44:56 -07:00
|
|
|
mediaStream->TicksToTimeRoundDown(mInfo.mAudio.mRate,
|
|
|
|
stream->mAudioFramesWritten));
|
2012-07-31 05:17:22 -07:00
|
|
|
}
|
|
|
|
|
2013-11-22 03:33:24 -08:00
|
|
|
if (mInfo.HasVideo()) {
|
|
|
|
nsAutoTArray<VideoData*,10> video;
|
|
|
|
// It's OK to hold references to the VideoData only the decoder thread
|
|
|
|
// pops from the queue.
|
2014-04-27 18:12:50 -07:00
|
|
|
VideoQueue().GetElementsAfter(stream->mNextVideoTime, &video);
|
2013-11-22 03:33:24 -08:00
|
|
|
VideoSegment output;
|
|
|
|
for (uint32_t i = 0; i < video.Length(); ++i) {
|
|
|
|
VideoData* v = video[i];
|
2013-12-05 21:39:49 -08:00
|
|
|
if (stream->mNextVideoTime < v->mTime) {
|
2014-04-23 02:29:04 -07:00
|
|
|
VERBOSE_LOG("writing last video to MediaStream %p for %lldus",
|
|
|
|
mediaStream, v->mTime - stream->mNextVideoTime);
|
2013-11-22 03:33:24 -08:00
|
|
|
// Write last video frame to catch up. mLastVideoImage can be null here
|
|
|
|
// which is fine, it just means there's no video.
|
|
|
|
WriteVideoToMediaStream(stream->mLastVideoImage,
|
2013-12-05 21:39:49 -08:00
|
|
|
v->mTime - stream->mNextVideoTime, stream->mLastVideoImageDisplaySize,
|
2013-11-22 03:33:24 -08:00
|
|
|
&output);
|
2013-12-05 21:39:49 -08:00
|
|
|
stream->mNextVideoTime = v->mTime;
|
2013-11-22 03:33:24 -08:00
|
|
|
}
|
2013-12-05 21:39:49 -08:00
|
|
|
if (stream->mNextVideoTime < v->GetEndTime()) {
|
2014-04-23 02:29:04 -07:00
|
|
|
VERBOSE_LOG("writing video frame %lldus to MediaStream %p for %lldus",
|
|
|
|
v->mTime, mediaStream, v->GetEndTime() - stream->mNextVideoTime);
|
2013-11-22 03:33:24 -08:00
|
|
|
WriteVideoToMediaStream(v->mImage,
|
2014-02-09 00:04:38 -08:00
|
|
|
v->GetEndTime() - stream->mNextVideoTime, v->mDisplay,
|
2013-11-22 03:33:24 -08:00
|
|
|
&output);
|
2013-12-05 21:39:49 -08:00
|
|
|
stream->mNextVideoTime = v->GetEndTime();
|
2013-11-22 03:33:24 -08:00
|
|
|
stream->mLastVideoImage = v->mImage;
|
2014-02-09 00:04:38 -08:00
|
|
|
stream->mLastVideoImageDisplaySize = v->mDisplay;
|
2013-11-22 03:33:24 -08:00
|
|
|
} else {
|
2014-04-23 02:29:04 -07:00
|
|
|
VERBOSE_LOG("skipping writing video frame %lldus (end %lldus) to MediaStream",
|
|
|
|
v->mTime, v->GetEndTime());
|
2013-11-22 03:33:24 -08:00
|
|
|
}
|
2012-04-29 20:12:42 -07:00
|
|
|
}
|
2013-11-22 03:33:24 -08:00
|
|
|
if (output.GetDuration() > 0) {
|
|
|
|
mediaStream->AppendToTrack(TRACK_VIDEO, &output);
|
2012-08-20 05:44:32 -07:00
|
|
|
}
|
2014-04-27 18:12:50 -07:00
|
|
|
if (VideoQueue().IsFinished() && !stream->mHaveSentFinishVideo) {
|
2013-11-22 03:33:24 -08:00
|
|
|
mediaStream->EndTrack(TRACK_VIDEO);
|
|
|
|
stream->mHaveSentFinishVideo = true;
|
|
|
|
}
|
|
|
|
endPosition = std::max(endPosition,
|
2014-06-11 21:44:56 -07:00
|
|
|
mediaStream->TicksToTimeRoundDown(RATE_VIDEO, stream->mNextVideoTime - stream->mInitialTime));
|
2012-04-29 20:12:42 -07:00
|
|
|
}
|
2012-07-31 05:17:22 -07:00
|
|
|
|
2013-11-22 03:33:24 -08:00
|
|
|
if (!stream->mHaveSentFinish) {
|
|
|
|
stream->mStream->AdvanceKnownTracksTime(endPosition);
|
|
|
|
}
|
2012-07-31 05:17:22 -07:00
|
|
|
|
2013-11-22 03:33:24 -08:00
|
|
|
if (finished && !stream->mHaveSentFinish) {
|
|
|
|
stream->mHaveSentFinish = true;
|
|
|
|
stream->mStream->Finish();
|
|
|
|
}
|
2012-04-29 20:12:42 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
if (mAudioCaptured) {
|
|
|
|
// Discard audio packets that are no longer needed.
|
|
|
|
while (true) {
|
2014-04-27 18:12:50 -07:00
|
|
|
const AudioData* a = AudioQueue().PeekFront();
|
2012-04-29 20:12:42 -07:00
|
|
|
// Packet times are not 100% reliable so this may discard packets that
|
|
|
|
// actually contain data for mCurrentFrameTime. This means if someone might
|
|
|
|
// create a new output stream and we actually don't have the audio for the
|
|
|
|
// very start. That's OK, we'll play silence instead for a brief moment.
|
|
|
|
// That's OK. Seeking to this time would have a similar issue for such
|
|
|
|
// badly muxed resources.
|
2014-03-10 20:44:09 -07:00
|
|
|
if (!a || a->GetEndTime() >= minLastAudioPacketTime)
|
2012-04-29 20:12:42 -07:00
|
|
|
break;
|
2013-12-19 19:24:42 -08:00
|
|
|
mAudioEndTime = std::max(mAudioEndTime, a->GetEndTime());
|
2014-04-27 18:12:50 -07:00
|
|
|
delete AudioQueue().PopFront();
|
2012-04-29 20:12:42 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
if (finished) {
|
|
|
|
mAudioCompleted = true;
|
|
|
|
UpdateReadyState();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
MediaDecoderStateMachine::WakeDecoderRunnable*
|
|
|
|
MediaDecoderStateMachine::GetWakeDecoderRunnable()
|
2012-04-29 20:13:42 -07:00
|
|
|
{
|
2013-11-03 14:11:09 -08:00
|
|
|
AssertCurrentThreadInMonitor();
|
2012-07-31 05:17:22 -07:00
|
|
|
|
|
|
|
if (!mPendingWakeDecoder.get()) {
|
|
|
|
mPendingWakeDecoder = new WakeDecoderRunnable(this);
|
2012-04-29 20:13:42 -07:00
|
|
|
}
|
2012-07-31 05:17:22 -07:00
|
|
|
return mPendingWakeDecoder.get();
|
2012-04-29 20:13:42 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
bool MediaDecoderStateMachine::HaveEnoughDecodedAudio(int64_t aAmpleAudioUSecs)
|
2012-04-29 20:12:42 -07:00
|
|
|
{
|
2013-11-03 14:11:09 -08:00
|
|
|
AssertCurrentThreadInMonitor();
|
2012-04-29 20:12:42 -07:00
|
|
|
|
2014-04-27 18:12:50 -07:00
|
|
|
if (AudioQueue().GetSize() == 0 ||
|
2012-04-29 20:12:42 -07:00
|
|
|
GetDecodedAudioDuration() < aAmpleAudioUSecs) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
if (!mAudioCaptured) {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2012-07-31 05:17:22 -07:00
|
|
|
DecodedStreamData* stream = mDecoder->GetDecodedStream();
|
|
|
|
if (stream && stream->mStreamInitialized && !stream->mHaveSentFinishAudio) {
|
|
|
|
if (!stream->mStream->HaveEnoughBuffered(TRACK_AUDIO)) {
|
2012-04-29 20:12:42 -07:00
|
|
|
return false;
|
|
|
|
}
|
2012-07-31 05:17:22 -07:00
|
|
|
stream->mStream->DispatchWhenNotEnoughBuffered(TRACK_AUDIO,
|
|
|
|
GetStateMachineThread(), GetWakeDecoderRunnable());
|
2012-04-29 20:12:42 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
bool MediaDecoderStateMachine::HaveEnoughDecodedVideo()
|
2012-04-29 20:12:42 -07:00
|
|
|
{
|
2013-11-03 14:11:09 -08:00
|
|
|
AssertCurrentThreadInMonitor();
|
2012-04-29 20:12:42 -07:00
|
|
|
|
2014-04-27 18:12:50 -07:00
|
|
|
if (static_cast<uint32_t>(VideoQueue().GetSize()) < mAmpleVideoFrames * mPlaybackRate) {
|
2012-04-29 20:12:42 -07:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2012-07-31 05:17:22 -07:00
|
|
|
DecodedStreamData* stream = mDecoder->GetDecodedStream();
|
|
|
|
if (stream && stream->mStreamInitialized && !stream->mHaveSentFinishVideo) {
|
|
|
|
if (!stream->mStream->HaveEnoughBuffered(TRACK_VIDEO)) {
|
2012-04-29 20:12:42 -07:00
|
|
|
return false;
|
|
|
|
}
|
2012-07-31 05:17:22 -07:00
|
|
|
stream->mStream->DispatchWhenNotEnoughBuffered(TRACK_VIDEO,
|
|
|
|
GetStateMachineThread(), GetWakeDecoderRunnable());
|
2012-04-29 20:12:42 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2014-03-10 20:44:09 -07:00
|
|
|
bool
|
|
|
|
MediaDecoderStateMachine::NeedToDecodeVideo()
|
|
|
|
{
|
|
|
|
AssertCurrentThreadInMonitor();
|
2014-03-10 20:44:09 -07:00
|
|
|
NS_ASSERTION(OnStateMachineThread() || OnDecodeThread(),
|
|
|
|
"Should be on state machine or decode thread.");
|
2014-06-13 13:20:37 -07:00
|
|
|
return mIsVideoDecoding &&
|
|
|
|
!mMinimizePreroll &&
|
|
|
|
!HaveEnoughDecodedVideo();
|
2014-03-10 20:44:09 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaDecoderStateMachine::DecodeVideo()
|
|
|
|
{
|
2014-06-13 13:20:37 -07:00
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
|
|
|
NS_ASSERTION(OnDecodeThread(), "Should be on decode thread.");
|
2014-03-10 20:44:09 -07:00
|
|
|
|
2014-06-13 13:20:37 -07:00
|
|
|
if (mState != DECODER_STATE_DECODING && mState != DECODER_STATE_BUFFERING) {
|
|
|
|
mDispatchedVideoDecodeTask = false;
|
|
|
|
return;
|
|
|
|
}
|
2014-06-10 00:31:09 -07:00
|
|
|
|
2014-06-13 13:20:37 -07:00
|
|
|
// We don't want to consider skipping to the next keyframe if we've
|
|
|
|
// only just started up the decode loop, so wait until we've decoded
|
|
|
|
// some frames before enabling the keyframe skip logic on video.
|
|
|
|
if (mIsVideoPrerolling &&
|
|
|
|
(static_cast<uint32_t>(VideoQueue().GetSize())
|
|
|
|
>= mVideoPrerollFrames * mPlaybackRate))
|
|
|
|
{
|
|
|
|
mIsVideoPrerolling = false;
|
|
|
|
}
|
|
|
|
|
|
|
|
// We'll skip the video decode to the nearest keyframe if we're low on
|
|
|
|
// audio, or if we're low on video, provided we're not running low on
|
|
|
|
// data to decode. If we're running low on downloaded data to decode,
|
|
|
|
// we won't start keyframe skipping, as we'll be pausing playback to buffer
|
|
|
|
// soon anyway and we'll want to be able to display frames immediately
|
|
|
|
// after buffering finishes.
|
|
|
|
if (mState == DECODER_STATE_DECODING &&
|
|
|
|
!mSkipToNextKeyFrame &&
|
|
|
|
mIsVideoDecoding &&
|
|
|
|
((!mIsAudioPrerolling && mIsAudioDecoding &&
|
|
|
|
GetDecodedAudioDuration() < mLowAudioThresholdUsecs * mPlaybackRate) ||
|
|
|
|
(!mIsVideoPrerolling && mIsVideoDecoding &&
|
|
|
|
// don't skip frame when |clock time| <= |mVideoFrameEndTime| for
|
|
|
|
// we are still in the safe range without underrunning video frames
|
|
|
|
GetClock() > mVideoFrameEndTime &&
|
2014-04-27 18:12:50 -07:00
|
|
|
(static_cast<uint32_t>(VideoQueue().GetSize())
|
2014-06-13 13:20:37 -07:00
|
|
|
< LOW_VIDEO_FRAMES * mPlaybackRate))) &&
|
|
|
|
!HasLowUndecodedData())
|
|
|
|
{
|
|
|
|
mSkipToNextKeyFrame = true;
|
|
|
|
DECODER_LOG(PR_LOG_DEBUG, "Skipping video decode to the next keyframe");
|
|
|
|
}
|
2014-03-10 20:44:09 -07:00
|
|
|
|
2014-06-13 13:20:37 -07:00
|
|
|
// Time the video decode, so that if it's slow, we can increase our low
|
|
|
|
// audio threshold to reduce the chance of an audio underrun while we're
|
|
|
|
// waiting for a video decode to complete.
|
|
|
|
TimeDuration decodeTime;
|
|
|
|
{
|
|
|
|
int64_t currentTime = GetMediaTime();
|
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
|
|
|
TimeStamp start = TimeStamp::Now();
|
|
|
|
mIsVideoDecoding = mReader->DecodeVideoFrame(mSkipToNextKeyFrame, currentTime);
|
|
|
|
decodeTime = TimeStamp::Now() - start;
|
|
|
|
}
|
|
|
|
if (!mIsVideoDecoding) {
|
|
|
|
// Playback ended for this stream, close the sample queue.
|
|
|
|
VideoQueue().Finish();
|
|
|
|
CheckIfDecodeComplete();
|
|
|
|
}
|
2014-03-10 20:44:09 -07:00
|
|
|
|
2014-06-13 13:20:37 -07:00
|
|
|
if (THRESHOLD_FACTOR * DurationToUsecs(decodeTime) > mLowAudioThresholdUsecs &&
|
|
|
|
!HasLowUndecodedData())
|
|
|
|
{
|
|
|
|
mLowAudioThresholdUsecs =
|
|
|
|
std::min(THRESHOLD_FACTOR * DurationToUsecs(decodeTime), AMPLE_AUDIO_USECS);
|
|
|
|
mAmpleAudioThresholdUsecs = std::max(THRESHOLD_FACTOR * mLowAudioThresholdUsecs,
|
|
|
|
mAmpleAudioThresholdUsecs);
|
|
|
|
DECODER_LOG(PR_LOG_DEBUG, "Slow video decode, set mLowAudioThresholdUsecs=%lld mAmpleAudioThresholdUsecs=%lld",
|
|
|
|
mLowAudioThresholdUsecs, mAmpleAudioThresholdUsecs);
|
2014-03-10 20:44:09 -07:00
|
|
|
}
|
|
|
|
|
2014-06-13 13:20:37 -07:00
|
|
|
SendStreamData();
|
|
|
|
|
|
|
|
// The ready state can change when we've decoded data, so update the
|
|
|
|
// ready state, so that DOM events can fire.
|
|
|
|
UpdateReadyState();
|
|
|
|
|
|
|
|
mDispatchedVideoDecodeTask = false;
|
|
|
|
DispatchDecodeTasksIfNeeded();
|
2014-03-10 20:44:09 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
bool
|
|
|
|
MediaDecoderStateMachine::NeedToDecodeAudio()
|
|
|
|
{
|
|
|
|
AssertCurrentThreadInMonitor();
|
2014-03-10 20:44:09 -07:00
|
|
|
NS_ASSERTION(OnStateMachineThread() || OnDecodeThread(),
|
|
|
|
"Should be on state machine or decode thread.");
|
2014-06-13 13:20:37 -07:00
|
|
|
return mIsAudioDecoding &&
|
|
|
|
!mMinimizePreroll &&
|
|
|
|
!HaveEnoughDecodedAudio(mAmpleAudioThresholdUsecs * mPlaybackRate);
|
2014-03-10 20:44:09 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaDecoderStateMachine::DecodeAudio()
|
2014-06-10 00:31:09 -07:00
|
|
|
{
|
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
2014-06-13 13:20:37 -07:00
|
|
|
NS_ASSERTION(OnDecodeThread(), "Should be on decode thread.");
|
2014-06-10 00:31:09 -07:00
|
|
|
|
2014-06-13 13:20:37 -07:00
|
|
|
if (mState != DECODER_STATE_DECODING && mState != DECODER_STATE_BUFFERING) {
|
|
|
|
mDispatchedAudioDecodeTask = false;
|
2014-03-10 20:44:09 -07:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2014-06-13 13:20:37 -07:00
|
|
|
// We don't want to consider skipping to the next keyframe if we've
|
|
|
|
// only just started up the decode loop, so wait until we've decoded
|
|
|
|
// some audio data before enabling the keyframe skip logic on audio.
|
|
|
|
if (mIsAudioPrerolling &&
|
|
|
|
GetDecodedAudioDuration() >= mAudioPrerollUsecs * mPlaybackRate) {
|
|
|
|
mIsAudioPrerolling = false;
|
2014-03-10 20:44:09 -07:00
|
|
|
}
|
|
|
|
|
2014-06-13 13:20:37 -07:00
|
|
|
{
|
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
|
|
|
mIsAudioDecoding = mReader->DecodeAudioData();
|
2014-03-10 20:44:09 -07:00
|
|
|
}
|
2014-06-13 13:20:37 -07:00
|
|
|
if (!mIsAudioDecoding) {
|
|
|
|
// Playback ended for this stream, close the sample queue.
|
|
|
|
AudioQueue().Finish();
|
|
|
|
CheckIfDecodeComplete();
|
2014-03-10 20:44:09 -07:00
|
|
|
}
|
|
|
|
|
2014-06-13 13:20:37 -07:00
|
|
|
SendStreamData();
|
2014-03-10 20:44:09 -07:00
|
|
|
|
2014-06-13 13:20:37 -07:00
|
|
|
// Notify to ensure that the AudioLoop() is not waiting, in case it was
|
|
|
|
// waiting for more audio to be decoded.
|
|
|
|
mDecoder->GetReentrantMonitor().NotifyAll();
|
2014-03-10 20:44:09 -07:00
|
|
|
|
2014-06-13 13:20:37 -07:00
|
|
|
// The ready state can change when we've decoded data, so update the
|
|
|
|
// ready state, so that DOM events can fire.
|
|
|
|
UpdateReadyState();
|
2014-06-10 00:31:09 -07:00
|
|
|
|
2014-06-13 13:20:37 -07:00
|
|
|
mDispatchedAudioDecodeTask = false;
|
|
|
|
DispatchDecodeTasksIfNeeded();
|
2014-03-10 20:44:09 -07:00
|
|
|
}
|
|
|
|
|
2014-03-10 20:44:09 -07:00
|
|
|
void
|
|
|
|
MediaDecoderStateMachine::CheckIfDecodeComplete()
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2014-03-10 20:44:09 -07:00
|
|
|
AssertCurrentThreadInMonitor();
|
|
|
|
if (mState == DECODER_STATE_SHUTDOWN ||
|
2014-03-11 03:06:23 -07:00
|
|
|
mState == DECODER_STATE_SEEKING ||
|
|
|
|
mState == DECODER_STATE_COMPLETED) {
|
2014-03-10 20:44:09 -07:00
|
|
|
// Don't change our state if we've already been shutdown, or we're seeking,
|
|
|
|
// since we don't want to abort the shutdown or seek processes.
|
2014-03-10 20:44:09 -07:00
|
|
|
return;
|
|
|
|
}
|
2014-06-13 13:20:37 -07:00
|
|
|
MOZ_ASSERT(!AudioQueue().IsFinished() || !mIsAudioDecoding);
|
|
|
|
MOZ_ASSERT(!VideoQueue().IsFinished() || !mIsVideoDecoding);
|
|
|
|
if (!mIsVideoDecoding && !mIsAudioDecoding) {
|
2014-03-10 20:44:09 -07:00
|
|
|
// We've finished decoding all active streams,
|
|
|
|
// so move to COMPLETED state.
|
|
|
|
mState = DECODER_STATE_COMPLETED;
|
2014-03-10 20:44:10 -07:00
|
|
|
DispatchDecodeTasksIfNeeded();
|
2014-03-10 20:44:09 -07:00
|
|
|
ScheduleStateMachine();
|
|
|
|
}
|
2014-04-23 02:29:04 -07:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, "CheckIfDecodeComplete %scompleted",
|
|
|
|
((mState == DECODER_STATE_COMPLETED) ? "" : "NOT "));
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
bool MediaDecoderStateMachine::IsPlaying()
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2013-11-03 14:11:09 -08:00
|
|
|
AssertCurrentThreadInMonitor();
|
2010-04-01 20:03:07 -07:00
|
|
|
|
|
|
|
return !mPlayStartTime.IsNull();
|
|
|
|
}
|
|
|
|
|
2013-12-19 19:24:42 -08:00
|
|
|
// If we have already written enough frames to the AudioStream, start the
|
|
|
|
// playback.
|
|
|
|
static void
|
|
|
|
StartAudioStreamPlaybackIfNeeded(AudioStream* aStream)
|
|
|
|
{
|
|
|
|
// We want to have enough data in the buffer to start the stream.
|
|
|
|
if (static_cast<double>(aStream->GetWritten()) / aStream->GetRate() >=
|
|
|
|
static_cast<double>(AUDIOSTREAM_MIN_WRITE_BEFORE_START_USECS) / USECS_PER_S) {
|
|
|
|
aStream->Start();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static void WriteSilence(AudioStream* aStream, uint32_t aFrames)
|
|
|
|
{
|
|
|
|
uint32_t numSamples = aFrames * aStream->GetChannels();
|
|
|
|
nsAutoTArray<AudioDataValue, 1000> buf;
|
|
|
|
buf.SetLength(numSamples);
|
|
|
|
memset(buf.Elements(), 0, numSamples * sizeof(AudioDataValue));
|
|
|
|
aStream->Write(buf.Elements(), aFrames);
|
|
|
|
|
|
|
|
StartAudioStreamPlaybackIfNeeded(aStream);
|
|
|
|
}
|
|
|
|
|
|
|
|
void MediaDecoderStateMachine::AudioLoop()
|
|
|
|
{
|
|
|
|
NS_ASSERTION(OnAudioThread(), "Should be on audio thread.");
|
2014-04-23 02:29:04 -07:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, "Begun audio thread/loop");
|
2013-12-19 19:24:42 -08:00
|
|
|
int64_t audioDuration = 0;
|
|
|
|
int64_t audioStartTime = -1;
|
|
|
|
uint32_t channels, rate;
|
|
|
|
double volume = -1;
|
|
|
|
bool setVolume;
|
|
|
|
double playbackRate = -1;
|
|
|
|
bool setPlaybackRate;
|
|
|
|
bool preservesPitch;
|
|
|
|
bool setPreservesPitch;
|
2014-04-10 10:39:20 -07:00
|
|
|
AudioChannel audioChannel;
|
2013-12-19 19:24:42 -08:00
|
|
|
|
|
|
|
{
|
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
|
|
|
mAudioCompleted = false;
|
|
|
|
audioStartTime = mAudioStartTime;
|
|
|
|
NS_ASSERTION(audioStartTime != -1, "Should have audio start time by now");
|
|
|
|
channels = mInfo.mAudio.mChannels;
|
|
|
|
rate = mInfo.mAudio.mRate;
|
|
|
|
|
2014-04-10 10:39:20 -07:00
|
|
|
audioChannel = mDecoder->GetAudioChannel();
|
2013-12-19 19:24:42 -08:00
|
|
|
volume = mVolume;
|
|
|
|
preservesPitch = mPreservesPitch;
|
|
|
|
playbackRate = mPlaybackRate;
|
|
|
|
}
|
|
|
|
|
|
|
|
{
|
|
|
|
// AudioStream initialization can block for extended periods in unusual
|
|
|
|
// circumstances, so we take care to drop the decoder monitor while
|
|
|
|
// initializing.
|
2014-04-09 12:59:07 -07:00
|
|
|
RefPtr<AudioStream> audioStream(new AudioStream());
|
2014-04-10 10:39:20 -07:00
|
|
|
audioStream->Init(channels, rate, audioChannel, AudioStream::HighLatency);
|
2013-12-19 19:24:42 -08:00
|
|
|
audioStream->SetVolume(volume);
|
|
|
|
if (audioStream->SetPreservesPitch(preservesPitch) != NS_OK) {
|
|
|
|
NS_WARNING("Setting the pitch preservation failed at AudioLoop start.");
|
|
|
|
}
|
|
|
|
if (playbackRate != 1.0) {
|
|
|
|
NS_ASSERTION(playbackRate != 0,
|
|
|
|
"Don't set the playbackRate to 0 on an AudioStream.");
|
|
|
|
if (audioStream->SetPlaybackRate(playbackRate) != NS_OK) {
|
|
|
|
NS_WARNING("Setting the playback rate failed at AudioLoop start.");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
{
|
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
2014-04-09 12:59:07 -07:00
|
|
|
mAudioStream = audioStream.forget();
|
2013-12-19 19:24:42 -08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
while (1) {
|
|
|
|
// Wait while we're not playing, and we're not shutting down, or we're
|
|
|
|
// playing and we've got no audio to play.
|
|
|
|
{
|
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
|
|
|
NS_ASSERTION(mState != DECODER_STATE_DECODING_METADATA,
|
|
|
|
"Should have meta data before audio started playing.");
|
|
|
|
while (mState != DECODER_STATE_SHUTDOWN &&
|
|
|
|
!mStopAudioThread &&
|
|
|
|
(!IsPlaying() ||
|
|
|
|
mState == DECODER_STATE_BUFFERING ||
|
2014-04-27 18:12:50 -07:00
|
|
|
(AudioQueue().GetSize() == 0 &&
|
|
|
|
!AudioQueue().AtEndOfStream())))
|
2013-12-19 19:24:42 -08:00
|
|
|
{
|
|
|
|
if (!IsPlaying() && !mAudioStream->IsPaused()) {
|
|
|
|
mAudioStream->Pause();
|
|
|
|
}
|
|
|
|
mon.Wait();
|
|
|
|
}
|
|
|
|
|
|
|
|
// If we're shutting down, break out and exit the audio thread.
|
|
|
|
// Also break out if audio is being captured.
|
|
|
|
if (mState == DECODER_STATE_SHUTDOWN ||
|
|
|
|
mStopAudioThread ||
|
2014-04-27 18:12:50 -07:00
|
|
|
AudioQueue().AtEndOfStream())
|
2013-12-19 19:24:42 -08:00
|
|
|
{
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
// We only want to go to the expense of changing the volume if
|
|
|
|
// the volume has changed.
|
|
|
|
setVolume = volume != mVolume;
|
|
|
|
volume = mVolume;
|
|
|
|
|
|
|
|
// Same for the playbackRate.
|
|
|
|
setPlaybackRate = playbackRate != mPlaybackRate;
|
|
|
|
playbackRate = mPlaybackRate;
|
|
|
|
|
|
|
|
// Same for the pitch preservation.
|
|
|
|
setPreservesPitch = preservesPitch != mPreservesPitch;
|
|
|
|
preservesPitch = mPreservesPitch;
|
|
|
|
|
|
|
|
if (IsPlaying() && mAudioStream->IsPaused()) {
|
|
|
|
mAudioStream->Resume();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (setVolume) {
|
|
|
|
mAudioStream->SetVolume(volume);
|
|
|
|
}
|
|
|
|
if (setPlaybackRate) {
|
|
|
|
NS_ASSERTION(playbackRate != 0,
|
|
|
|
"Don't set the playbackRate to 0 in the AudioStreams");
|
|
|
|
if (mAudioStream->SetPlaybackRate(playbackRate) != NS_OK) {
|
|
|
|
NS_WARNING("Setting the playback rate failed in AudioLoop.");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (setPreservesPitch) {
|
|
|
|
if (mAudioStream->SetPreservesPitch(preservesPitch) != NS_OK) {
|
|
|
|
NS_WARNING("Setting the pitch preservation failed in AudioLoop.");
|
|
|
|
}
|
|
|
|
}
|
2014-04-27 18:12:50 -07:00
|
|
|
NS_ASSERTION(AudioQueue().GetSize() > 0,
|
2013-12-19 19:24:42 -08:00
|
|
|
"Should have data to play");
|
|
|
|
// See if there's a gap in the audio. If there is, push silence into the
|
|
|
|
// audio hardware, so we can play across the gap.
|
2014-04-27 18:12:50 -07:00
|
|
|
const AudioData* s = AudioQueue().PeekFront();
|
2013-12-19 19:24:42 -08:00
|
|
|
|
|
|
|
// Calculate the number of frames that have been pushed onto the audio
|
|
|
|
// hardware.
|
|
|
|
CheckedInt64 playedFrames = UsecsToFrames(audioStartTime, rate) +
|
|
|
|
audioDuration;
|
|
|
|
// Calculate the timestamp of the next chunk of audio in numbers of
|
|
|
|
// samples.
|
|
|
|
CheckedInt64 sampleTime = UsecsToFrames(s->mTime, rate);
|
|
|
|
CheckedInt64 missingFrames = sampleTime - playedFrames;
|
|
|
|
if (!missingFrames.isValid() || !sampleTime.isValid()) {
|
|
|
|
NS_WARNING("Int overflow adding in AudioLoop()");
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
int64_t framesWritten = 0;
|
|
|
|
if (missingFrames.value() > 0) {
|
|
|
|
// The next audio chunk begins some time after the end of the last chunk
|
|
|
|
// we pushed to the audio hardware. We must push silence into the audio
|
|
|
|
// hardware so that the next audio chunk begins playback at the correct
|
|
|
|
// time.
|
|
|
|
missingFrames = std::min<int64_t>(UINT32_MAX, missingFrames.value());
|
2014-04-23 02:29:04 -07:00
|
|
|
VERBOSE_LOG("playing %d frames of silence", int32_t(missingFrames.value()));
|
2013-12-19 19:24:42 -08:00
|
|
|
framesWritten = PlaySilence(static_cast<uint32_t>(missingFrames.value()),
|
|
|
|
channels, playedFrames.value());
|
|
|
|
} else {
|
|
|
|
framesWritten = PlayFromAudioQueue(sampleTime.value(), channels);
|
|
|
|
}
|
|
|
|
audioDuration += framesWritten;
|
|
|
|
{
|
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
|
|
|
CheckedInt64 playedUsecs = FramesToUsecs(audioDuration, rate) + audioStartTime;
|
|
|
|
if (!playedUsecs.isValid()) {
|
|
|
|
NS_WARNING("Int overflow calculating audio end time");
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
mAudioEndTime = playedUsecs.value();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
{
|
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
2014-04-27 18:12:50 -07:00
|
|
|
if (AudioQueue().AtEndOfStream() &&
|
2013-12-19 19:24:42 -08:00
|
|
|
mState != DECODER_STATE_SHUTDOWN &&
|
|
|
|
!mStopAudioThread)
|
|
|
|
{
|
|
|
|
// If the media was too short to trigger the start of the audio stream,
|
|
|
|
// start it now.
|
|
|
|
mAudioStream->Start();
|
|
|
|
// Last frame pushed to audio hardware, wait for the audio to finish,
|
|
|
|
// before the audio thread terminates.
|
|
|
|
bool seeking = false;
|
|
|
|
{
|
|
|
|
int64_t oldPosition = -1;
|
|
|
|
int64_t position = GetMediaTime();
|
|
|
|
while (oldPosition != position &&
|
|
|
|
mAudioEndTime - position > 0 &&
|
|
|
|
mState != DECODER_STATE_SEEKING &&
|
|
|
|
mState != DECODER_STATE_SHUTDOWN)
|
|
|
|
{
|
|
|
|
const int64_t DRAIN_BLOCK_USECS = 100000;
|
|
|
|
Wait(std::min(mAudioEndTime - position, DRAIN_BLOCK_USECS));
|
|
|
|
oldPosition = position;
|
|
|
|
position = GetMediaTime();
|
|
|
|
}
|
|
|
|
seeking = mState == DECODER_STATE_SEEKING;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!seeking && !mAudioStream->IsPaused()) {
|
|
|
|
{
|
|
|
|
ReentrantMonitorAutoExit exit(mDecoder->GetReentrantMonitor());
|
|
|
|
mAudioStream->Drain();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2014-04-23 02:29:04 -07:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, "Reached audio stream end.");
|
2013-12-19 19:24:42 -08:00
|
|
|
{
|
|
|
|
// Must hold lock while shutting down and anulling the audio stream to prevent
|
|
|
|
// state machine thread trying to use it while we're destroying it.
|
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
|
|
|
mAudioStream->Shutdown();
|
|
|
|
mAudioStream = nullptr;
|
|
|
|
if (!mAudioCaptured) {
|
|
|
|
mAudioCompleted = true;
|
|
|
|
UpdateReadyState();
|
|
|
|
// Kick the decode thread; it may be sleeping waiting for this to finish.
|
|
|
|
mDecoder->GetReentrantMonitor().NotifyAll();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-04-23 02:29:04 -07:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, "Audio stream finished playing, audio thread exit");
|
2013-12-19 19:24:42 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
uint32_t MediaDecoderStateMachine::PlaySilence(uint32_t aFrames,
|
|
|
|
uint32_t aChannels,
|
|
|
|
uint64_t aFrameOffset)
|
|
|
|
|
|
|
|
{
|
|
|
|
NS_ASSERTION(OnAudioThread(), "Only call on audio thread.");
|
|
|
|
NS_ASSERTION(!mAudioStream->IsPaused(), "Don't play when paused");
|
|
|
|
uint32_t maxFrames = SILENCE_BYTES_CHUNK / aChannels / sizeof(AudioDataValue);
|
|
|
|
uint32_t frames = std::min(aFrames, maxFrames);
|
|
|
|
WriteSilence(mAudioStream, frames);
|
|
|
|
return frames;
|
|
|
|
}
|
|
|
|
|
|
|
|
uint32_t MediaDecoderStateMachine::PlayFromAudioQueue(uint64_t aFrameOffset,
|
|
|
|
uint32_t aChannels)
|
|
|
|
{
|
|
|
|
NS_ASSERTION(OnAudioThread(), "Only call on audio thread.");
|
|
|
|
NS_ASSERTION(!mAudioStream->IsPaused(), "Don't play when paused");
|
2014-04-27 18:12:50 -07:00
|
|
|
nsAutoPtr<AudioData> audio(AudioQueue().PopFront());
|
2013-12-19 19:24:42 -08:00
|
|
|
{
|
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
|
|
|
NS_WARN_IF_FALSE(IsPlaying(), "Should be playing");
|
|
|
|
// Awaken the decode loop if it's waiting for space to free up in the
|
|
|
|
// audio queue.
|
|
|
|
mDecoder->GetReentrantMonitor().NotifyAll();
|
|
|
|
}
|
|
|
|
int64_t offset = -1;
|
|
|
|
uint32_t frames = 0;
|
2014-04-23 02:29:04 -07:00
|
|
|
VERBOSE_LOG("playing %d frames of data to stream for AudioData at %lld",
|
|
|
|
audio->mFrames, audio->mTime);
|
2013-12-19 19:24:42 -08:00
|
|
|
mAudioStream->Write(audio->mAudioData,
|
|
|
|
audio->mFrames);
|
|
|
|
|
|
|
|
aChannels = mAudioStream->GetOutChannels();
|
|
|
|
|
|
|
|
StartAudioStreamPlaybackIfNeeded(mAudioStream);
|
|
|
|
|
|
|
|
offset = audio->mOffset;
|
|
|
|
frames = audio->mFrames;
|
|
|
|
|
|
|
|
if (offset != -1) {
|
|
|
|
mDecoder->UpdatePlaybackOffset(offset);
|
|
|
|
}
|
|
|
|
return frames;
|
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
nsresult MediaDecoderStateMachine::Init(MediaDecoderStateMachine* aCloneDonor)
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2014-02-17 14:53:52 -08:00
|
|
|
MOZ_ASSERT(NS_IsMainThread());
|
|
|
|
|
2014-06-13 13:20:37 -07:00
|
|
|
RefPtr<SharedThreadPool> decodePool(
|
|
|
|
SharedThreadPool::Get(NS_LITERAL_CSTRING("Media Decode"),
|
|
|
|
Preferences::GetUint("media.num-decode-threads", 25)));
|
2014-02-17 14:53:52 -08:00
|
|
|
NS_ENSURE_TRUE(decodePool, NS_ERROR_FAILURE);
|
|
|
|
|
2014-02-17 14:53:53 -08:00
|
|
|
RefPtr<SharedThreadPool> stateMachinePool(
|
|
|
|
SharedThreadPool::Get(NS_LITERAL_CSTRING("Media State Machine"), 1));
|
|
|
|
NS_ENSURE_TRUE(stateMachinePool, NS_ERROR_FAILURE);
|
|
|
|
|
2014-02-17 14:53:52 -08:00
|
|
|
mDecodeTaskQueue = new MediaTaskQueue(decodePool.forget());
|
|
|
|
NS_ENSURE_TRUE(mDecodeTaskQueue, NS_ERROR_FAILURE);
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
MediaDecoderReader* cloneReader = nullptr;
|
2010-09-20 17:49:50 -07:00
|
|
|
if (aCloneDonor) {
|
2014-04-22 04:58:00 -07:00
|
|
|
cloneReader = aCloneDonor->mReader;
|
2010-09-20 17:49:50 -07:00
|
|
|
}
|
2014-02-17 14:53:52 -08:00
|
|
|
|
2014-02-17 14:53:53 -08:00
|
|
|
mStateMachineThreadPool = stateMachinePool;
|
|
|
|
|
2014-04-02 05:51:46 -07:00
|
|
|
nsresult rv;
|
|
|
|
mTimer = do_CreateInstance("@mozilla.org/timer;1", &rv);
|
|
|
|
NS_ENSURE_SUCCESS(rv, rv);
|
|
|
|
rv = mTimer->SetTarget(GetStateMachineThread());
|
|
|
|
NS_ENSURE_SUCCESS(rv, rv);
|
|
|
|
|
2014-06-13 13:20:37 -07:00
|
|
|
return mReader->Init(cloneReader);
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::StopPlayback()
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2014-04-23 02:29:04 -07:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, "StopPlayback()");
|
2011-07-11 20:39:37 -07:00
|
|
|
|
2013-11-03 14:11:09 -08:00
|
|
|
AssertCurrentThreadInMonitor();
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2012-12-13 11:42:45 -08:00
|
|
|
mDecoder->NotifyPlaybackStopped();
|
2011-01-17 16:53:18 -08:00
|
|
|
|
2010-04-01 20:03:07 -07:00
|
|
|
if (IsPlaying()) {
|
2013-12-12 04:33:01 -08:00
|
|
|
mPlayDuration = GetClock();
|
2013-12-19 19:24:42 -08:00
|
|
|
mPlayStartTime = TimeStamp();
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
2013-12-19 19:24:42 -08:00
|
|
|
// Notify the audio thread, so that it notices that we've stopped playing,
|
2011-07-11 20:39:37 -07:00
|
|
|
// so it can pause audio playback.
|
|
|
|
mDecoder->GetReentrantMonitor().NotifyAll();
|
2011-07-11 20:39:30 -07:00
|
|
|
NS_ASSERTION(!IsPlaying(), "Should report not playing at end of StopPlayback()");
|
2013-11-23 01:48:24 -08:00
|
|
|
mDecoder->UpdateStreamBlockingForStateMachinePlaying();
|
2014-03-10 20:44:10 -07:00
|
|
|
|
2014-03-10 20:44:10 -07:00
|
|
|
DispatchDecodeTasksIfNeeded();
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2013-12-01 13:09:06 -08:00
|
|
|
void MediaDecoderStateMachine::SetSyncPointForMediaStream()
|
|
|
|
{
|
|
|
|
AssertCurrentThreadInMonitor();
|
|
|
|
|
|
|
|
DecodedStreamData* stream = mDecoder->GetDecodedStream();
|
|
|
|
if (!stream) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
mSyncPointInMediaStream = stream->GetLastOutputTime();
|
|
|
|
mSyncPointInDecodedStream = mStartTime + mPlayDuration;
|
|
|
|
}
|
|
|
|
|
2013-12-12 04:33:00 -08:00
|
|
|
int64_t MediaDecoderStateMachine::GetCurrentTimeViaMediaStreamSync()
|
|
|
|
{
|
|
|
|
AssertCurrentThreadInMonitor();
|
|
|
|
NS_ASSERTION(mSyncPointInDecodedStream >= 0, "Should have set up sync point");
|
|
|
|
DecodedStreamData* stream = mDecoder->GetDecodedStream();
|
2014-06-11 21:44:56 -07:00
|
|
|
int64_t streamDelta = stream->GetLastOutputTime() - mSyncPointInMediaStream;
|
|
|
|
return mSyncPointInDecodedStream + streamDelta;
|
2013-12-12 04:33:00 -08:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::StartPlayback()
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2014-04-23 02:29:04 -07:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, "StartPlayback()");
|
2011-07-11 20:39:37 -07:00
|
|
|
|
2010-04-01 20:03:07 -07:00
|
|
|
NS_ASSERTION(!IsPlaying(), "Shouldn't be playing when StartPlayback() is called");
|
2013-11-03 14:11:09 -08:00
|
|
|
AssertCurrentThreadInMonitor();
|
2013-01-13 21:25:02 -08:00
|
|
|
|
2012-12-13 11:42:45 -08:00
|
|
|
mDecoder->NotifyPlaybackStarted();
|
2013-12-19 19:24:42 -08:00
|
|
|
mPlayStartTime = TimeStamp::Now();
|
2011-07-11 20:39:37 -07:00
|
|
|
|
2011-07-11 20:39:30 -07:00
|
|
|
NS_ASSERTION(IsPlaying(), "Should report playing by end of StartPlayback()");
|
2011-07-11 20:39:37 -07:00
|
|
|
if (NS_FAILED(StartAudioThread())) {
|
2013-01-24 04:38:32 -08:00
|
|
|
NS_WARNING("Failed to create audio thread");
|
2011-07-11 20:39:37 -07:00
|
|
|
}
|
2011-04-29 12:21:57 -07:00
|
|
|
mDecoder->GetReentrantMonitor().NotifyAll();
|
2013-11-23 01:48:24 -08:00
|
|
|
mDecoder->UpdateStreamBlockingForStateMachinePlaying();
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::UpdatePlaybackPositionInternal(int64_t aTime)
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2011-07-11 20:39:25 -07:00
|
|
|
NS_ASSERTION(OnStateMachineThread() || OnDecodeThread(),
|
2010-04-01 20:03:07 -07:00
|
|
|
"Should be on state machine thread.");
|
2013-11-03 14:11:09 -08:00
|
|
|
AssertCurrentThreadInMonitor();
|
2010-04-01 20:03:07 -07:00
|
|
|
|
|
|
|
NS_ASSERTION(mStartTime >= 0, "Should have positive mStartTime");
|
|
|
|
mCurrentFrameTime = aTime - mStartTime;
|
|
|
|
NS_ASSERTION(mCurrentFrameTime >= 0, "CurrentTime should be positive!");
|
|
|
|
if (aTime > mEndTime) {
|
|
|
|
NS_ASSERTION(mCurrentFrameTime > GetDuration(),
|
|
|
|
"CurrentTime must be after duration if aTime > endTime!");
|
|
|
|
mEndTime = aTime;
|
|
|
|
nsCOMPtr<nsIRunnable> event =
|
2012-11-14 11:46:40 -08:00
|
|
|
NS_NewRunnableMethod(mDecoder, &MediaDecoder::DurationChanged);
|
2014-05-23 12:53:17 -07:00
|
|
|
NS_DispatchToMainThread(event);
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
2011-01-31 18:57:13 -08:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::UpdatePlaybackPosition(int64_t aTime)
|
2011-01-31 18:57:13 -08:00
|
|
|
{
|
|
|
|
UpdatePlaybackPositionInternal(aTime);
|
|
|
|
|
2011-09-28 23:19:26 -07:00
|
|
|
bool fragmentEnded = mFragmentEndTime >= 0 && GetMediaTime() >= mFragmentEndTime;
|
2011-08-24 16:42:23 -07:00
|
|
|
if (!mPositionChangeQueued || fragmentEnded) {
|
2011-09-29 16:34:37 -07:00
|
|
|
mPositionChangeQueued = true;
|
2010-04-01 20:03:07 -07:00
|
|
|
nsCOMPtr<nsIRunnable> event =
|
2012-11-14 11:46:40 -08:00
|
|
|
NS_NewRunnableMethod(mDecoder, &MediaDecoder::PlaybackPositionChanged);
|
2014-05-23 12:53:17 -07:00
|
|
|
NS_DispatchToMainThread(event);
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
2010-08-25 06:10:00 -07:00
|
|
|
|
2012-11-30 05:17:54 -08:00
|
|
|
mMetadataManager.DispatchMetadataIfNeeded(mDecoder, aTime);
|
|
|
|
|
2011-08-24 16:42:23 -07:00
|
|
|
if (fragmentEnded) {
|
|
|
|
StopPlayback();
|
|
|
|
}
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::ClearPositionChangeFlag()
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
2013-11-03 14:11:09 -08:00
|
|
|
AssertCurrentThreadInMonitor();
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2011-09-29 16:34:37 -07:00
|
|
|
mPositionChangeQueued = false;
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
MediaDecoderOwner::NextFrameStatus MediaDecoderStateMachine::GetNextFrameStatus()
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2011-04-29 12:21:57 -07:00
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
2010-04-01 20:03:07 -07:00
|
|
|
if (IsBuffering() || IsSeeking()) {
|
2012-11-14 11:45:31 -08:00
|
|
|
return MediaDecoderOwner::NEXT_FRAME_UNAVAILABLE_BUFFERING;
|
2010-04-01 20:03:07 -07:00
|
|
|
} else if (HaveNextFrameData()) {
|
2012-11-14 11:45:31 -08:00
|
|
|
return MediaDecoderOwner::NEXT_FRAME_AVAILABLE;
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
2012-11-14 11:45:31 -08:00
|
|
|
return MediaDecoderOwner::NEXT_FRAME_UNAVAILABLE;
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::SetVolume(double volume)
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
2011-04-29 12:21:57 -07:00
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
2010-09-05 19:14:50 -07:00
|
|
|
mVolume = volume;
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::SetAudioCaptured(bool aCaptured)
|
2012-04-29 20:12:42 -07:00
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
2012-12-04 02:59:36 -08:00
|
|
|
if (!mAudioCaptured && aCaptured && !mStopAudioThread) {
|
|
|
|
// Make sure the state machine runs as soon as possible. That will
|
2013-12-19 19:24:42 -08:00
|
|
|
// stop the audio thread.
|
|
|
|
// If mStopAudioThread is true then we're already stopping the audio thread
|
2012-12-04 02:59:36 -08:00
|
|
|
// and since we set mAudioCaptured to true, nothing can start it again.
|
|
|
|
ScheduleStateMachine();
|
2012-04-29 20:12:42 -07:00
|
|
|
}
|
|
|
|
mAudioCaptured = aCaptured;
|
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
double MediaDecoderStateMachine::GetCurrentTime() const
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2011-03-23 15:28:57 -07:00
|
|
|
NS_ASSERTION(NS_IsMainThread() ||
|
2011-07-11 20:39:23 -07:00
|
|
|
OnStateMachineThread() ||
|
2011-03-23 15:28:57 -07:00
|
|
|
OnDecodeThread(),
|
|
|
|
"Should be on main, decode, or state machine thread.");
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2011-04-13 15:12:23 -07:00
|
|
|
return static_cast<double>(mCurrentFrameTime) / static_cast<double>(USECS_PER_S);
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
int64_t MediaDecoderStateMachine::GetDuration()
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2013-11-03 14:11:09 -08:00
|
|
|
AssertCurrentThreadInMonitor();
|
2010-04-01 20:03:07 -07:00
|
|
|
|
|
|
|
if (mEndTime == -1 || mStartTime == -1)
|
|
|
|
return -1;
|
|
|
|
return mEndTime - mStartTime;
|
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::SetDuration(int64_t aDuration)
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2011-07-11 20:39:23 -07:00
|
|
|
NS_ASSERTION(NS_IsMainThread() || OnDecodeThread(),
|
|
|
|
"Should be on main or decode thread.");
|
2013-11-03 14:11:09 -08:00
|
|
|
AssertCurrentThreadInMonitor();
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2011-05-08 14:10:28 -07:00
|
|
|
if (aDuration == -1) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2010-04-01 20:03:07 -07:00
|
|
|
if (mStartTime != -1) {
|
|
|
|
mEndTime = mStartTime + aDuration;
|
|
|
|
} else {
|
|
|
|
mStartTime = 0;
|
|
|
|
mEndTime = aDuration;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-09-09 17:45:33 -07:00
|
|
|
void MediaDecoderStateMachine::UpdateEstimatedDuration(int64_t aDuration)
|
2013-05-03 00:48:37 -07:00
|
|
|
{
|
2013-11-03 14:11:09 -08:00
|
|
|
AssertCurrentThreadInMonitor();
|
2013-09-09 17:45:33 -07:00
|
|
|
int64_t duration = GetDuration();
|
|
|
|
if (aDuration != duration &&
|
|
|
|
abs(aDuration - duration) > ESTIMATED_DURATION_FUZZ_FACTOR_USECS) {
|
2013-05-03 00:48:37 -07:00
|
|
|
SetDuration(aDuration);
|
|
|
|
nsCOMPtr<nsIRunnable> event =
|
|
|
|
NS_NewRunnableMethod(mDecoder, &MediaDecoder::DurationChanged);
|
2014-05-23 12:53:17 -07:00
|
|
|
NS_DispatchToMainThread(event);
|
2013-05-03 00:48:37 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-11-19 07:11:21 -08:00
|
|
|
void MediaDecoderStateMachine::SetMediaEndTime(int64_t aEndTime)
|
2011-05-08 14:10:28 -07:00
|
|
|
{
|
2011-07-11 20:39:23 -07:00
|
|
|
NS_ASSERTION(OnDecodeThread(), "Should be on decode thread");
|
2013-11-03 14:11:09 -08:00
|
|
|
AssertCurrentThreadInMonitor();
|
2011-05-08 14:10:28 -07:00
|
|
|
|
|
|
|
mEndTime = aEndTime;
|
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::SetFragmentEndTime(int64_t aEndTime)
|
2011-08-24 16:42:23 -07:00
|
|
|
{
|
2013-11-03 14:11:09 -08:00
|
|
|
AssertCurrentThreadInMonitor();
|
2011-08-24 16:42:23 -07:00
|
|
|
|
|
|
|
mFragmentEndTime = aEndTime < 0 ? aEndTime : aEndTime + mStartTime;
|
|
|
|
}
|
|
|
|
|
2012-11-30 05:17:54 -08:00
|
|
|
void MediaDecoderStateMachine::SetTransportSeekable(bool aTransportSeekable)
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2012-11-30 05:17:54 -08:00
|
|
|
NS_ASSERTION(NS_IsMainThread() || OnDecodeThread(),
|
|
|
|
"Should be on main thread or the decoder thread.");
|
2013-11-03 14:11:09 -08:00
|
|
|
AssertCurrentThreadInMonitor();
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2012-11-30 05:17:54 -08:00
|
|
|
mTransportSeekable = aTransportSeekable;
|
|
|
|
}
|
|
|
|
|
|
|
|
void MediaDecoderStateMachine::SetMediaSeekable(bool aMediaSeekable)
|
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread() || OnDecodeThread(),
|
|
|
|
"Should be on main thread or the decoder thread.");
|
|
|
|
|
|
|
|
mMediaSeekable = aMediaSeekable;
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2013-06-10 05:22:05 -07:00
|
|
|
bool MediaDecoderStateMachine::IsDormantNeeded()
|
|
|
|
{
|
|
|
|
return mReader->IsDormantNeeded();
|
|
|
|
}
|
|
|
|
|
|
|
|
void MediaDecoderStateMachine::SetDormant(bool aDormant)
|
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
2013-11-03 14:11:09 -08:00
|
|
|
AssertCurrentThreadInMonitor();
|
2013-06-10 05:22:05 -07:00
|
|
|
|
|
|
|
if (!mReader) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (aDormant) {
|
|
|
|
ScheduleStateMachine();
|
|
|
|
mState = DECODER_STATE_DORMANT;
|
|
|
|
mDecoder->GetReentrantMonitor().NotifyAll();
|
|
|
|
} else if ((aDormant != true) && (mState == DECODER_STATE_DORMANT)) {
|
|
|
|
ScheduleStateMachine();
|
|
|
|
mStartTime = 0;
|
|
|
|
mCurrentFrameTime = 0;
|
|
|
|
mState = DECODER_STATE_DECODING_METADATA;
|
|
|
|
mDecoder->GetReentrantMonitor().NotifyAll();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::Shutdown()
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
|
|
|
|
|
|
|
// Once we've entered the shutdown state here there's no going back.
|
2011-04-29 12:21:57 -07:00
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
2010-04-01 20:03:07 -07:00
|
|
|
|
|
|
|
// Change state before issuing shutdown request to threads so those
|
|
|
|
// threads can start exiting cleanly during the Shutdown call.
|
2014-04-23 02:29:04 -07:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, "Changed state to SHUTDOWN");
|
2011-07-11 20:39:32 -07:00
|
|
|
ScheduleStateMachine();
|
2010-04-01 20:03:07 -07:00
|
|
|
mState = DECODER_STATE_SHUTDOWN;
|
2011-04-29 12:21:57 -07:00
|
|
|
mDecoder->GetReentrantMonitor().NotifyAll();
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::StartDecoding()
|
2011-03-23 15:28:57 -07:00
|
|
|
{
|
2011-07-11 20:39:23 -07:00
|
|
|
NS_ASSERTION(OnStateMachineThread() || OnDecodeThread(),
|
|
|
|
"Should be on state machine or decode thread.");
|
2011-04-29 12:21:57 -07:00
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
2014-03-10 20:44:09 -07:00
|
|
|
if (mState == DECODER_STATE_DECODING) {
|
|
|
|
return;
|
2011-03-23 15:28:57 -07:00
|
|
|
}
|
|
|
|
mState = DECODER_STATE_DECODING;
|
2014-03-10 20:44:08 -07:00
|
|
|
|
2014-03-10 20:44:09 -07:00
|
|
|
mDecodeStartTime = TimeStamp::Now();
|
|
|
|
|
2014-06-13 13:20:37 -07:00
|
|
|
// Reset our "stream finished decoding" flags, so we try to decode all
|
|
|
|
// streams that we have when we start decoding.
|
|
|
|
mIsVideoDecoding = HasVideo() && !VideoQueue().IsFinished();
|
|
|
|
mIsAudioDecoding = HasAudio() && !AudioQueue().IsFinished();
|
|
|
|
|
2014-03-10 20:44:10 -07:00
|
|
|
CheckIfDecodeComplete();
|
|
|
|
if (mState == DECODER_STATE_COMPLETED) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2014-03-10 20:44:09 -07:00
|
|
|
// Reset other state to pristine values before starting decode.
|
2014-06-13 13:20:37 -07:00
|
|
|
mSkipToNextKeyFrame = false;
|
2014-03-10 20:44:09 -07:00
|
|
|
mIsAudioPrerolling = true;
|
|
|
|
mIsVideoPrerolling = true;
|
2014-03-10 20:44:08 -07:00
|
|
|
|
2014-03-10 20:44:09 -07:00
|
|
|
// Ensure that we've got tasks enqueued to decode data if we need to.
|
2014-03-10 20:44:10 -07:00
|
|
|
DispatchDecodeTasksIfNeeded();
|
2014-03-10 20:44:09 -07:00
|
|
|
|
2011-07-11 20:39:32 -07:00
|
|
|
ScheduleStateMachine();
|
2011-03-23 15:28:57 -07:00
|
|
|
}
|
|
|
|
|
2013-06-10 05:22:05 -07:00
|
|
|
void MediaDecoderStateMachine::StartWaitForResources()
|
|
|
|
{
|
|
|
|
NS_ASSERTION(OnStateMachineThread() || OnDecodeThread(),
|
|
|
|
"Should be on state machine or decode thread.");
|
2014-03-10 20:44:09 -07:00
|
|
|
AssertCurrentThreadInMonitor();
|
2013-06-10 05:22:05 -07:00
|
|
|
mState = DECODER_STATE_WAIT_FOR_RESOURCES;
|
|
|
|
}
|
|
|
|
|
2014-03-10 20:44:09 -07:00
|
|
|
void MediaDecoderStateMachine::NotifyWaitingForResourcesStatusChanged()
|
|
|
|
{
|
|
|
|
AssertCurrentThreadInMonitor();
|
|
|
|
if (mState != DECODER_STATE_WAIT_FOR_RESOURCES ||
|
|
|
|
mReader->IsWaitingMediaResources()) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
// The reader is no longer waiting for resources (say a hardware decoder),
|
|
|
|
// we can now proceed to decode metadata.
|
|
|
|
mState = DECODER_STATE_DECODING_METADATA;
|
|
|
|
EnqueueDecodeMetadataTask();
|
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::Play()
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
2011-03-23 15:28:57 -07:00
|
|
|
// When asked to play, switch to decoding state only if
|
|
|
|
// we are currently buffering. In other cases, we'll start playing anyway
|
|
|
|
// when the state machine notices the decoder's state change to PLAYING.
|
2011-04-29 12:21:57 -07:00
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
2010-04-01 20:03:07 -07:00
|
|
|
if (mState == DECODER_STATE_BUFFERING) {
|
2014-04-23 02:29:04 -07:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, "Changed state from BUFFERING to DECODING");
|
2010-04-01 20:03:07 -07:00
|
|
|
mState = DECODER_STATE_DECODING;
|
2011-03-23 15:28:57 -07:00
|
|
|
mDecodeStartTime = TimeStamp::Now();
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
2014-03-31 20:43:57 -07:00
|
|
|
// Once we start playing, we don't want to minimize our prerolling, as we
|
|
|
|
// assume the user is likely to want to keep playing in future.
|
|
|
|
mMinimizePreroll = false;
|
2011-07-11 20:39:32 -07:00
|
|
|
ScheduleStateMachine();
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::ResetPlayback()
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2011-07-11 20:39:25 -07:00
|
|
|
NS_ASSERTION(OnDecodeThread(), "Should be on decode thread.");
|
2010-05-30 21:02:00 -07:00
|
|
|
mVideoFrameEndTime = -1;
|
2010-04-01 20:03:07 -07:00
|
|
|
mAudioStartTime = -1;
|
|
|
|
mAudioEndTime = -1;
|
2011-09-29 16:34:37 -07:00
|
|
|
mAudioCompleted = false;
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::NotifyDataArrived(const char* aBuffer,
|
2012-08-22 08:56:38 -07:00
|
|
|
uint32_t aLength,
|
|
|
|
int64_t aOffset)
|
2012-01-05 22:40:51 -08:00
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
|
|
|
|
mReader->NotifyDataArrived(aBuffer, aLength, aOffset);
|
|
|
|
|
|
|
|
// While playing an unseekable stream of unknown duration, mEndTime is
|
|
|
|
// updated (in AdvanceFrame()) as we play. But if data is being downloaded
|
|
|
|
// faster than played, mEndTime won't reflect the end of playable data
|
|
|
|
// since we haven't played the frame at the end of buffered data. So update
|
|
|
|
// mEndTime here as new data is downloaded to prevent such a lag.
|
2013-11-17 20:22:47 -08:00
|
|
|
dom::TimeRanges buffered;
|
2012-01-05 22:40:51 -08:00
|
|
|
if (mDecoder->IsInfinite() &&
|
|
|
|
NS_SUCCEEDED(mDecoder->GetBuffered(&buffered)))
|
|
|
|
{
|
2012-08-22 08:56:38 -07:00
|
|
|
uint32_t length = 0;
|
2012-01-05 22:40:51 -08:00
|
|
|
buffered.GetLength(&length);
|
|
|
|
if (length) {
|
|
|
|
double end = 0;
|
|
|
|
buffered.End(length - 1, &end);
|
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
2013-01-15 04:22:03 -08:00
|
|
|
mEndTime = std::max<int64_t>(mEndTime, end * USECS_PER_S);
|
2012-01-05 22:40:51 -08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-03-31 20:39:04 -07:00
|
|
|
void MediaDecoderStateMachine::Seek(const SeekTarget& aTarget)
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
2011-04-29 12:21:57 -07:00
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
2012-11-30 05:17:54 -08:00
|
|
|
|
|
|
|
// We need to be able to seek both at a transport level and at a media level
|
|
|
|
// to seek.
|
|
|
|
if (!mMediaSeekable) {
|
|
|
|
return;
|
|
|
|
}
|
2012-11-14 11:46:40 -08:00
|
|
|
// MediaDecoder::mPlayState should be SEEKING while we seek, and
|
|
|
|
// in that case MediaDecoder shouldn't be calling us.
|
2010-04-01 20:03:07 -07:00
|
|
|
NS_ASSERTION(mState != DECODER_STATE_SEEKING,
|
|
|
|
"We shouldn't already be seeking");
|
|
|
|
NS_ASSERTION(mState >= DECODER_STATE_DECODING,
|
|
|
|
"We should have loaded metadata");
|
|
|
|
|
|
|
|
// Bound the seek time to be inside the media range.
|
|
|
|
NS_ASSERTION(mStartTime != -1, "Should know start time by now");
|
|
|
|
NS_ASSERTION(mEndTime != -1, "Should know end time by now");
|
2014-03-31 20:39:04 -07:00
|
|
|
int64_t seekTime = aTarget.mTime + mStartTime;
|
|
|
|
seekTime = std::min(seekTime, mEndTime);
|
|
|
|
seekTime = std::max(mStartTime, seekTime);
|
|
|
|
NS_ASSERTION(seekTime >= mStartTime && seekTime <= mEndTime,
|
|
|
|
"Can only seek in range [0,duration]");
|
|
|
|
mSeekTarget = SeekTarget(seekTime, aTarget.mType);
|
|
|
|
|
2014-05-06 20:58:06 -07:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, "Changed state to SEEKING (to %lld)", mSeekTarget.mTime);
|
2010-04-01 20:03:07 -07:00
|
|
|
mState = DECODER_STATE_SEEKING;
|
2012-09-18 22:23:35 -07:00
|
|
|
if (mDecoder->GetDecodedStream()) {
|
2014-03-31 20:39:04 -07:00
|
|
|
mDecoder->RecreateDecodedStream(seekTime - mStartTime);
|
2012-09-18 22:23:35 -07:00
|
|
|
}
|
2011-07-11 20:39:32 -07:00
|
|
|
ScheduleStateMachine();
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::StopAudioThread()
|
2011-07-11 20:39:10 -07:00
|
|
|
{
|
2012-12-04 02:59:36 -08:00
|
|
|
NS_ASSERTION(OnDecodeThread() ||
|
|
|
|
OnStateMachineThread(), "Should be on decode thread or state machine thread");
|
2013-11-03 14:11:09 -08:00
|
|
|
AssertCurrentThreadInMonitor();
|
2012-12-04 02:59:36 -08:00
|
|
|
|
|
|
|
if (mStopAudioThread) {
|
|
|
|
// Nothing to do, since the thread is already stopping
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2011-09-29 16:34:37 -07:00
|
|
|
mStopAudioThread = true;
|
2011-07-11 20:39:10 -07:00
|
|
|
mDecoder->GetReentrantMonitor().NotifyAll();
|
2013-12-19 19:24:42 -08:00
|
|
|
if (mAudioThread) {
|
2014-04-23 02:29:04 -07:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, "Shutdown audio thread");
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2011-04-29 12:21:57 -07:00
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
2013-12-19 19:24:42 -08:00
|
|
|
mAudioThread->Shutdown();
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
2013-12-19 19:24:42 -08:00
|
|
|
mAudioThread = nullptr;
|
|
|
|
// Now that the audio thread is dead, try sending data to our MediaStream(s).
|
|
|
|
// That may have been waiting for the audio thread to stop.
|
2012-12-04 02:59:36 -08:00
|
|
|
SendStreamData();
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
nsresult
|
2014-03-10 20:44:09 -07:00
|
|
|
MediaDecoderStateMachine::EnqueueDecodeMetadataTask()
|
|
|
|
{
|
|
|
|
AssertCurrentThreadInMonitor();
|
|
|
|
|
2014-05-05 17:12:05 -07:00
|
|
|
if (mState != DECODER_STATE_DECODING_METADATA ||
|
|
|
|
mDispatchedDecodeMetadataTask) {
|
2014-03-10 20:44:09 -07:00
|
|
|
return NS_OK;
|
|
|
|
}
|
2014-06-13 13:20:37 -07:00
|
|
|
nsresult rv = mDecodeTaskQueue->Dispatch(
|
2014-03-10 20:44:09 -07:00
|
|
|
NS_NewRunnableMethod(this, &MediaDecoderStateMachine::CallDecodeMetadata));
|
2014-05-05 17:12:05 -07:00
|
|
|
if (NS_SUCCEEDED(rv)) {
|
|
|
|
mDispatchedDecodeMetadataTask = true;
|
|
|
|
} else {
|
|
|
|
NS_WARNING("Dispatch ReadMetadata task failed.");
|
|
|
|
return rv;
|
|
|
|
}
|
2014-03-10 20:44:09 -07:00
|
|
|
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
2014-03-10 20:44:10 -07:00
|
|
|
void
|
|
|
|
MediaDecoderStateMachine::SetReaderIdle()
|
|
|
|
{
|
2014-03-31 20:43:57 -07:00
|
|
|
#ifdef PR_LOGGING
|
|
|
|
{
|
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
2014-04-23 02:29:04 -07:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, "SetReaderIdle() audioQueue=%lld videoQueue=%lld",
|
|
|
|
GetDecodedAudioDuration(),
|
2014-04-27 18:12:50 -07:00
|
|
|
VideoQueue().Duration());
|
2014-03-31 20:43:57 -07:00
|
|
|
}
|
|
|
|
#endif
|
2014-03-10 20:44:10 -07:00
|
|
|
MOZ_ASSERT(OnDecodeThread());
|
|
|
|
mReader->SetIdle();
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
2014-03-10 20:44:10 -07:00
|
|
|
MediaDecoderStateMachine::DispatchDecodeTasksIfNeeded()
|
2014-03-10 20:44:10 -07:00
|
|
|
{
|
|
|
|
AssertCurrentThreadInMonitor();
|
2014-03-16 19:12:20 -07:00
|
|
|
|
2014-03-10 20:44:10 -07:00
|
|
|
// NeedToDecodeAudio() can go from false to true while we hold the
|
|
|
|
// monitor, but it can't go from true to false. This can happen because
|
|
|
|
// NeedToDecodeAudio() takes into account the amount of decoded audio
|
|
|
|
// that's been written to the AudioStream but not played yet. So if we
|
|
|
|
// were calling NeedToDecodeAudio() twice and we thread-context switch
|
|
|
|
// between the calls, audio can play, which can affect the return value
|
|
|
|
// of NeedToDecodeAudio() giving inconsistent results. So we cache the
|
|
|
|
// value returned by NeedToDecodeAudio(), and make decisions
|
|
|
|
// based on the cached value. If NeedToDecodeAudio() has
|
|
|
|
// returned false, and then subsequently returns true and we're not
|
|
|
|
// playing, it will probably be OK since we don't need to consume data
|
|
|
|
// anyway.
|
|
|
|
|
|
|
|
const bool needToDecodeAudio = NeedToDecodeAudio();
|
|
|
|
const bool needToDecodeVideo = NeedToDecodeVideo();
|
2014-03-10 20:44:10 -07:00
|
|
|
|
|
|
|
// If we're in completed state, we should not need to decode anything else.
|
|
|
|
MOZ_ASSERT(mState != DECODER_STATE_COMPLETED ||
|
2014-03-10 20:44:10 -07:00
|
|
|
(!needToDecodeAudio && !needToDecodeVideo));
|
2014-03-10 20:44:10 -07:00
|
|
|
|
2014-03-31 20:43:57 -07:00
|
|
|
bool needIdle = !mDecoder->IsLogicallyPlaying() &&
|
2014-04-14 20:01:34 -07:00
|
|
|
mState != DECODER_STATE_SEEKING &&
|
2014-03-10 20:44:10 -07:00
|
|
|
!needToDecodeAudio &&
|
2014-03-27 21:50:02 -07:00
|
|
|
!needToDecodeVideo &&
|
|
|
|
!IsPlaying();
|
2014-03-10 20:44:10 -07:00
|
|
|
|
2014-03-10 20:44:10 -07:00
|
|
|
if (needToDecodeAudio) {
|
|
|
|
EnsureAudioDecodeTaskQueued();
|
|
|
|
}
|
|
|
|
if (needToDecodeVideo) {
|
|
|
|
EnsureVideoDecodeTaskQueued();
|
|
|
|
}
|
|
|
|
|
2014-05-18 19:23:00 -07:00
|
|
|
if (needIdle) {
|
|
|
|
RefPtr<nsIRunnable> event = NS_NewRunnableMethod(
|
|
|
|
this, &MediaDecoderStateMachine::SetReaderIdle);
|
|
|
|
nsresult rv = mDecodeTaskQueue->Dispatch(event.forget());
|
|
|
|
if (NS_FAILED(rv) && mState != DECODER_STATE_SHUTDOWN) {
|
|
|
|
NS_WARNING("Failed to dispatch event to set decoder idle state");
|
|
|
|
}
|
2014-03-10 20:44:10 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-03-10 20:44:09 -07:00
|
|
|
nsresult
|
|
|
|
MediaDecoderStateMachine::EnqueueDecodeSeekTask()
|
|
|
|
{
|
|
|
|
NS_ASSERTION(OnStateMachineThread() || OnDecodeThread(),
|
|
|
|
"Should be on state machine or decode thread.");
|
|
|
|
AssertCurrentThreadInMonitor();
|
|
|
|
|
2014-05-05 17:12:05 -07:00
|
|
|
if (mState != DECODER_STATE_SEEKING ||
|
2014-06-13 13:20:37 -07:00
|
|
|
mDispatchedDecodeSeekTask) {
|
2014-03-10 20:44:09 -07:00
|
|
|
return NS_OK;
|
|
|
|
}
|
2014-06-13 13:20:37 -07:00
|
|
|
nsresult rv = mDecodeTaskQueue->Dispatch(
|
2014-03-10 20:44:09 -07:00
|
|
|
NS_NewRunnableMethod(this, &MediaDecoderStateMachine::DecodeSeek));
|
2014-06-13 13:20:37 -07:00
|
|
|
if (NS_SUCCEEDED(rv)) {
|
|
|
|
mDispatchedDecodeSeekTask = true;
|
|
|
|
} else {
|
2014-05-05 17:12:05 -07:00
|
|
|
NS_WARNING("Dispatch DecodeSeek task failed.");
|
|
|
|
}
|
|
|
|
return rv;
|
2014-03-10 20:44:09 -07:00
|
|
|
}
|
|
|
|
|
2014-03-10 20:44:10 -07:00
|
|
|
nsresult
|
|
|
|
MediaDecoderStateMachine::DispatchAudioDecodeTaskIfNeeded()
|
|
|
|
{
|
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
|
|
|
NS_ASSERTION(OnStateMachineThread() || OnDecodeThread(),
|
|
|
|
"Should be on state machine or decode thread.");
|
|
|
|
|
|
|
|
if (NeedToDecodeAudio()) {
|
|
|
|
return EnsureAudioDecodeTaskQueued();
|
|
|
|
}
|
|
|
|
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
2014-03-10 20:44:09 -07:00
|
|
|
nsresult
|
|
|
|
MediaDecoderStateMachine::EnsureAudioDecodeTaskQueued()
|
|
|
|
{
|
2014-06-13 13:20:37 -07:00
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
2014-03-10 20:44:09 -07:00
|
|
|
NS_ASSERTION(OnStateMachineThread() || OnDecodeThread(),
|
|
|
|
"Should be on state machine or decode thread.");
|
|
|
|
|
|
|
|
if (mState >= DECODER_STATE_COMPLETED) {
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
|
|
|
MOZ_ASSERT(mState > DECODER_STATE_DECODING_METADATA);
|
|
|
|
|
2014-06-13 13:20:37 -07:00
|
|
|
if (mIsAudioDecoding && !mDispatchedAudioDecodeTask) {
|
|
|
|
nsresult rv = mDecodeTaskQueue->Dispatch(
|
2014-03-10 20:44:09 -07:00
|
|
|
NS_NewRunnableMethod(this, &MediaDecoderStateMachine::DecodeAudio));
|
|
|
|
if (NS_SUCCEEDED(rv)) {
|
2014-06-13 13:20:37 -07:00
|
|
|
mDispatchedAudioDecodeTask = true;
|
2014-03-10 20:44:09 -07:00
|
|
|
} else {
|
|
|
|
NS_WARNING("Failed to dispatch task to decode audio");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return NS_OK;
|
|
|
|
}
|
2014-03-10 20:44:09 -07:00
|
|
|
|
2014-03-10 20:44:10 -07:00
|
|
|
nsresult
|
|
|
|
MediaDecoderStateMachine::DispatchVideoDecodeTaskIfNeeded()
|
|
|
|
{
|
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
|
|
|
NS_ASSERTION(OnStateMachineThread() || OnDecodeThread(),
|
|
|
|
"Should be on state machine or decode thread.");
|
|
|
|
|
|
|
|
if (NeedToDecodeVideo()) {
|
|
|
|
return EnsureVideoDecodeTaskQueued();
|
|
|
|
}
|
|
|
|
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
2014-03-10 20:44:09 -07:00
|
|
|
nsresult
|
2014-03-10 20:44:09 -07:00
|
|
|
MediaDecoderStateMachine::EnsureVideoDecodeTaskQueued()
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2014-06-13 13:20:37 -07:00
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
2014-03-10 20:44:09 -07:00
|
|
|
NS_ASSERTION(OnStateMachineThread() || OnDecodeThread(),
|
|
|
|
"Should be on state machine or decode thread.");
|
2013-01-24 04:38:32 -08:00
|
|
|
|
2012-01-19 10:30:29 -08:00
|
|
|
if (mState >= DECODER_STATE_COMPLETED) {
|
|
|
|
return NS_OK;
|
|
|
|
}
|
2014-03-10 20:44:09 -07:00
|
|
|
|
|
|
|
MOZ_ASSERT(mState > DECODER_STATE_DECODING_METADATA);
|
|
|
|
|
2014-06-13 13:20:37 -07:00
|
|
|
if (mIsVideoDecoding && !mDispatchedVideoDecodeTask) {
|
|
|
|
nsresult rv = mDecodeTaskQueue->Dispatch(
|
2014-03-10 20:44:09 -07:00
|
|
|
NS_NewRunnableMethod(this, &MediaDecoderStateMachine::DecodeVideo));
|
|
|
|
if (NS_SUCCEEDED(rv)) {
|
2014-06-13 13:20:37 -07:00
|
|
|
mDispatchedVideoDecodeTask = true;
|
2014-03-10 20:44:09 -07:00
|
|
|
} else {
|
|
|
|
NS_WARNING("Failed to dispatch task to decode video");
|
|
|
|
}
|
2011-11-07 17:38:17 -08:00
|
|
|
}
|
2014-03-10 20:44:09 -07:00
|
|
|
|
2012-01-19 10:30:29 -08:00
|
|
|
return NS_OK;
|
|
|
|
}
|
2011-11-07 17:38:17 -08:00
|
|
|
|
2011-07-11 20:39:10 -07:00
|
|
|
nsresult
|
2012-11-14 11:46:40 -08:00
|
|
|
MediaDecoderStateMachine::StartAudioThread()
|
2011-07-11 20:39:10 -07:00
|
|
|
{
|
2011-07-11 20:39:32 -07:00
|
|
|
NS_ASSERTION(OnStateMachineThread() || OnDecodeThread(),
|
|
|
|
"Should be on state machine or decode thread.");
|
2013-11-03 14:11:09 -08:00
|
|
|
AssertCurrentThreadInMonitor();
|
2012-12-04 02:59:36 -08:00
|
|
|
if (mAudioCaptured) {
|
|
|
|
NS_ASSERTION(mStopAudioThread, "mStopAudioThread must always be true if audio is captured");
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
2011-09-29 16:34:37 -07:00
|
|
|
mStopAudioThread = false;
|
2013-12-19 19:24:42 -08:00
|
|
|
if (HasAudio() && !mAudioThread) {
|
|
|
|
nsresult rv = NS_NewNamedThread("Media Audio",
|
|
|
|
getter_AddRefs(mAudioThread),
|
|
|
|
nullptr,
|
|
|
|
MEDIA_THREAD_STACK_SIZE);
|
2010-04-01 20:03:07 -07:00
|
|
|
if (NS_FAILED(rv)) {
|
2014-04-23 02:29:04 -07:00
|
|
|
DECODER_LOG(PR_LOG_WARNING, "Changed state to SHUTDOWN because failed to create audio thread");
|
2010-04-01 20:03:07 -07:00
|
|
|
mState = DECODER_STATE_SHUTDOWN;
|
|
|
|
return rv;
|
|
|
|
}
|
2012-06-12 10:06:20 -07:00
|
|
|
|
2013-12-19 19:24:42 -08:00
|
|
|
nsCOMPtr<nsIRunnable> event =
|
|
|
|
NS_NewRunnableMethod(this, &MediaDecoderStateMachine::AudioLoop);
|
|
|
|
mAudioThread->Dispatch(event, NS_DISPATCH_NORMAL);
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
2014-04-27 18:12:50 -07:00
|
|
|
int64_t MediaDecoderStateMachine::AudioDecodedUsecs()
|
2010-09-14 16:24:47 -07:00
|
|
|
{
|
|
|
|
NS_ASSERTION(HasAudio(),
|
2011-04-13 15:12:23 -07:00
|
|
|
"Should only call AudioDecodedUsecs() when we have audio");
|
2010-09-14 16:24:47 -07:00
|
|
|
// The amount of audio we have decoded is the amount of audio data we've
|
|
|
|
// already decoded and pushed to the hardware, plus the amount of audio
|
|
|
|
// data waiting to be pushed to the hardware.
|
2012-08-22 08:56:38 -07:00
|
|
|
int64_t pushed = (mAudioEndTime != -1) ? (mAudioEndTime - GetMediaTime()) : 0;
|
2014-04-27 18:12:50 -07:00
|
|
|
return pushed + AudioQueue().Duration();
|
2010-09-14 16:24:47 -07:00
|
|
|
}
|
|
|
|
|
2014-04-27 18:12:50 -07:00
|
|
|
bool MediaDecoderStateMachine::HasLowDecodedData(int64_t aAudioUsecs)
|
2011-01-17 16:53:18 -08:00
|
|
|
{
|
2013-11-03 14:11:09 -08:00
|
|
|
AssertCurrentThreadInMonitor();
|
2011-03-23 15:28:57 -07:00
|
|
|
// We consider ourselves low on decoded data if we're low on audio,
|
|
|
|
// provided we've not decoded to the end of the audio stream, or
|
2013-11-19 06:01:14 -08:00
|
|
|
// if we're low on video frames, provided
|
2011-03-23 15:28:57 -07:00
|
|
|
// we've not decoded to the end of the video stream.
|
2014-06-13 13:20:37 -07:00
|
|
|
return ((HasAudio() &&
|
|
|
|
!AudioQueue().IsFinished() &&
|
|
|
|
AudioDecodedUsecs() < aAudioUsecs)
|
|
|
|
||
|
|
|
|
(HasVideo() &&
|
|
|
|
!VideoQueue().IsFinished() &&
|
2014-04-27 18:12:50 -07:00
|
|
|
static_cast<uint32_t>(VideoQueue().GetSize()) < LOW_VIDEO_FRAMES));
|
2011-01-17 16:53:18 -08:00
|
|
|
}
|
2010-09-14 16:24:47 -07:00
|
|
|
|
2014-04-27 18:12:50 -07:00
|
|
|
bool MediaDecoderStateMachine::HasLowUndecodedData()
|
2011-03-23 15:28:57 -07:00
|
|
|
{
|
2013-11-19 06:01:14 -08:00
|
|
|
return HasLowUndecodedData(mLowDataThresholdUsecs);
|
2011-03-23 15:28:57 -07:00
|
|
|
}
|
|
|
|
|
2014-04-27 18:12:50 -07:00
|
|
|
bool MediaDecoderStateMachine::HasLowUndecodedData(double aUsecs)
|
2011-03-23 15:28:57 -07:00
|
|
|
{
|
2013-11-03 14:11:09 -08:00
|
|
|
AssertCurrentThreadInMonitor();
|
2011-03-23 15:28:57 -07:00
|
|
|
NS_ASSERTION(mState > DECODER_STATE_DECODING_METADATA,
|
|
|
|
"Must have loaded metadata for GetBuffered() to work");
|
|
|
|
|
2013-11-19 06:01:14 -08:00
|
|
|
bool reliable;
|
|
|
|
double bytesPerSecond = mDecoder->ComputePlaybackRate(&reliable);
|
|
|
|
if (!reliable) {
|
|
|
|
// Default to assuming we have enough
|
|
|
|
return false;
|
|
|
|
}
|
2011-03-23 15:28:57 -07:00
|
|
|
|
2013-11-19 06:01:14 -08:00
|
|
|
MediaResource* stream = mDecoder->GetResource();
|
|
|
|
int64_t currentPos = stream->Tell();
|
|
|
|
int64_t requiredPos = currentPos + int64_t((aUsecs/1000000.0)*bytesPerSecond);
|
|
|
|
int64_t length = stream->GetLength();
|
|
|
|
if (length >= 0) {
|
|
|
|
requiredPos = std::min(requiredPos, length);
|
2011-03-23 15:28:57 -07:00
|
|
|
}
|
2013-11-19 06:01:14 -08:00
|
|
|
|
|
|
|
return stream->GetCachedDataEnd(currentPos) < requiredPos;
|
2011-03-23 15:28:57 -07:00
|
|
|
}
|
|
|
|
|
2014-03-10 20:44:09 -07:00
|
|
|
void
|
|
|
|
MediaDecoderStateMachine::DecodeError()
|
|
|
|
{
|
|
|
|
AssertCurrentThreadInMonitor();
|
|
|
|
NS_ASSERTION(OnDecodeThread(), "Should be on decode thread.");
|
|
|
|
|
|
|
|
// Change state to shutdown before sending error report to MediaDecoder
|
|
|
|
// and the HTMLMediaElement, so that our pipeline can start exiting
|
|
|
|
// cleanly during the sync dispatch below.
|
2014-06-13 13:20:37 -07:00
|
|
|
DECODER_LOG(PR_LOG_WARNING, "Decode error, changed state to SHUTDOWN");
|
2014-03-10 20:44:09 -07:00
|
|
|
ScheduleStateMachine();
|
|
|
|
mState = DECODER_STATE_SHUTDOWN;
|
|
|
|
mDecoder->GetReentrantMonitor().NotifyAll();
|
|
|
|
|
|
|
|
// Dispatch the event to call DecodeError synchronously. This ensures
|
|
|
|
// we're in shutdown state by the time we exit the decode thread.
|
|
|
|
// If we just moved to shutdown state here on the decode thread, we may
|
|
|
|
// cause the state machine to shutdown/free memory without closing its
|
|
|
|
// media stream properly, and we'll get callbacks from the media stream
|
|
|
|
// causing a crash.
|
|
|
|
{
|
|
|
|
nsCOMPtr<nsIRunnable> event =
|
|
|
|
NS_NewRunnableMethod(mDecoder, &MediaDecoder::DecodeError);
|
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
|
|
|
NS_DispatchToMainThread(event, NS_DISPATCH_SYNC);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-03-10 20:44:09 -07:00
|
|
|
void
|
|
|
|
MediaDecoderStateMachine::CallDecodeMetadata()
|
|
|
|
{
|
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
2014-05-05 17:12:05 -07:00
|
|
|
AutoSetOnScopeExit<bool> unsetOnExit(mDispatchedDecodeMetadataTask, false);
|
2014-03-10 20:44:09 -07:00
|
|
|
if (mState != DECODER_STATE_DECODING_METADATA) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
if (NS_FAILED(DecodeMetadata())) {
|
2014-04-23 02:29:04 -07:00
|
|
|
DECODER_LOG(PR_LOG_WARNING, "Decode metadata failed, shutting down decoder");
|
2014-03-10 20:44:09 -07:00
|
|
|
DecodeError();
|
2014-03-10 20:44:09 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
nsresult MediaDecoderStateMachine::DecodeMetadata()
|
2011-07-11 20:39:23 -07:00
|
|
|
{
|
2013-11-03 14:11:09 -08:00
|
|
|
AssertCurrentThreadInMonitor();
|
2014-03-10 20:44:09 -07:00
|
|
|
NS_ASSERTION(OnDecodeThread(), "Should be on decode thread.");
|
2014-04-23 02:29:04 -07:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, "Decoding Media Headers");
|
2014-03-10 20:44:09 -07:00
|
|
|
if (mState != DECODER_STATE_DECODING_METADATA) {
|
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
|
|
|
|
2011-07-11 20:39:23 -07:00
|
|
|
nsresult res;
|
2013-09-26 22:22:38 -07:00
|
|
|
MediaInfo info;
|
2014-06-13 13:20:37 -07:00
|
|
|
MetadataTags* tags;
|
2011-07-11 20:39:23 -07:00
|
|
|
{
|
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
2014-06-13 13:20:37 -07:00
|
|
|
res = mReader->ReadMetadata(&info, &tags);
|
2011-07-11 20:39:23 -07:00
|
|
|
}
|
2014-06-13 13:20:37 -07:00
|
|
|
if (NS_SUCCEEDED(res) &&
|
|
|
|
mState == DECODER_STATE_DECODING_METADATA &&
|
|
|
|
mReader->IsWaitingMediaResources()) {
|
|
|
|
// change state to DECODER_STATE_WAIT_FOR_RESOURCES
|
|
|
|
StartWaitForResources();
|
|
|
|
return NS_OK;
|
2013-06-10 05:22:05 -07:00
|
|
|
}
|
|
|
|
|
2011-07-11 20:39:23 -07:00
|
|
|
mInfo = info;
|
|
|
|
|
2013-09-26 22:22:38 -07:00
|
|
|
if (NS_FAILED(res) || (!info.HasValidMedia())) {
|
2011-07-11 20:39:23 -07:00
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
|
|
|
mDecoder->StartProgressUpdates();
|
|
|
|
mGotDurationFromMetaData = (GetDuration() != -1);
|
|
|
|
|
2014-06-13 13:20:37 -07:00
|
|
|
VideoData* videoData = FindStartTime();
|
|
|
|
if (videoData) {
|
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
|
|
|
RenderVideoFrame(videoData, TimeStamp::Now());
|
2014-06-10 00:31:09 -07:00
|
|
|
}
|
|
|
|
|
2011-07-11 20:39:23 -07:00
|
|
|
if (mState == DECODER_STATE_SHUTDOWN) {
|
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
|
|
|
|
|
|
|
NS_ASSERTION(mStartTime != -1, "Must have start time");
|
2012-11-30 05:17:54 -08:00
|
|
|
MOZ_ASSERT((!HasVideo() && !HasAudio()) ||
|
|
|
|
!(mMediaSeekable && mTransportSeekable) || mEndTime != -1,
|
|
|
|
"Active seekable media should have end time");
|
|
|
|
MOZ_ASSERT(!(mMediaSeekable && mTransportSeekable) ||
|
|
|
|
GetDuration() != -1, "Seekable media should have duration");
|
2014-04-23 02:29:04 -07:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, "Media goes from %lld to %lld (duration %lld) "
|
|
|
|
"transportSeekable=%d, mediaSeekable=%d",
|
|
|
|
mStartTime, mEndTime, GetDuration(), mTransportSeekable, mMediaSeekable);
|
2011-07-11 20:39:23 -07:00
|
|
|
|
2014-03-20 15:47:17 -07:00
|
|
|
if (HasAudio() && !HasVideo()) {
|
|
|
|
// We're playing audio only. We don't need to worry about slow video
|
|
|
|
// decodes causing audio underruns, so don't buffer so much audio in
|
|
|
|
// order to reduce memory usage.
|
|
|
|
mAmpleAudioThresholdUsecs /= NO_VIDEO_AMPLE_AUDIO_DIVISOR;
|
|
|
|
mLowAudioThresholdUsecs /= NO_VIDEO_AMPLE_AUDIO_DIVISOR;
|
|
|
|
}
|
|
|
|
|
2014-04-02 14:53:39 -07:00
|
|
|
// Inform the element that we've loaded the metadata and the first frame.
|
2011-07-11 20:39:23 -07:00
|
|
|
nsCOMPtr<nsIRunnable> metadataLoadedEvent =
|
2012-11-30 05:17:54 -08:00
|
|
|
new AudioMetadataEventRunner(mDecoder,
|
2013-09-26 22:22:38 -07:00
|
|
|
mInfo.mAudio.mChannels,
|
|
|
|
mInfo.mAudio.mRate,
|
2012-11-30 05:17:54 -08:00
|
|
|
HasAudio(),
|
2012-12-27 07:21:30 -08:00
|
|
|
HasVideo(),
|
2014-06-13 13:20:37 -07:00
|
|
|
tags);
|
|
|
|
NS_DispatchToMainThread(metadataLoadedEvent);
|
|
|
|
|
|
|
|
if (HasAudio()) {
|
|
|
|
RefPtr<nsIRunnable> decodeTask(
|
|
|
|
NS_NewRunnableMethod(this, &MediaDecoderStateMachine::DispatchAudioDecodeTaskIfNeeded));
|
|
|
|
AudioQueue().AddPopListener(decodeTask, mDecodeTaskQueue);
|
|
|
|
}
|
|
|
|
if (HasVideo()) {
|
|
|
|
RefPtr<nsIRunnable> decodeTask(
|
|
|
|
NS_NewRunnableMethod(this, &MediaDecoderStateMachine::DispatchVideoDecodeTaskIfNeeded));
|
|
|
|
VideoQueue().AddPopListener(decodeTask, mDecodeTaskQueue);
|
|
|
|
}
|
2014-03-10 20:44:09 -07:00
|
|
|
|
2011-07-11 20:39:23 -07:00
|
|
|
if (mState == DECODER_STATE_DECODING_METADATA) {
|
2014-04-23 02:29:04 -07:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, "Changed state from DECODING_METADATA to DECODING");
|
2011-07-11 20:39:23 -07:00
|
|
|
StartDecoding();
|
|
|
|
}
|
|
|
|
|
2014-06-13 13:20:37 -07:00
|
|
|
// For very short media FindStartTime() can decode the entire media.
|
2014-03-10 20:44:09 -07:00
|
|
|
// So we need to check if this has occurred, else our decode pipeline won't
|
|
|
|
// run (since it doesn't need to) and we won't detect end of stream.
|
|
|
|
CheckIfDecodeComplete();
|
|
|
|
|
2011-07-11 20:39:32 -07:00
|
|
|
if ((mState == DECODER_STATE_DECODING || mState == DECODER_STATE_COMPLETED) &&
|
2012-11-14 11:46:40 -08:00
|
|
|
mDecoder->GetState() == MediaDecoder::PLAY_STATE_PLAYING &&
|
2011-07-11 20:39:32 -07:00
|
|
|
!IsPlaying())
|
|
|
|
{
|
|
|
|
StartPlayback();
|
|
|
|
}
|
|
|
|
|
2011-07-11 20:39:23 -07:00
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::DecodeSeek()
|
2011-07-11 20:39:25 -07:00
|
|
|
{
|
2014-03-10 20:44:09 -07:00
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
2011-07-11 20:39:25 -07:00
|
|
|
NS_ASSERTION(OnDecodeThread(), "Should be on decode thread.");
|
2014-06-13 13:20:37 -07:00
|
|
|
AutoSetOnScopeExit<bool> unsetOnExit(mDispatchedDecodeSeekTask, false);
|
2014-03-10 20:44:09 -07:00
|
|
|
if (mState != DECODER_STATE_SEEKING) {
|
|
|
|
return;
|
|
|
|
}
|
2011-07-11 20:39:25 -07:00
|
|
|
|
|
|
|
// During the seek, don't have a lock on the decoder state,
|
|
|
|
// otherwise long seek operations can block the main thread.
|
|
|
|
// The events dispatched to the main thread are SYNC calls.
|
|
|
|
// These calls are made outside of the decode monitor lock so
|
|
|
|
// it is safe for the main thread to makes calls that acquire
|
|
|
|
// the lock since it won't deadlock. We check the state when
|
|
|
|
// acquiring the lock again in case shutdown has occurred
|
|
|
|
// during the time when we didn't have the lock.
|
2014-06-13 13:20:37 -07:00
|
|
|
int64_t seekTime = mSeekTarget.mTime;
|
2011-07-11 20:39:25 -07:00
|
|
|
mDecoder->StopProgressUpdates();
|
|
|
|
|
2011-09-28 23:19:26 -07:00
|
|
|
bool currentTimeChanged = false;
|
2014-03-31 20:39:04 -07:00
|
|
|
const int64_t mediaTime = GetMediaTime();
|
2011-07-11 20:39:25 -07:00
|
|
|
if (mediaTime != seekTime) {
|
|
|
|
currentTimeChanged = true;
|
2012-05-22 02:56:02 -07:00
|
|
|
// Stop playback now to ensure that while we're outside the monitor
|
|
|
|
// dispatching SeekingStarted, playback doesn't advance and mess with
|
|
|
|
// mCurrentFrameTime that we've setting to seekTime here.
|
|
|
|
StopPlayback();
|
2011-07-11 20:39:25 -07:00
|
|
|
UpdatePlaybackPositionInternal(seekTime);
|
|
|
|
}
|
|
|
|
|
2014-05-20 15:41:00 -07:00
|
|
|
// Update mBasePosition only after StopPlayback() which will call GetClock()
|
|
|
|
// which will call GetVideoStreamPosition() which will read mBasePosition.
|
|
|
|
// If we update mBasePosition too early in Seek(), |pos -= mBasePosition|
|
|
|
|
// will be wrong and assertion will fail in GetVideoStreamPosition().
|
|
|
|
mBasePosition = seekTime - mStartTime;
|
|
|
|
|
2011-07-11 20:39:25 -07:00
|
|
|
// SeekingStarted will do a UpdateReadyStateForData which will
|
|
|
|
// inform the element and its users that we have no frames
|
|
|
|
// to display
|
|
|
|
{
|
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
|
|
|
nsCOMPtr<nsIRunnable> startEvent =
|
2012-11-14 11:46:40 -08:00
|
|
|
NS_NewRunnableMethod(mDecoder, &MediaDecoder::SeekingStarted);
|
2011-07-11 20:39:25 -07:00
|
|
|
NS_DispatchToMainThread(startEvent, NS_DISPATCH_SYNC);
|
|
|
|
}
|
|
|
|
|
2014-06-13 13:20:37 -07:00
|
|
|
int64_t newCurrentTime = seekTime;
|
|
|
|
if (currentTimeChanged) {
|
2011-07-11 20:39:25 -07:00
|
|
|
// The seek target is different than the current playback position,
|
|
|
|
// we'll need to seek the playback position, so shutdown our decode
|
2013-12-19 19:24:42 -08:00
|
|
|
// and audio threads.
|
2011-07-11 20:39:25 -07:00
|
|
|
StopAudioThread();
|
|
|
|
ResetPlayback();
|
|
|
|
nsresult res;
|
|
|
|
{
|
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
2014-06-13 13:20:37 -07:00
|
|
|
// Now perform the seek. We must not hold the state machine monitor
|
|
|
|
// while we seek, since the seek reads, which could block on I/O.
|
|
|
|
res = mReader->Seek(seekTime,
|
|
|
|
mStartTime,
|
|
|
|
mEndTime,
|
|
|
|
mediaTime);
|
|
|
|
|
|
|
|
if (NS_SUCCEEDED(res) && mSeekTarget.mType == SeekTarget::Accurate) {
|
|
|
|
res = mReader->DecodeToTarget(seekTime);
|
2014-03-31 20:39:04 -07:00
|
|
|
}
|
2011-07-11 20:39:25 -07:00
|
|
|
}
|
2014-06-10 00:31:09 -07:00
|
|
|
|
2014-06-13 13:20:37 -07:00
|
|
|
if (NS_SUCCEEDED(res)) {
|
|
|
|
int64_t nextSampleStartTime = 0;
|
|
|
|
VideoData* video = nullptr;
|
2014-06-10 00:31:09 -07:00
|
|
|
{
|
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
2014-06-13 13:20:37 -07:00
|
|
|
video = mReader->FindStartTime(nextSampleStartTime);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Setup timestamp state.
|
|
|
|
if (seekTime == mEndTime) {
|
|
|
|
newCurrentTime = mAudioStartTime = seekTime;
|
|
|
|
} else if (HasAudio()) {
|
|
|
|
AudioData* audio = AudioQueue().PeekFront();
|
|
|
|
newCurrentTime = mAudioStartTime = audio ? audio->mTime : seekTime;
|
|
|
|
} else {
|
|
|
|
newCurrentTime = video ? video->mTime : seekTime;
|
2011-07-11 20:39:25 -07:00
|
|
|
}
|
2014-06-13 13:20:37 -07:00
|
|
|
mPlayDuration = newCurrentTime - mStartTime;
|
|
|
|
|
|
|
|
if (HasVideo()) {
|
|
|
|
if (video) {
|
|
|
|
{
|
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
|
|
|
RenderVideoFrame(video, TimeStamp::Now());
|
|
|
|
}
|
|
|
|
nsCOMPtr<nsIRunnable> event =
|
|
|
|
NS_NewRunnableMethod(mDecoder, &MediaDecoder::Invalidate);
|
|
|
|
NS_DispatchToMainThread(event);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
DecodeError();
|
2011-07-11 20:39:25 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
mDecoder->StartProgressUpdates();
|
2013-07-10 06:58:04 -07:00
|
|
|
if (mState == DECODER_STATE_DECODING_METADATA ||
|
|
|
|
mState == DECODER_STATE_DORMANT ||
|
|
|
|
mState == DECODER_STATE_SHUTDOWN) {
|
2011-07-11 20:39:25 -07:00
|
|
|
return;
|
2013-07-10 06:58:04 -07:00
|
|
|
}
|
2011-07-11 20:39:25 -07:00
|
|
|
|
|
|
|
// Change state to DECODING or COMPLETED now. SeekingStopped will
|
2012-11-14 11:46:40 -08:00
|
|
|
// call MediaDecoderStateMachine::Seek to reset our state to SEEKING
|
2011-07-11 20:39:25 -07:00
|
|
|
// if we need to seek again.
|
2011-07-11 20:39:34 -07:00
|
|
|
|
2011-07-11 20:39:25 -07:00
|
|
|
nsCOMPtr<nsIRunnable> stopEvent;
|
2012-02-14 20:35:01 -08:00
|
|
|
bool isLiveStream = mDecoder->GetResource()->GetLength() == -1;
|
2012-01-05 22:40:51 -08:00
|
|
|
if (GetMediaTime() == mEndTime && !isLiveStream) {
|
|
|
|
// Seeked to end of media, move to COMPLETED state. Note we don't do
|
|
|
|
// this if we're playing a live stream, since the end of media will advance
|
|
|
|
// once we download more data!
|
2014-04-23 02:29:04 -07:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, "Changed state from SEEKING (to %lld) to COMPLETED", seekTime);
|
2012-11-14 11:46:40 -08:00
|
|
|
stopEvent = NS_NewRunnableMethod(mDecoder, &MediaDecoder::SeekingStoppedAtEnd);
|
2014-03-10 20:44:10 -07:00
|
|
|
// Explicitly set our state so we don't decode further, and so
|
|
|
|
// we report playback ended to the media element.
|
2011-07-11 20:39:25 -07:00
|
|
|
mState = DECODER_STATE_COMPLETED;
|
2014-06-13 13:20:37 -07:00
|
|
|
mIsAudioDecoding = false;
|
|
|
|
mIsVideoDecoding = false;
|
2014-03-10 20:44:10 -07:00
|
|
|
DispatchDecodeTasksIfNeeded();
|
2011-07-11 20:39:25 -07:00
|
|
|
} else {
|
2014-04-23 02:29:04 -07:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, "Changed state from SEEKING (to %lld) to DECODING", seekTime);
|
2012-11-14 11:46:40 -08:00
|
|
|
stopEvent = NS_NewRunnableMethod(mDecoder, &MediaDecoder::SeekingStopped);
|
2011-07-11 20:39:25 -07:00
|
|
|
StartDecoding();
|
|
|
|
}
|
2014-03-31 20:39:04 -07:00
|
|
|
|
2014-06-13 13:20:37 -07:00
|
|
|
if (newCurrentTime != mediaTime) {
|
|
|
|
UpdatePlaybackPositionInternal(newCurrentTime);
|
|
|
|
if (mDecoder->GetDecodedStream()) {
|
|
|
|
SetSyncPointForMediaStream();
|
|
|
|
}
|
2014-03-31 20:39:04 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
// Try to decode another frame to detect if we're at the end...
|
2014-04-23 02:29:04 -07:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, "Seek completed, mCurrentFrameTime=%lld", mCurrentFrameTime);
|
2014-03-31 20:39:04 -07:00
|
|
|
|
2011-07-11 20:39:25 -07:00
|
|
|
{
|
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
|
|
|
NS_DispatchToMainThread(stopEvent, NS_DISPATCH_SYNC);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Reset quick buffering status. This ensures that if we began the
|
|
|
|
// seek while quick-buffering, we won't bypass quick buffering mode
|
|
|
|
// if we need to buffer after the seek.
|
2011-09-29 16:34:37 -07:00
|
|
|
mQuickBuffering = false;
|
2011-07-11 20:39:32 -07:00
|
|
|
|
|
|
|
ScheduleStateMachine();
|
2011-07-11 20:39:25 -07:00
|
|
|
}
|
|
|
|
|
2011-07-11 20:39:34 -07:00
|
|
|
// Runnable to dispose of the decoder and state machine on the main thread.
|
|
|
|
class nsDecoderDisposeEvent : public nsRunnable {
|
|
|
|
public:
|
2012-11-14 11:46:40 -08:00
|
|
|
nsDecoderDisposeEvent(already_AddRefed<MediaDecoder> aDecoder,
|
|
|
|
already_AddRefed<MediaDecoderStateMachine> aStateMachine)
|
2011-09-21 00:01:00 -07:00
|
|
|
: mDecoder(aDecoder), mStateMachine(aStateMachine) {}
|
2011-07-11 20:39:34 -07:00
|
|
|
NS_IMETHOD Run() {
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Must be on main thread.");
|
2014-06-13 13:20:37 -07:00
|
|
|
mStateMachine->ReleaseDecoder();
|
|
|
|
mDecoder->ReleaseStateMachine();
|
2012-07-30 07:20:58 -07:00
|
|
|
mStateMachine = nullptr;
|
|
|
|
mDecoder = nullptr;
|
2011-07-11 20:39:34 -07:00
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
private:
|
2012-11-14 11:46:40 -08:00
|
|
|
nsRefPtr<MediaDecoder> mDecoder;
|
2014-03-17 17:23:03 -07:00
|
|
|
nsRefPtr<MediaDecoderStateMachine> mStateMachine;
|
2011-07-11 20:39:34 -07:00
|
|
|
};
|
|
|
|
|
|
|
|
// Runnable which dispatches an event to the main thread to dispose of the
|
|
|
|
// decoder and state machine. This runs on the state machine thread after
|
|
|
|
// the state machine has shutdown, and all events for that state machine have
|
|
|
|
// finished running.
|
|
|
|
class nsDispatchDisposeEvent : public nsRunnable {
|
|
|
|
public:
|
2012-11-14 11:46:40 -08:00
|
|
|
nsDispatchDisposeEvent(MediaDecoder* aDecoder,
|
|
|
|
MediaDecoderStateMachine* aStateMachine)
|
2011-09-21 00:01:00 -07:00
|
|
|
: mDecoder(aDecoder), mStateMachine(aStateMachine) {}
|
2011-07-11 20:39:34 -07:00
|
|
|
NS_IMETHOD Run() {
|
2011-09-21 00:01:00 -07:00
|
|
|
NS_DispatchToMainThread(new nsDecoderDisposeEvent(mDecoder.forget(),
|
|
|
|
mStateMachine.forget()));
|
2011-07-11 20:39:34 -07:00
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
private:
|
2012-11-14 11:46:40 -08:00
|
|
|
nsRefPtr<MediaDecoder> mDecoder;
|
2014-03-17 17:23:03 -07:00
|
|
|
nsRefPtr<MediaDecoderStateMachine> mStateMachine;
|
2011-07-11 20:39:34 -07:00
|
|
|
};
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
nsresult MediaDecoderStateMachine::RunStateMachine()
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2013-11-03 14:11:09 -08:00
|
|
|
AssertCurrentThreadInMonitor();
|
2011-07-11 20:39:32 -07:00
|
|
|
|
2012-02-14 20:35:01 -08:00
|
|
|
MediaResource* resource = mDecoder->GetResource();
|
|
|
|
NS_ENSURE_TRUE(resource, NS_ERROR_NULL_POINTER);
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2011-07-11 20:39:32 -07:00
|
|
|
switch (mState) {
|
|
|
|
case DECODER_STATE_SHUTDOWN: {
|
2010-04-01 20:03:07 -07:00
|
|
|
if (IsPlaying()) {
|
2011-07-11 20:39:30 -07:00
|
|
|
StopPlayback();
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
2011-07-11 20:39:10 -07:00
|
|
|
StopAudioThread();
|
2013-12-19 19:24:42 -08:00
|
|
|
// If mAudioThread is non-null after StopAudioThread completes, we are
|
2013-01-13 21:25:02 -08:00
|
|
|
// running in a nested event loop waiting for Shutdown() on
|
2013-12-19 19:24:42 -08:00
|
|
|
// mAudioThread to complete. Return to the event loop and let it
|
2013-01-13 21:25:02 -08:00
|
|
|
// finish processing before continuing with shutdown.
|
2013-12-19 19:24:42 -08:00
|
|
|
if (mAudioThread) {
|
2013-01-13 21:25:02 -08:00
|
|
|
MOZ_ASSERT(mStopAudioThread);
|
|
|
|
return NS_OK;
|
|
|
|
}
|
2014-03-10 20:44:09 -07:00
|
|
|
|
|
|
|
// The reader's listeners hold references to the state machine,
|
|
|
|
// creating a cycle which keeps the state machine and its shared
|
|
|
|
// thread pools alive. So break it here.
|
2014-04-27 18:12:50 -07:00
|
|
|
AudioQueue().ClearListeners();
|
|
|
|
VideoQueue().ClearListeners();
|
2014-02-17 14:53:52 -08:00
|
|
|
|
2014-06-13 13:20:37 -07:00
|
|
|
{
|
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
|
|
|
// Wait for the thread decoding to exit.
|
|
|
|
mDecodeTaskQueue->Shutdown();
|
|
|
|
mDecodeTaskQueue = nullptr;
|
|
|
|
mReader->ReleaseMediaResources();
|
|
|
|
}
|
2014-02-17 14:53:52 -08:00
|
|
|
// Now that those threads are stopped, there's no possibility of
|
|
|
|
// mPendingWakeDecoder being needed again. Revoke it.
|
|
|
|
mPendingWakeDecoder = nullptr;
|
|
|
|
|
2014-03-10 20:44:09 -07:00
|
|
|
MOZ_ASSERT(mState == DECODER_STATE_SHUTDOWN,
|
|
|
|
"How did we escape from the shutdown state?");
|
2011-07-11 20:39:34 -07:00
|
|
|
// We must daisy-chain these events to destroy the decoder. We must
|
|
|
|
// destroy the decoder on the main thread, but we can't destroy the
|
|
|
|
// decoder while this thread holds the decoder monitor. We can't
|
|
|
|
// dispatch an event to the main thread to destroy the decoder from
|
|
|
|
// here, as the event may run before the dispatch returns, and we
|
|
|
|
// hold the decoder monitor here. We also want to guarantee that the
|
|
|
|
// state machine is destroyed on the main thread, and so the
|
|
|
|
// event runner running this function (which holds a reference to the
|
|
|
|
// state machine) needs to finish and be released in order to allow
|
|
|
|
// that. So we dispatch an event to run after this event runner has
|
|
|
|
// finished and released its monitor/references. That event then will
|
|
|
|
// dispatch an event to the main thread to release the decoder and
|
|
|
|
// state machine.
|
2014-02-17 14:53:53 -08:00
|
|
|
GetStateMachineThread()->Dispatch(
|
|
|
|
new nsDispatchDisposeEvent(mDecoder, this), NS_DISPATCH_NORMAL);
|
2014-05-11 11:12:00 -07:00
|
|
|
|
|
|
|
mTimer->Cancel();
|
|
|
|
mTimer = nullptr;
|
2010-04-01 20:03:07 -07:00
|
|
|
return NS_OK;
|
2011-07-11 20:39:32 -07:00
|
|
|
}
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2013-06-10 05:22:05 -07:00
|
|
|
case DECODER_STATE_DORMANT: {
|
|
|
|
if (IsPlaying()) {
|
|
|
|
StopPlayback();
|
|
|
|
}
|
|
|
|
StopAudioThread();
|
|
|
|
// Now that those threads are stopped, there's no possibility of
|
|
|
|
// mPendingWakeDecoder being needed again. Revoke it.
|
|
|
|
mPendingWakeDecoder = nullptr;
|
|
|
|
{
|
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
2014-02-17 14:53:53 -08:00
|
|
|
// Wait for the thread decoding, if any, to exit.
|
|
|
|
mDecodeTaskQueue->AwaitIdle();
|
2013-06-10 05:22:05 -07:00
|
|
|
mReader->ReleaseMediaResources();
|
|
|
|
}
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
|
|
|
case DECODER_STATE_WAIT_FOR_RESOURCES: {
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
2011-07-11 20:39:32 -07:00
|
|
|
case DECODER_STATE_DECODING_METADATA: {
|
|
|
|
// Ensure we have a decode thread to decode metadata.
|
2014-03-10 20:44:09 -07:00
|
|
|
return EnqueueDecodeMetadataTask();
|
2011-07-11 20:39:32 -07:00
|
|
|
}
|
2013-01-24 04:38:32 -08:00
|
|
|
|
2011-07-11 20:39:32 -07:00
|
|
|
case DECODER_STATE_DECODING: {
|
2012-11-14 11:46:40 -08:00
|
|
|
if (mDecoder->GetState() != MediaDecoder::PLAY_STATE_PLAYING &&
|
2011-07-11 20:39:37 -07:00
|
|
|
IsPlaying())
|
|
|
|
{
|
|
|
|
// We're playing, but the element/decoder is in paused state. Stop
|
2014-03-10 20:44:09 -07:00
|
|
|
// playing!
|
2011-07-11 20:39:37 -07:00
|
|
|
StopPlayback();
|
|
|
|
}
|
|
|
|
|
2013-01-24 11:28:48 -08:00
|
|
|
if (mDecoder->GetState() == MediaDecoder::PLAY_STATE_PLAYING &&
|
|
|
|
!IsPlaying()) {
|
|
|
|
// We are playing, but the state machine does not know it yet. Tell it
|
|
|
|
// that it is, so that the clock can be properly queried.
|
|
|
|
StartPlayback();
|
|
|
|
}
|
|
|
|
|
2011-07-11 20:39:32 -07:00
|
|
|
AdvanceFrame();
|
2012-11-14 11:46:40 -08:00
|
|
|
NS_ASSERTION(mDecoder->GetState() != MediaDecoder::PLAY_STATE_PLAYING ||
|
2012-11-22 02:38:28 -08:00
|
|
|
IsStateMachineScheduled() ||
|
|
|
|
mPlaybackRate == 0.0, "Must have timer scheduled");
|
2011-07-11 20:39:32 -07:00
|
|
|
return NS_OK;
|
|
|
|
}
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2011-07-11 20:39:32 -07:00
|
|
|
case DECODER_STATE_BUFFERING: {
|
|
|
|
TimeStamp now = TimeStamp::Now();
|
|
|
|
NS_ASSERTION(!mBufferingStart.IsNull(), "Must know buffering start time.");
|
|
|
|
|
|
|
|
// We will remain in the buffering state if we've not decoded enough
|
|
|
|
// data to begin playback, or if we've not downloaded a reasonable
|
|
|
|
// amount of data inside our buffering time.
|
|
|
|
TimeDuration elapsed = now - mBufferingStart;
|
2013-09-03 20:08:10 -07:00
|
|
|
bool isLiveStream = resource->GetLength() == -1;
|
2011-07-11 20:39:32 -07:00
|
|
|
if ((isLiveStream || !mDecoder->CanPlayThrough()) &&
|
2012-11-22 02:38:28 -08:00
|
|
|
elapsed < TimeDuration::FromSeconds(mBufferingWait * mPlaybackRate) &&
|
2011-07-11 20:39:32 -07:00
|
|
|
(mQuickBuffering ? HasLowDecodedData(QUICK_BUFFERING_LOW_DATA_USECS)
|
2013-11-19 06:01:14 -08:00
|
|
|
: HasLowUndecodedData(mBufferingWait * USECS_PER_S)) &&
|
2013-09-03 20:08:10 -07:00
|
|
|
!mDecoder->IsDataCachedToEndOfResource() &&
|
2012-02-14 20:35:01 -08:00
|
|
|
!resource->IsSuspended())
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2014-04-23 02:29:04 -07:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, "Buffering: wait %ds, timeout in %.3lfs %s",
|
|
|
|
mBufferingWait, mBufferingWait - elapsed.ToSeconds(),
|
|
|
|
(mQuickBuffering ? "(quick exit)" : ""));
|
2011-07-11 20:39:32 -07:00
|
|
|
ScheduleStateMachine(USECS_PER_S);
|
|
|
|
return NS_OK;
|
|
|
|
} else {
|
2014-04-23 02:29:04 -07:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, "Changed state from BUFFERING to DECODING");
|
|
|
|
DECODER_LOG(PR_LOG_DEBUG, "Buffered for %.3lfs", (now - mBufferingStart).ToSeconds());
|
2011-07-11 20:39:32 -07:00
|
|
|
StartDecoding();
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2011-07-11 20:39:32 -07:00
|
|
|
// Notify to allow blocked decoder thread to continue
|
|
|
|
mDecoder->GetReentrantMonitor().NotifyAll();
|
|
|
|
UpdateReadyState();
|
2012-11-14 11:46:40 -08:00
|
|
|
if (mDecoder->GetState() == MediaDecoder::PLAY_STATE_PLAYING &&
|
2011-07-11 20:39:32 -07:00
|
|
|
!IsPlaying())
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2011-07-11 20:39:32 -07:00
|
|
|
StartPlayback();
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
2011-07-11 20:39:32 -07:00
|
|
|
NS_ASSERTION(IsStateMachineScheduled(), "Must have timer scheduled");
|
|
|
|
return NS_OK;
|
|
|
|
}
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2011-07-11 20:39:32 -07:00
|
|
|
case DECODER_STATE_SEEKING: {
|
2014-06-13 13:20:37 -07:00
|
|
|
// Ensure we have a decode thread to perform the seek.
|
|
|
|
return EnqueueDecodeSeekTask();
|
2011-07-11 20:39:32 -07:00
|
|
|
}
|
2011-01-17 16:53:18 -08:00
|
|
|
|
2011-07-11 20:39:32 -07:00
|
|
|
case DECODER_STATE_COMPLETED: {
|
|
|
|
// Play the remaining media. We want to run AdvanceFrame() at least
|
|
|
|
// once to ensure the current playback position is advanced to the
|
|
|
|
// end of the media, and so that we update the readyState.
|
2014-04-27 18:12:50 -07:00
|
|
|
if (VideoQueue().GetSize() > 0 ||
|
2014-03-10 20:44:09 -07:00
|
|
|
(HasAudio() && !mAudioCompleted) ||
|
|
|
|
(mDecoder->GetDecodedStream() && !mDecoder->GetDecodedStream()->IsFinished()))
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2011-07-11 20:39:32 -07:00
|
|
|
AdvanceFrame();
|
2012-11-14 11:46:40 -08:00
|
|
|
NS_ASSERTION(mDecoder->GetState() != MediaDecoder::PLAY_STATE_PLAYING ||
|
2012-11-22 02:38:28 -08:00
|
|
|
mPlaybackRate == 0 ||
|
2011-07-11 20:39:34 -07:00
|
|
|
IsStateMachineScheduled(),
|
|
|
|
"Must have timer scheduled");
|
2011-07-11 20:39:32 -07:00
|
|
|
return NS_OK;
|
|
|
|
}
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2011-07-11 20:39:32 -07:00
|
|
|
// StopPlayback in order to reset the IsPlaying() state so audio
|
|
|
|
// is restarted correctly.
|
|
|
|
StopPlayback();
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2011-07-11 20:39:32 -07:00
|
|
|
if (mState != DECODER_STATE_COMPLETED) {
|
2011-07-11 20:39:34 -07:00
|
|
|
// While we're presenting a frame we can change state. Whatever changed
|
|
|
|
// our state should have scheduled another state machine run.
|
2011-07-11 20:39:32 -07:00
|
|
|
NS_ASSERTION(IsStateMachineScheduled(), "Must have timer scheduled");
|
|
|
|
return NS_OK;
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
2013-01-13 21:25:02 -08:00
|
|
|
|
2011-07-11 20:39:32 -07:00
|
|
|
StopAudioThread();
|
2013-12-06 04:01:33 -08:00
|
|
|
// When we're decoding to a stream, the stream's main-thread finish signal
|
|
|
|
// will take care of calling MediaDecoder::PlaybackEnded.
|
|
|
|
if (mDecoder->GetState() == MediaDecoder::PLAY_STATE_PLAYING &&
|
|
|
|
!mDecoder->GetDecodedStream()) {
|
2012-08-22 08:56:38 -07:00
|
|
|
int64_t videoTime = HasVideo() ? mVideoFrameEndTime : 0;
|
2013-12-19 19:24:42 -08:00
|
|
|
int64_t clockTime = std::max(mEndTime, std::max(videoTime, GetAudioClock()));
|
2011-07-11 20:39:32 -07:00
|
|
|
UpdatePlaybackPosition(clockTime);
|
2013-12-06 04:01:33 -08:00
|
|
|
|
2014-03-16 19:12:20 -07:00
|
|
|
{
|
|
|
|
// Wait for the state change is completed in the main thread,
|
|
|
|
// otherwise we might see |mDecoder->GetState() == MediaDecoder::PLAY_STATE_PLAYING|
|
|
|
|
// in next loop and send |MediaDecoder::PlaybackEnded| again to trigger 'ended'
|
|
|
|
// event twice in the media element.
|
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
|
|
|
nsCOMPtr<nsIRunnable> event =
|
|
|
|
NS_NewRunnableMethod(mDecoder, &MediaDecoder::PlaybackEnded);
|
|
|
|
NS_DispatchToMainThread(event, NS_DISPATCH_SYNC);
|
|
|
|
}
|
2011-07-11 20:39:32 -07:00
|
|
|
}
|
|
|
|
return NS_OK;
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::RenderVideoFrame(VideoData* aData,
|
2014-03-10 20:44:09 -07:00
|
|
|
TimeStamp aTarget)
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2011-07-11 20:39:23 -07:00
|
|
|
NS_ASSERTION(OnStateMachineThread() || OnDecodeThread(),
|
|
|
|
"Should be on state machine or decode thread.");
|
2011-04-29 12:21:57 -07:00
|
|
|
mDecoder->GetReentrantMonitor().AssertNotCurrentThreadIn();
|
2010-04-01 20:03:07 -07:00
|
|
|
|
|
|
|
if (aData->mDuplicate) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2014-04-23 02:29:04 -07:00
|
|
|
VERBOSE_LOG("playing video frame %lld", aData->mTime);
|
2012-04-29 20:12:42 -07:00
|
|
|
|
2012-02-14 20:35:01 -08:00
|
|
|
VideoFrameContainer* container = mDecoder->GetVideoFrameContainer();
|
|
|
|
if (container) {
|
2014-02-09 00:04:38 -08:00
|
|
|
container->SetCurrentFrame(ThebesIntSize(aData->mDisplay), aData->mImage,
|
|
|
|
aTarget);
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-08-22 08:56:38 -07:00
|
|
|
int64_t
|
2012-11-14 11:46:40 -08:00
|
|
|
MediaDecoderStateMachine::GetAudioClock()
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2011-07-11 20:40:38 -07:00
|
|
|
// We must hold the decoder monitor while using the audio stream off the
|
2013-12-19 19:24:42 -08:00
|
|
|
// audio thread to ensure that it doesn't get destroyed on the audio thread
|
2011-07-11 20:40:38 -07:00
|
|
|
// while we're using it.
|
2013-11-03 14:11:09 -08:00
|
|
|
AssertCurrentThreadInMonitor();
|
2012-11-19 18:22:42 -08:00
|
|
|
if (!HasAudio() || mAudioCaptured)
|
|
|
|
return -1;
|
2013-12-19 19:24:42 -08:00
|
|
|
if (!mAudioStream) {
|
|
|
|
// Audio thread hasn't played any data yet.
|
2011-07-11 20:39:37 -07:00
|
|
|
return mAudioStartTime;
|
|
|
|
}
|
2013-12-19 19:24:42 -08:00
|
|
|
int64_t t = mAudioStream->GetPosition();
|
2010-04-01 20:03:07 -07:00
|
|
|
return (t == -1) ? -1 : t + mAudioStartTime;
|
|
|
|
}
|
|
|
|
|
2012-11-22 02:38:28 -08:00
|
|
|
int64_t MediaDecoderStateMachine::GetVideoStreamPosition()
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2013-11-03 14:11:09 -08:00
|
|
|
AssertCurrentThreadInMonitor();
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2012-11-22 02:38:28 -08:00
|
|
|
if (!IsPlaying()) {
|
|
|
|
return mPlayDuration + mStartTime;
|
|
|
|
}
|
|
|
|
|
|
|
|
// The playbackRate has been just been changed, reset the playstartTime.
|
|
|
|
if (mResetPlayStartTime) {
|
2013-12-19 19:24:42 -08:00
|
|
|
mPlayStartTime = TimeStamp::Now();
|
2012-11-22 02:38:28 -08:00
|
|
|
mResetPlayStartTime = false;
|
2011-07-11 20:39:32 -07:00
|
|
|
}
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2012-11-22 02:38:28 -08:00
|
|
|
int64_t pos = DurationToUsecs(TimeStamp::Now() - mPlayStartTime) + mPlayDuration;
|
|
|
|
pos -= mBasePosition;
|
2013-01-10 03:26:18 -08:00
|
|
|
NS_ASSERTION(pos >= 0, "Video stream position should be positive.");
|
|
|
|
return mBasePosition + pos * mPlaybackRate + mStartTime;
|
2012-11-22 02:38:28 -08:00
|
|
|
}
|
|
|
|
|
2013-12-12 04:33:01 -08:00
|
|
|
int64_t MediaDecoderStateMachine::GetClock()
|
|
|
|
{
|
2013-11-03 14:11:09 -08:00
|
|
|
AssertCurrentThreadInMonitor();
|
2012-11-22 02:38:28 -08:00
|
|
|
|
2011-07-11 20:39:32 -07:00
|
|
|
// Determine the clock time. If we've got audio, and we've not reached
|
|
|
|
// the end of the audio, use the audio clock. However if we've finished
|
2013-12-01 13:09:06 -08:00
|
|
|
// audio, or don't have audio, use the system clock. If our output is being
|
|
|
|
// fed to a MediaStream, use that stream as the source of the clock.
|
2012-08-22 08:56:38 -07:00
|
|
|
int64_t clock_time = -1;
|
2013-12-01 13:09:06 -08:00
|
|
|
DecodedStreamData* stream = mDecoder->GetDecodedStream();
|
2011-07-11 20:39:32 -07:00
|
|
|
if (!IsPlaying()) {
|
|
|
|
clock_time = mPlayDuration + mStartTime;
|
2013-12-01 13:09:06 -08:00
|
|
|
} else if (stream) {
|
2013-12-12 04:33:00 -08:00
|
|
|
clock_time = GetCurrentTimeViaMediaStreamSync();
|
2011-07-11 20:39:32 -07:00
|
|
|
} else {
|
2012-08-22 08:56:38 -07:00
|
|
|
int64_t audio_time = GetAudioClock();
|
2011-07-11 20:39:32 -07:00
|
|
|
if (HasAudio() && !mAudioCompleted && audio_time != -1) {
|
|
|
|
clock_time = audio_time;
|
|
|
|
// Resync against the audio clock, while we're trusting the
|
|
|
|
// audio clock. This ensures no "drift", particularly on Linux.
|
|
|
|
mPlayDuration = clock_time - mStartTime;
|
2013-12-19 19:24:42 -08:00
|
|
|
mPlayStartTime = TimeStamp::Now();
|
2011-07-11 20:39:32 -07:00
|
|
|
} else {
|
2011-09-26 20:31:18 -07:00
|
|
|
// Audio is disabled on this system. Sync to the system clock.
|
2012-11-22 02:38:28 -08:00
|
|
|
clock_time = GetVideoStreamPosition();
|
2011-07-11 20:39:32 -07:00
|
|
|
// Ensure the clock can never go backwards.
|
2012-11-22 02:38:28 -08:00
|
|
|
NS_ASSERTION(mCurrentFrameTime <= clock_time || mPlaybackRate <= 0,
|
|
|
|
"Clock should go forwards if the playback rate is > 0.");
|
2011-03-23 15:28:57 -07:00
|
|
|
}
|
2011-07-11 20:39:32 -07:00
|
|
|
}
|
2012-11-22 02:38:28 -08:00
|
|
|
return clock_time;
|
|
|
|
}
|
|
|
|
|
|
|
|
void MediaDecoderStateMachine::AdvanceFrame()
|
|
|
|
{
|
|
|
|
NS_ASSERTION(OnStateMachineThread(), "Should be on state machine thread.");
|
2013-11-03 14:11:09 -08:00
|
|
|
AssertCurrentThreadInMonitor();
|
2012-11-22 02:38:28 -08:00
|
|
|
NS_ASSERTION(!HasAudio() || mAudioStartTime != -1,
|
|
|
|
"Should know audio start time if we have audio.");
|
2011-03-23 15:28:57 -07:00
|
|
|
|
2012-11-22 02:38:28 -08:00
|
|
|
if (mDecoder->GetState() != MediaDecoder::PLAY_STATE_PLAYING) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// If playbackRate is 0.0, we should stop the progress, but not be in paused
|
|
|
|
// state, per spec.
|
|
|
|
if (mPlaybackRate == 0.0) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
int64_t clock_time = GetClock();
|
2011-07-11 20:39:32 -07:00
|
|
|
// Skip frames up to the frame at the playback position, and figure out
|
|
|
|
// the time remaining until it's time to display the next frame.
|
2012-08-22 08:56:38 -07:00
|
|
|
int64_t remainingTime = AUDIO_DURATION_USECS;
|
2011-07-11 20:39:32 -07:00
|
|
|
NS_ASSERTION(clock_time >= mStartTime, "Should have positive clock time.");
|
|
|
|
nsAutoPtr<VideoData> currentFrame;
|
2012-09-18 11:27:32 -07:00
|
|
|
#ifdef PR_LOGGING
|
|
|
|
int32_t droppedFrames = 0;
|
|
|
|
#endif
|
2014-04-27 18:12:50 -07:00
|
|
|
if (VideoQueue().GetSize() > 0) {
|
|
|
|
VideoData* frame = VideoQueue().PeekFront();
|
2011-09-26 17:25:41 -07:00
|
|
|
while (mRealTime || clock_time >= frame->mTime) {
|
2013-10-24 19:44:58 -07:00
|
|
|
mVideoFrameEndTime = frame->GetEndTime();
|
2011-07-11 20:39:32 -07:00
|
|
|
currentFrame = frame;
|
2012-09-18 11:27:32 -07:00
|
|
|
#ifdef PR_LOGGING
|
2014-04-23 02:29:04 -07:00
|
|
|
VERBOSE_LOG("discarding video frame %lld", frame->mTime);
|
|
|
|
if (droppedFrames++) {
|
|
|
|
VERBOSE_LOG("discarding video frame %lld (%d so far)", frame->mTime, droppedFrames-1);
|
2012-09-18 11:27:32 -07:00
|
|
|
}
|
|
|
|
#endif
|
2014-04-27 18:12:50 -07:00
|
|
|
VideoQueue().PopFront();
|
2011-07-11 20:39:32 -07:00
|
|
|
// Notify the decode thread that the video queue's buffers may have
|
|
|
|
// free'd up space for more frames.
|
2011-04-29 12:21:57 -07:00
|
|
|
mDecoder->GetReentrantMonitor().NotifyAll();
|
2013-12-19 19:24:42 -08:00
|
|
|
mDecoder->UpdatePlaybackOffset(frame->mOffset);
|
2014-04-27 18:12:50 -07:00
|
|
|
if (VideoQueue().GetSize() == 0)
|
2011-07-11 20:39:32 -07:00
|
|
|
break;
|
2014-04-27 18:12:50 -07:00
|
|
|
frame = VideoQueue().PeekFront();
|
2011-03-23 15:28:57 -07:00
|
|
|
}
|
2011-07-11 20:39:32 -07:00
|
|
|
// Current frame has already been presented, wait until it's time to
|
|
|
|
// present the next frame.
|
|
|
|
if (frame && !currentFrame) {
|
Backout b3a8618f901c (bug 829042), 34a9ef8f929d (bug 822933), 4c1215cefbab (bug 826349), 70bb7f775178 (bug 825325), e9c8447fb197 (bug 828713), eb6ebf01eafe (bug 828901), f1f3ef647920 (bug 825329), f9d7b5722d4f (bug 825329), 5add564d4546 (bug 819377), 55e93d1fa972 (bug 804875), f14639a3461e (bug 804875), 23456fc21052 (bug 814308) for Windows pgo-only mochitest-1 media test timeouts on a CLOSED TREE
2013-01-16 07:16:23 -08:00
|
|
|
int64_t now = IsPlaying() ? clock_time : mPlayDuration;
|
2012-11-22 02:38:28 -08:00
|
|
|
|
2013-01-24 11:28:48 -08:00
|
|
|
remainingTime = frame->mTime - now;
|
2011-07-11 20:39:32 -07:00
|
|
|
}
|
|
|
|
}
|
2011-03-23 15:28:57 -07:00
|
|
|
|
2011-07-11 20:39:32 -07:00
|
|
|
// Check to see if we don't have enough data to play up to the next frame.
|
|
|
|
// If we don't, switch to buffering mode.
|
2012-02-14 20:35:01 -08:00
|
|
|
MediaResource* resource = mDecoder->GetResource();
|
2011-07-11 20:39:32 -07:00
|
|
|
if (mState == DECODER_STATE_DECODING &&
|
2012-11-14 11:46:40 -08:00
|
|
|
mDecoder->GetState() == MediaDecoder::PLAY_STATE_PLAYING &&
|
2011-07-11 20:39:32 -07:00
|
|
|
HasLowDecodedData(remainingTime + EXHAUSTED_DATA_MARGIN_USECS) &&
|
2013-09-03 20:08:10 -07:00
|
|
|
!mDecoder->IsDataCachedToEndOfResource() &&
|
2013-11-19 06:01:14 -08:00
|
|
|
!resource->IsSuspended()) {
|
|
|
|
if (JustExitedQuickBuffering() || HasLowUndecodedData()) {
|
|
|
|
if (currentFrame) {
|
2014-04-27 18:12:50 -07:00
|
|
|
VideoQueue().PushFront(currentFrame.forget());
|
2013-11-19 06:01:14 -08:00
|
|
|
}
|
|
|
|
StartBuffering();
|
2014-03-24 04:58:11 -07:00
|
|
|
// Don't go straight back to the state machine loop since that might
|
|
|
|
// cause us to start decoding again and we could flip-flop between
|
|
|
|
// decoding and quick-buffering.
|
|
|
|
ScheduleStateMachine(USECS_PER_S);
|
2013-11-19 06:01:14 -08:00
|
|
|
return;
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
2011-07-11 20:39:32 -07:00
|
|
|
}
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2011-07-11 20:39:32 -07:00
|
|
|
// We've got enough data to keep playing until at least the next frame.
|
|
|
|
// Start playing now if need be.
|
2011-08-24 16:42:23 -07:00
|
|
|
if (!IsPlaying() && ((mFragmentEndTime >= 0 && clock_time < mFragmentEndTime) || mFragmentEndTime < 0)) {
|
2011-07-11 20:39:32 -07:00
|
|
|
StartPlayback();
|
|
|
|
}
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2011-07-11 20:39:32 -07:00
|
|
|
if (currentFrame) {
|
|
|
|
// Decode one frame and display it.
|
|
|
|
TimeStamp presTime = mPlayStartTime - UsecsToDuration(mPlayDuration) +
|
|
|
|
UsecsToDuration(currentFrame->mTime - mStartTime);
|
|
|
|
NS_ASSERTION(currentFrame->mTime >= mStartTime, "Should have positive frame time");
|
2014-03-17 23:30:51 -07:00
|
|
|
// Filter out invalid frames by checking the frame time. FrameTime could be
|
|
|
|
// zero if it's a initial frame.
|
|
|
|
int64_t frameTime = currentFrame->mTime - mStartTime;
|
|
|
|
if (frameTime > 0 || (frameTime == 0 && mPlayDuration == 0)) {
|
2011-07-11 20:39:32 -07:00
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
|
|
|
// If we have video, we want to increment the clock in steps of the frame
|
|
|
|
// duration.
|
|
|
|
RenderVideoFrame(currentFrame, presTime);
|
|
|
|
}
|
2012-03-04 18:27:49 -08:00
|
|
|
// If we're no longer playing after dropping and reacquiring the lock,
|
|
|
|
// playback must've been stopped on the decode thread (by a seek, for
|
|
|
|
// example). In that case, the current frame is probably out of date.
|
|
|
|
if (!IsPlaying()) {
|
|
|
|
ScheduleStateMachine();
|
|
|
|
return;
|
|
|
|
}
|
2013-06-20 20:14:18 -07:00
|
|
|
MediaDecoder::FrameStatistics& frameStats = mDecoder->GetFrameStatistics();
|
|
|
|
frameStats.NotifyPresentedFrame();
|
2013-10-24 19:44:58 -07:00
|
|
|
remainingTime = currentFrame->GetEndTime() - clock_time;
|
2012-07-30 07:20:58 -07:00
|
|
|
currentFrame = nullptr;
|
2011-07-11 20:39:32 -07:00
|
|
|
}
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2011-07-11 20:39:32 -07:00
|
|
|
// Cap the current time to the larger of the audio and video end time.
|
|
|
|
// This ensures that if we're running off the system clock, we don't
|
|
|
|
// advance the clock to after the media end time.
|
|
|
|
if (mVideoFrameEndTime != -1 || mAudioEndTime != -1) {
|
2011-09-26 20:31:18 -07:00
|
|
|
// These will be non -1 if we've displayed a video frame, or played an audio frame.
|
2013-01-15 04:22:03 -08:00
|
|
|
clock_time = std::min(clock_time, std::max(mVideoFrameEndTime, mAudioEndTime));
|
2011-07-11 20:39:32 -07:00
|
|
|
if (clock_time > GetMediaTime()) {
|
|
|
|
// Only update the playback position if the clock time is greater
|
|
|
|
// than the previous playback position. The audio clock can
|
|
|
|
// sometimes report a time less than its previously reported in
|
|
|
|
// some situations, and we need to gracefully handle that.
|
|
|
|
UpdatePlaybackPosition(clock_time);
|
2011-02-17 18:30:33 -08:00
|
|
|
}
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
2011-07-11 20:39:32 -07:00
|
|
|
|
2011-09-26 20:31:18 -07:00
|
|
|
// If the number of audio/video frames queued has changed, either by
|
|
|
|
// this function popping and playing a video frame, or by the audio
|
|
|
|
// thread popping and playing an audio frame, we may need to update our
|
2011-07-11 20:39:32 -07:00
|
|
|
// ready state. Post an update to do so.
|
|
|
|
UpdateReadyState();
|
|
|
|
|
|
|
|
ScheduleStateMachine(remainingTime);
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2013-12-19 19:24:42 -08:00
|
|
|
void MediaDecoderStateMachine::Wait(int64_t aUsecs) {
|
|
|
|
NS_ASSERTION(OnAudioThread(), "Only call on the audio thread");
|
|
|
|
AssertCurrentThreadInMonitor();
|
|
|
|
TimeStamp end = TimeStamp::Now() + UsecsToDuration(std::max<int64_t>(USECS_PER_MS, aUsecs));
|
|
|
|
TimeStamp now;
|
|
|
|
while ((now = TimeStamp::Now()) < end &&
|
|
|
|
mState != DECODER_STATE_SHUTDOWN &&
|
|
|
|
mState != DECODER_STATE_SEEKING &&
|
|
|
|
!mStopAudioThread &&
|
|
|
|
IsPlaying())
|
|
|
|
{
|
|
|
|
int64_t ms = static_cast<int64_t>(NS_round((end - now).ToSeconds() * 1000));
|
|
|
|
if (ms == 0 || ms > UINT32_MAX) {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
mDecoder->GetReentrantMonitor().Wait(PR_MillisecondsToInterval(static_cast<uint32_t>(ms)));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-06-13 13:20:37 -07:00
|
|
|
VideoData* MediaDecoderStateMachine::FindStartTime()
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2011-07-11 20:39:23 -07:00
|
|
|
NS_ASSERTION(OnDecodeThread(), "Should be on decode thread.");
|
2014-06-13 13:20:37 -07:00
|
|
|
AssertCurrentThreadInMonitor();
|
|
|
|
int64_t startTime = 0;
|
2010-04-01 20:03:07 -07:00
|
|
|
mStartTime = 0;
|
2014-06-13 13:20:37 -07:00
|
|
|
VideoData* v = nullptr;
|
|
|
|
{
|
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
|
|
|
v = mReader->FindStartTime(startTime);
|
|
|
|
}
|
|
|
|
if (startTime != 0) {
|
|
|
|
mStartTime = startTime;
|
2010-10-06 15:58:36 -07:00
|
|
|
if (mGotDurationFromMetaData) {
|
2010-04-01 20:03:07 -07:00
|
|
|
NS_ASSERTION(mEndTime != -1,
|
|
|
|
"We should have mEndTime as supplied duration here");
|
|
|
|
// We were specified a duration from a Content-Duration HTTP header.
|
|
|
|
// Adjust mEndTime so that mEndTime-mStartTime matches the specified
|
|
|
|
// duration.
|
|
|
|
mEndTime = mStartTime + mEndTime;
|
|
|
|
}
|
|
|
|
}
|
2010-08-12 19:28:15 -07:00
|
|
|
// Set the audio start time to be start of media. If this lies before the
|
2011-09-26 20:31:18 -07:00
|
|
|
// first actual audio frame we have, we'll inject silence during playback
|
2010-08-12 19:28:15 -07:00
|
|
|
// to ensure the audio starts at the correct time.
|
|
|
|
mAudioStartTime = mStartTime;
|
2014-06-13 13:20:37 -07:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, "Media start time is %lld", mStartTime);
|
|
|
|
return v;
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::UpdateReadyState() {
|
2013-11-03 14:11:09 -08:00
|
|
|
AssertCurrentThreadInMonitor();
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2012-12-18 20:48:32 -08:00
|
|
|
MediaDecoderOwner::NextFrameStatus nextFrameStatus = GetNextFrameStatus();
|
|
|
|
if (nextFrameStatus == mLastFrameStatus) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
mLastFrameStatus = nextFrameStatus;
|
|
|
|
|
2013-06-16 22:15:32 -07:00
|
|
|
/* This is a bit tricky. MediaDecoder::UpdateReadyStateForData will run on
|
|
|
|
* the main thread and re-evaluate GetNextFrameStatus there, passing it to
|
|
|
|
* HTMLMediaElement::UpdateReadyStateForData. It doesn't use the value of
|
|
|
|
* GetNextFrameStatus we computed here, because what we're computing here
|
|
|
|
* could be stale by the time MediaDecoder::UpdateReadyStateForData runs.
|
|
|
|
* We only compute GetNextFrameStatus here to avoid posting runnables to the main
|
|
|
|
* thread unnecessarily.
|
|
|
|
*/
|
2010-04-01 20:03:07 -07:00
|
|
|
nsCOMPtr<nsIRunnable> event;
|
2013-06-16 22:15:32 -07:00
|
|
|
event = NS_NewRunnableMethod(mDecoder, &MediaDecoder::UpdateReadyStateForData);
|
2014-05-23 12:53:17 -07:00
|
|
|
NS_DispatchToMainThread(event);
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
bool MediaDecoderStateMachine::JustExitedQuickBuffering()
|
2011-03-23 15:28:57 -07:00
|
|
|
{
|
|
|
|
return !mDecodeStartTime.IsNull() &&
|
|
|
|
mQuickBuffering &&
|
2012-09-18 11:23:59 -07:00
|
|
|
(TimeStamp::Now() - mDecodeStartTime) < TimeDuration::FromMicroseconds(QUICK_BUFFER_THRESHOLD_USECS);
|
2011-03-23 15:28:57 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::StartBuffering()
|
2010-07-22 15:48:32 -07:00
|
|
|
{
|
2013-11-03 14:11:09 -08:00
|
|
|
AssertCurrentThreadInMonitor();
|
2014-03-02 17:55:33 -08:00
|
|
|
|
2014-03-02 17:49:52 -08:00
|
|
|
if (mState != DECODER_STATE_DECODING) {
|
|
|
|
// We only move into BUFFERING state if we're actually decoding.
|
|
|
|
// If we're currently doing something else, we don't need to buffer,
|
|
|
|
// and more importantly, we shouldn't overwrite mState to interrupt
|
|
|
|
// the current operation, as that could leave us in an inconsistent
|
|
|
|
// state!
|
|
|
|
return;
|
|
|
|
}
|
2010-07-22 15:48:32 -07:00
|
|
|
|
2011-07-11 20:39:37 -07:00
|
|
|
if (IsPlaying()) {
|
|
|
|
StopPlayback();
|
|
|
|
}
|
|
|
|
|
2011-03-23 15:28:57 -07:00
|
|
|
TimeDuration decodeDuration = TimeStamp::Now() - mDecodeStartTime;
|
|
|
|
// Go into quick buffering mode provided we've not just left buffering using
|
|
|
|
// a "quick exit". This stops us flip-flopping between playing and buffering
|
|
|
|
// when the download speed is similar to the decode speed.
|
|
|
|
mQuickBuffering =
|
|
|
|
!JustExitedQuickBuffering() &&
|
2011-04-13 15:12:23 -07:00
|
|
|
decodeDuration < UsecsToDuration(QUICK_BUFFER_THRESHOLD_USECS);
|
2011-03-23 15:28:57 -07:00
|
|
|
mBufferingStart = TimeStamp::Now();
|
|
|
|
|
2010-07-22 15:48:32 -07:00
|
|
|
// We need to tell the element that buffering has started.
|
|
|
|
// We can't just directly send an asynchronous runnable that
|
|
|
|
// eventually fires the "waiting" event. The problem is that
|
|
|
|
// there might be pending main-thread events, such as "data
|
|
|
|
// received" notifications, that mean we're not actually still
|
|
|
|
// buffering by the time this runnable executes. So instead
|
|
|
|
// we just trigger UpdateReadyStateForData; when it runs, it
|
|
|
|
// will check the current state and decide whether to tell
|
|
|
|
// the element we're buffering or not.
|
|
|
|
UpdateReadyState();
|
|
|
|
mState = DECODER_STATE_BUFFERING;
|
2014-04-23 02:29:04 -07:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, "Changed state from DECODING to BUFFERING, decoded for %.3lfs",
|
|
|
|
decodeDuration.ToSeconds());
|
2012-03-28 06:14:33 -07:00
|
|
|
#ifdef PR_LOGGING
|
2012-11-14 11:46:40 -08:00
|
|
|
MediaDecoder::Statistics stats = mDecoder->GetStatistics();
|
2014-04-23 02:29:04 -07:00
|
|
|
DECODER_LOG(PR_LOG_DEBUG, "Playback rate: %.1lfKB/s%s download rate: %.1lfKB/s%s",
|
2013-11-20 19:02:42 -08:00
|
|
|
stats.mPlaybackRate/1024, stats.mPlaybackRateReliable ? "" : " (unreliable)",
|
2014-04-23 02:29:04 -07:00
|
|
|
stats.mDownloadRate/1024, stats.mDownloadRateReliable ? "" : " (unreliable)");
|
2014-03-10 20:44:10 -07:00
|
|
|
#endif
|
2010-07-22 15:48:32 -07:00
|
|
|
}
|
2011-03-23 15:28:58 -07:00
|
|
|
|
2013-11-17 20:22:47 -08:00
|
|
|
nsresult MediaDecoderStateMachine::GetBuffered(dom::TimeRanges* aBuffered) {
|
2012-02-14 20:35:01 -08:00
|
|
|
MediaResource* resource = mDecoder->GetResource();
|
|
|
|
NS_ENSURE_TRUE(resource, NS_ERROR_FAILURE);
|
|
|
|
resource->Pin();
|
2011-03-23 15:28:58 -07:00
|
|
|
nsresult res = mReader->GetBuffered(aBuffered, mStartTime);
|
2012-02-14 20:35:01 -08:00
|
|
|
resource->Unpin();
|
2011-03-23 15:28:58 -07:00
|
|
|
return res;
|
|
|
|
}
|
2011-07-11 20:39:32 -07:00
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
nsresult MediaDecoderStateMachine::CallRunStateMachine()
|
2011-07-11 20:39:34 -07:00
|
|
|
{
|
2013-11-03 14:11:09 -08:00
|
|
|
AssertCurrentThreadInMonitor();
|
2011-07-11 20:39:34 -07:00
|
|
|
NS_ASSERTION(OnStateMachineThread(), "Should be on state machine thread.");
|
|
|
|
|
2013-12-19 19:24:42 -08:00
|
|
|
// If audio is being captured, stop the audio thread if it's running
|
2012-12-04 02:59:36 -08:00
|
|
|
if (mAudioCaptured) {
|
|
|
|
StopAudioThread();
|
|
|
|
}
|
|
|
|
|
2014-04-02 05:51:47 -07:00
|
|
|
MOZ_ASSERT(!mInRunningStateMachine, "State machine cycles must run in sequence!");
|
2011-07-11 20:39:34 -07:00
|
|
|
mTimeout = TimeStamp();
|
2014-04-02 05:51:47 -07:00
|
|
|
mInRunningStateMachine = true;
|
2011-07-11 20:39:34 -07:00
|
|
|
nsresult res = RunStateMachine();
|
2014-04-02 05:51:47 -07:00
|
|
|
mInRunningStateMachine = false;
|
2011-07-11 20:39:34 -07:00
|
|
|
return res;
|
|
|
|
}
|
|
|
|
|
2014-04-23 02:29:14 -07:00
|
|
|
nsresult MediaDecoderStateMachine::TimeoutExpired(int aTimerId)
|
2011-07-11 20:39:34 -07:00
|
|
|
{
|
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
|
|
|
NS_ASSERTION(OnStateMachineThread(), "Must be on state machine thread");
|
2014-04-23 02:29:14 -07:00
|
|
|
mTimer->Cancel();
|
|
|
|
if (mTimerId == aTimerId) {
|
|
|
|
return CallRunStateMachine();
|
|
|
|
} else {
|
|
|
|
return NS_OK;
|
|
|
|
}
|
2011-07-11 20:39:32 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::ScheduleStateMachineWithLockAndWakeDecoder() {
|
2012-04-29 20:12:42 -07:00
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
2014-03-10 20:44:10 -07:00
|
|
|
DispatchAudioDecodeTaskIfNeeded();
|
|
|
|
DispatchVideoDecodeTaskIfNeeded();
|
2012-04-29 20:12:42 -07:00
|
|
|
}
|
|
|
|
|
2014-04-23 02:29:14 -07:00
|
|
|
class TimerEvent : public nsITimerCallback, public nsRunnable {
|
|
|
|
NS_DECL_THREADSAFE_ISUPPORTS
|
|
|
|
public:
|
|
|
|
TimerEvent(MediaDecoderStateMachine* aStateMachine, int aTimerId)
|
|
|
|
: mStateMachine(aStateMachine), mTimerId(aTimerId) {}
|
|
|
|
|
|
|
|
NS_IMETHOD Run() MOZ_OVERRIDE {
|
|
|
|
return mStateMachine->TimeoutExpired(mTimerId);
|
|
|
|
}
|
|
|
|
|
|
|
|
NS_IMETHOD Notify(nsITimer* aTimer) {
|
|
|
|
return mStateMachine->TimeoutExpired(mTimerId);
|
|
|
|
}
|
|
|
|
private:
|
|
|
|
const nsRefPtr<MediaDecoderStateMachine> mStateMachine;
|
|
|
|
int mTimerId;
|
|
|
|
};
|
|
|
|
|
2014-04-27 00:06:00 -07:00
|
|
|
NS_IMPL_ISUPPORTS(TimerEvent, nsITimerCallback, nsIRunnable);
|
2014-04-23 02:29:14 -07:00
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
nsresult MediaDecoderStateMachine::ScheduleStateMachine(int64_t aUsecs) {
|
2013-11-03 14:11:09 -08:00
|
|
|
AssertCurrentThreadInMonitor();
|
2011-11-07 17:38:17 -08:00
|
|
|
NS_ABORT_IF_FALSE(GetStateMachineThread(),
|
2011-07-11 20:39:34 -07:00
|
|
|
"Must have a state machine thread to schedule");
|
2011-07-11 20:39:32 -07:00
|
|
|
|
|
|
|
if (mState == DECODER_STATE_SHUTDOWN) {
|
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
2013-01-15 04:22:03 -08:00
|
|
|
aUsecs = std::max<int64_t>(aUsecs, 0);
|
2011-07-11 20:39:32 -07:00
|
|
|
|
|
|
|
TimeStamp timeout = TimeStamp::Now() + UsecsToDuration(aUsecs);
|
2014-04-02 05:51:47 -07:00
|
|
|
if (!mTimeout.IsNull() && timeout >= mTimeout) {
|
|
|
|
// We've already scheduled a timer set to expire at or before this time,
|
|
|
|
// or have an event dispatched to run the state machine.
|
|
|
|
return NS_OK;
|
2011-07-11 20:39:32 -07:00
|
|
|
}
|
|
|
|
|
2012-08-22 08:56:38 -07:00
|
|
|
uint32_t ms = static_cast<uint32_t>((aUsecs / USECS_PER_MS) & 0xFFFFFFFF);
|
2014-04-02 05:51:47 -07:00
|
|
|
if (mRealTime && ms > 40) {
|
2011-09-26 17:25:41 -07:00
|
|
|
ms = 40;
|
2014-03-16 19:12:20 -07:00
|
|
|
}
|
2014-04-23 02:29:14 -07:00
|
|
|
|
|
|
|
// Don't cancel the timer here for this function will be called from
|
|
|
|
// different threads.
|
|
|
|
|
|
|
|
nsresult rv = NS_ERROR_FAILURE;
|
|
|
|
nsRefPtr<TimerEvent> event = new TimerEvent(this, mTimerId+1);
|
|
|
|
|
|
|
|
if (ms == 0) {
|
|
|
|
// Dispatch a runnable to the state machine thread when delay is 0.
|
|
|
|
// It will has less latency than dispatching a runnable to the state
|
|
|
|
// machine thread which will then schedule a zero-delay timer.
|
|
|
|
rv = GetStateMachineThread()->Dispatch(event, NS_DISPATCH_NORMAL);
|
|
|
|
} else if (OnStateMachineThread()) {
|
|
|
|
rv = mTimer->InitWithCallback(event, ms, nsITimer::TYPE_ONE_SHOT);
|
|
|
|
} else {
|
|
|
|
MOZ_ASSERT(false, "non-zero delay timer should be only scheduled in state machine thread");
|
|
|
|
}
|
|
|
|
|
|
|
|
if (NS_SUCCEEDED(rv)) {
|
|
|
|
mTimeout = timeout;
|
|
|
|
++mTimerId;
|
|
|
|
} else {
|
|
|
|
NS_WARNING("Failed to schedule state machine");
|
|
|
|
}
|
|
|
|
|
|
|
|
return rv;
|
2011-07-11 20:39:32 -07:00
|
|
|
}
|
2011-11-07 17:38:17 -08:00
|
|
|
|
2014-02-17 14:53:52 -08:00
|
|
|
bool MediaDecoderStateMachine::OnDecodeThread() const
|
|
|
|
{
|
|
|
|
return mDecodeTaskQueue->IsCurrentThreadIn();
|
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
bool MediaDecoderStateMachine::OnStateMachineThread() const
|
2011-11-07 17:38:17 -08:00
|
|
|
{
|
2014-02-17 14:53:53 -08:00
|
|
|
bool rv = false;
|
|
|
|
mStateMachineThreadPool->IsOnCurrentThread(&rv);
|
|
|
|
return rv;
|
2011-11-07 17:38:17 -08:00
|
|
|
}
|
2012-11-14 11:45:33 -08:00
|
|
|
|
2014-02-17 14:53:53 -08:00
|
|
|
nsIEventTarget* MediaDecoderStateMachine::GetStateMachineThread()
|
2011-11-07 17:38:17 -08:00
|
|
|
{
|
2014-02-17 14:53:53 -08:00
|
|
|
return mStateMachineThreadPool->GetEventTarget();
|
2011-11-07 17:38:17 -08:00
|
|
|
}
|
|
|
|
|
2012-11-22 02:38:28 -08:00
|
|
|
void MediaDecoderStateMachine::SetPlaybackRate(double aPlaybackRate)
|
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
|
|
|
NS_ASSERTION(aPlaybackRate != 0,
|
|
|
|
"PlaybackRate == 0 should be handled before this function.");
|
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
|
|
|
|
|
|
|
if (mPlaybackRate == aPlaybackRate) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Get position of the last time we changed the rate.
|
|
|
|
if (!HasAudio()) {
|
|
|
|
// mBasePosition is a position in the video stream, not an absolute time.
|
2013-01-10 03:26:18 -08:00
|
|
|
if (mState == DECODER_STATE_SEEKING) {
|
2014-06-13 13:20:37 -07:00
|
|
|
mBasePosition = mSeekTarget.mTime - mStartTime;
|
2013-01-10 03:26:18 -08:00
|
|
|
} else {
|
|
|
|
mBasePosition = GetVideoStreamPosition();
|
2012-11-22 02:38:28 -08:00
|
|
|
}
|
2013-02-28 07:05:50 -08:00
|
|
|
mPlayDuration = mBasePosition;
|
2013-01-10 03:26:18 -08:00
|
|
|
mResetPlayStartTime = true;
|
2013-12-19 19:24:42 -08:00
|
|
|
mPlayStartTime = TimeStamp::Now();
|
2012-11-22 02:38:28 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
mPlaybackRate = aPlaybackRate;
|
|
|
|
}
|
|
|
|
|
|
|
|
void MediaDecoderStateMachine::SetPreservesPitch(bool aPreservesPitch)
|
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
|
|
|
|
|
|
|
mPreservesPitch = aPreservesPitch;
|
2014-03-31 20:43:57 -07:00
|
|
|
}
|
2013-12-19 19:24:42 -08:00
|
|
|
|
2014-03-31 20:43:57 -07:00
|
|
|
void
|
|
|
|
MediaDecoderStateMachine::SetMinimizePrerollUntilPlaybackStarts()
|
|
|
|
{
|
|
|
|
AssertCurrentThreadInMonitor();
|
|
|
|
mMinimizePreroll = true;
|
2012-11-22 02:38:28 -08:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
bool MediaDecoderStateMachine::IsShutdown()
|
2012-11-06 14:33:01 -08:00
|
|
|
{
|
2013-11-03 14:11:09 -08:00
|
|
|
AssertCurrentThreadInMonitor();
|
2012-11-06 14:33:01 -08:00
|
|
|
return GetState() == DECODER_STATE_SHUTDOWN;
|
|
|
|
}
|
|
|
|
|
2012-12-27 07:21:30 -08:00
|
|
|
void MediaDecoderStateMachine::QueueMetadata(int64_t aPublishTime,
|
|
|
|
int aChannels,
|
|
|
|
int aRate,
|
|
|
|
bool aHasAudio,
|
|
|
|
bool aHasVideo,
|
|
|
|
MetadataTags* aTags)
|
2012-11-30 05:17:54 -08:00
|
|
|
{
|
|
|
|
NS_ASSERTION(OnDecodeThread(), "Should be on decode thread.");
|
2013-11-03 14:11:09 -08:00
|
|
|
AssertCurrentThreadInMonitor();
|
2012-11-30 05:17:54 -08:00
|
|
|
TimedMetadata* metadata = new TimedMetadata;
|
|
|
|
metadata->mPublishTime = aPublishTime;
|
|
|
|
metadata->mChannels = aChannels;
|
|
|
|
metadata->mRate = aRate;
|
|
|
|
metadata->mHasAudio = aHasAudio;
|
2013-08-16 02:57:17 -07:00
|
|
|
metadata->mHasVideo = aHasVideo;
|
2012-11-30 05:17:54 -08:00
|
|
|
metadata->mTags = aTags;
|
|
|
|
mMetadataManager.QueueMetadata(metadata);
|
|
|
|
}
|
|
|
|
|
2012-11-14 11:45:33 -08:00
|
|
|
} // namespace mozilla
|
|
|
|
|
2014-04-23 02:29:04 -07:00
|
|
|
// avoid redefined macro in unified build
|
|
|
|
#undef DECODER_LOG
|
|
|
|
#undef VERBOSE_LOG
|