2010-04-01 20:03:07 -07:00
|
|
|
/* vim:set ts=2 sw=2 sts=2 et cindent: */
|
2012-05-21 04:12:37 -07:00
|
|
|
/* This Source Code Form is subject to the terms of the Mozilla Public
|
|
|
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
|
|
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2013-05-02 17:39:19 -07:00
|
|
|
#ifdef XP_WIN
|
|
|
|
// Include Windows headers required for enabling high precision timers.
|
2013-05-06 02:33:00 -07:00
|
|
|
#include "windows.h"
|
|
|
|
#include "mmsystem.h"
|
2013-05-02 17:39:19 -07:00
|
|
|
#endif
|
|
|
|
|
2012-12-14 15:58:45 -08:00
|
|
|
#include "mozilla/DebugOnly.h"
|
|
|
|
#include "mozilla/StandardInteger.h"
|
|
|
|
#include "mozilla/Util.h"
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
#include "MediaDecoderStateMachine.h"
|
2010-04-01 20:03:07 -07:00
|
|
|
#include <limits>
|
2012-11-14 11:46:40 -08:00
|
|
|
#include "AudioStream.h"
|
2010-04-01 20:03:07 -07:00
|
|
|
#include "nsTArray.h"
|
2012-11-14 11:46:40 -08:00
|
|
|
#include "MediaDecoder.h"
|
|
|
|
#include "MediaDecoderReader.h"
|
2010-04-01 20:03:07 -07:00
|
|
|
#include "mozilla/mozalloc.h"
|
2010-04-27 01:53:44 -07:00
|
|
|
#include "VideoUtils.h"
|
2013-03-02 11:14:44 -08:00
|
|
|
#include "mozilla/dom/TimeRanges.h"
|
2012-01-19 10:30:29 -08:00
|
|
|
#include "nsDeque.h"
|
2012-04-29 20:12:42 -07:00
|
|
|
#include "AudioSegment.h"
|
|
|
|
#include "VideoSegment.h"
|
2012-08-20 21:06:46 -07:00
|
|
|
#include "ImageContainer.h"
|
2012-01-11 00:23:07 -08:00
|
|
|
|
2012-11-30 05:17:54 -08:00
|
|
|
#include "prenv.h"
|
2011-09-26 17:25:41 -07:00
|
|
|
#include "mozilla/Preferences.h"
|
2013-01-15 04:22:03 -08:00
|
|
|
#include <algorithm>
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2012-11-14 11:45:33 -08:00
|
|
|
namespace mozilla {
|
|
|
|
|
2012-12-04 02:59:36 -08:00
|
|
|
using namespace mozilla::layers;
|
2012-11-15 19:25:26 -08:00
|
|
|
using namespace mozilla::dom;
|
2010-04-01 20:03:07 -07:00
|
|
|
|
|
|
|
#ifdef PR_LOGGING
|
2012-11-14 11:46:40 -08:00
|
|
|
extern PRLogModuleInfo* gMediaDecoderLog;
|
|
|
|
#define LOG(type, msg) PR_LOG(gMediaDecoderLog, type, msg)
|
2010-04-01 20:03:07 -07:00
|
|
|
#else
|
|
|
|
#define LOG(type, msg)
|
|
|
|
#endif
|
|
|
|
|
2012-06-06 16:43:25 -07:00
|
|
|
// Wait this number of seconds when buffering, then leave and play
|
2010-04-01 20:03:07 -07:00
|
|
|
// as best as we can if the required amount of data hasn't been
|
|
|
|
// retrieved.
|
2012-08-22 08:56:38 -07:00
|
|
|
static const uint32_t BUFFERING_WAIT_S = 30;
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2011-04-13 15:12:23 -07:00
|
|
|
// If audio queue has less than this many usecs of decoded audio, we won't risk
|
2010-04-27 01:53:44 -07:00
|
|
|
// trying to decode the video, we'll skip decoding video up to the next
|
2010-11-28 12:06:38 -08:00
|
|
|
// keyframe. We may increase this value for an individual decoder if we
|
|
|
|
// encounter video frames which take a long time to decode.
|
2012-08-22 08:56:38 -07:00
|
|
|
static const uint32_t LOW_AUDIO_USECS = 300000;
|
2010-04-27 01:53:44 -07:00
|
|
|
|
2011-04-13 15:12:23 -07:00
|
|
|
// If more than this many usecs of decoded audio is queued, we'll hold off
|
2010-11-28 12:06:38 -08:00
|
|
|
// decoding more audio. If we increase the low audio threshold (see
|
2011-04-13 15:12:23 -07:00
|
|
|
// LOW_AUDIO_USECS above) we'll also increase this value to ensure it's not
|
2010-11-28 12:06:38 -08:00
|
|
|
// less than the low audio threshold.
|
2012-08-22 08:56:38 -07:00
|
|
|
const int64_t AMPLE_AUDIO_USECS = 1000000;
|
2010-05-12 17:59:42 -07:00
|
|
|
|
2010-08-12 19:28:15 -07:00
|
|
|
// Maximum number of bytes we'll allocate and write at once to the audio
|
2011-09-26 20:31:18 -07:00
|
|
|
// hardware when the audio stream contains missing frames and we're
|
2010-08-12 19:28:15 -07:00
|
|
|
// writing silence in order to fill the gap. We limit our silence-writes
|
|
|
|
// to 32KB in order to avoid allocating an impossibly large chunk of
|
|
|
|
// memory if we encounter a large chunk of silence.
|
2012-08-22 08:56:38 -07:00
|
|
|
const uint32_t SILENCE_BYTES_CHUNK = 32 * 1024;
|
2010-08-12 19:28:15 -07:00
|
|
|
|
2010-04-27 01:53:44 -07:00
|
|
|
// If we have fewer than LOW_VIDEO_FRAMES decoded frames, and
|
|
|
|
// we're not "pumping video", we'll skip the video up to the next keyframe
|
|
|
|
// which is at or after the current playback position.
|
2012-08-22 08:56:38 -07:00
|
|
|
static const uint32_t LOW_VIDEO_FRAMES = 1;
|
2010-04-27 01:53:44 -07:00
|
|
|
|
2010-05-30 21:02:00 -07:00
|
|
|
// Arbitrary "frame duration" when playing only audio.
|
2011-04-13 15:12:23 -07:00
|
|
|
static const int AUDIO_DURATION_USECS = 40000;
|
2010-05-30 21:02:00 -07:00
|
|
|
|
2011-04-13 15:12:23 -07:00
|
|
|
// If we increase our "low audio threshold" (see LOW_AUDIO_USECS above), we
|
2010-11-28 12:06:38 -08:00
|
|
|
// use this as a factor in all our calculations. Increasing this will cause
|
|
|
|
// us to be more likely to increase our low audio threshold, and to
|
|
|
|
// increase it by more.
|
|
|
|
static const int THRESHOLD_FACTOR = 2;
|
|
|
|
|
2011-03-23 15:28:57 -07:00
|
|
|
// If we have less than this much undecoded data available, we'll consider
|
|
|
|
// ourselves to be running low on undecoded data. We determine how much
|
|
|
|
// undecoded data we have remaining using the reader's GetBuffered()
|
|
|
|
// implementation.
|
2012-08-22 08:56:38 -07:00
|
|
|
static const int64_t LOW_DATA_THRESHOLD_USECS = 5000000;
|
2010-11-28 12:06:38 -08:00
|
|
|
|
2011-04-13 15:12:23 -07:00
|
|
|
// LOW_DATA_THRESHOLD_USECS needs to be greater than AMPLE_AUDIO_USECS, otherwise
|
2011-03-23 15:28:57 -07:00
|
|
|
// the skip-to-keyframe logic can activate when we're running low on data.
|
2011-04-13 15:12:23 -07:00
|
|
|
PR_STATIC_ASSERT(LOW_DATA_THRESHOLD_USECS > AMPLE_AUDIO_USECS);
|
2010-11-28 12:06:38 -08:00
|
|
|
|
2011-04-13 15:12:23 -07:00
|
|
|
// Amount of excess usecs of data to add in to the "should we buffer" calculation.
|
2012-08-22 08:56:38 -07:00
|
|
|
static const uint32_t EXHAUSTED_DATA_MARGIN_USECS = 60000;
|
2011-03-23 15:28:57 -07:00
|
|
|
|
2011-04-13 15:12:23 -07:00
|
|
|
// If we enter buffering within QUICK_BUFFER_THRESHOLD_USECS seconds of starting
|
2011-03-23 15:28:57 -07:00
|
|
|
// decoding, we'll enter "quick buffering" mode, which exits a lot sooner than
|
|
|
|
// normal buffering mode. This exists so that if the decode-ahead exhausts the
|
|
|
|
// downloaded data while decode/playback is just starting up (for example
|
|
|
|
// after a seek while the media is still playing, or when playing a media
|
|
|
|
// as soon as it's load started), we won't necessarily stop for 30s and wait
|
|
|
|
// for buffering. We may actually be able to playback in this case, so exit
|
|
|
|
// buffering early and try to play. If it turns out we can't play, we'll fall
|
|
|
|
// back to buffering normally.
|
2012-08-22 08:56:38 -07:00
|
|
|
static const uint32_t QUICK_BUFFER_THRESHOLD_USECS = 2000000;
|
2011-03-23 15:28:57 -07:00
|
|
|
|
|
|
|
// If we're quick buffering, we'll remain in buffering mode while we have less than
|
2011-04-13 15:12:23 -07:00
|
|
|
// QUICK_BUFFERING_LOW_DATA_USECS of decoded data available.
|
2012-08-22 08:56:38 -07:00
|
|
|
static const uint32_t QUICK_BUFFERING_LOW_DATA_USECS = 1000000;
|
2011-03-23 15:28:57 -07:00
|
|
|
|
2011-04-13 15:12:23 -07:00
|
|
|
// If QUICK_BUFFERING_LOW_DATA_USECS is > AMPLE_AUDIO_USECS, we won't exit
|
2011-03-23 15:28:57 -07:00
|
|
|
// quick buffering in a timely fashion, as the decode pauses when it
|
2011-04-13 15:12:23 -07:00
|
|
|
// reaches AMPLE_AUDIO_USECS decoded data, and thus we'll never reach
|
|
|
|
// QUICK_BUFFERING_LOW_DATA_USECS.
|
|
|
|
PR_STATIC_ASSERT(QUICK_BUFFERING_LOW_DATA_USECS <= AMPLE_AUDIO_USECS);
|
2011-03-23 15:28:57 -07:00
|
|
|
|
2012-11-26 06:13:08 -08:00
|
|
|
// This value has been chosen empirically.
|
|
|
|
static const uint32_t AUDIOSTREAM_MIN_WRITE_BEFORE_START_USECS = 200000;
|
|
|
|
|
2012-08-22 08:56:38 -07:00
|
|
|
static TimeDuration UsecsToDuration(int64_t aUsecs) {
|
2011-04-13 15:12:23 -07:00
|
|
|
return TimeDuration::FromMilliseconds(static_cast<double>(aUsecs) / USECS_PER_MS);
|
2011-03-23 15:28:57 -07:00
|
|
|
}
|
|
|
|
|
2012-08-22 08:56:38 -07:00
|
|
|
static int64_t DurationToUsecs(TimeDuration aDuration) {
|
|
|
|
return static_cast<int64_t>(aDuration.ToSeconds() * USECS_PER_S);
|
2011-03-23 15:28:57 -07:00
|
|
|
}
|
|
|
|
|
2011-11-07 17:38:17 -08:00
|
|
|
// Owns the global state machine thread and counts of
|
|
|
|
// state machine and decoder threads. There should
|
|
|
|
// only be one instance of this class.
|
|
|
|
class StateMachineTracker
|
|
|
|
{
|
|
|
|
private:
|
|
|
|
StateMachineTracker() :
|
|
|
|
mMonitor("media.statemachinetracker"),
|
|
|
|
mStateMachineCount(0),
|
|
|
|
mDecodeThreadCount(0),
|
2012-07-30 07:20:58 -07:00
|
|
|
mStateMachineThread(nullptr)
|
2011-11-07 17:38:17 -08:00
|
|
|
{
|
|
|
|
MOZ_COUNT_CTOR(StateMachineTracker);
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
|
|
|
}
|
|
|
|
|
|
|
|
~StateMachineTracker()
|
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
|
|
|
|
|
|
|
MOZ_COUNT_DTOR(StateMachineTracker);
|
|
|
|
}
|
|
|
|
|
|
|
|
public:
|
|
|
|
// Access singleton instance. This is initially called on the main
|
2012-11-14 11:46:40 -08:00
|
|
|
// thread in the MediaDecoderStateMachine constructor resulting
|
2011-11-07 17:38:17 -08:00
|
|
|
// in the global object being created lazily. Non-main thread
|
|
|
|
// access always occurs after this and uses the monitor to
|
|
|
|
// safely access the decode thread counts.
|
|
|
|
static StateMachineTracker& Instance();
|
|
|
|
|
|
|
|
// Instantiate the global state machine thread if required.
|
|
|
|
// Call on main thread only.
|
|
|
|
void EnsureGlobalStateMachine();
|
|
|
|
|
|
|
|
// Destroy global state machine thread if required.
|
|
|
|
// Call on main thread only.
|
|
|
|
void CleanupGlobalStateMachine();
|
|
|
|
|
|
|
|
// Return the global state machine thread. Call from any thread.
|
|
|
|
nsIThread* GetGlobalStateMachineThread()
|
|
|
|
{
|
|
|
|
ReentrantMonitorAutoEnter mon(mMonitor);
|
|
|
|
NS_ASSERTION(mStateMachineThread, "Should have non-null state machine thread!");
|
|
|
|
return mStateMachineThread;
|
|
|
|
}
|
|
|
|
|
2012-01-19 10:30:29 -08:00
|
|
|
// Requests that a decode thread be created for aStateMachine. The thread
|
|
|
|
// may be created immediately, or after some delay, once a thread becomes
|
|
|
|
// available. The request can be cancelled using CancelCreateDecodeThread().
|
|
|
|
// It's the callers responsibility to not call this more than once for any
|
|
|
|
// given state machine.
|
2012-11-14 11:46:40 -08:00
|
|
|
nsresult RequestCreateDecodeThread(MediaDecoderStateMachine* aStateMachine);
|
2012-01-19 10:30:29 -08:00
|
|
|
|
|
|
|
// Cancels a request made by RequestCreateDecodeThread to create a decode
|
|
|
|
// thread for aStateMachine.
|
2012-11-14 11:46:40 -08:00
|
|
|
nsresult CancelCreateDecodeThread(MediaDecoderStateMachine* aStateMachine);
|
2012-01-19 10:30:29 -08:00
|
|
|
|
2011-11-07 17:38:17 -08:00
|
|
|
// Maximum number of active decode threads allowed. When more
|
|
|
|
// than this number are active the thread creation will fail.
|
2012-08-22 08:56:38 -07:00
|
|
|
static const uint32_t MAX_DECODE_THREADS = 25;
|
2011-11-07 17:38:17 -08:00
|
|
|
|
|
|
|
// Returns the number of active decode threads.
|
|
|
|
// Call on any thread. Holds the internal monitor so don't
|
|
|
|
// call with any other monitor held to avoid deadlock.
|
2012-08-22 08:56:38 -07:00
|
|
|
uint32_t GetDecodeThreadCount();
|
2011-11-07 17:38:17 -08:00
|
|
|
|
|
|
|
// Keep track of the fact that a decode thread was destroyed.
|
|
|
|
// Call on any thread. Holds the internal monitor so don't
|
|
|
|
// call with any other monitor held to avoid deadlock.
|
|
|
|
void NoteDecodeThreadDestroyed();
|
|
|
|
|
2012-01-19 10:30:29 -08:00
|
|
|
#ifdef DEBUG
|
|
|
|
// Returns true if aStateMachine has a pending request for a
|
|
|
|
// decode thread.
|
2012-11-14 11:46:40 -08:00
|
|
|
bool IsQueued(MediaDecoderStateMachine* aStateMachine);
|
2012-01-19 10:30:29 -08:00
|
|
|
#endif
|
|
|
|
|
2011-11-07 17:38:17 -08:00
|
|
|
private:
|
|
|
|
// Holds global instance of StateMachineTracker.
|
|
|
|
// Writable on main thread only.
|
2012-07-18 10:26:24 -07:00
|
|
|
static StateMachineTracker* sInstance;
|
2011-11-07 17:38:17 -08:00
|
|
|
|
|
|
|
// Reentrant monitor that must be obtained to access
|
|
|
|
// the decode thread count member and methods.
|
|
|
|
ReentrantMonitor mMonitor;
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
// Number of instances of MediaDecoderStateMachine
|
2011-11-07 17:38:17 -08:00
|
|
|
// that are currently instantiated. Access on the
|
|
|
|
// main thread only.
|
2012-08-22 08:56:38 -07:00
|
|
|
uint32_t mStateMachineCount;
|
2011-11-07 17:38:17 -08:00
|
|
|
|
|
|
|
// Number of instances of decoder threads that are
|
|
|
|
// currently instantiated. Access only with the
|
|
|
|
// mMonitor lock held. Can be used from any thread.
|
2012-08-22 08:56:38 -07:00
|
|
|
uint32_t mDecodeThreadCount;
|
2011-11-07 17:38:17 -08:00
|
|
|
|
|
|
|
// Global state machine thread. Write on the main thread
|
|
|
|
// only, read from the decoder threads. Synchronized via
|
|
|
|
// the mMonitor.
|
|
|
|
nsIThread* mStateMachineThread;
|
2012-01-19 10:30:29 -08:00
|
|
|
|
|
|
|
// Queue of state machines waiting for decode threads. Entries at the front
|
|
|
|
// get their threads first.
|
|
|
|
nsDeque mPending;
|
2011-11-07 17:38:17 -08:00
|
|
|
};
|
|
|
|
|
2012-07-30 07:20:58 -07:00
|
|
|
StateMachineTracker* StateMachineTracker::sInstance = nullptr;
|
2011-11-07 17:38:17 -08:00
|
|
|
|
|
|
|
StateMachineTracker& StateMachineTracker::Instance()
|
|
|
|
{
|
2012-07-18 10:26:24 -07:00
|
|
|
if (!sInstance) {
|
2011-11-07 17:38:17 -08:00
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
2012-07-18 10:26:24 -07:00
|
|
|
sInstance = new StateMachineTracker();
|
2011-11-07 17:38:17 -08:00
|
|
|
}
|
2012-07-18 10:26:24 -07:00
|
|
|
return *sInstance;
|
2011-11-07 17:38:17 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
void StateMachineTracker::EnsureGlobalStateMachine()
|
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
|
|
|
ReentrantMonitorAutoEnter mon(mMonitor);
|
|
|
|
if (mStateMachineCount == 0) {
|
|
|
|
NS_ASSERTION(!mStateMachineThread, "Should have null state machine thread!");
|
2012-07-30 07:20:58 -07:00
|
|
|
DebugOnly<nsresult> rv = NS_NewNamedThread("Media State", &mStateMachineThread, nullptr);
|
2012-02-21 01:34:01 -08:00
|
|
|
NS_ABORT_IF_FALSE(NS_SUCCEEDED(rv), "Can't create media state machine thread");
|
2011-11-07 17:38:17 -08:00
|
|
|
}
|
|
|
|
mStateMachineCount++;
|
|
|
|
}
|
2012-01-19 10:30:29 -08:00
|
|
|
|
|
|
|
#ifdef DEBUG
|
2012-11-14 11:46:40 -08:00
|
|
|
bool StateMachineTracker::IsQueued(MediaDecoderStateMachine* aStateMachine)
|
2012-01-19 10:30:29 -08:00
|
|
|
{
|
|
|
|
ReentrantMonitorAutoEnter mon(mMonitor);
|
2012-08-22 08:56:38 -07:00
|
|
|
int32_t size = mPending.GetSize();
|
2012-01-19 10:30:29 -08:00
|
|
|
for (int i = 0; i < size; ++i) {
|
2012-11-14 11:46:40 -08:00
|
|
|
MediaDecoderStateMachine* m =
|
|
|
|
static_cast<MediaDecoderStateMachine*>(mPending.ObjectAt(i));
|
2012-01-19 10:30:29 -08:00
|
|
|
if (m == aStateMachine) {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
2012-11-22 02:38:28 -08:00
|
|
|
void StateMachineTracker::CleanupGlobalStateMachine()
|
2011-11-07 17:38:17 -08:00
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
|
|
|
NS_ABORT_IF_FALSE(mStateMachineCount > 0,
|
|
|
|
"State machine ref count must be > 0");
|
|
|
|
mStateMachineCount--;
|
|
|
|
if (mStateMachineCount == 0) {
|
|
|
|
LOG(PR_LOG_DEBUG, ("Destroying media state machine thread"));
|
2012-01-19 10:30:29 -08:00
|
|
|
NS_ASSERTION(mPending.GetSize() == 0, "Shouldn't all requests be handled by now?");
|
2011-11-07 17:38:17 -08:00
|
|
|
{
|
|
|
|
ReentrantMonitorAutoEnter mon(mMonitor);
|
|
|
|
nsCOMPtr<nsIRunnable> event = new ShutdownThreadEvent(mStateMachineThread);
|
|
|
|
NS_RELEASE(mStateMachineThread);
|
2012-07-30 07:20:58 -07:00
|
|
|
mStateMachineThread = nullptr;
|
2011-11-07 17:38:17 -08:00
|
|
|
NS_DispatchToMainThread(event);
|
|
|
|
|
|
|
|
NS_ASSERTION(mDecodeThreadCount == 0, "Decode thread count must be zero.");
|
2012-07-30 07:20:58 -07:00
|
|
|
sInstance = nullptr;
|
2011-11-07 17:38:17 -08:00
|
|
|
}
|
|
|
|
delete this;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-01-18 14:56:54 -08:00
|
|
|
void StateMachineTracker::NoteDecodeThreadDestroyed()
|
2011-11-07 17:38:17 -08:00
|
|
|
{
|
|
|
|
ReentrantMonitorAutoEnter mon(mMonitor);
|
2012-01-18 14:56:54 -08:00
|
|
|
--mDecodeThreadCount;
|
2012-01-19 10:30:29 -08:00
|
|
|
while (mDecodeThreadCount < MAX_DECODE_THREADS && mPending.GetSize() > 0) {
|
2012-11-14 11:46:40 -08:00
|
|
|
MediaDecoderStateMachine* m =
|
|
|
|
static_cast<MediaDecoderStateMachine*>(mPending.PopFront());
|
2012-01-19 10:30:29 -08:00
|
|
|
nsresult rv;
|
|
|
|
{
|
|
|
|
ReentrantMonitorAutoExit exitMon(mMonitor);
|
|
|
|
rv = m->StartDecodeThread();
|
|
|
|
}
|
|
|
|
if (NS_SUCCEEDED(rv)) {
|
|
|
|
++mDecodeThreadCount;
|
|
|
|
}
|
|
|
|
}
|
2012-01-18 12:15:57 -08:00
|
|
|
}
|
|
|
|
|
2012-08-22 08:56:38 -07:00
|
|
|
uint32_t StateMachineTracker::GetDecodeThreadCount()
|
2012-01-18 12:15:57 -08:00
|
|
|
{
|
|
|
|
ReentrantMonitorAutoEnter mon(mMonitor);
|
2012-01-18 14:56:54 -08:00
|
|
|
return mDecodeThreadCount;
|
2012-01-18 12:15:57 -08:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
nsresult StateMachineTracker::CancelCreateDecodeThread(MediaDecoderStateMachine* aStateMachine) {
|
2012-01-19 10:30:29 -08:00
|
|
|
ReentrantMonitorAutoEnter mon(mMonitor);
|
2012-08-22 08:56:38 -07:00
|
|
|
int32_t size = mPending.GetSize();
|
|
|
|
for (int32_t i = 0; i < size; ++i) {
|
2012-11-14 11:46:40 -08:00
|
|
|
void* m = static_cast<MediaDecoderStateMachine*>(mPending.ObjectAt(i));
|
2012-01-19 10:30:29 -08:00
|
|
|
if (m == aStateMachine) {
|
|
|
|
mPending.RemoveObjectAt(i);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
NS_ASSERTION(!IsQueued(aStateMachine), "State machine should no longer have queued request.");
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
nsresult StateMachineTracker::RequestCreateDecodeThread(MediaDecoderStateMachine* aStateMachine)
|
2012-01-19 10:30:29 -08:00
|
|
|
{
|
|
|
|
NS_ENSURE_STATE(aStateMachine);
|
|
|
|
ReentrantMonitorAutoEnter mon(mMonitor);
|
|
|
|
if (mPending.GetSize() > 0 || mDecodeThreadCount + 1 >= MAX_DECODE_THREADS) {
|
|
|
|
// If there's already state machines in the queue, or we've exceeded the
|
|
|
|
// limit, append the state machine to the queue of state machines waiting
|
|
|
|
// for a decode thread. This ensures state machines already waiting get
|
|
|
|
// their threads first.
|
|
|
|
mPending.Push(aStateMachine);
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
nsresult rv;
|
|
|
|
{
|
|
|
|
ReentrantMonitorAutoExit exitMon(mMonitor);
|
|
|
|
rv = aStateMachine->StartDecodeThread();
|
|
|
|
}
|
|
|
|
if (NS_SUCCEEDED(rv)) {
|
|
|
|
++mDecodeThreadCount;
|
|
|
|
}
|
|
|
|
NS_ASSERTION(mDecodeThreadCount <= MAX_DECODE_THREADS,
|
|
|
|
"Should keep to thread limit!");
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
MediaDecoderStateMachine::MediaDecoderStateMachine(MediaDecoder* aDecoder,
|
2012-11-27 18:34:53 -08:00
|
|
|
MediaDecoderReader* aReader,
|
|
|
|
bool aRealTime) :
|
2010-04-01 20:03:07 -07:00
|
|
|
mDecoder(aDecoder),
|
|
|
|
mState(DECODER_STATE_DECODING_METADATA),
|
2012-11-22 02:38:28 -08:00
|
|
|
mResetPlayStartTime(false),
|
2010-04-01 20:03:07 -07:00
|
|
|
mPlayDuration(0),
|
|
|
|
mStartTime(-1),
|
|
|
|
mEndTime(-1),
|
|
|
|
mSeekTime(0),
|
2011-08-24 16:42:23 -07:00
|
|
|
mFragmentEndTime(-1),
|
2010-05-05 19:31:02 -07:00
|
|
|
mReader(aReader),
|
2010-04-01 20:03:07 -07:00
|
|
|
mCurrentFrameTime(0),
|
|
|
|
mAudioStartTime(-1),
|
|
|
|
mAudioEndTime(-1),
|
2010-05-30 21:02:00 -07:00
|
|
|
mVideoFrameEndTime(-1),
|
2010-04-01 20:03:07 -07:00
|
|
|
mVolume(1.0),
|
2012-11-22 02:38:28 -08:00
|
|
|
mPlaybackRate(1.0),
|
|
|
|
mPreservesPitch(true),
|
|
|
|
mBasePosition(0),
|
2012-04-29 20:12:42 -07:00
|
|
|
mAudioCaptured(false),
|
2012-11-30 05:17:54 -08:00
|
|
|
mTransportSeekable(true),
|
|
|
|
mMediaSeekable(true),
|
2011-09-29 16:34:37 -07:00
|
|
|
mPositionChangeQueued(false),
|
|
|
|
mAudioCompleted(false),
|
|
|
|
mGotDurationFromMetaData(false),
|
|
|
|
mStopDecodeThread(true),
|
|
|
|
mDecodeThreadIdle(false),
|
|
|
|
mStopAudioThread(true),
|
|
|
|
mQuickBuffering(false),
|
|
|
|
mIsRunning(false),
|
|
|
|
mRunAgain(false),
|
|
|
|
mDispatchedRunEvent(false),
|
|
|
|
mDecodeThreadWaiting(false),
|
|
|
|
mRealTime(aRealTime),
|
2012-04-29 20:12:42 -07:00
|
|
|
mDidThrottleAudioDecoding(false),
|
|
|
|
mDidThrottleVideoDecoding(false),
|
2012-05-18 01:29:38 -07:00
|
|
|
mRequestedNewDecodeThread(false),
|
2012-12-18 20:48:32 -08:00
|
|
|
mEventManager(aDecoder),
|
|
|
|
mLastFrameStatus(MediaDecoderOwner::NEXT_FRAME_UNINITIALIZED)
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2012-11-14 11:46:40 -08:00
|
|
|
MOZ_COUNT_CTOR(MediaDecoderStateMachine);
|
2011-07-11 20:39:34 -07:00
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
2011-11-07 17:38:17 -08:00
|
|
|
|
|
|
|
StateMachineTracker::Instance().EnsureGlobalStateMachine();
|
2011-09-26 17:25:41 -07:00
|
|
|
|
|
|
|
// only enable realtime mode when "media.realtime_decoder.enabled" is true.
|
2011-09-28 23:19:26 -07:00
|
|
|
if (Preferences::GetBool("media.realtime_decoder.enabled", false) == false)
|
2011-09-29 16:34:37 -07:00
|
|
|
mRealTime = false;
|
2011-09-26 17:25:41 -07:00
|
|
|
|
2012-06-06 16:43:25 -07:00
|
|
|
mBufferingWait = mRealTime ? 0 : BUFFERING_WAIT_S;
|
2011-09-26 17:25:41 -07:00
|
|
|
mLowDataThresholdUsecs = mRealTime ? 0 : LOW_DATA_THRESHOLD_USECS;
|
2012-09-28 10:34:03 -07:00
|
|
|
|
|
|
|
// If we've got more than mAmpleVideoFrames decoded video frames waiting in
|
|
|
|
// the video queue, we will not decode any more video frames until some have
|
|
|
|
// been consumed by the play state machine thread.
|
2012-12-02 18:41:06 -08:00
|
|
|
#if defined(MOZ_WIDGET_GONK) || defined(MOZ_MEDIA_PLUGINS)
|
|
|
|
// On B2G and Android this is decided by a similar value which varies for
|
|
|
|
// each OMX decoder |OMX_PARAM_PORTDEFINITIONTYPE::nBufferCountMin|. This
|
|
|
|
// number must be less than the OMX equivalent or gecko will think it is
|
|
|
|
// chronically starved of video frames. All decoders seen so far have a value
|
|
|
|
// of at least 4.
|
2012-09-28 10:34:03 -07:00
|
|
|
mAmpleVideoFrames = Preferences::GetUint("media.video-queue.default-size", 3);
|
|
|
|
#else
|
|
|
|
mAmpleVideoFrames = Preferences::GetUint("media.video-queue.default-size", 10);
|
|
|
|
#endif
|
|
|
|
if (mAmpleVideoFrames < 2) {
|
|
|
|
mAmpleVideoFrames = 2;
|
|
|
|
}
|
2013-05-02 17:39:19 -07:00
|
|
|
#ifdef XP_WIN
|
|
|
|
// Ensure high precision timers are enabled on Windows, otherwise the state
|
|
|
|
// machine thread isn't woken up at reliable intervals to set the next frame,
|
|
|
|
// and we drop frames while painting. Note that multiple calls to this
|
|
|
|
// function per-process is OK, provided each call is matched by a corresponding
|
|
|
|
// timeEndPeriod() call.
|
|
|
|
timeBeginPeriod(1);
|
|
|
|
#endif
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
MediaDecoderStateMachine::~MediaDecoderStateMachine()
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2011-07-11 20:39:34 -07:00
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
2012-11-14 11:46:40 -08:00
|
|
|
MOZ_COUNT_DTOR(MediaDecoderStateMachine);
|
2012-07-31 05:17:22 -07:00
|
|
|
NS_ASSERTION(!mPendingWakeDecoder.get(),
|
|
|
|
"WakeDecoder should have been revoked already");
|
2012-01-19 10:30:29 -08:00
|
|
|
NS_ASSERTION(!StateMachineTracker::Instance().IsQueued(this),
|
|
|
|
"Should not have a pending request for a new decode thread");
|
|
|
|
NS_ASSERTION(!mRequestedNewDecodeThread,
|
|
|
|
"Should not have (or flagged) a pending request for a new decode thread");
|
2011-07-11 20:39:32 -07:00
|
|
|
if (mTimer)
|
|
|
|
mTimer->Cancel();
|
2012-07-30 07:20:58 -07:00
|
|
|
mTimer = nullptr;
|
|
|
|
mReader = nullptr;
|
2011-11-07 17:38:17 -08:00
|
|
|
|
|
|
|
StateMachineTracker::Instance().CleanupGlobalStateMachine();
|
2013-05-02 17:39:19 -07:00
|
|
|
#ifdef XP_WIN
|
|
|
|
timeEndPeriod(1);
|
|
|
|
#endif
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
bool MediaDecoderStateMachine::HasFutureAudio() const {
|
2011-04-29 12:21:57 -07:00
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
2010-09-14 16:24:47 -07:00
|
|
|
NS_ASSERTION(HasAudio(), "Should only call HasFutureAudio() when we have audio");
|
|
|
|
// We've got audio ready to play if:
|
|
|
|
// 1. We've not completed playback of audio, and
|
|
|
|
// 2. we either have more than the threshold of decoded audio available, or
|
|
|
|
// we've completely decoded all audio (but not finished playing it yet
|
|
|
|
// as per 1).
|
|
|
|
return !mAudioCompleted &&
|
2012-11-22 02:38:28 -08:00
|
|
|
(AudioDecodedUsecs() > LOW_AUDIO_USECS * mPlaybackRate || mReader->AudioQueue().IsFinished());
|
2010-05-12 17:59:42 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
bool MediaDecoderStateMachine::HaveNextFrameData() const {
|
2011-04-29 12:21:57 -07:00
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
2010-09-14 16:24:47 -07:00
|
|
|
return (!HasAudio() || HasFutureAudio()) &&
|
2012-09-17 13:45:38 -07:00
|
|
|
(!HasVideo() || mReader->VideoQueue().GetSize() > 0);
|
2010-05-12 17:59:42 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
int64_t MediaDecoderStateMachine::GetDecodedAudioDuration() {
|
2011-01-12 17:06:15 -08:00
|
|
|
NS_ASSERTION(OnDecodeThread(), "Should be on decode thread.");
|
2011-04-29 12:21:57 -07:00
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
2012-09-17 13:45:38 -07:00
|
|
|
int64_t audioDecoded = mReader->AudioQueue().Duration();
|
2011-01-12 17:06:15 -08:00
|
|
|
if (mAudioEndTime != -1) {
|
|
|
|
audioDecoded += mAudioEndTime - GetMediaTime();
|
|
|
|
}
|
|
|
|
return audioDecoded;
|
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::DecodeThreadRun()
|
2011-07-11 20:39:25 -07:00
|
|
|
{
|
|
|
|
NS_ASSERTION(OnDecodeThread(), "Should be on decode thread.");
|
2012-12-18 00:49:58 -08:00
|
|
|
mReader->OnDecodeThreadStart();
|
|
|
|
|
|
|
|
{
|
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
2011-07-11 20:39:25 -07:00
|
|
|
|
2012-12-18 00:49:58 -08:00
|
|
|
if (mState == DECODER_STATE_DECODING_METADATA &&
|
|
|
|
NS_FAILED(DecodeMetadata())) {
|
2011-07-11 20:39:25 -07:00
|
|
|
NS_ASSERTION(mState == DECODER_STATE_SHUTDOWN,
|
|
|
|
"Should be in shutdown state if metadata loading fails.");
|
|
|
|
LOG(PR_LOG_DEBUG, ("Decode metadata failed, shutting down decode thread"));
|
|
|
|
}
|
|
|
|
|
2012-12-18 00:49:58 -08:00
|
|
|
while (mState != DECODER_STATE_SHUTDOWN &&
|
|
|
|
mState != DECODER_STATE_COMPLETED &&
|
|
|
|
!mStopDecodeThread)
|
|
|
|
{
|
|
|
|
if (mState == DECODER_STATE_DECODING || mState == DECODER_STATE_BUFFERING) {
|
|
|
|
DecodeLoop();
|
|
|
|
} else if (mState == DECODER_STATE_SEEKING) {
|
|
|
|
DecodeSeek();
|
|
|
|
}
|
2011-07-11 20:39:25 -07:00
|
|
|
}
|
|
|
|
|
2012-12-18 00:49:58 -08:00
|
|
|
mDecodeThreadIdle = true;
|
|
|
|
LOG(PR_LOG_DEBUG, ("%p Decode thread finished", mDecoder.get()));
|
|
|
|
}
|
|
|
|
|
|
|
|
mReader->OnDecodeThreadFinish();
|
2011-07-11 20:39:25 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::SendStreamAudio(AudioData* aAudio,
|
2012-12-04 02:59:36 -08:00
|
|
|
DecodedStreamData* aStream,
|
|
|
|
AudioSegment* aOutput)
|
2012-04-29 20:12:42 -07:00
|
|
|
{
|
2013-01-29 20:20:03 -08:00
|
|
|
NS_ASSERTION(OnDecodeThread() ||
|
|
|
|
OnStateMachineThread(), "Should be on decode thread or state machine thread");
|
2012-04-29 20:12:42 -07:00
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
|
|
|
|
|
|
|
if (aAudio->mTime <= aStream->mLastAudioPacketTime) {
|
|
|
|
// ignore packet that we've already processed
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
aStream->mLastAudioPacketTime = aAudio->mTime;
|
|
|
|
aStream->mLastAudioPacketEndTime = aAudio->GetEnd();
|
|
|
|
|
|
|
|
// This logic has to mimic AudioLoop closely to make sure we write
|
|
|
|
// the exact same silences
|
|
|
|
CheckedInt64 audioWrittenOffset = UsecsToFrames(mInfo.mAudioRate,
|
2012-07-31 05:17:22 -07:00
|
|
|
aStream->mInitialTime + mStartTime) + aStream->mAudioFramesWritten;
|
2012-04-29 20:12:42 -07:00
|
|
|
CheckedInt64 frameOffset = UsecsToFrames(mInfo.mAudioRate, aAudio->mTime);
|
2012-05-14 12:50:20 -07:00
|
|
|
if (!audioWrittenOffset.isValid() || !frameOffset.isValid())
|
2012-04-29 20:12:42 -07:00
|
|
|
return;
|
|
|
|
if (audioWrittenOffset.value() < frameOffset.value()) {
|
|
|
|
// Write silence to catch up
|
|
|
|
LOG(PR_LOG_DEBUG, ("%p Decoder writing %d frames of silence to MediaStream",
|
2012-08-22 08:56:38 -07:00
|
|
|
mDecoder.get(), int32_t(frameOffset.value() - audioWrittenOffset.value())));
|
2012-04-29 20:12:42 -07:00
|
|
|
AudioSegment silence;
|
|
|
|
silence.InsertNullDataAtStart(frameOffset.value() - audioWrittenOffset.value());
|
|
|
|
aStream->mAudioFramesWritten += silence.GetDuration();
|
|
|
|
aOutput->AppendFrom(&silence);
|
|
|
|
}
|
|
|
|
|
2012-08-22 08:56:38 -07:00
|
|
|
int64_t offset;
|
2012-04-29 20:12:42 -07:00
|
|
|
if (aStream->mAudioFramesWritten == 0) {
|
|
|
|
NS_ASSERTION(frameOffset.value() <= audioWrittenOffset.value(),
|
|
|
|
"Otherwise we'd have taken the write-silence path");
|
|
|
|
// We're starting in the middle of a packet. Split the packet.
|
|
|
|
offset = audioWrittenOffset.value() - frameOffset.value();
|
|
|
|
} else {
|
|
|
|
// Write the entire packet.
|
|
|
|
offset = 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (offset >= aAudio->mFrames)
|
|
|
|
return;
|
|
|
|
|
|
|
|
aAudio->EnsureAudioBuffer();
|
|
|
|
nsRefPtr<SharedBuffer> buffer = aAudio->mAudioBuffer;
|
2012-11-21 21:04:27 -08:00
|
|
|
AudioDataValue* bufferData = static_cast<AudioDataValue*>(buffer->Data());
|
|
|
|
nsAutoTArray<const AudioDataValue*,2> channels;
|
|
|
|
for (uint32_t i = 0; i < aAudio->mChannels; ++i) {
|
|
|
|
channels.AppendElement(bufferData + i*aAudio->mFrames + offset);
|
|
|
|
}
|
|
|
|
aOutput->AppendFrames(buffer.forget(), channels, aAudio->mFrames);
|
2012-04-29 20:12:42 -07:00
|
|
|
LOG(PR_LOG_DEBUG, ("%p Decoder writing %d frames of data to MediaStream for AudioData at %lld",
|
2012-08-22 08:56:38 -07:00
|
|
|
mDecoder.get(), aAudio->mFrames - int32_t(offset), aAudio->mTime));
|
|
|
|
aStream->mAudioFramesWritten += aAudio->mFrames - int32_t(offset);
|
2012-04-29 20:12:42 -07:00
|
|
|
}
|
|
|
|
|
2012-12-18 20:48:32 -08:00
|
|
|
static void WriteVideoToMediaStream(layers::Image* aImage,
|
2012-08-22 08:56:38 -07:00
|
|
|
int64_t aDuration, const gfxIntSize& aIntrinsicSize,
|
2012-04-29 20:12:42 -07:00
|
|
|
VideoSegment* aOutput)
|
|
|
|
{
|
2012-12-18 20:48:32 -08:00
|
|
|
nsRefPtr<layers::Image> image = aImage;
|
2012-04-29 20:12:42 -07:00
|
|
|
aOutput->AppendFrame(image.forget(), aDuration, aIntrinsicSize);
|
|
|
|
}
|
|
|
|
|
|
|
|
static const TrackID TRACK_AUDIO = 1;
|
|
|
|
static const TrackID TRACK_VIDEO = 2;
|
|
|
|
static const TrackRate RATE_VIDEO = USECS_PER_S;
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::SendStreamData()
|
2012-04-29 20:12:42 -07:00
|
|
|
{
|
2012-12-04 02:59:36 -08:00
|
|
|
NS_ASSERTION(OnDecodeThread() ||
|
|
|
|
OnStateMachineThread(), "Should be on decode thread or state machine thread");
|
2012-04-29 20:12:42 -07:00
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
|
|
|
|
2012-07-31 05:17:22 -07:00
|
|
|
DecodedStreamData* stream = mDecoder->GetDecodedStream();
|
|
|
|
if (!stream)
|
|
|
|
return;
|
|
|
|
|
2012-04-29 20:12:42 -07:00
|
|
|
if (mState == DECODER_STATE_DECODING_METADATA)
|
|
|
|
return;
|
|
|
|
|
2012-12-04 02:59:36 -08:00
|
|
|
// If there's still an audio thread alive, then we can't send any stream
|
|
|
|
// data yet since both SendStreamData and the audio thread want to be in
|
|
|
|
// charge of popping the audio queue. We're waiting for the audio thread
|
|
|
|
// to die before sending anything to our stream.
|
|
|
|
if (mAudioThread)
|
|
|
|
return;
|
|
|
|
|
2012-09-27 23:57:33 -07:00
|
|
|
int64_t minLastAudioPacketTime = INT64_MAX;
|
2012-07-31 05:17:22 -07:00
|
|
|
SourceMediaStream* mediaStream = stream->mStream;
|
|
|
|
StreamTime endPosition = 0;
|
2012-04-29 20:12:42 -07:00
|
|
|
|
2012-07-31 05:17:22 -07:00
|
|
|
if (!stream->mStreamInitialized) {
|
2012-04-29 20:12:42 -07:00
|
|
|
if (mInfo.mHasAudio) {
|
2012-07-31 05:17:22 -07:00
|
|
|
AudioSegment* audio = new AudioSegment();
|
|
|
|
mediaStream->AddTrack(TRACK_AUDIO, mInfo.mAudioRate, 0, audio);
|
2012-04-29 20:12:42 -07:00
|
|
|
}
|
|
|
|
if (mInfo.mHasVideo) {
|
2012-07-31 05:17:22 -07:00
|
|
|
VideoSegment* video = new VideoSegment();
|
|
|
|
mediaStream->AddTrack(TRACK_VIDEO, RATE_VIDEO, 0, video);
|
|
|
|
}
|
|
|
|
stream->mStreamInitialized = true;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (mInfo.mHasAudio) {
|
|
|
|
nsAutoTArray<AudioData*,10> audio;
|
|
|
|
// It's OK to hold references to the AudioData because while audio
|
|
|
|
// is captured, only the decoder thread pops from the queue (see below).
|
2012-09-17 13:45:38 -07:00
|
|
|
mReader->AudioQueue().GetElementsAfter(stream->mLastAudioPacketTime, &audio);
|
2012-07-31 05:17:22 -07:00
|
|
|
AudioSegment output;
|
|
|
|
for (uint32_t i = 0; i < audio.Length(); ++i) {
|
|
|
|
SendStreamAudio(audio[i], stream, &output);
|
|
|
|
}
|
|
|
|
if (output.GetDuration() > 0) {
|
|
|
|
mediaStream->AppendToTrack(TRACK_AUDIO, &output);
|
|
|
|
}
|
2012-09-17 13:45:38 -07:00
|
|
|
if (mReader->AudioQueue().IsFinished() && !stream->mHaveSentFinishAudio) {
|
2012-07-31 05:17:22 -07:00
|
|
|
mediaStream->EndTrack(TRACK_AUDIO);
|
|
|
|
stream->mHaveSentFinishAudio = true;
|
|
|
|
}
|
2013-01-15 04:22:03 -08:00
|
|
|
minLastAudioPacketTime = std::min(minLastAudioPacketTime, stream->mLastAudioPacketTime);
|
|
|
|
endPosition = std::max(endPosition,
|
2012-07-31 05:17:22 -07:00
|
|
|
TicksToTimeRoundDown(mInfo.mAudioRate, stream->mAudioFramesWritten));
|
|
|
|
}
|
|
|
|
|
|
|
|
if (mInfo.mHasVideo) {
|
|
|
|
nsAutoTArray<VideoData*,10> video;
|
|
|
|
// It's OK to hold references to the VideoData only the decoder thread
|
|
|
|
// pops from the queue.
|
2012-09-17 13:45:38 -07:00
|
|
|
mReader->VideoQueue().GetElementsAfter(stream->mNextVideoTime + mStartTime, &video);
|
2012-07-31 05:17:22 -07:00
|
|
|
VideoSegment output;
|
|
|
|
for (uint32_t i = 0; i < video.Length(); ++i) {
|
|
|
|
VideoData* v = video[i];
|
|
|
|
if (stream->mNextVideoTime + mStartTime < v->mTime) {
|
|
|
|
LOG(PR_LOG_DEBUG, ("%p Decoder writing last video to MediaStream %p for %lld ms",
|
|
|
|
mDecoder.get(), mediaStream,
|
|
|
|
v->mTime - (stream->mNextVideoTime + mStartTime)));
|
|
|
|
// Write last video frame to catch up. mLastVideoImage can be null here
|
|
|
|
// which is fine, it just means there's no video.
|
|
|
|
WriteVideoToMediaStream(stream->mLastVideoImage,
|
|
|
|
v->mTime - (stream->mNextVideoTime + mStartTime), stream->mLastVideoImageDisplaySize,
|
|
|
|
&output);
|
|
|
|
stream->mNextVideoTime = v->mTime - mStartTime;
|
2012-04-29 20:12:42 -07:00
|
|
|
}
|
2012-07-31 05:17:22 -07:00
|
|
|
if (stream->mNextVideoTime + mStartTime < v->mEndTime) {
|
|
|
|
LOG(PR_LOG_DEBUG, ("%p Decoder writing video frame %lld to MediaStream %p for %lld ms",
|
|
|
|
mDecoder.get(), v->mTime, mediaStream,
|
|
|
|
v->mEndTime - (stream->mNextVideoTime + mStartTime)));
|
|
|
|
WriteVideoToMediaStream(v->mImage,
|
|
|
|
v->mEndTime - (stream->mNextVideoTime + mStartTime), v->mDisplay,
|
|
|
|
&output);
|
|
|
|
stream->mNextVideoTime = v->mEndTime - mStartTime;
|
|
|
|
stream->mLastVideoImage = v->mImage;
|
|
|
|
stream->mLastVideoImageDisplaySize = v->mDisplay;
|
|
|
|
} else {
|
|
|
|
LOG(PR_LOG_DEBUG, ("%p Decoder skipping writing video frame %lld to MediaStream",
|
|
|
|
mDecoder.get(), v->mTime));
|
2012-08-20 05:44:32 -07:00
|
|
|
}
|
2012-04-29 20:12:42 -07:00
|
|
|
}
|
2012-07-31 05:17:22 -07:00
|
|
|
if (output.GetDuration() > 0) {
|
|
|
|
mediaStream->AppendToTrack(TRACK_VIDEO, &output);
|
2012-08-20 05:44:32 -07:00
|
|
|
}
|
2012-09-17 13:45:38 -07:00
|
|
|
if (mReader->VideoQueue().IsFinished() && !stream->mHaveSentFinishVideo) {
|
2012-07-31 05:17:22 -07:00
|
|
|
mediaStream->EndTrack(TRACK_VIDEO);
|
|
|
|
stream->mHaveSentFinishVideo = true;
|
2012-08-20 05:44:32 -07:00
|
|
|
}
|
2013-01-15 04:22:03 -08:00
|
|
|
endPosition = std::max(endPosition,
|
2012-07-31 05:17:22 -07:00
|
|
|
TicksToTimeRoundDown(RATE_VIDEO, stream->mNextVideoTime - stream->mInitialTime));
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!stream->mHaveSentFinish) {
|
|
|
|
stream->mStream->AdvanceKnownTracksTime(endPosition);
|
|
|
|
}
|
|
|
|
|
|
|
|
bool finished =
|
2012-09-17 13:45:38 -07:00
|
|
|
(!mInfo.mHasAudio || mReader->AudioQueue().IsFinished()) &&
|
|
|
|
(!mInfo.mHasVideo || mReader->VideoQueue().IsFinished());
|
2012-07-31 05:17:22 -07:00
|
|
|
if (finished && !stream->mHaveSentFinish) {
|
|
|
|
stream->mHaveSentFinish = true;
|
|
|
|
stream->mStream->Finish();
|
2012-04-29 20:12:42 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
if (mAudioCaptured) {
|
|
|
|
// Discard audio packets that are no longer needed.
|
2012-08-22 08:56:38 -07:00
|
|
|
int64_t audioPacketTimeToDiscard =
|
2013-01-15 04:22:03 -08:00
|
|
|
std::min(minLastAudioPacketTime, mStartTime + mCurrentFrameTime);
|
2012-04-29 20:12:42 -07:00
|
|
|
while (true) {
|
2012-09-17 13:45:38 -07:00
|
|
|
nsAutoPtr<AudioData> a(mReader->AudioQueue().PopFront());
|
2012-04-29 20:12:42 -07:00
|
|
|
if (!a)
|
|
|
|
break;
|
|
|
|
// Packet times are not 100% reliable so this may discard packets that
|
|
|
|
// actually contain data for mCurrentFrameTime. This means if someone might
|
|
|
|
// create a new output stream and we actually don't have the audio for the
|
|
|
|
// very start. That's OK, we'll play silence instead for a brief moment.
|
|
|
|
// That's OK. Seeking to this time would have a similar issue for such
|
|
|
|
// badly muxed resources.
|
|
|
|
if (a->GetEnd() >= audioPacketTimeToDiscard) {
|
2012-09-17 13:45:38 -07:00
|
|
|
mReader->AudioQueue().PushFront(a.forget());
|
2012-04-29 20:12:42 -07:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (finished) {
|
|
|
|
mAudioCompleted = true;
|
|
|
|
UpdateReadyState();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
MediaDecoderStateMachine::WakeDecoderRunnable*
|
|
|
|
MediaDecoderStateMachine::GetWakeDecoderRunnable()
|
2012-04-29 20:13:42 -07:00
|
|
|
{
|
2012-07-31 05:17:22 -07:00
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
|
|
|
|
|
|
|
if (!mPendingWakeDecoder.get()) {
|
|
|
|
mPendingWakeDecoder = new WakeDecoderRunnable(this);
|
2012-04-29 20:13:42 -07:00
|
|
|
}
|
2012-07-31 05:17:22 -07:00
|
|
|
return mPendingWakeDecoder.get();
|
2012-04-29 20:13:42 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
bool MediaDecoderStateMachine::HaveEnoughDecodedAudio(int64_t aAmpleAudioUSecs)
|
2012-04-29 20:12:42 -07:00
|
|
|
{
|
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
|
|
|
|
2012-09-17 13:45:38 -07:00
|
|
|
if (mReader->AudioQueue().GetSize() == 0 ||
|
2012-04-29 20:12:42 -07:00
|
|
|
GetDecodedAudioDuration() < aAmpleAudioUSecs) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
if (!mAudioCaptured) {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2012-07-31 05:17:22 -07:00
|
|
|
DecodedStreamData* stream = mDecoder->GetDecodedStream();
|
|
|
|
if (stream && stream->mStreamInitialized && !stream->mHaveSentFinishAudio) {
|
|
|
|
if (!stream->mStream->HaveEnoughBuffered(TRACK_AUDIO)) {
|
2012-04-29 20:12:42 -07:00
|
|
|
return false;
|
|
|
|
}
|
2012-07-31 05:17:22 -07:00
|
|
|
stream->mStream->DispatchWhenNotEnoughBuffered(TRACK_AUDIO,
|
|
|
|
GetStateMachineThread(), GetWakeDecoderRunnable());
|
2012-04-29 20:12:42 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
bool MediaDecoderStateMachine::HaveEnoughDecodedVideo()
|
2012-04-29 20:12:42 -07:00
|
|
|
{
|
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
|
|
|
|
2012-11-27 18:34:53 -08:00
|
|
|
if (static_cast<uint32_t>(mReader->VideoQueue().GetSize()) < GetAmpleVideoFrames() * mPlaybackRate) {
|
2012-04-29 20:12:42 -07:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2012-07-31 05:17:22 -07:00
|
|
|
DecodedStreamData* stream = mDecoder->GetDecodedStream();
|
|
|
|
if (stream && stream->mStreamInitialized && !stream->mHaveSentFinishVideo) {
|
|
|
|
if (!stream->mStream->HaveEnoughBuffered(TRACK_VIDEO)) {
|
2012-04-29 20:12:42 -07:00
|
|
|
return false;
|
|
|
|
}
|
2012-07-31 05:17:22 -07:00
|
|
|
stream->mStream->DispatchWhenNotEnoughBuffered(TRACK_VIDEO,
|
|
|
|
GetStateMachineThread(), GetWakeDecoderRunnable());
|
2012-04-29 20:12:42 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::DecodeLoop()
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2011-07-11 20:39:37 -07:00
|
|
|
LOG(PR_LOG_DEBUG, ("%p Start DecodeLoop()", mDecoder.get()));
|
|
|
|
|
2011-07-11 20:39:25 -07:00
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
2010-04-01 20:03:07 -07:00
|
|
|
NS_ASSERTION(OnDecodeThread(), "Should be on decode thread.");
|
|
|
|
|
2011-09-26 20:31:18 -07:00
|
|
|
// We want to "pump" the decode until we've got a few frames decoded
|
2010-04-01 20:03:07 -07:00
|
|
|
// before we consider whether decode is falling behind.
|
2011-09-28 23:19:26 -07:00
|
|
|
bool audioPump = true;
|
|
|
|
bool videoPump = true;
|
2010-04-01 20:03:07 -07:00
|
|
|
|
|
|
|
// If the video decode is falling behind the audio, we'll start dropping the
|
|
|
|
// inter-frames up until the next keyframe which is at or before the current
|
2011-09-29 16:34:37 -07:00
|
|
|
// playback position. skipToNextKeyframe is true if we're currently
|
2010-04-01 20:03:07 -07:00
|
|
|
// skipping up to the next keyframe.
|
2011-09-28 23:19:26 -07:00
|
|
|
bool skipToNextKeyframe = false;
|
2010-04-01 20:03:07 -07:00
|
|
|
|
|
|
|
// Once we've decoded more than videoPumpThreshold video frames, we'll
|
|
|
|
// no longer be considered to be "pumping video".
|
2012-11-27 18:34:53 -08:00
|
|
|
const unsigned videoPumpThreshold = mRealTime ? 0 : GetAmpleVideoFrames() / 2;
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2011-04-13 15:12:23 -07:00
|
|
|
// After the audio decode fills with more than audioPumpThreshold usecs
|
2010-04-01 20:03:07 -07:00
|
|
|
// of decoded audio, we'll start to check whether the audio or video decode
|
|
|
|
// is falling behind.
|
2011-09-26 17:25:41 -07:00
|
|
|
const unsigned audioPumpThreshold = mRealTime ? 0 : LOW_AUDIO_USECS * 2;
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2010-11-28 12:06:38 -08:00
|
|
|
// Our local low audio threshold. We may increase this if we're slow to
|
|
|
|
// decode video frames, in order to reduce the chance of audio underruns.
|
2012-08-22 08:56:38 -07:00
|
|
|
int64_t lowAudioThreshold = LOW_AUDIO_USECS;
|
2010-11-28 12:06:38 -08:00
|
|
|
|
|
|
|
// Our local ample audio threshold. If we increase lowAudioThreshold, we'll
|
2011-01-12 17:06:15 -08:00
|
|
|
// also increase this too appropriately (we don't want lowAudioThreshold to
|
2010-11-28 12:06:38 -08:00
|
|
|
// be greater than ampleAudioThreshold, else we'd stop decoding!).
|
2012-08-22 08:56:38 -07:00
|
|
|
int64_t ampleAudioThreshold = AMPLE_AUDIO_USECS;
|
2010-11-28 12:06:38 -08:00
|
|
|
|
2011-01-12 17:06:15 -08:00
|
|
|
// Main decode loop.
|
2011-09-28 23:19:26 -07:00
|
|
|
bool videoPlaying = HasVideo();
|
|
|
|
bool audioPlaying = HasAudio();
|
2011-07-11 20:39:25 -07:00
|
|
|
while ((mState == DECODER_STATE_DECODING || mState == DECODER_STATE_BUFFERING) &&
|
2011-07-11 20:39:10 -07:00
|
|
|
!mStopDecodeThread &&
|
2011-01-12 17:06:15 -08:00
|
|
|
(videoPlaying || audioPlaying))
|
|
|
|
{
|
2013-04-02 17:05:00 -07:00
|
|
|
#ifdef MOZ_DASH
|
2012-12-06 15:27:08 -08:00
|
|
|
mReader->PrepareToDecode();
|
2013-04-02 17:05:00 -07:00
|
|
|
#endif
|
2012-12-06 15:27:08 -08:00
|
|
|
|
2010-04-01 20:03:07 -07:00
|
|
|
// We don't want to consider skipping to the next keyframe if we've
|
|
|
|
// only just started up the decode loop, so wait until we've decoded
|
2011-01-12 17:06:15 -08:00
|
|
|
// some frames before enabling the keyframe skip logic on video.
|
2011-02-01 17:35:47 -08:00
|
|
|
if (videoPump &&
|
2012-09-17 13:45:38 -07:00
|
|
|
(static_cast<uint32_t>(mReader->VideoQueue().GetSize())
|
2012-11-22 02:38:28 -08:00
|
|
|
>= videoPumpThreshold * mPlaybackRate))
|
2011-02-01 17:35:47 -08:00
|
|
|
{
|
2011-09-29 16:34:37 -07:00
|
|
|
videoPump = false;
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2011-01-12 17:06:15 -08:00
|
|
|
// We don't want to consider skipping to the next keyframe if we've
|
|
|
|
// only just started up the decode loop, so wait until we've decoded
|
|
|
|
// some audio data before enabling the keyframe skip logic on audio.
|
2012-11-22 02:38:28 -08:00
|
|
|
if (audioPump && GetDecodedAudioDuration() >= audioPumpThreshold * mPlaybackRate) {
|
2011-09-29 16:34:37 -07:00
|
|
|
audioPump = false;
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
2011-01-12 17:06:15 -08:00
|
|
|
|
2010-11-28 12:06:38 -08:00
|
|
|
// We'll skip the video decode to the nearest keyframe if we're low on
|
|
|
|
// audio, or if we're low on video, provided we're not running low on
|
|
|
|
// data to decode. If we're running low on downloaded data to decode,
|
|
|
|
// we won't start keyframe skipping, as we'll be pausing playback to buffer
|
|
|
|
// soon anyway and we'll want to be able to display frames immediately
|
|
|
|
// after buffering finishes.
|
2011-03-23 15:28:57 -07:00
|
|
|
if (mState == DECODER_STATE_DECODING &&
|
|
|
|
!skipToNextKeyframe &&
|
2010-11-28 12:06:38 -08:00
|
|
|
videoPlaying &&
|
2012-11-22 02:38:28 -08:00
|
|
|
((!audioPump && audioPlaying && !mDidThrottleAudioDecoding &&
|
|
|
|
GetDecodedAudioDuration() < lowAudioThreshold * mPlaybackRate) ||
|
2012-04-29 20:12:42 -07:00
|
|
|
(!videoPump && videoPlaying && !mDidThrottleVideoDecoding &&
|
2012-09-17 13:45:38 -07:00
|
|
|
(static_cast<uint32_t>(mReader->VideoQueue().GetSize())
|
2012-11-22 02:38:28 -08:00
|
|
|
< LOW_VIDEO_FRAMES * mPlaybackRate))) &&
|
2011-03-23 15:28:57 -07:00
|
|
|
!HasLowUndecodedData())
|
2010-11-28 12:06:38 -08:00
|
|
|
{
|
2011-09-29 16:34:37 -07:00
|
|
|
skipToNextKeyframe = true;
|
2011-07-11 20:39:34 -07:00
|
|
|
LOG(PR_LOG_DEBUG, ("%p Skipping video decode to the next keyframe", mDecoder.get()));
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2010-11-28 12:06:38 -08:00
|
|
|
// Video decode.
|
2012-04-29 20:12:42 -07:00
|
|
|
bool throttleVideoDecoding = !videoPlaying || HaveEnoughDecodedVideo();
|
|
|
|
if (mDidThrottleVideoDecoding && !throttleVideoDecoding) {
|
|
|
|
videoPump = true;
|
|
|
|
}
|
|
|
|
mDidThrottleVideoDecoding = throttleVideoDecoding;
|
|
|
|
if (!throttleVideoDecoding)
|
2011-02-01 17:35:47 -08:00
|
|
|
{
|
2010-11-28 12:06:38 -08:00
|
|
|
// Time the video decode, so that if it's slow, we can increase our low
|
|
|
|
// audio threshold to reduce the chance of an audio underrun while we're
|
|
|
|
// waiting for a video decode to complete.
|
2011-01-12 17:06:15 -08:00
|
|
|
TimeDuration decodeTime;
|
|
|
|
{
|
2012-08-22 08:56:38 -07:00
|
|
|
int64_t currentTime = GetMediaTime();
|
2011-04-29 12:21:57 -07:00
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
2011-01-12 17:06:15 -08:00
|
|
|
TimeStamp start = TimeStamp::Now();
|
|
|
|
videoPlaying = mReader->DecodeVideoFrame(skipToNextKeyframe, currentTime);
|
|
|
|
decodeTime = TimeStamp::Now() - start;
|
|
|
|
}
|
2011-04-13 15:12:23 -07:00
|
|
|
if (THRESHOLD_FACTOR * DurationToUsecs(decodeTime) > lowAudioThreshold &&
|
2011-03-23 15:28:57 -07:00
|
|
|
!HasLowUndecodedData())
|
2010-11-28 12:06:38 -08:00
|
|
|
{
|
|
|
|
lowAudioThreshold =
|
2013-01-15 04:22:03 -08:00
|
|
|
std::min(THRESHOLD_FACTOR * DurationToUsecs(decodeTime), AMPLE_AUDIO_USECS);
|
|
|
|
ampleAudioThreshold = std::max(THRESHOLD_FACTOR * lowAudioThreshold,
|
2010-11-28 12:06:38 -08:00
|
|
|
ampleAudioThreshold);
|
|
|
|
LOG(PR_LOG_DEBUG,
|
|
|
|
("Slow video decode, set lowAudioThreshold=%lld ampleAudioThreshold=%lld",
|
|
|
|
lowAudioThreshold, ampleAudioThreshold));
|
|
|
|
}
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2010-11-28 12:06:38 -08:00
|
|
|
// Audio decode.
|
2012-11-22 02:38:28 -08:00
|
|
|
bool throttleAudioDecoding = !audioPlaying || HaveEnoughDecodedAudio(ampleAudioThreshold * mPlaybackRate);
|
2012-04-29 20:12:42 -07:00
|
|
|
if (mDidThrottleAudioDecoding && !throttleAudioDecoding) {
|
|
|
|
audioPump = true;
|
|
|
|
}
|
|
|
|
mDidThrottleAudioDecoding = throttleAudioDecoding;
|
|
|
|
if (!mDidThrottleAudioDecoding) {
|
2011-04-29 12:21:57 -07:00
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
2010-05-05 19:31:02 -07:00
|
|
|
audioPlaying = mReader->DecodeAudioData();
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
2011-09-26 17:25:41 -07:00
|
|
|
|
2012-07-31 05:17:22 -07:00
|
|
|
SendStreamData();
|
2012-04-29 20:12:42 -07:00
|
|
|
|
2011-01-12 17:06:15 -08:00
|
|
|
// Notify to ensure that the AudioLoop() is not waiting, in case it was
|
|
|
|
// waiting for more audio to be decoded.
|
2011-04-29 12:21:57 -07:00
|
|
|
mDecoder->GetReentrantMonitor().NotifyAll();
|
2011-01-12 17:06:15 -08:00
|
|
|
|
2011-06-30 16:00:22 -07:00
|
|
|
// The ready state can change when we've decoded data, so update the
|
|
|
|
// ready state, so that DOM events can fire.
|
|
|
|
UpdateReadyState();
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2011-07-11 20:39:25 -07:00
|
|
|
if ((mState == DECODER_STATE_DECODING || mState == DECODER_STATE_BUFFERING) &&
|
2011-07-11 20:39:10 -07:00
|
|
|
!mStopDecodeThread &&
|
2011-06-30 16:00:22 -07:00
|
|
|
(videoPlaying || audioPlaying) &&
|
2012-04-29 20:12:42 -07:00
|
|
|
throttleAudioDecoding && throttleVideoDecoding)
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2011-01-12 17:06:15 -08:00
|
|
|
// All active bitstreams' decode is well ahead of the playback
|
|
|
|
// position, we may as well wait for the playback to catch up. Note the
|
|
|
|
// audio push thread acquires and notifies the decoder monitor every time
|
2011-08-15 22:19:51 -07:00
|
|
|
// it pops AudioData off the audio queue. So if the audio push thread pops
|
|
|
|
// the last AudioData off the audio queue right after that queue reported
|
2011-01-12 17:06:15 -08:00
|
|
|
// it was non-empty here, we'll receive a notification on the decoder
|
|
|
|
// monitor which will wake us up shortly after we sleep, thus preventing
|
|
|
|
// both the decode and audio push threads waiting at the same time.
|
|
|
|
// See bug 620326.
|
2011-09-29 16:34:37 -07:00
|
|
|
mDecodeThreadWaiting = true;
|
2012-11-14 11:46:40 -08:00
|
|
|
if (mDecoder->GetState() != MediaDecoder::PLAY_STATE_PLAYING) {
|
2011-07-11 20:39:37 -07:00
|
|
|
// We're not playing, and the decode is about to wait. This means
|
|
|
|
// the decode thread may not be needed in future. Signal the state
|
|
|
|
// machine thread to run, so it can decide whether to shutdown the
|
|
|
|
// decode thread.
|
|
|
|
ScheduleStateMachine();
|
|
|
|
}
|
2011-07-11 20:39:25 -07:00
|
|
|
mDecoder->GetReentrantMonitor().Wait();
|
2011-09-29 16:34:37 -07:00
|
|
|
mDecodeThreadWaiting = false;
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2011-01-12 17:06:15 -08:00
|
|
|
} // End decode loop.
|
|
|
|
|
2011-07-11 20:39:10 -07:00
|
|
|
if (!mStopDecodeThread &&
|
2011-01-12 17:06:15 -08:00
|
|
|
mState != DECODER_STATE_SHUTDOWN &&
|
|
|
|
mState != DECODER_STATE_SEEKING)
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2011-01-12 17:06:15 -08:00
|
|
|
mState = DECODER_STATE_COMPLETED;
|
2011-07-11 20:39:32 -07:00
|
|
|
ScheduleStateMachine();
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
2011-01-12 17:06:15 -08:00
|
|
|
|
2011-07-11 20:39:34 -07:00
|
|
|
LOG(PR_LOG_DEBUG, ("%p Exiting DecodeLoop", mDecoder.get()));
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
bool MediaDecoderStateMachine::IsPlaying()
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2011-04-29 12:21:57 -07:00
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
2010-04-01 20:03:07 -07:00
|
|
|
|
|
|
|
return !mPlayStartTime.IsNull();
|
|
|
|
}
|
|
|
|
|
2012-11-26 06:13:08 -08:00
|
|
|
// If we have already written enough frames to the AudioStream, start the
|
|
|
|
// playback.
|
|
|
|
static void
|
|
|
|
StartAudioStreamPlaybackIfNeeded(AudioStream* aStream)
|
|
|
|
{
|
|
|
|
// We want to have enough data in the buffer to start the stream.
|
2013-01-22 21:53:10 -08:00
|
|
|
if (static_cast<double>(aStream->GetWritten()) / aStream->GetRate() >=
|
2012-11-26 06:13:08 -08:00
|
|
|
static_cast<double>(AUDIOSTREAM_MIN_WRITE_BEFORE_START_USECS) / USECS_PER_S) {
|
|
|
|
aStream->Start();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
static void WriteSilence(AudioStream* aStream, uint32_t aFrames)
|
2012-04-29 20:12:42 -07:00
|
|
|
{
|
2012-08-22 08:56:38 -07:00
|
|
|
uint32_t numSamples = aFrames * aStream->GetChannels();
|
2012-04-29 20:12:42 -07:00
|
|
|
nsAutoTArray<AudioDataValue, 1000> buf;
|
|
|
|
buf.SetLength(numSamples);
|
|
|
|
memset(buf.Elements(), 0, numSamples * sizeof(AudioDataValue));
|
|
|
|
aStream->Write(buf.Elements(), aFrames);
|
2012-11-26 06:13:08 -08:00
|
|
|
|
|
|
|
StartAudioStreamPlaybackIfNeeded(aStream);
|
2012-04-29 20:12:42 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::AudioLoop()
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
|
|
|
NS_ASSERTION(OnAudioThread(), "Should be on audio thread.");
|
2011-07-11 20:39:34 -07:00
|
|
|
LOG(PR_LOG_DEBUG, ("%p Begun audio thread/loop", mDecoder.get()));
|
2012-08-22 08:56:38 -07:00
|
|
|
int64_t audioDuration = 0;
|
|
|
|
int64_t audioStartTime = -1;
|
|
|
|
uint32_t channels, rate;
|
2011-01-16 19:03:00 -08:00
|
|
|
double volume = -1;
|
2011-09-28 23:19:26 -07:00
|
|
|
bool setVolume;
|
2012-11-22 02:38:28 -08:00
|
|
|
double playbackRate = -1;
|
|
|
|
bool setPlaybackRate;
|
|
|
|
bool preservesPitch;
|
|
|
|
bool setPreservesPitch;
|
2013-02-07 16:46:33 -08:00
|
|
|
AudioChannelType audioChannelType;
|
|
|
|
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2011-04-29 12:21:57 -07:00
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
2011-09-29 16:34:37 -07:00
|
|
|
mAudioCompleted = false;
|
2010-08-12 19:28:15 -07:00
|
|
|
audioStartTime = mAudioStartTime;
|
2013-02-13 13:57:46 -08:00
|
|
|
NS_ASSERTION(audioStartTime != -1, "Should have audio start time by now");
|
2011-03-23 20:53:03 -07:00
|
|
|
channels = mInfo.mAudioChannels;
|
|
|
|
rate = mInfo.mAudioRate;
|
2011-07-11 20:39:30 -07:00
|
|
|
|
2013-02-13 13:57:46 -08:00
|
|
|
audioChannelType = mDecoder->GetAudioChannelType();
|
2011-07-11 20:39:30 -07:00
|
|
|
volume = mVolume;
|
2012-11-22 02:38:28 -08:00
|
|
|
preservesPitch = mPreservesPitch;
|
|
|
|
playbackRate = mPlaybackRate;
|
2013-02-07 16:46:33 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
{
|
|
|
|
// AudioStream initialization can block for extended periods in unusual
|
|
|
|
// circumstances, so we take care to drop the decoder monitor while
|
|
|
|
// initializing.
|
|
|
|
nsAutoPtr<AudioStream> audioStream(AudioStream::AllocateStream());
|
|
|
|
audioStream->Init(channels, rate, audioChannelType);
|
2013-02-13 13:57:46 -08:00
|
|
|
audioStream->SetVolume(volume);
|
2013-03-04 06:48:58 -08:00
|
|
|
if (audioStream->SetPreservesPitch(preservesPitch) != NS_OK) {
|
|
|
|
NS_WARNING("Setting the pitch preservation failed at AudioLoop start.");
|
|
|
|
}
|
2012-11-22 02:38:28 -08:00
|
|
|
if (playbackRate != 1.0) {
|
|
|
|
NS_ASSERTION(playbackRate != 0,
|
2013-02-07 16:46:33 -08:00
|
|
|
"Don't set the playbackRate to 0 on an AudioStream.");
|
2013-03-04 06:48:58 -08:00
|
|
|
if (audioStream->SetPlaybackRate(playbackRate) != NS_OK) {
|
|
|
|
NS_WARNING("Setting the playback rate failed at AudioLoop start.");
|
|
|
|
}
|
2013-02-07 16:46:33 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
{
|
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
|
|
|
mAudioStream = audioStream;
|
2012-11-22 02:38:28 -08:00
|
|
|
}
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
2013-02-07 16:46:33 -08:00
|
|
|
|
2010-04-01 20:03:07 -07:00
|
|
|
while (1) {
|
2010-12-19 11:05:40 -08:00
|
|
|
// Wait while we're not playing, and we're not shutting down, or we're
|
2010-04-01 20:03:07 -07:00
|
|
|
// playing and we've got no audio to play.
|
|
|
|
{
|
2011-04-29 12:21:57 -07:00
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
2010-04-01 20:03:07 -07:00
|
|
|
NS_ASSERTION(mState != DECODER_STATE_DECODING_METADATA,
|
|
|
|
"Should have meta data before audio started playing.");
|
|
|
|
while (mState != DECODER_STATE_SHUTDOWN &&
|
2011-07-11 20:39:10 -07:00
|
|
|
!mStopAudioThread &&
|
2010-04-01 20:03:07 -07:00
|
|
|
(!IsPlaying() ||
|
|
|
|
mState == DECODER_STATE_BUFFERING ||
|
2012-09-17 13:45:38 -07:00
|
|
|
(mReader->AudioQueue().GetSize() == 0 &&
|
|
|
|
!mReader->AudioQueue().AtEndOfStream())))
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2011-07-11 20:39:30 -07:00
|
|
|
if (!IsPlaying() && !mAudioStream->IsPaused()) {
|
|
|
|
mAudioStream->Pause();
|
|
|
|
}
|
2010-04-01 20:03:07 -07:00
|
|
|
mon.Wait();
|
|
|
|
}
|
|
|
|
|
|
|
|
// If we're shutting down, break out and exit the audio thread.
|
2012-04-29 20:12:42 -07:00
|
|
|
// Also break out if audio is being captured.
|
2010-04-01 20:03:07 -07:00
|
|
|
if (mState == DECODER_STATE_SHUTDOWN ||
|
2011-07-11 20:39:10 -07:00
|
|
|
mStopAudioThread ||
|
2012-09-17 13:45:38 -07:00
|
|
|
mReader->AudioQueue().AtEndOfStream())
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
|
|
|
break;
|
|
|
|
}
|
2010-09-05 19:14:50 -07:00
|
|
|
|
2011-07-11 20:39:30 -07:00
|
|
|
// We only want to go to the expense of changing the volume if
|
|
|
|
// the volume has changed.
|
2010-09-05 19:14:50 -07:00
|
|
|
setVolume = volume != mVolume;
|
|
|
|
volume = mVolume;
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2012-11-22 02:38:28 -08:00
|
|
|
// Same for the playbackRate.
|
|
|
|
setPlaybackRate = playbackRate != mPlaybackRate;
|
|
|
|
playbackRate = mPlaybackRate;
|
|
|
|
|
|
|
|
// Same for the pitch preservation.
|
|
|
|
setPreservesPitch = preservesPitch != mPreservesPitch;
|
|
|
|
preservesPitch = mPreservesPitch;
|
|
|
|
|
2011-07-11 20:39:30 -07:00
|
|
|
if (IsPlaying() && mAudioStream->IsPaused()) {
|
|
|
|
mAudioStream->Resume();
|
2010-09-05 19:14:50 -07:00
|
|
|
}
|
|
|
|
}
|
2011-07-11 20:39:30 -07:00
|
|
|
|
|
|
|
if (setVolume) {
|
|
|
|
mAudioStream->SetVolume(volume);
|
|
|
|
}
|
2012-11-22 02:38:28 -08:00
|
|
|
if (setPlaybackRate) {
|
|
|
|
NS_ASSERTION(playbackRate != 0,
|
|
|
|
"Don't set the playbackRate to 0 in the AudioStreams");
|
2013-03-04 06:48:58 -08:00
|
|
|
if (mAudioStream->SetPlaybackRate(playbackRate) != NS_OK) {
|
|
|
|
NS_WARNING("Setting the playback rate failed in AudioLoop.");
|
|
|
|
}
|
2012-11-22 02:38:28 -08:00
|
|
|
}
|
|
|
|
if (setPreservesPitch) {
|
2013-03-04 06:48:58 -08:00
|
|
|
if (mAudioStream->SetPreservesPitch(preservesPitch) != NS_OK) {
|
|
|
|
NS_WARNING("Setting the pitch preservation failed in AudioLoop.");
|
|
|
|
}
|
2012-11-22 02:38:28 -08:00
|
|
|
}
|
2012-09-17 13:45:38 -07:00
|
|
|
NS_ASSERTION(mReader->AudioQueue().GetSize() > 0,
|
2010-04-01 20:03:07 -07:00
|
|
|
"Should have data to play");
|
2011-09-26 20:31:18 -07:00
|
|
|
// See if there's a gap in the audio. If there is, push silence into the
|
|
|
|
// audio hardware, so we can play across the gap.
|
2012-09-17 13:45:38 -07:00
|
|
|
const AudioData* s = mReader->AudioQueue().PeekFront();
|
2010-08-12 19:28:15 -07:00
|
|
|
|
2011-09-26 20:31:18 -07:00
|
|
|
// Calculate the number of frames that have been pushed onto the audio
|
2010-08-12 19:28:15 -07:00
|
|
|
// hardware.
|
2012-02-22 04:28:06 -08:00
|
|
|
CheckedInt64 playedFrames = UsecsToFrames(audioStartTime, rate) +
|
|
|
|
audioDuration;
|
2010-08-12 19:28:15 -07:00
|
|
|
// Calculate the timestamp of the next chunk of audio in numbers of
|
|
|
|
// samples.
|
2012-02-22 04:28:06 -08:00
|
|
|
CheckedInt64 sampleTime = UsecsToFrames(s->mTime, rate);
|
|
|
|
CheckedInt64 missingFrames = sampleTime - playedFrames;
|
2012-05-14 12:50:20 -07:00
|
|
|
if (!missingFrames.isValid() || !sampleTime.isValid()) {
|
2012-02-22 04:28:06 -08:00
|
|
|
NS_WARNING("Int overflow adding in AudioLoop()");
|
2010-08-12 19:28:15 -07:00
|
|
|
break;
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2012-08-22 08:56:38 -07:00
|
|
|
int64_t framesWritten = 0;
|
2012-02-22 04:28:06 -08:00
|
|
|
if (missingFrames.value() > 0) {
|
2011-08-15 22:19:51 -07:00
|
|
|
// The next audio chunk begins some time after the end of the last chunk
|
|
|
|
// we pushed to the audio hardware. We must push silence into the audio
|
|
|
|
// hardware so that the next audio chunk begins playback at the correct
|
2010-08-12 19:28:15 -07:00
|
|
|
// time.
|
2013-01-15 04:22:03 -08:00
|
|
|
missingFrames = std::min<int64_t>(UINT32_MAX, missingFrames.value());
|
2012-04-29 20:12:42 -07:00
|
|
|
LOG(PR_LOG_DEBUG, ("%p Decoder playing %d frames of silence",
|
2012-08-22 08:56:38 -07:00
|
|
|
mDecoder.get(), int32_t(missingFrames.value())));
|
|
|
|
framesWritten = PlaySilence(static_cast<uint32_t>(missingFrames.value()),
|
2012-02-22 04:28:06 -08:00
|
|
|
channels, playedFrames.value());
|
2010-08-12 19:28:15 -07:00
|
|
|
} else {
|
2012-02-22 04:28:06 -08:00
|
|
|
framesWritten = PlayFromAudioQueue(sampleTime.value(), channels);
|
2010-08-12 19:28:15 -07:00
|
|
|
}
|
2011-09-26 20:31:18 -07:00
|
|
|
audioDuration += framesWritten;
|
2010-05-12 17:59:42 -07:00
|
|
|
{
|
2011-04-29 12:21:57 -07:00
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
2012-02-22 04:28:06 -08:00
|
|
|
CheckedInt64 playedUsecs = FramesToUsecs(audioDuration, rate) + audioStartTime;
|
2012-05-14 12:50:20 -07:00
|
|
|
if (!playedUsecs.isValid()) {
|
2010-08-12 19:28:15 -07:00
|
|
|
NS_WARNING("Int overflow calculating audio end time");
|
|
|
|
break;
|
2010-05-12 17:59:42 -07:00
|
|
|
}
|
2012-02-22 04:28:06 -08:00
|
|
|
mAudioEndTime = playedUsecs.value();
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
}
|
2010-05-12 17:59:42 -07:00
|
|
|
{
|
2012-05-06 22:12:52 -07:00
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
2012-09-17 13:45:38 -07:00
|
|
|
if (mReader->AudioQueue().AtEndOfStream() &&
|
2012-05-06 22:12:52 -07:00
|
|
|
mState != DECODER_STATE_SHUTDOWN &&
|
|
|
|
!mStopAudioThread)
|
2011-07-11 20:39:30 -07:00
|
|
|
{
|
2012-11-26 06:13:08 -08:00
|
|
|
// If the media was too short to trigger the start of the audio stream,
|
|
|
|
// start it now.
|
2013-01-22 21:53:10 -08:00
|
|
|
mAudioStream->Start();
|
2012-05-06 22:12:52 -07:00
|
|
|
// Last frame pushed to audio hardware, wait for the audio to finish,
|
|
|
|
// before the audio thread terminates.
|
|
|
|
bool seeking = false;
|
|
|
|
{
|
2012-08-22 08:56:38 -07:00
|
|
|
int64_t oldPosition = -1;
|
|
|
|
int64_t position = GetMediaTime();
|
2012-05-06 22:12:52 -07:00
|
|
|
while (oldPosition != position &&
|
|
|
|
mAudioEndTime - position > 0 &&
|
|
|
|
mState != DECODER_STATE_SEEKING &&
|
|
|
|
mState != DECODER_STATE_SHUTDOWN)
|
|
|
|
{
|
2012-08-22 08:56:38 -07:00
|
|
|
const int64_t DRAIN_BLOCK_USECS = 100000;
|
2013-01-15 04:22:03 -08:00
|
|
|
Wait(std::min(mAudioEndTime - position, DRAIN_BLOCK_USECS));
|
2012-05-06 22:12:52 -07:00
|
|
|
oldPosition = position;
|
|
|
|
position = GetMediaTime();
|
|
|
|
}
|
|
|
|
seeking = mState == DECODER_STATE_SEEKING;
|
2011-02-01 17:31:53 -08:00
|
|
|
}
|
|
|
|
|
2012-05-06 22:12:52 -07:00
|
|
|
if (!seeking && !mAudioStream->IsPaused()) {
|
|
|
|
{
|
|
|
|
ReentrantMonitorAutoExit exit(mDecoder->GetReentrantMonitor());
|
|
|
|
mAudioStream->Drain();
|
|
|
|
}
|
|
|
|
// Fire one last event for any extra frames that didn't fill a framebuffer.
|
|
|
|
mEventManager.Drain(mAudioEndTime);
|
|
|
|
}
|
2010-05-12 17:59:42 -07:00
|
|
|
}
|
|
|
|
}
|
2011-07-11 20:39:34 -07:00
|
|
|
LOG(PR_LOG_DEBUG, ("%p Reached audio stream end.", mDecoder.get()));
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2012-11-19 18:22:42 -08:00
|
|
|
// Must hold lock while shutting down and anulling the audio stream to prevent
|
2011-07-11 20:39:30 -07:00
|
|
|
// state machine thread trying to use it while we're destroying it.
|
2011-04-29 12:21:57 -07:00
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
2012-11-19 18:22:42 -08:00
|
|
|
mAudioStream->Shutdown();
|
2012-07-30 07:20:58 -07:00
|
|
|
mAudioStream = nullptr;
|
2011-07-11 20:39:30 -07:00
|
|
|
mEventManager.Clear();
|
2012-04-29 20:12:42 -07:00
|
|
|
if (!mAudioCaptured) {
|
|
|
|
mAudioCompleted = true;
|
|
|
|
UpdateReadyState();
|
|
|
|
// Kick the decode thread; it may be sleeping waiting for this to finish.
|
|
|
|
mDecoder->GetReentrantMonitor().NotifyAll();
|
|
|
|
}
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
2011-07-11 20:40:38 -07:00
|
|
|
|
2011-07-11 20:39:34 -07:00
|
|
|
LOG(PR_LOG_DEBUG, ("%p Audio stream finished playing, audio thread exit", mDecoder.get()));
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
uint32_t MediaDecoderStateMachine::PlaySilence(uint32_t aFrames,
|
2012-08-22 08:56:38 -07:00
|
|
|
uint32_t aChannels,
|
|
|
|
uint64_t aFrameOffset)
|
2010-08-25 06:10:00 -07:00
|
|
|
|
2010-08-12 19:28:15 -07:00
|
|
|
{
|
2011-07-11 20:39:30 -07:00
|
|
|
NS_ASSERTION(OnAudioThread(), "Only call on audio thread.");
|
|
|
|
NS_ASSERTION(!mAudioStream->IsPaused(), "Don't play when paused");
|
2012-08-22 08:56:38 -07:00
|
|
|
uint32_t maxFrames = SILENCE_BYTES_CHUNK / aChannels / sizeof(AudioDataValue);
|
2013-01-15 04:22:03 -08:00
|
|
|
uint32_t frames = std::min(aFrames, maxFrames);
|
2012-04-29 20:12:42 -07:00
|
|
|
WriteSilence(mAudioStream, frames);
|
2010-08-25 06:10:00 -07:00
|
|
|
// Dispatch events to the DOM for the audio just written.
|
2012-07-30 07:20:58 -07:00
|
|
|
mEventManager.QueueWrittenAudioData(nullptr, frames * aChannels,
|
2011-09-26 20:31:18 -07:00
|
|
|
(aFrameOffset + frames) * aChannels);
|
|
|
|
return frames;
|
2010-08-12 19:28:15 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
uint32_t MediaDecoderStateMachine::PlayFromAudioQueue(uint64_t aFrameOffset,
|
2013-01-29 20:20:03 -08:00
|
|
|
uint32_t aChannels)
|
2010-08-12 19:28:15 -07:00
|
|
|
{
|
2011-07-11 20:39:30 -07:00
|
|
|
NS_ASSERTION(OnAudioThread(), "Only call on audio thread.");
|
|
|
|
NS_ASSERTION(!mAudioStream->IsPaused(), "Don't play when paused");
|
2012-09-17 13:45:38 -07:00
|
|
|
nsAutoPtr<AudioData> audio(mReader->AudioQueue().PopFront());
|
2010-08-12 19:28:15 -07:00
|
|
|
{
|
2011-04-29 12:21:57 -07:00
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
2010-08-12 19:28:15 -07:00
|
|
|
NS_WARN_IF_FALSE(IsPlaying(), "Should be playing");
|
|
|
|
// Awaken the decode loop if it's waiting for space to free up in the
|
|
|
|
// audio queue.
|
2011-04-29 12:21:57 -07:00
|
|
|
mDecoder->GetReentrantMonitor().NotifyAll();
|
2010-08-12 19:28:15 -07:00
|
|
|
}
|
2012-08-22 08:56:38 -07:00
|
|
|
int64_t offset = -1;
|
|
|
|
uint32_t frames = 0;
|
2012-11-30 05:17:54 -08:00
|
|
|
if (!PR_GetEnv("MOZ_QUIET")) {
|
|
|
|
LOG(PR_LOG_DEBUG, ("%p Decoder playing %d frames of data to stream for AudioData at %lld",
|
|
|
|
mDecoder.get(), audio->mFrames, audio->mTime));
|
|
|
|
}
|
2012-05-31 23:44:38 -07:00
|
|
|
mAudioStream->Write(audio->mAudioData,
|
|
|
|
audio->mFrames);
|
|
|
|
|
2012-11-26 06:13:08 -08:00
|
|
|
StartAudioStreamPlaybackIfNeeded(mAudioStream);
|
|
|
|
|
2012-05-31 23:44:38 -07:00
|
|
|
offset = audio->mOffset;
|
|
|
|
frames = audio->mFrames;
|
|
|
|
|
|
|
|
// Dispatch events to the DOM for the audio just written.
|
|
|
|
mEventManager.QueueWrittenAudioData(audio->mAudioData.get(),
|
|
|
|
audio->mFrames * aChannels,
|
|
|
|
(aFrameOffset + frames) * aChannels);
|
2010-08-12 19:28:15 -07:00
|
|
|
if (offset != -1) {
|
|
|
|
mDecoder->UpdatePlaybackOffset(offset);
|
|
|
|
}
|
2011-09-26 20:31:18 -07:00
|
|
|
return frames;
|
2010-08-12 19:28:15 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
nsresult MediaDecoderStateMachine::Init(MediaDecoderStateMachine* aCloneDonor)
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2012-11-14 11:46:40 -08:00
|
|
|
MediaDecoderReader* cloneReader = nullptr;
|
2010-09-20 17:49:50 -07:00
|
|
|
if (aCloneDonor) {
|
2012-11-14 11:46:40 -08:00
|
|
|
cloneReader = static_cast<MediaDecoderStateMachine*>(aCloneDonor)->mReader;
|
2010-09-20 17:49:50 -07:00
|
|
|
}
|
|
|
|
return mReader->Init(cloneReader);
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::StopPlayback()
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2011-07-11 20:39:37 -07:00
|
|
|
LOG(PR_LOG_DEBUG, ("%p StopPlayback()", mDecoder.get()));
|
|
|
|
|
2011-07-11 20:39:25 -07:00
|
|
|
NS_ASSERTION(OnStateMachineThread() || OnDecodeThread(),
|
2012-11-22 02:38:28 -08:00
|
|
|
"Should be on state machine thread or the decoder thread.");
|
2011-04-29 12:21:57 -07:00
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2012-12-13 11:42:45 -08:00
|
|
|
mDecoder->NotifyPlaybackStopped();
|
2011-01-17 16:53:18 -08:00
|
|
|
|
2010-04-01 20:03:07 -07:00
|
|
|
if (IsPlaying()) {
|
2011-04-13 15:12:23 -07:00
|
|
|
mPlayDuration += DurationToUsecs(TimeStamp::Now() - mPlayStartTime);
|
2010-04-01 20:03:07 -07:00
|
|
|
mPlayStartTime = TimeStamp();
|
|
|
|
}
|
2011-07-11 20:39:37 -07:00
|
|
|
// Notify the audio thread, so that it notices that we've stopped playing,
|
|
|
|
// so it can pause audio playback.
|
|
|
|
mDecoder->GetReentrantMonitor().NotifyAll();
|
2011-07-11 20:39:30 -07:00
|
|
|
NS_ASSERTION(!IsPlaying(), "Should report not playing at end of StopPlayback()");
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::StartPlayback()
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2011-07-11 20:39:37 -07:00
|
|
|
LOG(PR_LOG_DEBUG, ("%p StartPlayback()", mDecoder.get()));
|
|
|
|
|
2010-04-01 20:03:07 -07:00
|
|
|
NS_ASSERTION(!IsPlaying(), "Shouldn't be playing when StartPlayback() is called");
|
2011-04-29 12:21:57 -07:00
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
2013-01-13 21:25:02 -08:00
|
|
|
|
2012-12-13 11:42:45 -08:00
|
|
|
mDecoder->NotifyPlaybackStarted();
|
2010-04-01 20:03:07 -07:00
|
|
|
mPlayStartTime = TimeStamp::Now();
|
2011-07-11 20:39:37 -07:00
|
|
|
|
2011-07-11 20:39:30 -07:00
|
|
|
NS_ASSERTION(IsPlaying(), "Should report playing by end of StartPlayback()");
|
2011-07-11 20:39:37 -07:00
|
|
|
if (NS_FAILED(StartAudioThread())) {
|
|
|
|
NS_WARNING("Failed to create audio thread");
|
|
|
|
}
|
2011-04-29 12:21:57 -07:00
|
|
|
mDecoder->GetReentrantMonitor().NotifyAll();
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::UpdatePlaybackPositionInternal(int64_t aTime)
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2011-07-11 20:39:25 -07:00
|
|
|
NS_ASSERTION(OnStateMachineThread() || OnDecodeThread(),
|
2010-04-01 20:03:07 -07:00
|
|
|
"Should be on state machine thread.");
|
2011-04-29 12:21:57 -07:00
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
2010-04-01 20:03:07 -07:00
|
|
|
|
|
|
|
NS_ASSERTION(mStartTime >= 0, "Should have positive mStartTime");
|
|
|
|
mCurrentFrameTime = aTime - mStartTime;
|
|
|
|
NS_ASSERTION(mCurrentFrameTime >= 0, "CurrentTime should be positive!");
|
|
|
|
if (aTime > mEndTime) {
|
|
|
|
NS_ASSERTION(mCurrentFrameTime > GetDuration(),
|
|
|
|
"CurrentTime must be after duration if aTime > endTime!");
|
|
|
|
mEndTime = aTime;
|
|
|
|
nsCOMPtr<nsIRunnable> event =
|
2012-11-14 11:46:40 -08:00
|
|
|
NS_NewRunnableMethod(mDecoder, &MediaDecoder::DurationChanged);
|
2010-04-01 20:03:07 -07:00
|
|
|
NS_DispatchToMainThread(event, NS_DISPATCH_NORMAL);
|
|
|
|
}
|
2011-01-31 18:57:13 -08:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::UpdatePlaybackPosition(int64_t aTime)
|
2011-01-31 18:57:13 -08:00
|
|
|
{
|
|
|
|
UpdatePlaybackPositionInternal(aTime);
|
|
|
|
|
2011-09-28 23:19:26 -07:00
|
|
|
bool fragmentEnded = mFragmentEndTime >= 0 && GetMediaTime() >= mFragmentEndTime;
|
2011-08-24 16:42:23 -07:00
|
|
|
if (!mPositionChangeQueued || fragmentEnded) {
|
2011-09-29 16:34:37 -07:00
|
|
|
mPositionChangeQueued = true;
|
2010-04-01 20:03:07 -07:00
|
|
|
nsCOMPtr<nsIRunnable> event =
|
2012-11-14 11:46:40 -08:00
|
|
|
NS_NewRunnableMethod(mDecoder, &MediaDecoder::PlaybackPositionChanged);
|
2010-04-01 20:03:07 -07:00
|
|
|
NS_DispatchToMainThread(event, NS_DISPATCH_NORMAL);
|
|
|
|
}
|
2010-08-25 06:10:00 -07:00
|
|
|
|
|
|
|
// Notify DOM of any queued up audioavailable events
|
2010-09-14 16:24:47 -07:00
|
|
|
mEventManager.DispatchPendingEvents(GetMediaTime());
|
2011-08-24 16:42:23 -07:00
|
|
|
|
2012-11-30 05:17:54 -08:00
|
|
|
mMetadataManager.DispatchMetadataIfNeeded(mDecoder, aTime);
|
|
|
|
|
2011-08-24 16:42:23 -07:00
|
|
|
if (fragmentEnded) {
|
|
|
|
StopPlayback();
|
|
|
|
}
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::ClearPositionChangeFlag()
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
2011-04-29 12:21:57 -07:00
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2011-09-29 16:34:37 -07:00
|
|
|
mPositionChangeQueued = false;
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
MediaDecoderOwner::NextFrameStatus MediaDecoderStateMachine::GetNextFrameStatus()
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2011-04-29 12:21:57 -07:00
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
2010-04-01 20:03:07 -07:00
|
|
|
if (IsBuffering() || IsSeeking()) {
|
2012-11-14 11:45:31 -08:00
|
|
|
return MediaDecoderOwner::NEXT_FRAME_UNAVAILABLE_BUFFERING;
|
2010-04-01 20:03:07 -07:00
|
|
|
} else if (HaveNextFrameData()) {
|
2012-11-14 11:45:31 -08:00
|
|
|
return MediaDecoderOwner::NEXT_FRAME_AVAILABLE;
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
2012-11-14 11:45:31 -08:00
|
|
|
return MediaDecoderOwner::NEXT_FRAME_UNAVAILABLE;
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::SetVolume(double volume)
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
2011-04-29 12:21:57 -07:00
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
2010-09-05 19:14:50 -07:00
|
|
|
mVolume = volume;
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::SetAudioCaptured(bool aCaptured)
|
2012-04-29 20:12:42 -07:00
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
2012-12-04 02:59:36 -08:00
|
|
|
if (!mAudioCaptured && aCaptured && !mStopAudioThread) {
|
|
|
|
// Make sure the state machine runs as soon as possible. That will
|
|
|
|
// stop the audio thread.
|
|
|
|
// If mStopAudioThread is true then we're already stopping the audio thread
|
|
|
|
// and since we set mAudioCaptured to true, nothing can start it again.
|
|
|
|
ScheduleStateMachine();
|
2012-04-29 20:12:42 -07:00
|
|
|
}
|
|
|
|
mAudioCaptured = aCaptured;
|
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
double MediaDecoderStateMachine::GetCurrentTime() const
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2011-03-23 15:28:57 -07:00
|
|
|
NS_ASSERTION(NS_IsMainThread() ||
|
2011-07-11 20:39:23 -07:00
|
|
|
OnStateMachineThread() ||
|
2011-03-23 15:28:57 -07:00
|
|
|
OnDecodeThread(),
|
|
|
|
"Should be on main, decode, or state machine thread.");
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2011-04-13 15:12:23 -07:00
|
|
|
return static_cast<double>(mCurrentFrameTime) / static_cast<double>(USECS_PER_S);
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
int64_t MediaDecoderStateMachine::GetDuration()
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2011-04-29 12:21:57 -07:00
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
2010-04-01 20:03:07 -07:00
|
|
|
|
|
|
|
if (mEndTime == -1 || mStartTime == -1)
|
|
|
|
return -1;
|
|
|
|
return mEndTime - mStartTime;
|
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::SetDuration(int64_t aDuration)
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2011-07-11 20:39:23 -07:00
|
|
|
NS_ASSERTION(NS_IsMainThread() || OnDecodeThread(),
|
|
|
|
"Should be on main or decode thread.");
|
2011-04-29 12:21:57 -07:00
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2011-05-08 14:10:28 -07:00
|
|
|
if (aDuration == -1) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2010-04-01 20:03:07 -07:00
|
|
|
if (mStartTime != -1) {
|
|
|
|
mEndTime = mStartTime + aDuration;
|
|
|
|
} else {
|
|
|
|
mStartTime = 0;
|
|
|
|
mEndTime = aDuration;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-11-19 07:11:21 -08:00
|
|
|
void MediaDecoderStateMachine::SetMediaEndTime(int64_t aEndTime)
|
2011-05-08 14:10:28 -07:00
|
|
|
{
|
2011-07-11 20:39:23 -07:00
|
|
|
NS_ASSERTION(OnDecodeThread(), "Should be on decode thread");
|
2011-05-08 14:10:28 -07:00
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
|
|
|
|
|
|
|
mEndTime = aEndTime;
|
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::SetFragmentEndTime(int64_t aEndTime)
|
2011-08-24 16:42:23 -07:00
|
|
|
{
|
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
|
|
|
|
|
|
|
mFragmentEndTime = aEndTime < 0 ? aEndTime : aEndTime + mStartTime;
|
|
|
|
}
|
|
|
|
|
2012-11-30 05:17:54 -08:00
|
|
|
void MediaDecoderStateMachine::SetTransportSeekable(bool aTransportSeekable)
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2012-11-30 05:17:54 -08:00
|
|
|
NS_ASSERTION(NS_IsMainThread() || OnDecodeThread(),
|
|
|
|
"Should be on main thread or the decoder thread.");
|
2011-04-29 12:21:57 -07:00
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2012-11-30 05:17:54 -08:00
|
|
|
mTransportSeekable = aTransportSeekable;
|
|
|
|
}
|
|
|
|
|
|
|
|
void MediaDecoderStateMachine::SetMediaSeekable(bool aMediaSeekable)
|
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread() || OnDecodeThread(),
|
|
|
|
"Should be on main thread or the decoder thread.");
|
|
|
|
|
|
|
|
mMediaSeekable = aMediaSeekable;
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::Shutdown()
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
|
|
|
|
|
|
|
// Once we've entered the shutdown state here there's no going back.
|
2011-04-29 12:21:57 -07:00
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
2010-04-01 20:03:07 -07:00
|
|
|
|
|
|
|
// Change state before issuing shutdown request to threads so those
|
|
|
|
// threads can start exiting cleanly during the Shutdown call.
|
2011-07-11 20:39:34 -07:00
|
|
|
LOG(PR_LOG_DEBUG, ("%p Changed state to SHUTDOWN", mDecoder.get()));
|
2011-07-11 20:39:32 -07:00
|
|
|
ScheduleStateMachine();
|
2010-04-01 20:03:07 -07:00
|
|
|
mState = DECODER_STATE_SHUTDOWN;
|
2011-04-29 12:21:57 -07:00
|
|
|
mDecoder->GetReentrantMonitor().NotifyAll();
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::StartDecoding()
|
2011-03-23 15:28:57 -07:00
|
|
|
{
|
2011-07-11 20:39:23 -07:00
|
|
|
NS_ASSERTION(OnStateMachineThread() || OnDecodeThread(),
|
|
|
|
"Should be on state machine or decode thread.");
|
2011-04-29 12:21:57 -07:00
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
2011-03-23 15:28:57 -07:00
|
|
|
if (mState != DECODER_STATE_DECODING) {
|
|
|
|
mDecodeStartTime = TimeStamp::Now();
|
|
|
|
}
|
|
|
|
mState = DECODER_STATE_DECODING;
|
2011-07-11 20:39:32 -07:00
|
|
|
ScheduleStateMachine();
|
2011-03-23 15:28:57 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::Play()
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
2011-03-23 15:28:57 -07:00
|
|
|
// When asked to play, switch to decoding state only if
|
|
|
|
// we are currently buffering. In other cases, we'll start playing anyway
|
|
|
|
// when the state machine notices the decoder's state change to PLAYING.
|
2011-04-29 12:21:57 -07:00
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
2010-04-01 20:03:07 -07:00
|
|
|
if (mState == DECODER_STATE_BUFFERING) {
|
2011-07-11 20:39:34 -07:00
|
|
|
LOG(PR_LOG_DEBUG, ("%p Changed state from BUFFERING to DECODING", mDecoder.get()));
|
2010-04-01 20:03:07 -07:00
|
|
|
mState = DECODER_STATE_DECODING;
|
2011-03-23 15:28:57 -07:00
|
|
|
mDecodeStartTime = TimeStamp::Now();
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
2011-07-11 20:39:32 -07:00
|
|
|
ScheduleStateMachine();
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::ResetPlayback()
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2011-07-11 20:39:25 -07:00
|
|
|
NS_ASSERTION(OnDecodeThread(), "Should be on decode thread.");
|
2010-05-30 21:02:00 -07:00
|
|
|
mVideoFrameEndTime = -1;
|
2010-04-01 20:03:07 -07:00
|
|
|
mAudioStartTime = -1;
|
|
|
|
mAudioEndTime = -1;
|
2011-09-29 16:34:37 -07:00
|
|
|
mAudioCompleted = false;
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::NotifyDataArrived(const char* aBuffer,
|
2012-08-22 08:56:38 -07:00
|
|
|
uint32_t aLength,
|
|
|
|
int64_t aOffset)
|
2012-01-05 22:40:51 -08:00
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
|
|
|
|
mReader->NotifyDataArrived(aBuffer, aLength, aOffset);
|
|
|
|
|
|
|
|
// While playing an unseekable stream of unknown duration, mEndTime is
|
|
|
|
// updated (in AdvanceFrame()) as we play. But if data is being downloaded
|
|
|
|
// faster than played, mEndTime won't reflect the end of playable data
|
|
|
|
// since we haven't played the frame at the end of buffered data. So update
|
|
|
|
// mEndTime here as new data is downloaded to prevent such a lag.
|
2013-03-02 11:14:44 -08:00
|
|
|
TimeRanges buffered;
|
2012-01-05 22:40:51 -08:00
|
|
|
if (mDecoder->IsInfinite() &&
|
|
|
|
NS_SUCCEEDED(mDecoder->GetBuffered(&buffered)))
|
|
|
|
{
|
2012-08-22 08:56:38 -07:00
|
|
|
uint32_t length = 0;
|
2012-01-05 22:40:51 -08:00
|
|
|
buffered.GetLength(&length);
|
|
|
|
if (length) {
|
|
|
|
double end = 0;
|
|
|
|
buffered.End(length - 1, &end);
|
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
2013-01-15 04:22:03 -08:00
|
|
|
mEndTime = std::max<int64_t>(mEndTime, end * USECS_PER_S);
|
2012-01-05 22:40:51 -08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::Seek(double aTime)
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
2011-04-29 12:21:57 -07:00
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
2012-11-30 05:17:54 -08:00
|
|
|
|
|
|
|
// We need to be able to seek both at a transport level and at a media level
|
|
|
|
// to seek.
|
|
|
|
if (!mMediaSeekable) {
|
|
|
|
return;
|
|
|
|
}
|
2012-11-14 11:46:40 -08:00
|
|
|
// MediaDecoder::mPlayState should be SEEKING while we seek, and
|
|
|
|
// in that case MediaDecoder shouldn't be calling us.
|
2010-04-01 20:03:07 -07:00
|
|
|
NS_ASSERTION(mState != DECODER_STATE_SEEKING,
|
|
|
|
"We shouldn't already be seeking");
|
|
|
|
NS_ASSERTION(mState >= DECODER_STATE_DECODING,
|
|
|
|
"We should have loaded metadata");
|
2011-04-13 15:12:23 -07:00
|
|
|
double t = aTime * static_cast<double>(USECS_PER_S);
|
2012-01-11 00:23:07 -08:00
|
|
|
if (t > INT64_MAX) {
|
2010-04-01 20:03:07 -07:00
|
|
|
// Prevent integer overflow.
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2012-08-22 08:56:38 -07:00
|
|
|
mSeekTime = static_cast<int64_t>(t) + mStartTime;
|
2010-04-01 20:03:07 -07:00
|
|
|
NS_ASSERTION(mSeekTime >= mStartTime && mSeekTime <= mEndTime,
|
|
|
|
"Can only seek in range [0,duration]");
|
|
|
|
|
|
|
|
// Bound the seek time to be inside the media range.
|
|
|
|
NS_ASSERTION(mStartTime != -1, "Should know start time by now");
|
|
|
|
NS_ASSERTION(mEndTime != -1, "Should know end time by now");
|
2013-01-15 04:22:03 -08:00
|
|
|
mSeekTime = std::min(mSeekTime, mEndTime);
|
|
|
|
mSeekTime = std::max(mStartTime, mSeekTime);
|
2013-02-28 07:05:50 -08:00
|
|
|
mBasePosition = mSeekTime - mStartTime;
|
2011-07-11 20:39:34 -07:00
|
|
|
LOG(PR_LOG_DEBUG, ("%p Changed state to SEEKING (to %f)", mDecoder.get(), aTime));
|
2010-04-01 20:03:07 -07:00
|
|
|
mState = DECODER_STATE_SEEKING;
|
2012-09-18 22:23:35 -07:00
|
|
|
if (mDecoder->GetDecodedStream()) {
|
|
|
|
mDecoder->RecreateDecodedStream(mSeekTime - mStartTime);
|
|
|
|
}
|
2011-07-11 20:39:32 -07:00
|
|
|
ScheduleStateMachine();
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::StopDecodeThread()
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2011-07-11 20:39:34 -07:00
|
|
|
NS_ASSERTION(OnStateMachineThread(), "Should be on state machine thread.");
|
2011-04-29 12:21:57 -07:00
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
2012-01-19 10:30:29 -08:00
|
|
|
if (mRequestedNewDecodeThread) {
|
|
|
|
// We've requested that the decode be created, but it hasn't been yet.
|
|
|
|
// Cancel that request.
|
|
|
|
NS_ASSERTION(!mDecodeThread,
|
|
|
|
"Shouldn't have a decode thread until after request processed");
|
|
|
|
StateMachineTracker::Instance().CancelCreateDecodeThread(this);
|
|
|
|
mRequestedNewDecodeThread = false;
|
|
|
|
}
|
2011-09-29 16:34:37 -07:00
|
|
|
mStopDecodeThread = true;
|
2011-04-29 12:21:57 -07:00
|
|
|
mDecoder->GetReentrantMonitor().NotifyAll();
|
2010-04-01 20:03:07 -07:00
|
|
|
if (mDecodeThread) {
|
2011-07-11 20:39:34 -07:00
|
|
|
LOG(PR_LOG_DEBUG, ("%p Shutdown decode thread", mDecoder.get()));
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2011-04-29 12:21:57 -07:00
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
2010-04-01 20:03:07 -07:00
|
|
|
mDecodeThread->Shutdown();
|
2011-11-07 17:38:17 -08:00
|
|
|
StateMachineTracker::Instance().NoteDecodeThreadDestroyed();
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
2012-07-30 07:20:58 -07:00
|
|
|
mDecodeThread = nullptr;
|
2011-09-29 16:34:37 -07:00
|
|
|
mDecodeThreadIdle = false;
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
2012-01-19 10:30:29 -08:00
|
|
|
NS_ASSERTION(!mRequestedNewDecodeThread,
|
|
|
|
"Any pending requests for decode threads must be canceled and unflagged");
|
|
|
|
NS_ASSERTION(!StateMachineTracker::Instance().IsQueued(this),
|
|
|
|
"Any pending requests for decode threads must be canceled");
|
2011-07-11 20:39:10 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::StopAudioThread()
|
2011-07-11 20:39:10 -07:00
|
|
|
{
|
2012-12-04 02:59:36 -08:00
|
|
|
NS_ASSERTION(OnDecodeThread() ||
|
|
|
|
OnStateMachineThread(), "Should be on decode thread or state machine thread");
|
2011-07-11 20:39:10 -07:00
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
2012-12-04 02:59:36 -08:00
|
|
|
|
|
|
|
if (mStopAudioThread) {
|
|
|
|
// Nothing to do, since the thread is already stopping
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2011-09-29 16:34:37 -07:00
|
|
|
mStopAudioThread = true;
|
2011-07-11 20:39:10 -07:00
|
|
|
mDecoder->GetReentrantMonitor().NotifyAll();
|
2010-04-01 20:03:07 -07:00
|
|
|
if (mAudioThread) {
|
2011-07-11 20:39:34 -07:00
|
|
|
LOG(PR_LOG_DEBUG, ("%p Shutdown audio thread", mDecoder.get()));
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2011-04-29 12:21:57 -07:00
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
2010-04-01 20:03:07 -07:00
|
|
|
mAudioThread->Shutdown();
|
|
|
|
}
|
2012-07-30 07:20:58 -07:00
|
|
|
mAudioThread = nullptr;
|
2012-12-04 02:59:36 -08:00
|
|
|
// Now that the audio thread is dead, try sending data to our MediaStream(s).
|
|
|
|
// That may have been waiting for the audio thread to stop.
|
|
|
|
SendStreamData();
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
nsresult
|
2012-11-14 11:46:40 -08:00
|
|
|
MediaDecoderStateMachine::ScheduleDecodeThread()
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2011-07-11 20:39:34 -07:00
|
|
|
NS_ASSERTION(OnStateMachineThread(), "Should be on state machine thread.");
|
2011-04-29 12:21:57 -07:00
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
2012-01-19 10:30:29 -08:00
|
|
|
|
|
|
|
mStopDecodeThread = false;
|
|
|
|
if (mState >= DECODER_STATE_COMPLETED) {
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
if (mDecodeThread) {
|
|
|
|
NS_ASSERTION(!mRequestedNewDecodeThread,
|
|
|
|
"Shouldn't have requested new decode thread when we have a decode thread");
|
|
|
|
// We already have a decode thread...
|
|
|
|
if (mDecodeThreadIdle) {
|
|
|
|
// ... and it's not been shutdown yet, wake it up.
|
|
|
|
nsCOMPtr<nsIRunnable> event =
|
2012-11-14 11:46:40 -08:00
|
|
|
NS_NewRunnableMethod(this, &MediaDecoderStateMachine::DecodeThreadRun);
|
2012-01-19 10:30:29 -08:00
|
|
|
mDecodeThread->Dispatch(event, NS_DISPATCH_NORMAL);
|
|
|
|
mDecodeThreadIdle = false;
|
|
|
|
}
|
|
|
|
return NS_OK;
|
|
|
|
} else if (!mRequestedNewDecodeThread) {
|
|
|
|
// We don't already have a decode thread, request a new one.
|
|
|
|
mRequestedNewDecodeThread = true;
|
2011-11-07 17:38:17 -08:00
|
|
|
ReentrantMonitorAutoExit mon(mDecoder->GetReentrantMonitor());
|
2012-01-19 10:30:29 -08:00
|
|
|
StateMachineTracker::Instance().RequestCreateDecodeThread(this);
|
2011-11-07 17:38:17 -08:00
|
|
|
}
|
2012-01-19 10:30:29 -08:00
|
|
|
return NS_OK;
|
|
|
|
}
|
2011-11-07 17:38:17 -08:00
|
|
|
|
2012-01-19 10:30:29 -08:00
|
|
|
nsresult
|
2012-11-14 11:46:40 -08:00
|
|
|
MediaDecoderStateMachine::StartDecodeThread()
|
2012-01-19 10:30:29 -08:00
|
|
|
{
|
|
|
|
NS_ASSERTION(StateMachineTracker::Instance().GetDecodeThreadCount() <
|
|
|
|
StateMachineTracker::MAX_DECODE_THREADS,
|
|
|
|
"Should not have reached decode thread limit");
|
2012-01-18 12:15:57 -08:00
|
|
|
|
2012-01-19 10:30:29 -08:00
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
|
|
|
NS_ASSERTION(!StateMachineTracker::Instance().IsQueued(this),
|
|
|
|
"Should not already have a pending request for a new decode thread.");
|
|
|
|
NS_ASSERTION(OnStateMachineThread(), "Should be on state machine thread.");
|
|
|
|
NS_ASSERTION(!mDecodeThread, "Should not have decode thread yet");
|
|
|
|
NS_ASSERTION(mRequestedNewDecodeThread, "Should have requested this...");
|
2012-01-18 12:15:57 -08:00
|
|
|
|
2012-01-19 10:30:29 -08:00
|
|
|
mRequestedNewDecodeThread = false;
|
|
|
|
|
2012-06-12 10:06:20 -07:00
|
|
|
nsresult rv = NS_NewNamedThread("Media Decode",
|
|
|
|
getter_AddRefs(mDecodeThread),
|
2012-07-30 07:20:58 -07:00
|
|
|
nullptr,
|
2012-06-12 10:06:20 -07:00
|
|
|
MEDIA_THREAD_STACK_SIZE);
|
2012-01-19 10:30:29 -08:00
|
|
|
if (NS_FAILED(rv)) {
|
|
|
|
// Give up, report error to media element.
|
|
|
|
nsCOMPtr<nsIRunnable> event =
|
2012-11-14 11:46:40 -08:00
|
|
|
NS_NewRunnableMethod(mDecoder, &MediaDecoder::DecodeError);
|
2012-01-19 10:30:29 -08:00
|
|
|
NS_DispatchToMainThread(event, NS_DISPATCH_NORMAL);
|
|
|
|
return rv;
|
2012-01-18 14:56:54 -08:00
|
|
|
}
|
2012-01-19 10:30:29 -08:00
|
|
|
|
2011-07-11 20:39:25 -07:00
|
|
|
nsCOMPtr<nsIRunnable> event =
|
2012-11-14 11:46:40 -08:00
|
|
|
NS_NewRunnableMethod(this, &MediaDecoderStateMachine::DecodeThreadRun);
|
2011-07-11 20:39:25 -07:00
|
|
|
mDecodeThread->Dispatch(event, NS_DISPATCH_NORMAL);
|
2011-09-29 16:34:37 -07:00
|
|
|
mDecodeThreadIdle = false;
|
2011-11-07 17:38:17 -08:00
|
|
|
|
2011-07-11 20:39:10 -07:00
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
|
|
|
nsresult
|
2012-11-14 11:46:40 -08:00
|
|
|
MediaDecoderStateMachine::StartAudioThread()
|
2011-07-11 20:39:10 -07:00
|
|
|
{
|
2011-07-11 20:39:32 -07:00
|
|
|
NS_ASSERTION(OnStateMachineThread() || OnDecodeThread(),
|
|
|
|
"Should be on state machine or decode thread.");
|
2011-07-11 20:39:10 -07:00
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
2012-12-04 02:59:36 -08:00
|
|
|
if (mAudioCaptured) {
|
|
|
|
NS_ASSERTION(mStopAudioThread, "mStopAudioThread must always be true if audio is captured");
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
2011-09-29 16:34:37 -07:00
|
|
|
mStopAudioThread = false;
|
2012-12-04 02:59:36 -08:00
|
|
|
if (HasAudio() && !mAudioThread) {
|
2012-06-12 10:06:20 -07:00
|
|
|
nsresult rv = NS_NewNamedThread("Media Audio",
|
|
|
|
getter_AddRefs(mAudioThread),
|
2012-07-30 07:20:58 -07:00
|
|
|
nullptr,
|
2012-06-12 10:06:20 -07:00
|
|
|
MEDIA_THREAD_STACK_SIZE);
|
2010-04-01 20:03:07 -07:00
|
|
|
if (NS_FAILED(rv)) {
|
2012-01-19 10:30:29 -08:00
|
|
|
LOG(PR_LOG_DEBUG, ("%p Changed state to SHUTDOWN because failed to create audio thread", mDecoder.get()));
|
2010-04-01 20:03:07 -07:00
|
|
|
mState = DECODER_STATE_SHUTDOWN;
|
|
|
|
return rv;
|
|
|
|
}
|
2012-06-12 10:06:20 -07:00
|
|
|
|
2010-04-01 20:03:07 -07:00
|
|
|
nsCOMPtr<nsIRunnable> event =
|
2012-11-14 11:46:40 -08:00
|
|
|
NS_NewRunnableMethod(this, &MediaDecoderStateMachine::AudioLoop);
|
2010-04-01 20:03:07 -07:00
|
|
|
mAudioThread->Dispatch(event, NS_DISPATCH_NORMAL);
|
|
|
|
}
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
int64_t MediaDecoderStateMachine::AudioDecodedUsecs() const
|
2010-09-14 16:24:47 -07:00
|
|
|
{
|
|
|
|
NS_ASSERTION(HasAudio(),
|
2011-04-13 15:12:23 -07:00
|
|
|
"Should only call AudioDecodedUsecs() when we have audio");
|
2010-09-14 16:24:47 -07:00
|
|
|
// The amount of audio we have decoded is the amount of audio data we've
|
|
|
|
// already decoded and pushed to the hardware, plus the amount of audio
|
|
|
|
// data waiting to be pushed to the hardware.
|
2012-08-22 08:56:38 -07:00
|
|
|
int64_t pushed = (mAudioEndTime != -1) ? (mAudioEndTime - GetMediaTime()) : 0;
|
2012-09-17 13:45:38 -07:00
|
|
|
return pushed + mReader->AudioQueue().Duration();
|
2010-09-14 16:24:47 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
bool MediaDecoderStateMachine::HasLowDecodedData(int64_t aAudioUsecs) const
|
2011-01-17 16:53:18 -08:00
|
|
|
{
|
2011-04-29 12:21:57 -07:00
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
2011-03-23 15:28:57 -07:00
|
|
|
// We consider ourselves low on decoded data if we're low on audio,
|
|
|
|
// provided we've not decoded to the end of the audio stream, or
|
|
|
|
// if we're only playing video and we're low on video frames, provided
|
|
|
|
// we've not decoded to the end of the video stream.
|
|
|
|
return ((HasAudio() &&
|
2012-09-17 13:45:38 -07:00
|
|
|
!mReader->AudioQueue().IsFinished() &&
|
2011-04-13 15:12:23 -07:00
|
|
|
AudioDecodedUsecs() < aAudioUsecs)
|
2011-03-23 15:28:57 -07:00
|
|
|
||
|
|
|
|
(!HasAudio() &&
|
|
|
|
HasVideo() &&
|
2012-09-17 13:45:38 -07:00
|
|
|
!mReader->VideoQueue().IsFinished() &&
|
|
|
|
static_cast<uint32_t>(mReader->VideoQueue().GetSize()) < LOW_VIDEO_FRAMES));
|
2011-01-17 16:53:18 -08:00
|
|
|
}
|
2010-09-14 16:24:47 -07:00
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
bool MediaDecoderStateMachine::HasLowUndecodedData() const
|
2011-03-23 15:28:57 -07:00
|
|
|
{
|
2011-09-26 17:25:41 -07:00
|
|
|
return GetUndecodedData() < mLowDataThresholdUsecs;
|
2011-03-23 15:28:57 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
int64_t MediaDecoderStateMachine::GetUndecodedData() const
|
2011-03-23 15:28:57 -07:00
|
|
|
{
|
2011-04-29 12:21:57 -07:00
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
2011-03-23 15:28:57 -07:00
|
|
|
NS_ASSERTION(mState > DECODER_STATE_DECODING_METADATA,
|
|
|
|
"Must have loaded metadata for GetBuffered() to work");
|
2013-03-02 11:14:44 -08:00
|
|
|
TimeRanges buffered;
|
2011-03-23 15:28:57 -07:00
|
|
|
|
|
|
|
nsresult res = mDecoder->GetBuffered(&buffered);
|
|
|
|
NS_ENSURE_SUCCESS(res, 0);
|
|
|
|
double currentTime = GetCurrentTime();
|
|
|
|
|
|
|
|
nsIDOMTimeRanges* r = static_cast<nsIDOMTimeRanges*>(&buffered);
|
2012-08-22 08:56:38 -07:00
|
|
|
uint32_t length = 0;
|
2011-03-23 15:28:57 -07:00
|
|
|
res = r->GetLength(&length);
|
|
|
|
NS_ENSURE_SUCCESS(res, 0);
|
|
|
|
|
2012-08-22 08:56:38 -07:00
|
|
|
for (uint32_t index = 0; index < length; ++index) {
|
2011-03-23 15:28:57 -07:00
|
|
|
double start, end;
|
|
|
|
res = r->Start(index, &start);
|
|
|
|
NS_ENSURE_SUCCESS(res, 0);
|
|
|
|
|
|
|
|
res = r->End(index, &end);
|
|
|
|
NS_ENSURE_SUCCESS(res, 0);
|
|
|
|
|
|
|
|
if (start <= currentTime && end >= currentTime) {
|
2012-08-22 08:56:38 -07:00
|
|
|
return static_cast<int64_t>((end - currentTime) * USECS_PER_S);
|
2011-03-23 15:28:57 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::SetFrameBufferLength(uint32_t aLength)
|
2011-04-11 14:15:45 -07:00
|
|
|
{
|
|
|
|
NS_ASSERTION(aLength >= 512 && aLength <= 16384,
|
|
|
|
"The length must be between 512 and 16384");
|
2011-04-29 12:21:57 -07:00
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
2011-04-11 14:15:45 -07:00
|
|
|
mEventManager.SetSignalBufferLength(aLength);
|
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
nsresult MediaDecoderStateMachine::DecodeMetadata()
|
2011-07-11 20:39:23 -07:00
|
|
|
{
|
|
|
|
NS_ASSERTION(OnDecodeThread(), "Should be on decode thread.");
|
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
2011-07-11 20:39:25 -07:00
|
|
|
NS_ASSERTION(mState == DECODER_STATE_DECODING_METADATA,
|
|
|
|
"Only call when in metadata decoding state");
|
2011-07-11 20:39:23 -07:00
|
|
|
|
2011-07-11 20:39:34 -07:00
|
|
|
LOG(PR_LOG_DEBUG, ("%p Decoding Media Headers", mDecoder.get()));
|
2011-07-11 20:39:23 -07:00
|
|
|
nsresult res;
|
2012-11-28 11:40:07 -08:00
|
|
|
VideoInfo info;
|
2012-11-08 16:40:08 -08:00
|
|
|
MetadataTags* tags;
|
2011-07-11 20:39:23 -07:00
|
|
|
{
|
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
2012-07-30 17:14:29 -07:00
|
|
|
res = mReader->ReadMetadata(&info, &tags);
|
2011-07-11 20:39:23 -07:00
|
|
|
}
|
|
|
|
mInfo = info;
|
|
|
|
|
|
|
|
if (NS_FAILED(res) || (!info.mHasVideo && !info.mHasAudio)) {
|
2011-07-11 20:39:34 -07:00
|
|
|
// Dispatch the event to call DecodeError synchronously. This ensures
|
|
|
|
// we're in shutdown state by the time we exit the decode thread.
|
|
|
|
// If we just moved to shutdown state here on the decode thread, we may
|
|
|
|
// cause the state machine to shutdown/free memory without closing its
|
|
|
|
// media stream properly, and we'll get callbacks from the media stream
|
|
|
|
// causing a crash. Note the state machine shutdown joins this decode
|
|
|
|
// thread during shutdown (and other state machines can run on the state
|
|
|
|
// machine thread while the join is waiting), so it's safe to do this
|
|
|
|
// synchronously.
|
2011-07-11 20:39:23 -07:00
|
|
|
nsCOMPtr<nsIRunnable> event =
|
2012-11-14 11:46:40 -08:00
|
|
|
NS_NewRunnableMethod(mDecoder, &MediaDecoder::DecodeError);
|
2011-07-11 20:39:34 -07:00
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
|
|
|
NS_DispatchToMainThread(event, NS_DISPATCH_SYNC);
|
2011-07-11 20:39:23 -07:00
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
|
|
|
mDecoder->StartProgressUpdates();
|
|
|
|
mGotDurationFromMetaData = (GetDuration() != -1);
|
|
|
|
|
|
|
|
VideoData* videoData = FindStartTime();
|
|
|
|
if (videoData) {
|
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
|
|
|
RenderVideoFrame(videoData, TimeStamp::Now());
|
|
|
|
}
|
|
|
|
|
|
|
|
if (mState == DECODER_STATE_SHUTDOWN) {
|
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
|
|
|
|
|
|
|
NS_ASSERTION(mStartTime != -1, "Must have start time");
|
2012-11-30 05:17:54 -08:00
|
|
|
MOZ_ASSERT((!HasVideo() && !HasAudio()) ||
|
|
|
|
!(mMediaSeekable && mTransportSeekable) || mEndTime != -1,
|
|
|
|
"Active seekable media should have end time");
|
|
|
|
MOZ_ASSERT(!(mMediaSeekable && mTransportSeekable) ||
|
|
|
|
GetDuration() != -1, "Seekable media should have duration");
|
|
|
|
LOG(PR_LOG_DEBUG, ("%p Media goes from %lld to %lld (duration %lld)"
|
|
|
|
" transportSeekable=%d, mediaSeekable=%d",
|
|
|
|
mDecoder.get(), mStartTime, mEndTime, GetDuration(),
|
|
|
|
mTransportSeekable, mMediaSeekable));
|
2011-07-11 20:39:23 -07:00
|
|
|
|
|
|
|
// Inform the element that we've loaded the metadata and the first frame,
|
|
|
|
// setting the default framebuffer size for audioavailable events. Also,
|
|
|
|
// if there is audio, let the MozAudioAvailable event manager know about
|
|
|
|
// the metadata.
|
|
|
|
if (HasAudio()) {
|
|
|
|
mEventManager.Init(mInfo.mAudioChannels, mInfo.mAudioRate);
|
|
|
|
// Set the buffer length at the decoder level to be able, to be able
|
|
|
|
// to retrive the value via media element method. The RequestFrameBufferLength
|
2012-11-14 11:46:40 -08:00
|
|
|
// will call the MediaDecoderStateMachine::SetFrameBufferLength().
|
2012-08-22 08:56:38 -07:00
|
|
|
uint32_t frameBufferLength = mInfo.mAudioChannels * FRAMEBUFFER_LENGTH_PER_CHANNEL;
|
2011-07-11 20:39:23 -07:00
|
|
|
mDecoder->RequestFrameBufferLength(frameBufferLength);
|
|
|
|
}
|
2012-11-30 05:17:54 -08:00
|
|
|
|
2011-07-11 20:39:23 -07:00
|
|
|
nsCOMPtr<nsIRunnable> metadataLoadedEvent =
|
2012-11-30 05:17:54 -08:00
|
|
|
new AudioMetadataEventRunner(mDecoder,
|
|
|
|
mInfo.mAudioChannels,
|
|
|
|
mInfo.mAudioRate,
|
|
|
|
HasAudio(),
|
2012-12-27 07:21:30 -08:00
|
|
|
HasVideo(),
|
2012-11-30 05:17:54 -08:00
|
|
|
tags);
|
2011-07-11 20:39:23 -07:00
|
|
|
NS_DispatchToMainThread(metadataLoadedEvent, NS_DISPATCH_NORMAL);
|
|
|
|
|
|
|
|
if (mState == DECODER_STATE_DECODING_METADATA) {
|
2011-07-11 20:39:34 -07:00
|
|
|
LOG(PR_LOG_DEBUG, ("%p Changed state from DECODING_METADATA to DECODING", mDecoder.get()));
|
2011-07-11 20:39:23 -07:00
|
|
|
StartDecoding();
|
|
|
|
}
|
|
|
|
|
2011-07-11 20:39:32 -07:00
|
|
|
if ((mState == DECODER_STATE_DECODING || mState == DECODER_STATE_COMPLETED) &&
|
2012-11-14 11:46:40 -08:00
|
|
|
mDecoder->GetState() == MediaDecoder::PLAY_STATE_PLAYING &&
|
2011-07-11 20:39:32 -07:00
|
|
|
!IsPlaying())
|
|
|
|
{
|
|
|
|
StartPlayback();
|
|
|
|
}
|
|
|
|
|
2011-07-11 20:39:23 -07:00
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::DecodeSeek()
|
2011-07-11 20:39:25 -07:00
|
|
|
{
|
|
|
|
NS_ASSERTION(OnDecodeThread(), "Should be on decode thread.");
|
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
|
|
|
NS_ASSERTION(mState == DECODER_STATE_SEEKING,
|
|
|
|
"Only call when in seeking state");
|
|
|
|
|
2012-04-29 20:12:42 -07:00
|
|
|
mDidThrottleAudioDecoding = false;
|
|
|
|
mDidThrottleVideoDecoding = false;
|
|
|
|
|
2011-07-11 20:39:25 -07:00
|
|
|
// During the seek, don't have a lock on the decoder state,
|
|
|
|
// otherwise long seek operations can block the main thread.
|
|
|
|
// The events dispatched to the main thread are SYNC calls.
|
|
|
|
// These calls are made outside of the decode monitor lock so
|
|
|
|
// it is safe for the main thread to makes calls that acquire
|
|
|
|
// the lock since it won't deadlock. We check the state when
|
|
|
|
// acquiring the lock again in case shutdown has occurred
|
|
|
|
// during the time when we didn't have the lock.
|
2012-08-22 08:56:38 -07:00
|
|
|
int64_t seekTime = mSeekTime;
|
2011-07-11 20:39:25 -07:00
|
|
|
mDecoder->StopProgressUpdates();
|
|
|
|
|
2011-09-28 23:19:26 -07:00
|
|
|
bool currentTimeChanged = false;
|
2012-08-22 08:56:38 -07:00
|
|
|
int64_t mediaTime = GetMediaTime();
|
2011-07-11 20:39:25 -07:00
|
|
|
if (mediaTime != seekTime) {
|
|
|
|
currentTimeChanged = true;
|
2012-05-22 02:56:02 -07:00
|
|
|
// Stop playback now to ensure that while we're outside the monitor
|
|
|
|
// dispatching SeekingStarted, playback doesn't advance and mess with
|
|
|
|
// mCurrentFrameTime that we've setting to seekTime here.
|
|
|
|
StopPlayback();
|
2011-07-11 20:39:25 -07:00
|
|
|
UpdatePlaybackPositionInternal(seekTime);
|
|
|
|
}
|
|
|
|
|
|
|
|
// SeekingStarted will do a UpdateReadyStateForData which will
|
|
|
|
// inform the element and its users that we have no frames
|
|
|
|
// to display
|
|
|
|
{
|
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
|
|
|
nsCOMPtr<nsIRunnable> startEvent =
|
2012-11-14 11:46:40 -08:00
|
|
|
NS_NewRunnableMethod(mDecoder, &MediaDecoder::SeekingStarted);
|
2011-07-11 20:39:25 -07:00
|
|
|
NS_DispatchToMainThread(startEvent, NS_DISPATCH_SYNC);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (currentTimeChanged) {
|
|
|
|
// The seek target is different than the current playback position,
|
|
|
|
// we'll need to seek the playback position, so shutdown our decode
|
|
|
|
// and audio threads.
|
|
|
|
StopAudioThread();
|
|
|
|
ResetPlayback();
|
|
|
|
nsresult res;
|
|
|
|
{
|
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
|
|
|
// Now perform the seek. We must not hold the state machine monitor
|
2011-07-11 20:39:30 -07:00
|
|
|
// while we seek, since the seek reads, which could block on I/O.
|
2011-07-11 20:39:25 -07:00
|
|
|
res = mReader->Seek(seekTime,
|
|
|
|
mStartTime,
|
|
|
|
mEndTime,
|
|
|
|
mediaTime);
|
|
|
|
}
|
2011-07-11 20:39:30 -07:00
|
|
|
if (NS_SUCCEEDED(res)) {
|
2012-09-17 13:45:38 -07:00
|
|
|
AudioData* audio = HasAudio() ? mReader->AudioQueue().PeekFront() : nullptr;
|
2011-07-11 20:39:25 -07:00
|
|
|
NS_ASSERTION(!audio || (audio->mTime <= seekTime &&
|
|
|
|
seekTime <= audio->mTime + audio->mDuration),
|
|
|
|
"Seek target should lie inside the first audio block after seek");
|
2012-08-22 08:56:38 -07:00
|
|
|
int64_t startTime = (audio && audio->mTime < seekTime) ? audio->mTime : seekTime;
|
2011-07-11 20:39:25 -07:00
|
|
|
mAudioStartTime = startTime;
|
|
|
|
mPlayDuration = startTime - mStartTime;
|
|
|
|
if (HasVideo()) {
|
2012-09-17 13:45:38 -07:00
|
|
|
VideoData* video = mReader->VideoQueue().PeekFront();
|
2011-07-11 20:39:25 -07:00
|
|
|
if (video) {
|
2013-04-11 21:50:04 -07:00
|
|
|
NS_ASSERTION((video->mTime <= seekTime && seekTime <= video->mEndTime) ||
|
|
|
|
mReader->VideoQueue().IsFinished(),
|
|
|
|
"Seek target should lie inside the first frame after seek, unless it's the last frame.");
|
2011-07-11 20:39:25 -07:00
|
|
|
{
|
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
|
|
|
RenderVideoFrame(video, TimeStamp::Now());
|
|
|
|
}
|
|
|
|
nsCOMPtr<nsIRunnable> event =
|
2012-11-14 11:46:40 -08:00
|
|
|
NS_NewRunnableMethod(mDecoder, &MediaDecoder::Invalidate);
|
2011-07-11 20:39:25 -07:00
|
|
|
NS_DispatchToMainThread(event, NS_DISPATCH_NORMAL);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
mDecoder->StartProgressUpdates();
|
|
|
|
if (mState == DECODER_STATE_SHUTDOWN)
|
|
|
|
return;
|
|
|
|
|
|
|
|
// Try to decode another frame to detect if we're at the end...
|
2011-07-11 20:39:34 -07:00
|
|
|
LOG(PR_LOG_DEBUG, ("%p Seek completed, mCurrentFrameTime=%lld\n",
|
|
|
|
mDecoder.get(), mCurrentFrameTime));
|
2011-07-11 20:39:25 -07:00
|
|
|
|
|
|
|
// Change state to DECODING or COMPLETED now. SeekingStopped will
|
2012-11-14 11:46:40 -08:00
|
|
|
// call MediaDecoderStateMachine::Seek to reset our state to SEEKING
|
2011-07-11 20:39:25 -07:00
|
|
|
// if we need to seek again.
|
2011-07-11 20:39:34 -07:00
|
|
|
|
2011-07-11 20:39:25 -07:00
|
|
|
nsCOMPtr<nsIRunnable> stopEvent;
|
2012-02-14 20:35:01 -08:00
|
|
|
bool isLiveStream = mDecoder->GetResource()->GetLength() == -1;
|
2012-01-05 22:40:51 -08:00
|
|
|
if (GetMediaTime() == mEndTime && !isLiveStream) {
|
|
|
|
// Seeked to end of media, move to COMPLETED state. Note we don't do
|
|
|
|
// this if we're playing a live stream, since the end of media will advance
|
|
|
|
// once we download more data!
|
2011-07-11 20:39:25 -07:00
|
|
|
LOG(PR_LOG_DEBUG, ("%p Changed state from SEEKING (to %lld) to COMPLETED",
|
2011-07-11 20:39:34 -07:00
|
|
|
mDecoder.get(), seekTime));
|
2012-11-14 11:46:40 -08:00
|
|
|
stopEvent = NS_NewRunnableMethod(mDecoder, &MediaDecoder::SeekingStoppedAtEnd);
|
2011-07-11 20:39:25 -07:00
|
|
|
mState = DECODER_STATE_COMPLETED;
|
|
|
|
} else {
|
|
|
|
LOG(PR_LOG_DEBUG, ("%p Changed state from SEEKING (to %lld) to DECODING",
|
2011-07-11 20:39:34 -07:00
|
|
|
mDecoder.get(), seekTime));
|
2012-11-14 11:46:40 -08:00
|
|
|
stopEvent = NS_NewRunnableMethod(mDecoder, &MediaDecoder::SeekingStopped);
|
2011-07-11 20:39:25 -07:00
|
|
|
StartDecoding();
|
|
|
|
}
|
|
|
|
{
|
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
|
|
|
NS_DispatchToMainThread(stopEvent, NS_DISPATCH_SYNC);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Reset quick buffering status. This ensures that if we began the
|
|
|
|
// seek while quick-buffering, we won't bypass quick buffering mode
|
|
|
|
// if we need to buffer after the seek.
|
2011-09-29 16:34:37 -07:00
|
|
|
mQuickBuffering = false;
|
2011-07-11 20:39:32 -07:00
|
|
|
|
|
|
|
ScheduleStateMachine();
|
2011-07-11 20:39:25 -07:00
|
|
|
}
|
|
|
|
|
2011-07-11 20:39:34 -07:00
|
|
|
// Runnable to dispose of the decoder and state machine on the main thread.
|
|
|
|
class nsDecoderDisposeEvent : public nsRunnable {
|
|
|
|
public:
|
2012-11-14 11:46:40 -08:00
|
|
|
nsDecoderDisposeEvent(already_AddRefed<MediaDecoder> aDecoder,
|
|
|
|
already_AddRefed<MediaDecoderStateMachine> aStateMachine)
|
2011-09-21 00:01:00 -07:00
|
|
|
: mDecoder(aDecoder), mStateMachine(aStateMachine) {}
|
2011-07-11 20:39:34 -07:00
|
|
|
NS_IMETHOD Run() {
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Must be on main thread.");
|
2011-09-21 00:01:00 -07:00
|
|
|
mStateMachine->ReleaseDecoder();
|
|
|
|
mDecoder->ReleaseStateMachine();
|
2012-07-30 07:20:58 -07:00
|
|
|
mStateMachine = nullptr;
|
|
|
|
mDecoder = nullptr;
|
2011-07-11 20:39:34 -07:00
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
private:
|
2012-11-14 11:46:40 -08:00
|
|
|
nsRefPtr<MediaDecoder> mDecoder;
|
|
|
|
nsCOMPtr<MediaDecoderStateMachine> mStateMachine;
|
2011-07-11 20:39:34 -07:00
|
|
|
};
|
|
|
|
|
|
|
|
// Runnable which dispatches an event to the main thread to dispose of the
|
|
|
|
// decoder and state machine. This runs on the state machine thread after
|
|
|
|
// the state machine has shutdown, and all events for that state machine have
|
|
|
|
// finished running.
|
|
|
|
class nsDispatchDisposeEvent : public nsRunnable {
|
|
|
|
public:
|
2012-11-14 11:46:40 -08:00
|
|
|
nsDispatchDisposeEvent(MediaDecoder* aDecoder,
|
|
|
|
MediaDecoderStateMachine* aStateMachine)
|
2011-09-21 00:01:00 -07:00
|
|
|
: mDecoder(aDecoder), mStateMachine(aStateMachine) {}
|
2011-07-11 20:39:34 -07:00
|
|
|
NS_IMETHOD Run() {
|
2011-09-21 00:01:00 -07:00
|
|
|
NS_DispatchToMainThread(new nsDecoderDisposeEvent(mDecoder.forget(),
|
|
|
|
mStateMachine.forget()));
|
2011-07-11 20:39:34 -07:00
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
private:
|
2012-11-14 11:46:40 -08:00
|
|
|
nsRefPtr<MediaDecoder> mDecoder;
|
|
|
|
nsCOMPtr<MediaDecoderStateMachine> mStateMachine;
|
2011-07-11 20:39:34 -07:00
|
|
|
};
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
nsresult MediaDecoderStateMachine::RunStateMachine()
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2011-07-11 20:39:34 -07:00
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
2011-07-11 20:39:32 -07:00
|
|
|
|
2012-02-14 20:35:01 -08:00
|
|
|
MediaResource* resource = mDecoder->GetResource();
|
|
|
|
NS_ENSURE_TRUE(resource, NS_ERROR_NULL_POINTER);
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2011-07-11 20:39:32 -07:00
|
|
|
switch (mState) {
|
|
|
|
case DECODER_STATE_SHUTDOWN: {
|
2010-04-01 20:03:07 -07:00
|
|
|
if (IsPlaying()) {
|
2011-07-11 20:39:30 -07:00
|
|
|
StopPlayback();
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
2011-07-11 20:39:10 -07:00
|
|
|
StopAudioThread();
|
2013-01-13 21:25:02 -08:00
|
|
|
// If mAudioThread is non-null after StopAudioThread completes, we are
|
|
|
|
// running in a nested event loop waiting for Shutdown() on
|
|
|
|
// mAudioThread to complete. Return to the event loop and let it
|
|
|
|
// finish processing before continuing with shutdown.
|
|
|
|
if (mAudioThread) {
|
|
|
|
MOZ_ASSERT(mStopAudioThread);
|
|
|
|
return NS_OK;
|
|
|
|
}
|
2011-07-11 20:39:10 -07:00
|
|
|
StopDecodeThread();
|
2012-07-31 05:17:22 -07:00
|
|
|
// Now that those threads are stopped, there's no possibility of
|
|
|
|
// mPendingWakeDecoder being needed again. Revoke it.
|
|
|
|
mPendingWakeDecoder = nullptr;
|
2010-04-01 20:03:07 -07:00
|
|
|
NS_ASSERTION(mState == DECODER_STATE_SHUTDOWN,
|
2011-09-21 00:01:00 -07:00
|
|
|
"How did we escape from the shutdown state?");
|
2011-07-11 20:39:34 -07:00
|
|
|
// We must daisy-chain these events to destroy the decoder. We must
|
|
|
|
// destroy the decoder on the main thread, but we can't destroy the
|
|
|
|
// decoder while this thread holds the decoder monitor. We can't
|
|
|
|
// dispatch an event to the main thread to destroy the decoder from
|
|
|
|
// here, as the event may run before the dispatch returns, and we
|
|
|
|
// hold the decoder monitor here. We also want to guarantee that the
|
|
|
|
// state machine is destroyed on the main thread, and so the
|
|
|
|
// event runner running this function (which holds a reference to the
|
|
|
|
// state machine) needs to finish and be released in order to allow
|
|
|
|
// that. So we dispatch an event to run after this event runner has
|
|
|
|
// finished and released its monitor/references. That event then will
|
|
|
|
// dispatch an event to the main thread to release the decoder and
|
|
|
|
// state machine.
|
2011-09-21 00:01:00 -07:00
|
|
|
NS_DispatchToCurrentThread(new nsDispatchDisposeEvent(mDecoder, this));
|
2010-04-01 20:03:07 -07:00
|
|
|
return NS_OK;
|
2011-07-11 20:39:32 -07:00
|
|
|
}
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2011-07-11 20:39:32 -07:00
|
|
|
case DECODER_STATE_DECODING_METADATA: {
|
|
|
|
// Ensure we have a decode thread to decode metadata.
|
2012-01-19 10:30:29 -08:00
|
|
|
return ScheduleDecodeThread();
|
2011-07-11 20:39:32 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
case DECODER_STATE_DECODING: {
|
2012-11-14 11:46:40 -08:00
|
|
|
if (mDecoder->GetState() != MediaDecoder::PLAY_STATE_PLAYING &&
|
2011-07-11 20:39:37 -07:00
|
|
|
IsPlaying())
|
|
|
|
{
|
|
|
|
// We're playing, but the element/decoder is in paused state. Stop
|
|
|
|
// playing! Note we do this before StopDecodeThread() below because
|
|
|
|
// that blocks this state machine's execution, and can cause a
|
|
|
|
// perceptible delay between the pause command, and playback actually
|
|
|
|
// pausing.
|
|
|
|
StopPlayback();
|
|
|
|
}
|
|
|
|
|
2013-01-24 11:28:48 -08:00
|
|
|
if (mDecoder->GetState() == MediaDecoder::PLAY_STATE_PLAYING &&
|
|
|
|
!IsPlaying()) {
|
|
|
|
// We are playing, but the state machine does not know it yet. Tell it
|
|
|
|
// that it is, so that the clock can be properly queried.
|
|
|
|
StartPlayback();
|
|
|
|
}
|
|
|
|
|
2011-07-11 20:39:37 -07:00
|
|
|
if (IsPausedAndDecoderWaiting()) {
|
|
|
|
// The decode buffers are full, and playback is paused. Shutdown the
|
|
|
|
// decode thread.
|
|
|
|
StopDecodeThread();
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
|
|
|
// We're playing and/or our decode buffers aren't full. Ensure we have
|
|
|
|
// an active decode thread.
|
2012-01-19 10:30:29 -08:00
|
|
|
if (NS_FAILED(ScheduleDecodeThread())) {
|
2011-07-11 20:39:37 -07:00
|
|
|
NS_WARNING("Failed to start media decode thread!");
|
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
|
|
|
|
2011-07-11 20:39:32 -07:00
|
|
|
AdvanceFrame();
|
2012-11-14 11:46:40 -08:00
|
|
|
NS_ASSERTION(mDecoder->GetState() != MediaDecoder::PLAY_STATE_PLAYING ||
|
2012-11-22 02:38:28 -08:00
|
|
|
IsStateMachineScheduled() ||
|
|
|
|
mPlaybackRate == 0.0, "Must have timer scheduled");
|
2011-07-11 20:39:32 -07:00
|
|
|
return NS_OK;
|
|
|
|
}
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2011-07-11 20:39:32 -07:00
|
|
|
case DECODER_STATE_BUFFERING: {
|
2011-07-11 20:39:37 -07:00
|
|
|
if (IsPausedAndDecoderWaiting()) {
|
|
|
|
// The decode buffers are full, and playback is paused. Shutdown the
|
|
|
|
// decode thread.
|
|
|
|
StopDecodeThread();
|
|
|
|
return NS_OK;
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2011-07-11 20:39:32 -07:00
|
|
|
TimeStamp now = TimeStamp::Now();
|
|
|
|
NS_ASSERTION(!mBufferingStart.IsNull(), "Must know buffering start time.");
|
|
|
|
|
|
|
|
// We will remain in the buffering state if we've not decoded enough
|
|
|
|
// data to begin playback, or if we've not downloaded a reasonable
|
|
|
|
// amount of data inside our buffering time.
|
|
|
|
TimeDuration elapsed = now - mBufferingStart;
|
2012-02-14 20:35:01 -08:00
|
|
|
bool isLiveStream = mDecoder->GetResource()->GetLength() == -1;
|
2011-07-11 20:39:32 -07:00
|
|
|
if ((isLiveStream || !mDecoder->CanPlayThrough()) &&
|
2012-11-22 02:38:28 -08:00
|
|
|
elapsed < TimeDuration::FromSeconds(mBufferingWait * mPlaybackRate) &&
|
2011-07-11 20:39:32 -07:00
|
|
|
(mQuickBuffering ? HasLowDecodedData(QUICK_BUFFERING_LOW_DATA_USECS)
|
2012-11-22 02:38:28 -08:00
|
|
|
: (GetUndecodedData() < mBufferingWait * mPlaybackRate * USECS_PER_S)) &&
|
2012-02-14 20:35:01 -08:00
|
|
|
!resource->IsDataCachedToEndOfResource(mDecoder->mDecoderPosition) &&
|
|
|
|
!resource->IsSuspended())
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2011-07-11 20:39:32 -07:00
|
|
|
LOG(PR_LOG_DEBUG,
|
2011-07-11 20:39:34 -07:00
|
|
|
("%p Buffering: %.3lfs/%ds, timeout in %.3lfs %s",
|
|
|
|
mDecoder.get(),
|
2011-07-11 20:39:32 -07:00
|
|
|
GetUndecodedData() / static_cast<double>(USECS_PER_S),
|
2011-09-26 17:25:41 -07:00
|
|
|
mBufferingWait,
|
|
|
|
mBufferingWait - elapsed.ToSeconds(),
|
2011-07-11 20:39:32 -07:00
|
|
|
(mQuickBuffering ? "(quick exit)" : "")));
|
|
|
|
ScheduleStateMachine(USECS_PER_S);
|
|
|
|
return NS_OK;
|
|
|
|
} else {
|
2011-07-11 20:39:34 -07:00
|
|
|
LOG(PR_LOG_DEBUG, ("%p Changed state from BUFFERING to DECODING", mDecoder.get()));
|
2011-07-11 20:39:32 -07:00
|
|
|
LOG(PR_LOG_DEBUG, ("%p Buffered for %.3lfs",
|
2011-07-11 20:39:34 -07:00
|
|
|
mDecoder.get(),
|
2011-07-11 20:39:32 -07:00
|
|
|
(now - mBufferingStart).ToSeconds()));
|
|
|
|
StartDecoding();
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2011-07-11 20:39:32 -07:00
|
|
|
// Notify to allow blocked decoder thread to continue
|
|
|
|
mDecoder->GetReentrantMonitor().NotifyAll();
|
|
|
|
UpdateReadyState();
|
2012-11-14 11:46:40 -08:00
|
|
|
if (mDecoder->GetState() == MediaDecoder::PLAY_STATE_PLAYING &&
|
2011-07-11 20:39:32 -07:00
|
|
|
!IsPlaying())
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2011-07-11 20:39:32 -07:00
|
|
|
StartPlayback();
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
2011-07-11 20:39:32 -07:00
|
|
|
NS_ASSERTION(IsStateMachineScheduled(), "Must have timer scheduled");
|
|
|
|
return NS_OK;
|
|
|
|
}
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2011-07-11 20:39:32 -07:00
|
|
|
case DECODER_STATE_SEEKING: {
|
|
|
|
// Ensure we have a decode thread to perform the seek.
|
2012-01-19 10:30:29 -08:00
|
|
|
return ScheduleDecodeThread();
|
2011-07-11 20:39:32 -07:00
|
|
|
}
|
2011-01-17 16:53:18 -08:00
|
|
|
|
2011-07-11 20:39:32 -07:00
|
|
|
case DECODER_STATE_COMPLETED: {
|
|
|
|
StopDecodeThread();
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2011-07-11 20:39:34 -07:00
|
|
|
if (mState != DECODER_STATE_COMPLETED) {
|
|
|
|
// While we're waiting for the decode thread to shutdown, we can
|
|
|
|
// change state, for example to seeking or shutdown state.
|
|
|
|
// Whatever changed our state should have scheduled another state
|
|
|
|
// machine run.
|
|
|
|
NS_ASSERTION(IsStateMachineScheduled(), "Must have timer scheduled");
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
2011-07-11 20:39:32 -07:00
|
|
|
// Play the remaining media. We want to run AdvanceFrame() at least
|
|
|
|
// once to ensure the current playback position is advanced to the
|
|
|
|
// end of the media, and so that we update the readyState.
|
|
|
|
if (mState == DECODER_STATE_COMPLETED &&
|
2012-09-17 13:45:38 -07:00
|
|
|
(mReader->VideoQueue().GetSize() > 0 ||
|
2011-07-11 20:39:32 -07:00
|
|
|
(HasAudio() && !mAudioCompleted)))
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2011-07-11 20:39:32 -07:00
|
|
|
AdvanceFrame();
|
2012-11-14 11:46:40 -08:00
|
|
|
NS_ASSERTION(mDecoder->GetState() != MediaDecoder::PLAY_STATE_PLAYING ||
|
2012-11-22 02:38:28 -08:00
|
|
|
mPlaybackRate == 0 ||
|
2011-07-11 20:39:34 -07:00
|
|
|
IsStateMachineScheduled(),
|
|
|
|
"Must have timer scheduled");
|
2011-07-11 20:39:32 -07:00
|
|
|
return NS_OK;
|
|
|
|
}
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2011-07-11 20:39:32 -07:00
|
|
|
// StopPlayback in order to reset the IsPlaying() state so audio
|
|
|
|
// is restarted correctly.
|
|
|
|
StopPlayback();
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2011-07-11 20:39:32 -07:00
|
|
|
if (mState != DECODER_STATE_COMPLETED) {
|
2011-07-11 20:39:34 -07:00
|
|
|
// While we're presenting a frame we can change state. Whatever changed
|
|
|
|
// our state should have scheduled another state machine run.
|
2011-07-11 20:39:32 -07:00
|
|
|
NS_ASSERTION(IsStateMachineScheduled(), "Must have timer scheduled");
|
|
|
|
return NS_OK;
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
2013-01-13 21:25:02 -08:00
|
|
|
|
2011-07-11 20:39:32 -07:00
|
|
|
StopAudioThread();
|
2012-11-14 11:46:40 -08:00
|
|
|
if (mDecoder->GetState() == MediaDecoder::PLAY_STATE_PLAYING) {
|
2012-08-22 08:56:38 -07:00
|
|
|
int64_t videoTime = HasVideo() ? mVideoFrameEndTime : 0;
|
2013-01-15 04:22:03 -08:00
|
|
|
int64_t clockTime = std::max(mEndTime, std::max(videoTime, GetAudioClock()));
|
2011-07-11 20:39:32 -07:00
|
|
|
UpdatePlaybackPosition(clockTime);
|
|
|
|
nsCOMPtr<nsIRunnable> event =
|
2012-11-14 11:46:40 -08:00
|
|
|
NS_NewRunnableMethod(mDecoder, &MediaDecoder::PlaybackEnded);
|
2011-07-11 20:39:32 -07:00
|
|
|
NS_DispatchToMainThread(event, NS_DISPATCH_NORMAL);
|
|
|
|
}
|
|
|
|
return NS_OK;
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::RenderVideoFrame(VideoData* aData,
|
2011-06-23 15:08:54 -07:00
|
|
|
TimeStamp aTarget)
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2011-07-11 20:39:23 -07:00
|
|
|
NS_ASSERTION(OnStateMachineThread() || OnDecodeThread(),
|
|
|
|
"Should be on state machine or decode thread.");
|
2011-04-29 12:21:57 -07:00
|
|
|
mDecoder->GetReentrantMonitor().AssertNotCurrentThreadIn();
|
2010-04-01 20:03:07 -07:00
|
|
|
|
|
|
|
if (aData->mDuplicate) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2012-11-30 05:17:54 -08:00
|
|
|
if (!PR_GetEnv("MOZ_QUIET")) {
|
|
|
|
LOG(PR_LOG_DEBUG, ("%p Decoder playing video frame %lld",
|
|
|
|
mDecoder.get(), aData->mTime));
|
|
|
|
}
|
2012-04-29 20:12:42 -07:00
|
|
|
|
2012-02-14 20:35:01 -08:00
|
|
|
VideoFrameContainer* container = mDecoder->GetVideoFrameContainer();
|
|
|
|
if (container) {
|
|
|
|
container->SetCurrentFrame(aData->mDisplay, aData->mImage, aTarget);
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-08-22 08:56:38 -07:00
|
|
|
int64_t
|
2012-11-14 11:46:40 -08:00
|
|
|
MediaDecoderStateMachine::GetAudioClock()
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2011-07-11 20:39:34 -07:00
|
|
|
NS_ASSERTION(OnStateMachineThread(), "Should be on state machine thread.");
|
2011-07-11 20:40:38 -07:00
|
|
|
// We must hold the decoder monitor while using the audio stream off the
|
|
|
|
// audio thread to ensure that it doesn't get destroyed on the audio thread
|
|
|
|
// while we're using it.
|
2012-11-19 18:22:42 -08:00
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
|
|
|
if (!HasAudio() || mAudioCaptured)
|
|
|
|
return -1;
|
2011-07-11 20:39:37 -07:00
|
|
|
if (!mAudioStream) {
|
|
|
|
// Audio thread hasn't played any data yet.
|
|
|
|
return mAudioStartTime;
|
|
|
|
}
|
2012-08-22 08:56:38 -07:00
|
|
|
int64_t t = mAudioStream->GetPosition();
|
2010-04-01 20:03:07 -07:00
|
|
|
return (t == -1) ? -1 : t + mAudioStartTime;
|
|
|
|
}
|
|
|
|
|
2012-11-22 02:38:28 -08:00
|
|
|
int64_t MediaDecoderStateMachine::GetVideoStreamPosition()
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2011-04-29 12:21:57 -07:00
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2012-11-22 02:38:28 -08:00
|
|
|
if (!IsPlaying()) {
|
|
|
|
return mPlayDuration + mStartTime;
|
|
|
|
}
|
|
|
|
|
|
|
|
// The playbackRate has been just been changed, reset the playstartTime.
|
|
|
|
if (mResetPlayStartTime) {
|
|
|
|
mPlayStartTime = TimeStamp::Now();
|
|
|
|
mResetPlayStartTime = false;
|
2011-07-11 20:39:32 -07:00
|
|
|
}
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2012-11-22 02:38:28 -08:00
|
|
|
int64_t pos = DurationToUsecs(TimeStamp::Now() - mPlayStartTime) + mPlayDuration;
|
|
|
|
pos -= mBasePosition;
|
2013-01-10 03:26:18 -08:00
|
|
|
NS_ASSERTION(pos >= 0, "Video stream position should be positive.");
|
|
|
|
return mBasePosition + pos * mPlaybackRate + mStartTime;
|
2012-11-22 02:38:28 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
int64_t MediaDecoderStateMachine::GetClock() {
|
|
|
|
NS_ASSERTION(OnStateMachineThread(), "Should be on state machine thread.");
|
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
|
|
|
|
2011-07-11 20:39:32 -07:00
|
|
|
// Determine the clock time. If we've got audio, and we've not reached
|
|
|
|
// the end of the audio, use the audio clock. However if we've finished
|
|
|
|
// audio, or don't have audio, use the system clock.
|
2012-08-22 08:56:38 -07:00
|
|
|
int64_t clock_time = -1;
|
2011-07-11 20:39:32 -07:00
|
|
|
if (!IsPlaying()) {
|
|
|
|
clock_time = mPlayDuration + mStartTime;
|
|
|
|
} else {
|
2012-08-22 08:56:38 -07:00
|
|
|
int64_t audio_time = GetAudioClock();
|
2011-07-11 20:39:32 -07:00
|
|
|
if (HasAudio() && !mAudioCompleted && audio_time != -1) {
|
|
|
|
clock_time = audio_time;
|
|
|
|
// Resync against the audio clock, while we're trusting the
|
|
|
|
// audio clock. This ensures no "drift", particularly on Linux.
|
|
|
|
mPlayDuration = clock_time - mStartTime;
|
|
|
|
mPlayStartTime = TimeStamp::Now();
|
|
|
|
} else {
|
2011-09-26 20:31:18 -07:00
|
|
|
// Audio is disabled on this system. Sync to the system clock.
|
2012-11-22 02:38:28 -08:00
|
|
|
clock_time = GetVideoStreamPosition();
|
2011-07-11 20:39:32 -07:00
|
|
|
// Ensure the clock can never go backwards.
|
2012-11-22 02:38:28 -08:00
|
|
|
NS_ASSERTION(mCurrentFrameTime <= clock_time || mPlaybackRate <= 0,
|
|
|
|
"Clock should go forwards if the playback rate is > 0.");
|
2011-03-23 15:28:57 -07:00
|
|
|
}
|
2011-07-11 20:39:32 -07:00
|
|
|
}
|
2012-11-22 02:38:28 -08:00
|
|
|
return clock_time;
|
|
|
|
}
|
|
|
|
|
|
|
|
void MediaDecoderStateMachine::AdvanceFrame()
|
|
|
|
{
|
|
|
|
NS_ASSERTION(OnStateMachineThread(), "Should be on state machine thread.");
|
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
|
|
|
NS_ASSERTION(!HasAudio() || mAudioStartTime != -1,
|
|
|
|
"Should know audio start time if we have audio.");
|
2011-03-23 15:28:57 -07:00
|
|
|
|
2012-11-22 02:38:28 -08:00
|
|
|
if (mDecoder->GetState() != MediaDecoder::PLAY_STATE_PLAYING) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// If playbackRate is 0.0, we should stop the progress, but not be in paused
|
|
|
|
// state, per spec.
|
|
|
|
if (mPlaybackRate == 0.0) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
int64_t clock_time = GetClock();
|
2011-07-11 20:39:32 -07:00
|
|
|
// Skip frames up to the frame at the playback position, and figure out
|
|
|
|
// the time remaining until it's time to display the next frame.
|
2012-08-22 08:56:38 -07:00
|
|
|
int64_t remainingTime = AUDIO_DURATION_USECS;
|
2011-07-11 20:39:32 -07:00
|
|
|
NS_ASSERTION(clock_time >= mStartTime, "Should have positive clock time.");
|
|
|
|
nsAutoPtr<VideoData> currentFrame;
|
2012-09-18 11:27:32 -07:00
|
|
|
#ifdef PR_LOGGING
|
|
|
|
int32_t droppedFrames = 0;
|
|
|
|
#endif
|
2012-09-17 13:45:38 -07:00
|
|
|
if (mReader->VideoQueue().GetSize() > 0) {
|
|
|
|
VideoData* frame = mReader->VideoQueue().PeekFront();
|
2011-09-26 17:25:41 -07:00
|
|
|
while (mRealTime || clock_time >= frame->mTime) {
|
2011-07-11 20:39:32 -07:00
|
|
|
mVideoFrameEndTime = frame->mEndTime;
|
|
|
|
currentFrame = frame;
|
2012-04-29 20:12:42 -07:00
|
|
|
LOG(PR_LOG_DEBUG, ("%p Decoder discarding video frame %lld", mDecoder.get(), frame->mTime));
|
2012-09-18 11:27:32 -07:00
|
|
|
#ifdef PR_LOGGING
|
|
|
|
if (droppedFrames++) {
|
|
|
|
LOG(PR_LOG_DEBUG, ("%p Decoder discarding video frame %lld (%d so far)",
|
|
|
|
mDecoder.get(), frame->mTime, droppedFrames - 1));
|
|
|
|
}
|
|
|
|
#endif
|
2012-09-17 13:45:38 -07:00
|
|
|
mReader->VideoQueue().PopFront();
|
2011-07-11 20:39:32 -07:00
|
|
|
// Notify the decode thread that the video queue's buffers may have
|
|
|
|
// free'd up space for more frames.
|
2011-04-29 12:21:57 -07:00
|
|
|
mDecoder->GetReentrantMonitor().NotifyAll();
|
2011-07-11 20:39:32 -07:00
|
|
|
mDecoder->UpdatePlaybackOffset(frame->mOffset);
|
2012-09-17 13:45:38 -07:00
|
|
|
if (mReader->VideoQueue().GetSize() == 0)
|
2011-07-11 20:39:32 -07:00
|
|
|
break;
|
2012-09-17 13:45:38 -07:00
|
|
|
frame = mReader->VideoQueue().PeekFront();
|
2011-03-23 15:28:57 -07:00
|
|
|
}
|
2011-07-11 20:39:32 -07:00
|
|
|
// Current frame has already been presented, wait until it's time to
|
|
|
|
// present the next frame.
|
|
|
|
if (frame && !currentFrame) {
|
Backout b3a8618f901c (bug 829042), 34a9ef8f929d (bug 822933), 4c1215cefbab (bug 826349), 70bb7f775178 (bug 825325), e9c8447fb197 (bug 828713), eb6ebf01eafe (bug 828901), f1f3ef647920 (bug 825329), f9d7b5722d4f (bug 825329), 5add564d4546 (bug 819377), 55e93d1fa972 (bug 804875), f14639a3461e (bug 804875), 23456fc21052 (bug 814308) for Windows pgo-only mochitest-1 media test timeouts on a CLOSED TREE
2013-01-16 07:16:23 -08:00
|
|
|
int64_t now = IsPlaying() ? clock_time : mPlayDuration;
|
2012-11-22 02:38:28 -08:00
|
|
|
|
2013-01-24 11:28:48 -08:00
|
|
|
remainingTime = frame->mTime - now;
|
2011-07-11 20:39:32 -07:00
|
|
|
}
|
|
|
|
}
|
2011-03-23 15:28:57 -07:00
|
|
|
|
2011-07-11 20:39:32 -07:00
|
|
|
// Check to see if we don't have enough data to play up to the next frame.
|
|
|
|
// If we don't, switch to buffering mode.
|
2012-02-14 20:35:01 -08:00
|
|
|
MediaResource* resource = mDecoder->GetResource();
|
2011-07-11 20:39:32 -07:00
|
|
|
if (mState == DECODER_STATE_DECODING &&
|
2012-11-14 11:46:40 -08:00
|
|
|
mDecoder->GetState() == MediaDecoder::PLAY_STATE_PLAYING &&
|
2011-07-11 20:39:32 -07:00
|
|
|
HasLowDecodedData(remainingTime + EXHAUSTED_DATA_MARGIN_USECS) &&
|
2012-02-14 20:35:01 -08:00
|
|
|
!resource->IsDataCachedToEndOfResource(mDecoder->mDecoderPosition) &&
|
|
|
|
!resource->IsSuspended() &&
|
2011-07-11 20:39:32 -07:00
|
|
|
(JustExitedQuickBuffering() || HasLowUndecodedData()))
|
|
|
|
{
|
2011-02-17 18:30:33 -08:00
|
|
|
if (currentFrame) {
|
2012-09-17 13:45:38 -07:00
|
|
|
mReader->VideoQueue().PushFront(currentFrame.forget());
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
2011-07-11 20:39:32 -07:00
|
|
|
StartBuffering();
|
|
|
|
ScheduleStateMachine();
|
|
|
|
return;
|
|
|
|
}
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2011-07-11 20:39:32 -07:00
|
|
|
// We've got enough data to keep playing until at least the next frame.
|
|
|
|
// Start playing now if need be.
|
2011-08-24 16:42:23 -07:00
|
|
|
if (!IsPlaying() && ((mFragmentEndTime >= 0 && clock_time < mFragmentEndTime) || mFragmentEndTime < 0)) {
|
2011-07-11 20:39:32 -07:00
|
|
|
StartPlayback();
|
|
|
|
}
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2011-07-11 20:39:32 -07:00
|
|
|
if (currentFrame) {
|
|
|
|
// Decode one frame and display it.
|
|
|
|
TimeStamp presTime = mPlayStartTime - UsecsToDuration(mPlayDuration) +
|
|
|
|
UsecsToDuration(currentFrame->mTime - mStartTime);
|
|
|
|
NS_ASSERTION(currentFrame->mTime >= mStartTime, "Should have positive frame time");
|
|
|
|
{
|
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
|
|
|
// If we have video, we want to increment the clock in steps of the frame
|
|
|
|
// duration.
|
|
|
|
RenderVideoFrame(currentFrame, presTime);
|
|
|
|
}
|
2012-03-04 18:27:49 -08:00
|
|
|
// If we're no longer playing after dropping and reacquiring the lock,
|
|
|
|
// playback must've been stopped on the decode thread (by a seek, for
|
|
|
|
// example). In that case, the current frame is probably out of date.
|
|
|
|
if (!IsPlaying()) {
|
|
|
|
ScheduleStateMachine();
|
|
|
|
return;
|
|
|
|
}
|
2011-07-11 20:39:32 -07:00
|
|
|
mDecoder->GetFrameStatistics().NotifyPresentedFrame();
|
2013-01-24 11:28:48 -08:00
|
|
|
remainingTime = currentFrame->mEndTime - clock_time;
|
2012-07-30 07:20:58 -07:00
|
|
|
currentFrame = nullptr;
|
2011-07-11 20:39:32 -07:00
|
|
|
}
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2011-07-11 20:39:32 -07:00
|
|
|
// Cap the current time to the larger of the audio and video end time.
|
|
|
|
// This ensures that if we're running off the system clock, we don't
|
|
|
|
// advance the clock to after the media end time.
|
|
|
|
if (mVideoFrameEndTime != -1 || mAudioEndTime != -1) {
|
2011-09-26 20:31:18 -07:00
|
|
|
// These will be non -1 if we've displayed a video frame, or played an audio frame.
|
2013-01-15 04:22:03 -08:00
|
|
|
clock_time = std::min(clock_time, std::max(mVideoFrameEndTime, mAudioEndTime));
|
2011-07-11 20:39:32 -07:00
|
|
|
if (clock_time > GetMediaTime()) {
|
|
|
|
// Only update the playback position if the clock time is greater
|
|
|
|
// than the previous playback position. The audio clock can
|
|
|
|
// sometimes report a time less than its previously reported in
|
|
|
|
// some situations, and we need to gracefully handle that.
|
|
|
|
UpdatePlaybackPosition(clock_time);
|
2011-02-17 18:30:33 -08:00
|
|
|
}
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
2011-07-11 20:39:32 -07:00
|
|
|
|
2011-09-26 20:31:18 -07:00
|
|
|
// If the number of audio/video frames queued has changed, either by
|
|
|
|
// this function popping and playing a video frame, or by the audio
|
|
|
|
// thread popping and playing an audio frame, we may need to update our
|
2011-07-11 20:39:32 -07:00
|
|
|
// ready state. Post an update to do so.
|
|
|
|
UpdateReadyState();
|
|
|
|
|
|
|
|
ScheduleStateMachine(remainingTime);
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::Wait(int64_t aUsecs) {
|
2011-07-11 20:39:32 -07:00
|
|
|
NS_ASSERTION(OnAudioThread(), "Only call on the audio thread");
|
2011-04-29 12:21:57 -07:00
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
2013-01-15 04:22:03 -08:00
|
|
|
TimeStamp end = TimeStamp::Now() + UsecsToDuration(std::max<int64_t>(USECS_PER_MS, aUsecs));
|
2010-04-01 20:03:07 -07:00
|
|
|
TimeStamp now;
|
|
|
|
while ((now = TimeStamp::Now()) < end &&
|
|
|
|
mState != DECODER_STATE_SHUTDOWN &&
|
2011-07-11 20:39:30 -07:00
|
|
|
mState != DECODER_STATE_SEEKING &&
|
2011-07-11 20:39:32 -07:00
|
|
|
!mStopAudioThread &&
|
|
|
|
IsPlaying())
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2012-08-22 08:56:38 -07:00
|
|
|
int64_t ms = static_cast<int64_t>(NS_round((end - now).ToSeconds() * 1000));
|
2012-09-27 23:57:33 -07:00
|
|
|
if (ms == 0 || ms > UINT32_MAX) {
|
2010-04-01 20:03:07 -07:00
|
|
|
break;
|
|
|
|
}
|
2012-08-22 08:56:38 -07:00
|
|
|
mDecoder->GetReentrantMonitor().Wait(PR_MillisecondsToInterval(static_cast<uint32_t>(ms)));
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
VideoData* MediaDecoderStateMachine::FindStartTime()
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2011-07-11 20:39:23 -07:00
|
|
|
NS_ASSERTION(OnDecodeThread(), "Should be on decode thread.");
|
2011-04-29 12:21:57 -07:00
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
2012-08-22 08:56:38 -07:00
|
|
|
int64_t startTime = 0;
|
2010-04-01 20:03:07 -07:00
|
|
|
mStartTime = 0;
|
2012-07-30 07:20:58 -07:00
|
|
|
VideoData* v = nullptr;
|
2010-04-01 20:03:07 -07:00
|
|
|
{
|
2011-04-29 12:21:57 -07:00
|
|
|
ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
|
2011-05-08 14:10:28 -07:00
|
|
|
v = mReader->FindStartTime(startTime);
|
2010-04-01 20:03:07 -07:00
|
|
|
}
|
|
|
|
if (startTime != 0) {
|
|
|
|
mStartTime = startTime;
|
2010-10-06 15:58:36 -07:00
|
|
|
if (mGotDurationFromMetaData) {
|
2010-04-01 20:03:07 -07:00
|
|
|
NS_ASSERTION(mEndTime != -1,
|
|
|
|
"We should have mEndTime as supplied duration here");
|
|
|
|
// We were specified a duration from a Content-Duration HTTP header.
|
|
|
|
// Adjust mEndTime so that mEndTime-mStartTime matches the specified
|
|
|
|
// duration.
|
|
|
|
mEndTime = mStartTime + mEndTime;
|
|
|
|
}
|
|
|
|
}
|
2010-08-12 19:28:15 -07:00
|
|
|
// Set the audio start time to be start of media. If this lies before the
|
2011-09-26 20:31:18 -07:00
|
|
|
// first actual audio frame we have, we'll inject silence during playback
|
2010-08-12 19:28:15 -07:00
|
|
|
// to ensure the audio starts at the correct time.
|
|
|
|
mAudioStartTime = mStartTime;
|
2011-07-11 20:39:34 -07:00
|
|
|
LOG(PR_LOG_DEBUG, ("%p Media start time is %lld", mDecoder.get(), mStartTime));
|
2010-04-01 20:03:07 -07:00
|
|
|
return v;
|
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::UpdateReadyState() {
|
2011-04-29 12:21:57 -07:00
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
2010-04-01 20:03:07 -07:00
|
|
|
|
2012-12-18 20:48:32 -08:00
|
|
|
MediaDecoderOwner::NextFrameStatus nextFrameStatus = GetNextFrameStatus();
|
|
|
|
if (nextFrameStatus == mLastFrameStatus) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
mLastFrameStatus = nextFrameStatus;
|
|
|
|
|
2010-04-01 20:03:07 -07:00
|
|
|
nsCOMPtr<nsIRunnable> event;
|
2012-12-18 20:48:32 -08:00
|
|
|
switch (nextFrameStatus) {
|
2012-11-14 11:45:31 -08:00
|
|
|
case MediaDecoderOwner::NEXT_FRAME_UNAVAILABLE_BUFFERING:
|
2012-11-14 11:46:40 -08:00
|
|
|
event = NS_NewRunnableMethod(mDecoder, &MediaDecoder::NextFrameUnavailableBuffering);
|
2010-04-01 20:03:07 -07:00
|
|
|
break;
|
2012-11-14 11:45:31 -08:00
|
|
|
case MediaDecoderOwner::NEXT_FRAME_AVAILABLE:
|
2012-11-14 11:46:40 -08:00
|
|
|
event = NS_NewRunnableMethod(mDecoder, &MediaDecoder::NextFrameAvailable);
|
2010-04-01 20:03:07 -07:00
|
|
|
break;
|
2012-11-14 11:45:31 -08:00
|
|
|
case MediaDecoderOwner::NEXT_FRAME_UNAVAILABLE:
|
2012-11-14 11:46:40 -08:00
|
|
|
event = NS_NewRunnableMethod(mDecoder, &MediaDecoder::NextFrameUnavailable);
|
2010-04-01 20:03:07 -07:00
|
|
|
break;
|
|
|
|
default:
|
|
|
|
PR_NOT_REACHED("unhandled frame state");
|
|
|
|
}
|
|
|
|
|
|
|
|
NS_DispatchToMainThread(event, NS_DISPATCH_NORMAL);
|
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
bool MediaDecoderStateMachine::JustExitedQuickBuffering()
|
2011-03-23 15:28:57 -07:00
|
|
|
{
|
|
|
|
return !mDecodeStartTime.IsNull() &&
|
|
|
|
mQuickBuffering &&
|
2012-09-18 11:23:59 -07:00
|
|
|
(TimeStamp::Now() - mDecodeStartTime) < TimeDuration::FromMicroseconds(QUICK_BUFFER_THRESHOLD_USECS);
|
2011-03-23 15:28:57 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::StartBuffering()
|
2010-07-22 15:48:32 -07:00
|
|
|
{
|
2011-04-29 12:21:57 -07:00
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
2010-07-22 15:48:32 -07:00
|
|
|
|
2011-07-11 20:39:37 -07:00
|
|
|
if (IsPlaying()) {
|
|
|
|
StopPlayback();
|
|
|
|
}
|
|
|
|
|
2011-03-23 15:28:57 -07:00
|
|
|
TimeDuration decodeDuration = TimeStamp::Now() - mDecodeStartTime;
|
|
|
|
// Go into quick buffering mode provided we've not just left buffering using
|
|
|
|
// a "quick exit". This stops us flip-flopping between playing and buffering
|
|
|
|
// when the download speed is similar to the decode speed.
|
|
|
|
mQuickBuffering =
|
|
|
|
!JustExitedQuickBuffering() &&
|
2011-04-13 15:12:23 -07:00
|
|
|
decodeDuration < UsecsToDuration(QUICK_BUFFER_THRESHOLD_USECS);
|
2011-03-23 15:28:57 -07:00
|
|
|
mBufferingStart = TimeStamp::Now();
|
|
|
|
|
2010-07-22 15:48:32 -07:00
|
|
|
// We need to tell the element that buffering has started.
|
|
|
|
// We can't just directly send an asynchronous runnable that
|
|
|
|
// eventually fires the "waiting" event. The problem is that
|
|
|
|
// there might be pending main-thread events, such as "data
|
|
|
|
// received" notifications, that mean we're not actually still
|
|
|
|
// buffering by the time this runnable executes. So instead
|
|
|
|
// we just trigger UpdateReadyStateForData; when it runs, it
|
|
|
|
// will check the current state and decide whether to tell
|
|
|
|
// the element we're buffering or not.
|
|
|
|
UpdateReadyState();
|
|
|
|
mState = DECODER_STATE_BUFFERING;
|
2011-05-07 23:24:09 -07:00
|
|
|
LOG(PR_LOG_DEBUG, ("%p Changed state from DECODING to BUFFERING, decoded for %.3lfs",
|
2011-07-11 20:39:34 -07:00
|
|
|
mDecoder.get(), decodeDuration.ToSeconds()));
|
2012-03-28 06:14:33 -07:00
|
|
|
#ifdef PR_LOGGING
|
2012-11-14 11:46:40 -08:00
|
|
|
MediaDecoder::Statistics stats = mDecoder->GetStatistics();
|
2012-03-28 06:14:33 -07:00
|
|
|
#endif
|
2011-05-07 23:24:09 -07:00
|
|
|
LOG(PR_LOG_DEBUG, ("%p Playback rate: %.1lfKB/s%s download rate: %.1lfKB/s%s",
|
2011-07-11 20:39:34 -07:00
|
|
|
mDecoder.get(),
|
2011-03-23 15:28:57 -07:00
|
|
|
stats.mPlaybackRate/1024, stats.mPlaybackRateReliable ? "" : " (unreliable)",
|
|
|
|
stats.mDownloadRate/1024, stats.mDownloadRateReliable ? "" : " (unreliable)"));
|
2010-07-22 15:48:32 -07:00
|
|
|
}
|
2011-03-23 15:28:58 -07:00
|
|
|
|
2013-03-02 11:14:44 -08:00
|
|
|
nsresult MediaDecoderStateMachine::GetBuffered(TimeRanges* aBuffered) {
|
2012-02-14 20:35:01 -08:00
|
|
|
MediaResource* resource = mDecoder->GetResource();
|
|
|
|
NS_ENSURE_TRUE(resource, NS_ERROR_FAILURE);
|
|
|
|
resource->Pin();
|
2011-03-23 15:28:58 -07:00
|
|
|
nsresult res = mReader->GetBuffered(aBuffered, mStartTime);
|
2012-02-14 20:35:01 -08:00
|
|
|
resource->Unpin();
|
2011-03-23 15:28:58 -07:00
|
|
|
return res;
|
|
|
|
}
|
2011-07-11 20:39:32 -07:00
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
bool MediaDecoderStateMachine::IsPausedAndDecoderWaiting() {
|
2011-07-11 20:39:37 -07:00
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
|
|
|
NS_ASSERTION(OnStateMachineThread(), "Should be on state machine thread.");
|
|
|
|
|
|
|
|
return
|
|
|
|
mDecodeThreadWaiting &&
|
2012-11-14 11:46:40 -08:00
|
|
|
mDecoder->GetState() != MediaDecoder::PLAY_STATE_PLAYING &&
|
2011-07-11 20:39:37 -07:00
|
|
|
(mState == DECODER_STATE_DECODING || mState == DECODER_STATE_BUFFERING);
|
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
nsresult MediaDecoderStateMachine::Run()
|
2011-07-11 20:39:34 -07:00
|
|
|
{
|
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
|
|
|
NS_ASSERTION(OnStateMachineThread(), "Should be on state machine thread.");
|
|
|
|
|
2011-09-26 17:25:41 -07:00
|
|
|
return CallRunStateMachine();
|
2011-07-11 20:39:34 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
nsresult MediaDecoderStateMachine::CallRunStateMachine()
|
2011-07-11 20:39:34 -07:00
|
|
|
{
|
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
|
|
|
NS_ASSERTION(OnStateMachineThread(), "Should be on state machine thread.");
|
2011-09-29 16:34:37 -07:00
|
|
|
// This will be set to true by ScheduleStateMachine() if it's called
|
2011-07-11 20:39:34 -07:00
|
|
|
// while we're in RunStateMachine().
|
2011-09-29 16:34:37 -07:00
|
|
|
mRunAgain = false;
|
2011-07-11 20:39:34 -07:00
|
|
|
|
2011-09-29 16:34:37 -07:00
|
|
|
// Set to true whenever we dispatch an event to run this state machine.
|
2011-07-11 20:39:34 -07:00
|
|
|
// This flag prevents us from dispatching
|
2011-09-29 16:34:37 -07:00
|
|
|
mDispatchedRunEvent = false;
|
2011-07-11 20:39:34 -07:00
|
|
|
|
2012-12-04 02:59:36 -08:00
|
|
|
// If audio is being captured, stop the audio thread if it's running
|
|
|
|
if (mAudioCaptured) {
|
|
|
|
StopAudioThread();
|
|
|
|
}
|
|
|
|
|
2011-07-11 20:39:34 -07:00
|
|
|
mTimeout = TimeStamp();
|
|
|
|
|
2011-09-29 16:34:37 -07:00
|
|
|
mIsRunning = true;
|
2011-07-11 20:39:34 -07:00
|
|
|
nsresult res = RunStateMachine();
|
2011-09-29 16:34:37 -07:00
|
|
|
mIsRunning = false;
|
2011-07-11 20:39:34 -07:00
|
|
|
|
|
|
|
if (mRunAgain && !mDispatchedRunEvent) {
|
2011-09-29 16:34:37 -07:00
|
|
|
mDispatchedRunEvent = true;
|
2011-07-11 20:39:34 -07:00
|
|
|
return NS_DispatchToCurrentThread(this);
|
|
|
|
}
|
|
|
|
|
|
|
|
return res;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void TimeoutExpired(nsITimer *aTimer, void *aClosure) {
|
2012-11-14 11:46:40 -08:00
|
|
|
MediaDecoderStateMachine *machine =
|
|
|
|
static_cast<MediaDecoderStateMachine*>(aClosure);
|
2011-07-11 20:39:32 -07:00
|
|
|
NS_ASSERTION(machine, "Must have been passed state machine");
|
2011-07-11 20:39:34 -07:00
|
|
|
machine->TimeoutExpired();
|
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::TimeoutExpired()
|
2011-07-11 20:39:34 -07:00
|
|
|
{
|
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
|
|
|
NS_ASSERTION(OnStateMachineThread(), "Must be on state machine thread");
|
|
|
|
if (mIsRunning) {
|
2011-09-29 16:34:37 -07:00
|
|
|
mRunAgain = true;
|
2011-07-11 20:39:34 -07:00
|
|
|
} else if (!mDispatchedRunEvent) {
|
2011-09-26 17:25:41 -07:00
|
|
|
// We don't have an event dispatched to run the state machine, so we
|
|
|
|
// can just run it from here.
|
|
|
|
CallRunStateMachine();
|
2011-07-11 20:39:34 -07:00
|
|
|
}
|
|
|
|
// Otherwise, an event has already been dispatched to run the state machine
|
|
|
|
// as soon as possible. Nothing else needed to do, the state machine is
|
|
|
|
// going to run anyway.
|
2011-07-11 20:39:32 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::ScheduleStateMachineWithLockAndWakeDecoder() {
|
2012-04-29 20:12:42 -07:00
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
|
|
|
mon.NotifyAll();
|
2013-01-13 21:25:02 -08:00
|
|
|
ScheduleStateMachine();
|
2012-04-29 20:12:42 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
nsresult MediaDecoderStateMachine::ScheduleStateMachine(int64_t aUsecs) {
|
2011-07-11 20:39:32 -07:00
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
2011-11-07 17:38:17 -08:00
|
|
|
NS_ABORT_IF_FALSE(GetStateMachineThread(),
|
2011-07-11 20:39:34 -07:00
|
|
|
"Must have a state machine thread to schedule");
|
2011-07-11 20:39:32 -07:00
|
|
|
|
|
|
|
if (mState == DECODER_STATE_SHUTDOWN) {
|
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
2013-01-15 04:22:03 -08:00
|
|
|
aUsecs = std::max<int64_t>(aUsecs, 0);
|
2011-07-11 20:39:32 -07:00
|
|
|
|
|
|
|
TimeStamp timeout = TimeStamp::Now() + UsecsToDuration(aUsecs);
|
|
|
|
if (!mTimeout.IsNull()) {
|
|
|
|
if (timeout >= mTimeout) {
|
|
|
|
// We've already scheduled a timer set to expire at or before this time,
|
|
|
|
// or have an event dispatched to run the state machine.
|
|
|
|
return NS_OK;
|
2011-07-11 20:39:34 -07:00
|
|
|
}
|
|
|
|
if (mTimer) {
|
2011-07-11 20:39:32 -07:00
|
|
|
// We've been asked to schedule a timer to run before an existing timer.
|
|
|
|
// Cancel the existing timer.
|
|
|
|
mTimer->Cancel();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-08-22 08:56:38 -07:00
|
|
|
uint32_t ms = static_cast<uint32_t>((aUsecs / USECS_PER_MS) & 0xFFFFFFFF);
|
2011-09-26 17:25:41 -07:00
|
|
|
if (mRealTime && ms > 40)
|
|
|
|
ms = 40;
|
2011-07-11 20:39:32 -07:00
|
|
|
if (ms == 0) {
|
2011-07-11 20:39:34 -07:00
|
|
|
if (mIsRunning) {
|
|
|
|
// We're currently running this state machine on the state machine
|
|
|
|
// thread. Signal it to run again once it finishes its current cycle.
|
2011-09-29 16:34:37 -07:00
|
|
|
mRunAgain = true;
|
2011-07-11 20:39:34 -07:00
|
|
|
return NS_OK;
|
|
|
|
} else if (!mDispatchedRunEvent) {
|
|
|
|
// We're not currently running this state machine on the state machine
|
|
|
|
// thread. Dispatch an event to run one cycle of the state machine.
|
2011-09-29 16:34:37 -07:00
|
|
|
mDispatchedRunEvent = true;
|
2011-11-07 17:38:17 -08:00
|
|
|
return GetStateMachineThread()->Dispatch(this, NS_DISPATCH_NORMAL);
|
2011-07-11 20:39:34 -07:00
|
|
|
}
|
|
|
|
// We're not currently running this state machine on the state machine
|
|
|
|
// thread, but something has already dispatched an event to run it again,
|
|
|
|
// so just exit; it's going to run real soon.
|
|
|
|
return NS_OK;
|
2011-07-11 20:39:32 -07:00
|
|
|
}
|
|
|
|
|
2011-07-11 20:39:34 -07:00
|
|
|
mTimeout = timeout;
|
|
|
|
|
|
|
|
nsresult res;
|
2011-07-11 20:39:32 -07:00
|
|
|
if (!mTimer) {
|
|
|
|
mTimer = do_CreateInstance("@mozilla.org/timer;1", &res);
|
|
|
|
if (NS_FAILED(res)) return res;
|
2011-11-07 17:38:17 -08:00
|
|
|
mTimer->SetTarget(GetStateMachineThread());
|
2011-07-11 20:39:32 -07:00
|
|
|
}
|
|
|
|
|
2012-11-14 11:45:33 -08:00
|
|
|
res = mTimer->InitWithFuncCallback(mozilla::TimeoutExpired,
|
2011-07-11 20:39:32 -07:00
|
|
|
this,
|
|
|
|
ms,
|
|
|
|
nsITimer::TYPE_ONE_SHOT);
|
|
|
|
return res;
|
|
|
|
}
|
2011-11-07 17:38:17 -08:00
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
bool MediaDecoderStateMachine::OnStateMachineThread() const
|
2011-11-07 17:38:17 -08:00
|
|
|
{
|
|
|
|
return IsCurrentThread(GetStateMachineThread());
|
|
|
|
}
|
2012-11-14 11:45:33 -08:00
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
nsIThread* MediaDecoderStateMachine::GetStateMachineThread()
|
2011-11-07 17:38:17 -08:00
|
|
|
{
|
|
|
|
return StateMachineTracker::Instance().GetGlobalStateMachineThread();
|
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
void MediaDecoderStateMachine::NotifyAudioAvailableListener()
|
2011-11-21 16:34:21 -08:00
|
|
|
{
|
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
|
|
|
mEventManager.NotifyAudioAvailableListener();
|
|
|
|
}
|
2012-11-06 14:33:01 -08:00
|
|
|
|
2012-11-22 02:38:28 -08:00
|
|
|
void MediaDecoderStateMachine::SetPlaybackRate(double aPlaybackRate)
|
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
|
|
|
NS_ASSERTION(aPlaybackRate != 0,
|
|
|
|
"PlaybackRate == 0 should be handled before this function.");
|
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
|
|
|
|
2013-03-04 06:48:58 -08:00
|
|
|
// We don't currently support more than two channels when changing playback
|
|
|
|
// rate.
|
|
|
|
if (mAudioStream && mAudioStream->GetChannels() > 2) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2012-11-22 02:38:28 -08:00
|
|
|
if (mPlaybackRate == aPlaybackRate) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Get position of the last time we changed the rate.
|
|
|
|
if (!HasAudio()) {
|
|
|
|
// mBasePosition is a position in the video stream, not an absolute time.
|
2013-01-10 03:26:18 -08:00
|
|
|
if (mState == DECODER_STATE_SEEKING) {
|
2013-02-28 07:05:50 -08:00
|
|
|
mBasePosition = mSeekTime - mStartTime;
|
2013-01-10 03:26:18 -08:00
|
|
|
} else {
|
|
|
|
mBasePosition = GetVideoStreamPosition();
|
2012-11-22 02:38:28 -08:00
|
|
|
}
|
2013-02-28 07:05:50 -08:00
|
|
|
mPlayDuration = mBasePosition;
|
2013-01-10 03:26:18 -08:00
|
|
|
mResetPlayStartTime = true;
|
|
|
|
mPlayStartTime = TimeStamp::Now();
|
2012-11-22 02:38:28 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
mPlaybackRate = aPlaybackRate;
|
|
|
|
}
|
|
|
|
|
|
|
|
void MediaDecoderStateMachine::SetPreservesPitch(bool aPreservesPitch)
|
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
|
|
|
|
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
|
|
|
|
|
|
|
|
mPreservesPitch = aPreservesPitch;
|
|
|
|
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2012-11-14 11:46:40 -08:00
|
|
|
bool MediaDecoderStateMachine::IsShutdown()
|
2012-11-06 14:33:01 -08:00
|
|
|
{
|
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
|
|
|
return GetState() == DECODER_STATE_SHUTDOWN;
|
|
|
|
}
|
|
|
|
|
2012-12-27 07:21:30 -08:00
|
|
|
void MediaDecoderStateMachine::QueueMetadata(int64_t aPublishTime,
|
|
|
|
int aChannels,
|
|
|
|
int aRate,
|
|
|
|
bool aHasAudio,
|
|
|
|
bool aHasVideo,
|
|
|
|
MetadataTags* aTags)
|
2012-11-30 05:17:54 -08:00
|
|
|
{
|
|
|
|
NS_ASSERTION(OnDecodeThread(), "Should be on decode thread.");
|
|
|
|
mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
|
|
|
|
TimedMetadata* metadata = new TimedMetadata;
|
|
|
|
metadata->mPublishTime = aPublishTime;
|
|
|
|
metadata->mChannels = aChannels;
|
|
|
|
metadata->mRate = aRate;
|
|
|
|
metadata->mHasAudio = aHasAudio;
|
|
|
|
metadata->mTags = aTags;
|
|
|
|
mMetadataManager.QueueMetadata(metadata);
|
|
|
|
}
|
|
|
|
|
2012-11-14 11:45:33 -08:00
|
|
|
} // namespace mozilla
|
|
|
|
|