2012-05-11 10:35:36 -07:00
|
|
|
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
|
2012-04-29 20:11:26 -07:00
|
|
|
/* This Source Code Form is subject to the terms of the Mozilla Public
|
|
|
|
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
|
|
|
* You can obtain one at http://mozilla.org/MPL/2.0/. */
|
|
|
|
|
2013-02-04 02:04:25 -08:00
|
|
|
#include "MediaStreamGraphImpl.h"
|
2013-07-19 07:40:58 -07:00
|
|
|
#include "mozilla/LinkedList.h"
|
2014-04-13 12:41:07 -07:00
|
|
|
#include "mozilla/MathAlgorithms.h"
|
2014-03-15 12:00:16 -07:00
|
|
|
#include "mozilla/unused.h"
|
2012-04-29 20:11:26 -07:00
|
|
|
|
|
|
|
#include "AudioSegment.h"
|
|
|
|
#include "VideoSegment.h"
|
|
|
|
#include "nsContentUtils.h"
|
|
|
|
#include "nsIAppShell.h"
|
|
|
|
#include "nsIObserver.h"
|
2014-04-13 11:08:10 -07:00
|
|
|
#include "nsPrintfCString.h"
|
2012-04-29 20:11:26 -07:00
|
|
|
#include "nsServiceManagerUtils.h"
|
|
|
|
#include "nsWidgetsCID.h"
|
2014-04-13 11:08:10 -07:00
|
|
|
#include "prerror.h"
|
2012-04-29 20:11:26 -07:00
|
|
|
#include "prlog.h"
|
2012-06-18 19:30:09 -07:00
|
|
|
#include "mozilla/Attributes.h"
|
2012-07-31 05:17:21 -07:00
|
|
|
#include "TrackUnionStream.h"
|
2012-08-20 21:06:46 -07:00
|
|
|
#include "ImageContainer.h"
|
2014-04-10 10:39:20 -07:00
|
|
|
#include "AudioChannelService.h"
|
2013-01-13 14:46:57 -08:00
|
|
|
#include "AudioNodeEngine.h"
|
|
|
|
#include "AudioNodeStream.h"
|
2013-07-24 04:29:39 -07:00
|
|
|
#include "AudioNodeExternalInputStream.h"
|
2013-01-15 04:22:03 -08:00
|
|
|
#include <algorithm>
|
2013-05-02 22:02:55 -07:00
|
|
|
#include "DOMMediaStream.h"
|
2013-07-19 07:40:57 -07:00
|
|
|
#include "GeckoProfiler.h"
|
2014-03-15 12:00:16 -07:00
|
|
|
#include "mozilla/unused.h"
|
2014-03-24 03:06:05 -07:00
|
|
|
#include "speex/speex_resampler.h"
|
2014-04-21 00:15:34 -07:00
|
|
|
#ifdef MOZ_WEBRTC
|
2014-04-02 10:58:19 -07:00
|
|
|
#include "AudioOutputObserver.h"
|
2014-04-21 00:15:34 -07:00
|
|
|
#endif
|
2012-04-29 20:11:26 -07:00
|
|
|
|
|
|
|
using namespace mozilla::layers;
|
2012-11-15 19:25:26 -08:00
|
|
|
using namespace mozilla::dom;
|
2013-12-31 01:06:12 -08:00
|
|
|
using namespace mozilla::gfx;
|
2012-04-29 20:11:26 -07:00
|
|
|
|
|
|
|
namespace mozilla {
|
|
|
|
|
|
|
|
#ifdef PR_LOGGING
|
|
|
|
PRLogModuleInfo* gMediaStreamGraphLog;
|
2013-11-20 19:02:42 -08:00
|
|
|
#define STREAM_LOG(type, msg) PR_LOG(gMediaStreamGraphLog, type, msg)
|
2013-11-18 03:48:04 -08:00
|
|
|
#else
|
2013-11-20 19:02:42 -08:00
|
|
|
#define STREAM_LOG(type, msg)
|
2012-04-29 20:11:26 -07:00
|
|
|
#endif
|
|
|
|
|
|
|
|
/**
|
|
|
|
* The singleton graph instance.
|
|
|
|
*/
|
|
|
|
static MediaStreamGraphImpl* gGraph;
|
|
|
|
|
2013-11-18 03:48:04 -08:00
|
|
|
MediaStreamGraphImpl::~MediaStreamGraphImpl()
|
|
|
|
{
|
|
|
|
NS_ASSERTION(IsEmpty(),
|
|
|
|
"All streams should have been destroyed by messages from the main thread");
|
2013-11-20 19:02:42 -08:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG, ("MediaStreamGraph %p destroyed", this));
|
2013-11-18 03:48:04 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-04-29 20:11:26 -07:00
|
|
|
StreamTime
|
|
|
|
MediaStreamGraphImpl::GetDesiredBufferEnd(MediaStream* aStream)
|
|
|
|
{
|
|
|
|
StreamTime current = mCurrentTime - aStream->mBufferStartTime;
|
2014-01-27 10:10:48 -08:00
|
|
|
// When waking up media decoders, we need a longer safety margin, as it can
|
|
|
|
// take more time to get new samples. A factor of two seem to work.
|
2012-07-31 05:17:21 -07:00
|
|
|
return current +
|
2014-01-27 10:10:48 -08:00
|
|
|
2 * MillisecondsToMediaTime(std::max(AUDIO_TARGET_MS, VIDEO_TARGET_MS));
|
2012-04-29 20:11:26 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::FinishStream(MediaStream* aStream)
|
|
|
|
{
|
|
|
|
if (aStream->mFinished)
|
|
|
|
return;
|
2013-11-20 19:02:42 -08:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG, ("MediaStream %p will finish", aStream));
|
2012-04-29 20:11:26 -07:00
|
|
|
aStream->mFinished = true;
|
|
|
|
// Force at least one more iteration of the control loop, since we rely
|
|
|
|
// on UpdateCurrentTime to notify our listeners once the stream end
|
|
|
|
// has been reached.
|
|
|
|
EnsureNextIteration();
|
2014-01-29 05:34:35 -08:00
|
|
|
|
|
|
|
SetStreamOrderDirty();
|
2012-04-29 20:11:26 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::AddStream(MediaStream* aStream)
|
|
|
|
{
|
|
|
|
aStream->mBufferStartTime = mCurrentTime;
|
|
|
|
*mStreams.AppendElement() = already_AddRefed<MediaStream>(aStream);
|
2013-11-20 19:02:42 -08:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG, ("Adding media stream %p to the graph", aStream));
|
2014-01-29 05:34:35 -08:00
|
|
|
|
|
|
|
SetStreamOrderDirty();
|
2012-04-29 20:11:26 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::RemoveStream(MediaStream* aStream)
|
|
|
|
{
|
|
|
|
// Remove references in mStreamUpdates before we allow aStream to die.
|
|
|
|
// Pending updates are not needed (since the main thread has already given
|
|
|
|
// up the stream) so we will just drop them.
|
|
|
|
{
|
|
|
|
MonitorAutoLock lock(mMonitor);
|
2012-08-22 08:56:38 -07:00
|
|
|
for (uint32_t i = 0; i < mStreamUpdates.Length(); ++i) {
|
2012-04-29 20:11:26 -07:00
|
|
|
if (mStreamUpdates[i].mStream == aStream) {
|
2012-07-30 07:20:58 -07:00
|
|
|
mStreamUpdates[i].mStream = nullptr;
|
2012-04-29 20:11:26 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-01-29 05:34:35 -08:00
|
|
|
SetStreamOrderDirty();
|
|
|
|
|
2012-04-29 20:11:26 -07:00
|
|
|
// This unrefs the stream, probably destroying it
|
|
|
|
mStreams.RemoveElement(aStream);
|
|
|
|
|
2013-11-20 19:02:42 -08:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG, ("Removing media stream %p from the graph", aStream));
|
2012-04-29 20:11:26 -07:00
|
|
|
}
|
|
|
|
|
2012-05-31 23:26:17 -07:00
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::UpdateConsumptionState(SourceMediaStream* aStream)
|
|
|
|
{
|
2012-07-31 05:17:21 -07:00
|
|
|
MediaStreamListener::Consumption state =
|
|
|
|
aStream->mIsConsumed ? MediaStreamListener::CONSUMED
|
|
|
|
: MediaStreamListener::NOT_CONSUMED;
|
2012-05-31 23:26:17 -07:00
|
|
|
if (state != aStream->mLastConsumptionState) {
|
|
|
|
aStream->mLastConsumptionState = state;
|
2012-08-22 08:56:38 -07:00
|
|
|
for (uint32_t j = 0; j < aStream->mListeners.Length(); ++j) {
|
2012-05-31 23:26:17 -07:00
|
|
|
MediaStreamListener* l = aStream->mListeners[j];
|
|
|
|
l->NotifyConsumptionChanged(this, state);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-04-29 20:11:40 -07:00
|
|
|
void
|
2012-07-20 12:36:03 -07:00
|
|
|
MediaStreamGraphImpl::ExtractPendingInput(SourceMediaStream* aStream,
|
|
|
|
GraphTime aDesiredUpToTime,
|
|
|
|
bool* aEnsureNextIteration)
|
2012-04-29 20:11:40 -07:00
|
|
|
{
|
|
|
|
bool finished;
|
|
|
|
{
|
|
|
|
MutexAutoLock lock(aStream->mMutex);
|
2012-12-07 03:06:55 -08:00
|
|
|
if (aStream->mPullEnabled && !aStream->mFinished &&
|
|
|
|
!aStream->mListeners.IsEmpty()) {
|
|
|
|
// Compute how much stream time we'll need assuming we don't block
|
|
|
|
// the stream at all between mBlockingDecisionsMadeUntilTime and
|
|
|
|
// aDesiredUpToTime.
|
|
|
|
StreamTime t =
|
|
|
|
GraphTimeToStreamTime(aStream, mStateComputedTime) +
|
|
|
|
(aDesiredUpToTime - mStateComputedTime);
|
2013-11-20 19:02:42 -08:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG+1, ("Calling NotifyPull aStream=%p t=%f current end=%f", aStream,
|
|
|
|
MediaTimeToSeconds(t),
|
|
|
|
MediaTimeToSeconds(aStream->mBuffer.GetEnd())));
|
2012-12-07 03:06:55 -08:00
|
|
|
if (t > aStream->mBuffer.GetEnd()) {
|
|
|
|
*aEnsureNextIteration = true;
|
2013-03-07 00:53:45 -08:00
|
|
|
#ifdef DEBUG
|
|
|
|
if (aStream->mListeners.Length() == 0) {
|
2013-11-20 19:02:42 -08:00
|
|
|
STREAM_LOG(PR_LOG_ERROR, ("No listeners in NotifyPull aStream=%p desired=%f current end=%f",
|
|
|
|
aStream, MediaTimeToSeconds(t),
|
|
|
|
MediaTimeToSeconds(aStream->mBuffer.GetEnd())));
|
2013-03-07 00:53:45 -08:00
|
|
|
aStream->DumpTrackInfo();
|
|
|
|
}
|
|
|
|
#endif
|
2012-12-07 03:06:55 -08:00
|
|
|
for (uint32_t j = 0; j < aStream->mListeners.Length(); ++j) {
|
|
|
|
MediaStreamListener* l = aStream->mListeners[j];
|
|
|
|
{
|
|
|
|
MutexAutoUnlock unlock(aStream->mMutex);
|
|
|
|
l->NotifyPull(this, t);
|
|
|
|
}
|
2012-07-20 12:36:03 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2012-04-29 20:11:40 -07:00
|
|
|
finished = aStream->mUpdateFinished;
|
2012-08-22 08:56:38 -07:00
|
|
|
for (int32_t i = aStream->mUpdateTracks.Length() - 1; i >= 0; --i) {
|
2012-04-29 20:11:40 -07:00
|
|
|
SourceMediaStream::TrackData* data = &aStream->mUpdateTracks[i];
|
2013-05-29 21:44:43 -07:00
|
|
|
aStream->ApplyTrackDisabling(data->mID, data->mData);
|
2012-08-22 08:56:38 -07:00
|
|
|
for (uint32_t j = 0; j < aStream->mListeners.Length(); ++j) {
|
2012-04-29 20:12:50 -07:00
|
|
|
MediaStreamListener* l = aStream->mListeners[j];
|
|
|
|
TrackTicks offset = (data->mCommands & SourceMediaStream::TRACK_CREATE)
|
|
|
|
? data->mStart : aStream->mBuffer.FindTrack(data->mID)->GetSegment()->GetDuration();
|
2014-03-24 03:06:05 -07:00
|
|
|
l->NotifyQueuedTrackChanges(this, data->mID, data->mOutputRate,
|
2012-04-29 20:12:50 -07:00
|
|
|
offset, data->mCommands, *data->mData);
|
|
|
|
}
|
2012-04-29 20:11:40 -07:00
|
|
|
if (data->mCommands & SourceMediaStream::TRACK_CREATE) {
|
|
|
|
MediaSegment* segment = data->mData.forget();
|
2013-11-20 19:02:42 -08:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG, ("SourceMediaStream %p creating track %d, rate %d, start %lld, initial end %lld",
|
2014-03-24 03:06:05 -07:00
|
|
|
aStream, data->mID, data->mOutputRate, int64_t(data->mStart),
|
2013-11-20 19:02:42 -08:00
|
|
|
int64_t(segment->GetDuration())));
|
2014-03-24 03:06:05 -07:00
|
|
|
|
|
|
|
aStream->mBuffer.AddTrack(data->mID, data->mOutputRate, data->mStart, segment);
|
2012-04-29 20:11:40 -07:00
|
|
|
// The track has taken ownership of data->mData, so let's replace
|
|
|
|
// data->mData with an empty clone.
|
|
|
|
data->mData = segment->CreateEmptyClone();
|
|
|
|
data->mCommands &= ~SourceMediaStream::TRACK_CREATE;
|
|
|
|
} else if (data->mData->GetDuration() > 0) {
|
|
|
|
MediaSegment* dest = aStream->mBuffer.FindTrack(data->mID)->GetSegment();
|
2013-11-20 19:02:42 -08:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG+1, ("SourceMediaStream %p track %d, advancing end from %lld to %lld",
|
|
|
|
aStream, data->mID,
|
|
|
|
int64_t(dest->GetDuration()),
|
|
|
|
int64_t(dest->GetDuration() + data->mData->GetDuration())));
|
2012-04-29 20:11:40 -07:00
|
|
|
dest->AppendFrom(data->mData);
|
|
|
|
}
|
|
|
|
if (data->mCommands & SourceMediaStream::TRACK_END) {
|
|
|
|
aStream->mBuffer.FindTrack(data->mID)->SetEnded();
|
|
|
|
aStream->mUpdateTracks.RemoveElementAt(i);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
aStream->mBuffer.AdvanceKnownTracksTime(aStream->mUpdateKnownTracksTime);
|
|
|
|
}
|
2013-03-20 04:19:39 -07:00
|
|
|
if (aStream->mBuffer.GetEnd() > 0) {
|
|
|
|
aStream->mHasCurrentData = true;
|
|
|
|
}
|
2012-04-29 20:11:40 -07:00
|
|
|
if (finished) {
|
|
|
|
FinishStream(aStream);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::UpdateBufferSufficiencyState(SourceMediaStream* aStream)
|
|
|
|
{
|
|
|
|
StreamTime desiredEnd = GetDesiredBufferEnd(aStream);
|
|
|
|
nsTArray<SourceMediaStream::ThreadAndRunnable> runnables;
|
|
|
|
|
|
|
|
{
|
|
|
|
MutexAutoLock lock(aStream->mMutex);
|
2012-08-22 08:56:38 -07:00
|
|
|
for (uint32_t i = 0; i < aStream->mUpdateTracks.Length(); ++i) {
|
2012-04-29 20:11:40 -07:00
|
|
|
SourceMediaStream::TrackData* data = &aStream->mUpdateTracks[i];
|
2012-04-29 22:23:00 -07:00
|
|
|
if (data->mCommands & SourceMediaStream::TRACK_CREATE) {
|
|
|
|
// This track hasn't been created yet, so we have no sufficiency
|
|
|
|
// data. The track will be created in the next iteration of the
|
|
|
|
// control loop and then we'll fire insufficiency notifications
|
|
|
|
// if necessary.
|
|
|
|
continue;
|
|
|
|
}
|
2012-04-29 20:13:42 -07:00
|
|
|
if (data->mCommands & SourceMediaStream::TRACK_END) {
|
|
|
|
// This track will end, so no point in firing not-enough-data
|
|
|
|
// callbacks.
|
2012-04-29 20:11:40 -07:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
StreamBuffer::Track* track = aStream->mBuffer.FindTrack(data->mID);
|
2012-04-29 20:13:42 -07:00
|
|
|
// Note that track->IsEnded() must be false, otherwise we would have
|
|
|
|
// removed the track from mUpdateTracks already.
|
|
|
|
NS_ASSERTION(!track->IsEnded(), "What is this track doing here?");
|
2012-04-29 20:11:40 -07:00
|
|
|
data->mHaveEnough = track->GetEndTimeRoundDown() >= desiredEnd;
|
|
|
|
if (!data->mHaveEnough) {
|
|
|
|
runnables.MoveElementsFrom(data->mDispatchWhenNotEnough);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-08-22 08:56:38 -07:00
|
|
|
for (uint32_t i = 0; i < runnables.Length(); ++i) {
|
2014-02-17 14:53:53 -08:00
|
|
|
runnables[i].mTarget->Dispatch(runnables[i].mRunnable, 0);
|
2012-04-29 20:11:40 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-04-29 20:11:26 -07:00
|
|
|
StreamTime
|
|
|
|
MediaStreamGraphImpl::GraphTimeToStreamTime(MediaStream* aStream,
|
|
|
|
GraphTime aTime)
|
|
|
|
{
|
2012-07-31 05:17:21 -07:00
|
|
|
NS_ASSERTION(aTime <= mStateComputedTime,
|
2012-04-29 20:11:26 -07:00
|
|
|
"Don't ask about times where we haven't made blocking decisions yet");
|
|
|
|
if (aTime <= mCurrentTime) {
|
2013-01-15 04:22:03 -08:00
|
|
|
return std::max<StreamTime>(0, aTime - aStream->mBufferStartTime);
|
2012-04-29 20:11:26 -07:00
|
|
|
}
|
|
|
|
GraphTime t = mCurrentTime;
|
|
|
|
StreamTime s = t - aStream->mBufferStartTime;
|
|
|
|
while (t < aTime) {
|
|
|
|
GraphTime end;
|
|
|
|
if (!aStream->mBlocked.GetAt(t, &end)) {
|
2013-01-15 04:22:03 -08:00
|
|
|
s += std::min(aTime, end) - t;
|
2012-04-29 20:11:26 -07:00
|
|
|
}
|
|
|
|
t = end;
|
|
|
|
}
|
2013-01-15 04:22:03 -08:00
|
|
|
return std::max<StreamTime>(0, s);
|
2013-02-04 02:04:26 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
StreamTime
|
|
|
|
MediaStreamGraphImpl::GraphTimeToStreamTimeOptimistic(MediaStream* aStream,
|
|
|
|
GraphTime aTime)
|
|
|
|
{
|
|
|
|
GraphTime computedUpToTime = std::min(mStateComputedTime, aTime);
|
|
|
|
StreamTime s = GraphTimeToStreamTime(aStream, computedUpToTime);
|
|
|
|
return s + (aTime - computedUpToTime);
|
|
|
|
}
|
2012-04-29 20:11:26 -07:00
|
|
|
|
|
|
|
GraphTime
|
|
|
|
MediaStreamGraphImpl::StreamTimeToGraphTime(MediaStream* aStream,
|
2012-08-22 08:56:38 -07:00
|
|
|
StreamTime aTime, uint32_t aFlags)
|
2012-04-29 20:11:26 -07:00
|
|
|
{
|
|
|
|
if (aTime >= STREAM_TIME_MAX) {
|
|
|
|
return GRAPH_TIME_MAX;
|
|
|
|
}
|
|
|
|
MediaTime bufferElapsedToCurrentTime = mCurrentTime - aStream->mBufferStartTime;
|
|
|
|
if (aTime < bufferElapsedToCurrentTime ||
|
|
|
|
(aTime == bufferElapsedToCurrentTime && !(aFlags & INCLUDE_TRAILING_BLOCKED_INTERVAL))) {
|
|
|
|
return aTime + aStream->mBufferStartTime;
|
|
|
|
}
|
|
|
|
|
|
|
|
MediaTime streamAmount = aTime - bufferElapsedToCurrentTime;
|
|
|
|
NS_ASSERTION(streamAmount >= 0, "Can't answer queries before current time");
|
|
|
|
|
|
|
|
GraphTime t = mCurrentTime;
|
|
|
|
while (t < GRAPH_TIME_MAX) {
|
2013-12-08 21:08:02 -08:00
|
|
|
if (!(aFlags & INCLUDE_TRAILING_BLOCKED_INTERVAL) && streamAmount == 0) {
|
|
|
|
return t;
|
|
|
|
}
|
2012-04-29 20:11:26 -07:00
|
|
|
bool blocked;
|
|
|
|
GraphTime end;
|
2012-07-31 05:17:21 -07:00
|
|
|
if (t < mStateComputedTime) {
|
2012-04-29 20:11:26 -07:00
|
|
|
blocked = aStream->mBlocked.GetAt(t, &end);
|
2013-01-15 04:22:03 -08:00
|
|
|
end = std::min(end, mStateComputedTime);
|
2012-04-29 20:11:26 -07:00
|
|
|
} else {
|
|
|
|
blocked = false;
|
|
|
|
end = GRAPH_TIME_MAX;
|
|
|
|
}
|
|
|
|
if (blocked) {
|
|
|
|
t = end;
|
|
|
|
} else {
|
|
|
|
if (streamAmount == 0) {
|
|
|
|
// No more stream time to consume at time t, so we're done.
|
|
|
|
break;
|
|
|
|
}
|
2013-01-15 04:22:03 -08:00
|
|
|
MediaTime consume = std::min(end - t, streamAmount);
|
2012-04-29 20:11:26 -07:00
|
|
|
streamAmount -= consume;
|
|
|
|
t += consume;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return t;
|
|
|
|
}
|
|
|
|
|
|
|
|
GraphTime
|
|
|
|
MediaStreamGraphImpl::GetAudioPosition(MediaStream* aStream)
|
|
|
|
{
|
2012-07-31 05:17:22 -07:00
|
|
|
if (aStream->mAudioOutputStreams.IsEmpty()) {
|
2012-04-29 20:11:26 -07:00
|
|
|
return mCurrentTime;
|
|
|
|
}
|
2012-08-22 08:56:38 -07:00
|
|
|
int64_t positionInFrames = aStream->mAudioOutputStreams[0].mStream->GetPositionInFrames();
|
2012-05-06 20:44:41 -07:00
|
|
|
if (positionInFrames < 0) {
|
|
|
|
return mCurrentTime;
|
|
|
|
}
|
2012-07-31 05:17:22 -07:00
|
|
|
return aStream->mAudioOutputStreams[0].mAudioPlaybackStartTime +
|
2014-03-24 03:06:05 -07:00
|
|
|
TicksToTimeRoundDown(IdealAudioRate(),
|
2012-05-06 20:44:41 -07:00
|
|
|
positionInFrames);
|
2012-04-29 20:11:26 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::UpdateCurrentTime()
|
|
|
|
{
|
2013-07-19 07:40:57 -07:00
|
|
|
GraphTime prevCurrentTime, nextCurrentTime;
|
|
|
|
if (mRealtime) {
|
|
|
|
TimeStamp now = TimeStamp::Now();
|
|
|
|
prevCurrentTime = mCurrentTime;
|
|
|
|
nextCurrentTime =
|
|
|
|
SecondsToMediaTime((now - mCurrentTimeStamp).ToSeconds()) + mCurrentTime;
|
|
|
|
|
|
|
|
mCurrentTimeStamp = now;
|
2013-11-20 19:02:42 -08:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG+1, ("Updating current time to %f (real %f, mStateComputedTime %f)",
|
|
|
|
MediaTimeToSeconds(nextCurrentTime),
|
|
|
|
(now - mInitialTimeStamp).ToSeconds(),
|
|
|
|
MediaTimeToSeconds(mStateComputedTime)));
|
2013-07-19 07:40:57 -07:00
|
|
|
} else {
|
|
|
|
prevCurrentTime = mCurrentTime;
|
2013-12-08 21:08:02 -08:00
|
|
|
nextCurrentTime = mCurrentTime + MillisecondsToMediaTime(MEDIA_GRAPH_TARGET_PERIOD_MS);
|
2013-11-20 19:02:42 -08:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG+1, ("Updating offline current time to %f (mStateComputedTime %f)",
|
|
|
|
MediaTimeToSeconds(nextCurrentTime),
|
|
|
|
MediaTimeToSeconds(mStateComputedTime)));
|
2013-07-19 07:40:57 -07:00
|
|
|
}
|
|
|
|
|
2012-07-31 05:17:21 -07:00
|
|
|
if (mStateComputedTime < nextCurrentTime) {
|
2013-11-20 19:02:42 -08:00
|
|
|
STREAM_LOG(PR_LOG_WARNING, ("Media graph global underrun detected"));
|
2013-02-04 02:04:25 -08:00
|
|
|
nextCurrentTime = mStateComputedTime;
|
2012-04-29 20:11:26 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
if (prevCurrentTime >= nextCurrentTime) {
|
|
|
|
NS_ASSERTION(prevCurrentTime == nextCurrentTime, "Time can't go backwards!");
|
|
|
|
// This could happen due to low clock resolution, maybe?
|
2013-11-20 19:02:42 -08:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG, ("Time did not advance"));
|
2012-05-28 04:58:34 -07:00
|
|
|
// There's not much left to do here, but the code below that notifies
|
|
|
|
// listeners that streams have ended still needs to run.
|
2012-04-29 20:11:26 -07:00
|
|
|
}
|
|
|
|
|
2013-10-01 19:28:49 -07:00
|
|
|
nsTArray<MediaStream*> streamsReadyToFinish;
|
2013-11-25 03:59:49 -08:00
|
|
|
nsAutoTArray<bool,800> streamHasOutput;
|
|
|
|
streamHasOutput.SetLength(mStreams.Length());
|
2012-08-22 08:56:38 -07:00
|
|
|
for (uint32_t i = 0; i < mStreams.Length(); ++i) {
|
2012-04-29 20:11:26 -07:00
|
|
|
MediaStream* stream = mStreams[i];
|
|
|
|
|
|
|
|
// Calculate blocked time and fire Blocked/Unblocked events
|
|
|
|
GraphTime blockedTime = 0;
|
|
|
|
GraphTime t = prevCurrentTime;
|
|
|
|
while (t < nextCurrentTime) {
|
|
|
|
GraphTime end;
|
|
|
|
bool blocked = stream->mBlocked.GetAt(t, &end);
|
|
|
|
if (blocked) {
|
2013-01-15 04:22:03 -08:00
|
|
|
blockedTime += std::min(end, nextCurrentTime) - t;
|
2012-04-29 20:11:26 -07:00
|
|
|
}
|
2013-01-02 05:49:18 -08:00
|
|
|
if (blocked != stream->mNotifiedBlocked) {
|
2012-08-22 08:56:38 -07:00
|
|
|
for (uint32_t j = 0; j < stream->mListeners.Length(); ++j) {
|
2012-04-29 20:11:26 -07:00
|
|
|
MediaStreamListener* l = stream->mListeners[j];
|
|
|
|
l->NotifyBlockingChanged(this,
|
|
|
|
blocked ? MediaStreamListener::BLOCKED : MediaStreamListener::UNBLOCKED);
|
|
|
|
}
|
2013-01-02 05:49:18 -08:00
|
|
|
stream->mNotifiedBlocked = blocked;
|
2012-04-29 20:11:26 -07:00
|
|
|
}
|
|
|
|
t = end;
|
|
|
|
}
|
|
|
|
|
|
|
|
stream->AdvanceTimeVaryingValuesToCurrentTime(nextCurrentTime, blockedTime);
|
|
|
|
// Advance mBlocked last so that implementations of
|
|
|
|
// AdvanceTimeVaryingValuesToCurrentTime can rely on the value of mBlocked.
|
|
|
|
stream->mBlocked.AdvanceCurrentTime(nextCurrentTime);
|
|
|
|
|
2013-11-25 03:59:49 -08:00
|
|
|
streamHasOutput[i] = blockedTime < nextCurrentTime - prevCurrentTime;
|
2014-01-27 21:14:24 -08:00
|
|
|
// Make this an assertion when bug 957832 is fixed.
|
|
|
|
NS_WARN_IF_FALSE(!streamHasOutput[i] || !stream->mNotifiedFinished,
|
|
|
|
"Shouldn't have already notified of finish *and* have output!");
|
2012-04-29 20:11:26 -07:00
|
|
|
|
2013-10-01 19:28:49 -07:00
|
|
|
if (stream->mFinished && !stream->mNotifiedFinished) {
|
|
|
|
streamsReadyToFinish.AppendElement(stream);
|
|
|
|
}
|
2013-11-20 19:02:42 -08:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p bufferStartTime=%f blockedTime=%f",
|
|
|
|
stream, MediaTimeToSeconds(stream->mBufferStartTime),
|
|
|
|
MediaTimeToSeconds(blockedTime)));
|
2013-10-01 19:28:49 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
mCurrentTime = nextCurrentTime;
|
|
|
|
|
2013-11-25 03:59:49 -08:00
|
|
|
// Do these after setting mCurrentTime so that StreamTimeToGraphTime works properly.
|
|
|
|
for (uint32_t i = 0; i < streamHasOutput.Length(); ++i) {
|
|
|
|
if (!streamHasOutput[i]) {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
MediaStream* stream = mStreams[i];
|
|
|
|
for (uint32_t j = 0; j < stream->mListeners.Length(); ++j) {
|
|
|
|
MediaStreamListener* l = stream->mListeners[j];
|
|
|
|
l->NotifyOutput(this, mCurrentTime);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-10-01 19:28:49 -07:00
|
|
|
for (uint32_t i = 0; i < streamsReadyToFinish.Length(); ++i) {
|
|
|
|
MediaStream* stream = streamsReadyToFinish[i];
|
2013-12-03 17:08:12 -08:00
|
|
|
// The stream is fully finished when all of its track data has been played
|
|
|
|
// out.
|
|
|
|
if (mCurrentTime >=
|
|
|
|
stream->StreamTimeToGraphTime(stream->GetStreamBuffer().GetAllTracksEnd())) {
|
2012-04-29 20:11:26 -07:00
|
|
|
stream->mNotifiedFinished = true;
|
2012-11-20 17:32:06 -08:00
|
|
|
stream->mLastPlayedVideoFrame.SetNull();
|
2014-01-29 05:34:35 -08:00
|
|
|
SetStreamOrderDirty();
|
2012-08-22 08:56:38 -07:00
|
|
|
for (uint32_t j = 0; j < stream->mListeners.Length(); ++j) {
|
2012-04-29 20:11:26 -07:00
|
|
|
MediaStreamListener* l = stream->mListeners[j];
|
|
|
|
l->NotifyFinished(this);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
bool
|
|
|
|
MediaStreamGraphImpl::WillUnderrun(MediaStream* aStream, GraphTime aTime,
|
|
|
|
GraphTime aEndBlockingDecisions, GraphTime* aEnd)
|
|
|
|
{
|
2012-07-31 05:17:21 -07:00
|
|
|
// Finished streams can't underrun. ProcessedMediaStreams also can't cause
|
|
|
|
// underrun currently, since we'll always be able to produce data for them
|
|
|
|
// unless they block on some other stream.
|
|
|
|
if (aStream->mFinished || aStream->AsProcessedStream()) {
|
2012-04-29 20:11:26 -07:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
GraphTime bufferEnd =
|
|
|
|
StreamTimeToGraphTime(aStream, aStream->GetBufferEnd(),
|
|
|
|
INCLUDE_TRAILING_BLOCKED_INTERVAL);
|
2013-03-07 00:53:45 -08:00
|
|
|
#ifdef DEBUG
|
|
|
|
if (bufferEnd < mCurrentTime) {
|
2013-11-20 19:02:42 -08:00
|
|
|
STREAM_LOG(PR_LOG_ERROR, ("MediaStream %p underrun, "
|
|
|
|
"bufferEnd %f < mCurrentTime %f (%lld < %lld), Streamtime %lld",
|
|
|
|
aStream, MediaTimeToSeconds(bufferEnd), MediaTimeToSeconds(mCurrentTime),
|
|
|
|
bufferEnd, mCurrentTime, aStream->GetBufferEnd()));
|
2013-03-07 00:53:45 -08:00
|
|
|
aStream->DumpTrackInfo();
|
|
|
|
NS_ASSERTION(bufferEnd >= mCurrentTime, "Buffer underran");
|
|
|
|
}
|
|
|
|
#endif
|
2012-04-29 20:11:26 -07:00
|
|
|
// We should block after bufferEnd.
|
|
|
|
if (bufferEnd <= aTime) {
|
2013-11-20 19:02:42 -08:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p will block due to data underrun, "
|
|
|
|
"bufferEnd %f",
|
|
|
|
aStream, MediaTimeToSeconds(bufferEnd)));
|
2012-04-29 20:11:26 -07:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
// We should keep blocking if we're currently blocked and we don't have
|
|
|
|
// data all the way through to aEndBlockingDecisions. If we don't have
|
|
|
|
// data all the way through to aEndBlockingDecisions, we'll block soon,
|
|
|
|
// but we might as well remain unblocked and play the data we've got while
|
|
|
|
// we can.
|
|
|
|
if (bufferEnd <= aEndBlockingDecisions && aStream->mBlocked.GetBefore(aTime)) {
|
2013-11-20 19:02:42 -08:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p will block due to speculative data underrun, "
|
|
|
|
"bufferEnd %f",
|
|
|
|
aStream, MediaTimeToSeconds(bufferEnd)));
|
2012-04-29 20:11:26 -07:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
// Reconsider decisions at bufferEnd
|
2013-01-15 04:22:03 -08:00
|
|
|
*aEnd = std::min(*aEnd, bufferEnd);
|
2012-04-29 20:11:26 -07:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
2012-10-28 21:34:17 -07:00
|
|
|
MediaStreamGraphImpl::MarkConsumed(MediaStream* aStream)
|
2012-04-29 20:11:26 -07:00
|
|
|
{
|
2012-10-28 21:34:17 -07:00
|
|
|
if (aStream->mIsConsumed) {
|
2012-07-31 05:17:21 -07:00
|
|
|
return;
|
2012-10-28 21:34:17 -07:00
|
|
|
}
|
|
|
|
aStream->mIsConsumed = true;
|
|
|
|
|
|
|
|
ProcessedMediaStream* ps = aStream->AsProcessedStream();
|
|
|
|
if (!ps) {
|
2012-07-31 05:17:21 -07:00
|
|
|
return;
|
|
|
|
}
|
2012-10-28 21:34:17 -07:00
|
|
|
// Mark all the inputs to this stream as consumed
|
|
|
|
for (uint32_t i = 0; i < ps->mInputs.Length(); ++i) {
|
|
|
|
MarkConsumed(ps->mInputs[i]->mSource);
|
2012-04-29 20:11:26 -07:00
|
|
|
}
|
2012-07-31 05:17:21 -07:00
|
|
|
}
|
2012-04-29 20:11:26 -07:00
|
|
|
|
2012-07-31 05:17:21 -07:00
|
|
|
void
|
2013-07-19 07:40:58 -07:00
|
|
|
MediaStreamGraphImpl::UpdateStreamOrderForStream(mozilla::LinkedList<MediaStream>* aStack,
|
2012-07-31 05:17:21 -07:00
|
|
|
already_AddRefed<MediaStream> aStream)
|
|
|
|
{
|
|
|
|
nsRefPtr<MediaStream> stream = aStream;
|
|
|
|
NS_ASSERTION(!stream->mHasBeenOrdered, "stream should not have already been ordered");
|
|
|
|
if (stream->mIsOnOrderingStack) {
|
2013-07-19 07:40:58 -07:00
|
|
|
MediaStream* iter = aStack->getLast();
|
2013-08-26 10:19:36 -07:00
|
|
|
AudioNodeStream* ns = stream->AsAudioNodeStream();
|
|
|
|
bool delayNodePresent = ns ? ns->Engine()->AsDelayNodeEngine() != nullptr : false;
|
|
|
|
bool cycleFound = false;
|
2013-07-19 07:40:58 -07:00
|
|
|
if (iter) {
|
|
|
|
do {
|
2013-08-26 10:19:36 -07:00
|
|
|
cycleFound = true;
|
2013-07-19 07:40:58 -07:00
|
|
|
iter->AsProcessedStream()->mInCycle = true;
|
2013-08-26 10:19:36 -07:00
|
|
|
AudioNodeStream* ns = iter->AsAudioNodeStream();
|
|
|
|
if (ns && ns->Engine()->AsDelayNodeEngine()) {
|
|
|
|
delayNodePresent = true;
|
|
|
|
}
|
2013-07-19 07:40:58 -07:00
|
|
|
iter = iter->getPrevious();
|
|
|
|
} while (iter && iter != stream);
|
2012-07-31 05:17:21 -07:00
|
|
|
}
|
2013-08-26 10:19:36 -07:00
|
|
|
if (cycleFound && !delayNodePresent) {
|
|
|
|
// If we have detected a cycle, the previous loop should exit with stream
|
2013-09-16 08:37:27 -07:00
|
|
|
// == iter, or the node is connected to itself. Go back in the cycle and
|
|
|
|
// mute all nodes we find, or just mute the node itself.
|
|
|
|
if (!iter) {
|
|
|
|
// The node is connected to itself.
|
2013-10-24 18:05:41 -07:00
|
|
|
// There can't be a non-AudioNodeStream here, because only AudioNodes
|
|
|
|
// can be self-connected.
|
2013-09-16 08:37:27 -07:00
|
|
|
iter = aStack->getLast();
|
2013-10-24 18:05:41 -07:00
|
|
|
MOZ_ASSERT(iter->AsAudioNodeStream());
|
2013-08-26 10:19:36 -07:00
|
|
|
iter->AsAudioNodeStream()->Mute();
|
2013-09-16 08:37:27 -07:00
|
|
|
} else {
|
|
|
|
MOZ_ASSERT(iter);
|
|
|
|
do {
|
2013-10-24 18:05:41 -07:00
|
|
|
AudioNodeStream* nodeStream = iter->AsAudioNodeStream();
|
|
|
|
if (nodeStream) {
|
|
|
|
nodeStream->Mute();
|
|
|
|
}
|
2013-09-16 08:37:27 -07:00
|
|
|
} while((iter = iter->getNext()));
|
|
|
|
}
|
2013-08-26 10:19:36 -07:00
|
|
|
}
|
2012-07-31 05:17:21 -07:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
ProcessedMediaStream* ps = stream->AsProcessedStream();
|
|
|
|
if (ps) {
|
2013-07-19 07:40:58 -07:00
|
|
|
aStack->insertBack(stream);
|
2012-07-31 05:17:21 -07:00
|
|
|
stream->mIsOnOrderingStack = true;
|
2012-08-22 08:56:38 -07:00
|
|
|
for (uint32_t i = 0; i < ps->mInputs.Length(); ++i) {
|
2012-07-31 05:17:21 -07:00
|
|
|
MediaStream* source = ps->mInputs[i]->mSource;
|
|
|
|
if (!source->mHasBeenOrdered) {
|
|
|
|
nsRefPtr<MediaStream> s = source;
|
|
|
|
UpdateStreamOrderForStream(aStack, s.forget());
|
|
|
|
}
|
|
|
|
}
|
2013-07-19 07:40:58 -07:00
|
|
|
aStack->popLast();
|
2012-07-31 05:17:21 -07:00
|
|
|
stream->mIsOnOrderingStack = false;
|
|
|
|
}
|
|
|
|
|
|
|
|
stream->mHasBeenOrdered = true;
|
|
|
|
*mStreams.AppendElement() = stream.forget();
|
|
|
|
}
|
|
|
|
|
2014-03-24 03:06:06 -07:00
|
|
|
static void AudioMixerCallback(AudioDataValue* aMixedBuffer,
|
|
|
|
AudioSampleFormat aFormat,
|
|
|
|
uint32_t aChannels,
|
|
|
|
uint32_t aFrames)
|
|
|
|
{
|
|
|
|
// Need an api to register mixer callbacks, bug 989921
|
2014-04-21 00:15:34 -07:00
|
|
|
#ifdef MOZ_WEBRTC
|
2014-04-02 10:58:19 -07:00
|
|
|
if (aFrames > 0 && aChannels > 0) {
|
|
|
|
// XXX need Observer base class and registration API
|
|
|
|
if (gFarendObserver) {
|
|
|
|
gFarendObserver->InsertFarEnd(aMixedBuffer, aFrames, false,
|
|
|
|
IdealAudioRate(), aChannels, aFormat);
|
|
|
|
}
|
|
|
|
}
|
2014-04-21 00:15:34 -07:00
|
|
|
#endif
|
2014-03-24 03:06:06 -07:00
|
|
|
}
|
|
|
|
|
2012-07-31 05:17:21 -07:00
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::UpdateStreamOrder()
|
|
|
|
{
|
2013-07-19 07:40:56 -07:00
|
|
|
mOldStreams.SwapElements(mStreams);
|
|
|
|
mStreams.ClearAndRetainStorage();
|
2014-03-24 03:06:06 -07:00
|
|
|
bool shouldMix = false;
|
2013-07-19 07:40:56 -07:00
|
|
|
for (uint32_t i = 0; i < mOldStreams.Length(); ++i) {
|
|
|
|
MediaStream* stream = mOldStreams[i];
|
2012-07-31 05:17:21 -07:00
|
|
|
stream->mHasBeenOrdered = false;
|
2012-08-29 04:20:45 -07:00
|
|
|
stream->mIsConsumed = false;
|
2012-07-31 05:17:21 -07:00
|
|
|
stream->mIsOnOrderingStack = false;
|
|
|
|
stream->mInBlockingSet = false;
|
2014-03-24 03:06:06 -07:00
|
|
|
if (stream->AsSourceStream() &&
|
|
|
|
stream->AsSourceStream()->NeedsMixing()) {
|
|
|
|
shouldMix = true;
|
|
|
|
}
|
2012-07-31 05:17:21 -07:00
|
|
|
ProcessedMediaStream* ps = stream->AsProcessedStream();
|
|
|
|
if (ps) {
|
|
|
|
ps->mInCycle = false;
|
2013-08-26 10:19:36 -07:00
|
|
|
AudioNodeStream* ns = ps->AsAudioNodeStream();
|
|
|
|
if (ns) {
|
|
|
|
ns->Unmute();
|
|
|
|
}
|
2012-07-31 05:17:21 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-03-24 03:06:06 -07:00
|
|
|
if (!mMixer && shouldMix) {
|
|
|
|
mMixer = new AudioMixer(AudioMixerCallback);
|
|
|
|
} else if (mMixer && !shouldMix) {
|
|
|
|
mMixer = nullptr;
|
|
|
|
}
|
|
|
|
|
2013-07-19 07:40:58 -07:00
|
|
|
mozilla::LinkedList<MediaStream> stack;
|
2013-07-19 07:40:56 -07:00
|
|
|
for (uint32_t i = 0; i < mOldStreams.Length(); ++i) {
|
|
|
|
nsRefPtr<MediaStream>& s = mOldStreams[i];
|
2013-07-24 03:11:35 -07:00
|
|
|
if (s->IsIntrinsicallyConsumed()) {
|
2012-10-28 21:34:17 -07:00
|
|
|
MarkConsumed(s);
|
|
|
|
}
|
|
|
|
if (!s->mHasBeenOrdered) {
|
|
|
|
UpdateStreamOrderForStream(&stack, s.forget());
|
2012-07-31 05:17:21 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::RecomputeBlocking(GraphTime aEndBlockingDecisions)
|
|
|
|
{
|
|
|
|
bool blockingDecisionsWillChange = false;
|
|
|
|
|
2013-11-20 19:02:42 -08:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG+1, ("Media graph %p computing blocking for time %f",
|
|
|
|
this, MediaTimeToSeconds(mStateComputedTime)));
|
2012-08-22 08:56:38 -07:00
|
|
|
for (uint32_t i = 0; i < mStreams.Length(); ++i) {
|
2012-04-29 20:11:26 -07:00
|
|
|
MediaStream* stream = mStreams[i];
|
2012-07-31 05:17:21 -07:00
|
|
|
if (!stream->mInBlockingSet) {
|
|
|
|
// Compute a partition of the streams containing 'stream' such that we can
|
|
|
|
// compute the blocking status of each subset independently.
|
|
|
|
nsAutoTArray<MediaStream*,10> streamSet;
|
|
|
|
AddBlockingRelatedStreamsToSet(&streamSet, stream);
|
|
|
|
|
|
|
|
GraphTime end;
|
|
|
|
for (GraphTime t = mStateComputedTime;
|
|
|
|
t < aEndBlockingDecisions; t = end) {
|
|
|
|
end = GRAPH_TIME_MAX;
|
|
|
|
RecomputeBlockingAt(streamSet, t, aEndBlockingDecisions, &end);
|
|
|
|
if (end < GRAPH_TIME_MAX) {
|
|
|
|
blockingDecisionsWillChange = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-04-29 20:11:26 -07:00
|
|
|
GraphTime end;
|
|
|
|
stream->mBlocked.GetAt(mCurrentTime, &end);
|
|
|
|
if (end < GRAPH_TIME_MAX) {
|
|
|
|
blockingDecisionsWillChange = true;
|
|
|
|
}
|
|
|
|
}
|
2013-11-20 19:02:42 -08:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG+1, ("Media graph %p computed blocking for interval %f to %f",
|
|
|
|
this, MediaTimeToSeconds(mStateComputedTime),
|
|
|
|
MediaTimeToSeconds(aEndBlockingDecisions)));
|
2012-07-31 05:17:21 -07:00
|
|
|
mStateComputedTime = aEndBlockingDecisions;
|
|
|
|
|
2012-04-29 20:11:26 -07:00
|
|
|
if (blockingDecisionsWillChange) {
|
|
|
|
// Make sure we wake up to notify listeners about these changes.
|
|
|
|
EnsureNextIteration();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
2012-07-31 05:17:21 -07:00
|
|
|
MediaStreamGraphImpl::AddBlockingRelatedStreamsToSet(nsTArray<MediaStream*>* aStreams,
|
|
|
|
MediaStream* aStream)
|
|
|
|
{
|
|
|
|
if (aStream->mInBlockingSet)
|
|
|
|
return;
|
|
|
|
aStream->mInBlockingSet = true;
|
|
|
|
aStreams->AppendElement(aStream);
|
2012-08-22 08:56:38 -07:00
|
|
|
for (uint32_t i = 0; i < aStream->mConsumers.Length(); ++i) {
|
2012-07-31 05:17:21 -07:00
|
|
|
MediaInputPort* port = aStream->mConsumers[i];
|
|
|
|
if (port->mFlags & (MediaInputPort::FLAG_BLOCK_INPUT | MediaInputPort::FLAG_BLOCK_OUTPUT)) {
|
|
|
|
AddBlockingRelatedStreamsToSet(aStreams, port->mDest);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
ProcessedMediaStream* ps = aStream->AsProcessedStream();
|
|
|
|
if (ps) {
|
2012-08-22 08:56:38 -07:00
|
|
|
for (uint32_t i = 0; i < ps->mInputs.Length(); ++i) {
|
2012-07-31 05:17:21 -07:00
|
|
|
MediaInputPort* port = ps->mInputs[i];
|
|
|
|
if (port->mFlags & (MediaInputPort::FLAG_BLOCK_INPUT | MediaInputPort::FLAG_BLOCK_OUTPUT)) {
|
|
|
|
AddBlockingRelatedStreamsToSet(aStreams, port->mSource);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::MarkStreamBlocking(MediaStream* aStream)
|
|
|
|
{
|
|
|
|
if (aStream->mBlockInThisPhase)
|
|
|
|
return;
|
|
|
|
aStream->mBlockInThisPhase = true;
|
2012-08-22 08:56:38 -07:00
|
|
|
for (uint32_t i = 0; i < aStream->mConsumers.Length(); ++i) {
|
2012-07-31 05:17:21 -07:00
|
|
|
MediaInputPort* port = aStream->mConsumers[i];
|
|
|
|
if (port->mFlags & MediaInputPort::FLAG_BLOCK_OUTPUT) {
|
|
|
|
MarkStreamBlocking(port->mDest);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
ProcessedMediaStream* ps = aStream->AsProcessedStream();
|
|
|
|
if (ps) {
|
2012-08-22 08:56:38 -07:00
|
|
|
for (uint32_t i = 0; i < ps->mInputs.Length(); ++i) {
|
2012-07-31 05:17:21 -07:00
|
|
|
MediaInputPort* port = ps->mInputs[i];
|
|
|
|
if (port->mFlags & MediaInputPort::FLAG_BLOCK_INPUT) {
|
|
|
|
MarkStreamBlocking(port->mSource);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::RecomputeBlockingAt(const nsTArray<MediaStream*>& aStreams,
|
|
|
|
GraphTime aTime,
|
2012-04-29 20:11:26 -07:00
|
|
|
GraphTime aEndBlockingDecisions,
|
|
|
|
GraphTime* aEnd)
|
|
|
|
{
|
2012-08-22 08:56:38 -07:00
|
|
|
for (uint32_t i = 0; i < aStreams.Length(); ++i) {
|
2012-07-31 05:17:21 -07:00
|
|
|
MediaStream* stream = aStreams[i];
|
|
|
|
stream->mBlockInThisPhase = false;
|
2012-04-29 20:11:26 -07:00
|
|
|
}
|
|
|
|
|
2012-08-22 08:56:38 -07:00
|
|
|
for (uint32_t i = 0; i < aStreams.Length(); ++i) {
|
2012-07-31 05:17:21 -07:00
|
|
|
MediaStream* stream = aStreams[i];
|
2012-04-29 20:11:26 -07:00
|
|
|
|
|
|
|
if (stream->mFinished) {
|
2013-12-03 22:00:26 -08:00
|
|
|
GraphTime endTime = StreamTimeToGraphTime(stream,
|
|
|
|
stream->GetStreamBuffer().GetAllTracksEnd());
|
2012-04-29 20:11:26 -07:00
|
|
|
if (endTime <= aTime) {
|
2013-11-20 19:02:42 -08:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p is blocked due to being finished", stream));
|
2012-07-31 05:17:21 -07:00
|
|
|
// We'll block indefinitely
|
|
|
|
MarkStreamBlocking(stream);
|
2014-02-02 16:55:41 -08:00
|
|
|
*aEnd = std::min(*aEnd, aEndBlockingDecisions);
|
2012-04-29 20:11:26 -07:00
|
|
|
continue;
|
|
|
|
} else {
|
2013-11-20 19:02:42 -08:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p is finished, but not blocked yet (end at %f, with blocking at %f)",
|
|
|
|
stream, MediaTimeToSeconds(stream->GetBufferEnd()),
|
|
|
|
MediaTimeToSeconds(endTime)));
|
2013-01-15 04:22:03 -08:00
|
|
|
*aEnd = std::min(*aEnd, endTime);
|
2012-04-29 20:11:26 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
GraphTime end;
|
|
|
|
bool explicitBlock = stream->mExplicitBlockerCount.GetAt(aTime, &end) > 0;
|
2013-01-15 04:22:03 -08:00
|
|
|
*aEnd = std::min(*aEnd, end);
|
2012-04-29 20:11:26 -07:00
|
|
|
if (explicitBlock) {
|
2013-11-20 19:02:42 -08:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p is blocked due to explicit blocker", stream));
|
2012-07-31 05:17:21 -07:00
|
|
|
MarkStreamBlocking(stream);
|
2012-04-29 20:11:26 -07:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool underrun = WillUnderrun(stream, aTime, aEndBlockingDecisions, aEnd);
|
|
|
|
if (underrun) {
|
2012-07-31 05:17:21 -07:00
|
|
|
// We'll block indefinitely
|
|
|
|
MarkStreamBlocking(stream);
|
2014-02-02 16:55:41 -08:00
|
|
|
*aEnd = std::min(*aEnd, aEndBlockingDecisions);
|
2012-04-29 20:11:26 -07:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
NS_ASSERTION(*aEnd > aTime, "Failed to advance!");
|
2012-07-31 05:17:21 -07:00
|
|
|
|
2012-08-22 08:56:38 -07:00
|
|
|
for (uint32_t i = 0; i < aStreams.Length(); ++i) {
|
2012-07-31 05:17:21 -07:00
|
|
|
MediaStream* stream = aStreams[i];
|
|
|
|
stream->mBlocked.SetAtAndAfter(aTime, stream->mBlockInThisPhase);
|
|
|
|
}
|
2012-04-29 20:11:26 -07:00
|
|
|
}
|
|
|
|
|
2012-09-19 17:47:51 -07:00
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::NotifyHasCurrentData(MediaStream* aStream)
|
|
|
|
{
|
2013-03-20 04:19:39 -07:00
|
|
|
if (!aStream->mNotifiedHasCurrentData && aStream->mHasCurrentData) {
|
|
|
|
for (uint32_t j = 0; j < aStream->mListeners.Length(); ++j) {
|
|
|
|
MediaStreamListener* l = aStream->mListeners[j];
|
|
|
|
l->NotifyHasCurrentData(this);
|
|
|
|
}
|
|
|
|
aStream->mNotifiedHasCurrentData = true;
|
2012-09-19 17:47:51 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-04-29 20:11:26 -07:00
|
|
|
void
|
2012-07-31 05:17:22 -07:00
|
|
|
MediaStreamGraphImpl::CreateOrDestroyAudioStreams(GraphTime aAudioOutputStartTime,
|
|
|
|
MediaStream* aStream)
|
2012-04-29 20:11:26 -07:00
|
|
|
{
|
2013-05-08 04:44:07 -07:00
|
|
|
MOZ_ASSERT(mRealtime, "Should only attempt to create audio streams in real-time mode");
|
|
|
|
|
2012-07-31 05:17:22 -07:00
|
|
|
nsAutoTArray<bool,2> audioOutputStreamsFound;
|
2012-08-22 08:56:38 -07:00
|
|
|
for (uint32_t i = 0; i < aStream->mAudioOutputStreams.Length(); ++i) {
|
2012-07-31 05:17:22 -07:00
|
|
|
audioOutputStreamsFound.AppendElement(false);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!aStream->mAudioOutputs.IsEmpty()) {
|
|
|
|
for (StreamBuffer::TrackIter tracks(aStream->GetStreamBuffer(), MediaSegment::AUDIO);
|
|
|
|
!tracks.IsEnded(); tracks.Next()) {
|
2012-08-22 08:56:38 -07:00
|
|
|
uint32_t i;
|
2012-07-31 05:17:22 -07:00
|
|
|
for (i = 0; i < audioOutputStreamsFound.Length(); ++i) {
|
|
|
|
if (aStream->mAudioOutputStreams[i].mTrackID == tracks->GetID()) {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (i < audioOutputStreamsFound.Length()) {
|
|
|
|
audioOutputStreamsFound[i] = true;
|
|
|
|
} else {
|
|
|
|
// No output stream created for this track yet. Check if it's time to
|
|
|
|
// create one.
|
|
|
|
GraphTime startTime =
|
|
|
|
StreamTimeToGraphTime(aStream, tracks->GetStartTimeRoundDown(),
|
|
|
|
INCLUDE_TRAILING_BLOCKED_INTERVAL);
|
|
|
|
if (startTime >= mStateComputedTime) {
|
|
|
|
// The stream wants to play audio, but nothing will play for the forseeable
|
|
|
|
// future, so don't create the stream.
|
|
|
|
continue;
|
|
|
|
}
|
2012-04-29 20:11:26 -07:00
|
|
|
|
2014-04-09 12:59:07 -07:00
|
|
|
// Allocating a AudioStream would be slow, so we finish the Init async
|
2012-07-31 05:17:22 -07:00
|
|
|
MediaStream::AudioOutputStream* audioOutputStream =
|
|
|
|
aStream->mAudioOutputStreams.AppendElement();
|
|
|
|
audioOutputStream->mAudioPlaybackStartTime = aAudioOutputStartTime;
|
|
|
|
audioOutputStream->mBlockedAudioTime = 0;
|
2014-03-24 03:06:06 -07:00
|
|
|
audioOutputStream->mLastTickWritten = 0;
|
2013-11-27 21:09:08 -08:00
|
|
|
audioOutputStream->mStream = new AudioStream();
|
2013-01-20 12:44:44 -08:00
|
|
|
// XXX for now, allocate stereo output. But we need to fix this to
|
|
|
|
// match the system's ideal channel configuration.
|
2014-04-09 12:59:07 -07:00
|
|
|
// NOTE: we presume this is either fast or async-under-the-covers
|
2014-04-10 10:39:20 -07:00
|
|
|
audioOutputStream->mStream->Init(2, IdealAudioRate(),
|
2014-04-22 23:20:37 -07:00
|
|
|
AudioChannel::Normal,
|
2014-04-10 10:39:20 -07:00
|
|
|
AudioStream::LowLatency);
|
2012-07-31 05:17:22 -07:00
|
|
|
audioOutputStream->mTrackID = tracks->GetID();
|
2013-10-25 15:13:42 -07:00
|
|
|
|
|
|
|
LogLatency(AsyncLatencyLogger::AudioStreamCreate,
|
|
|
|
reinterpret_cast<uint64_t>(aStream),
|
|
|
|
reinterpret_cast<int64_t>(audioOutputStream->mStream.get()));
|
2012-07-31 05:17:22 -07:00
|
|
|
}
|
2012-04-29 20:11:26 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-08-22 08:56:38 -07:00
|
|
|
for (int32_t i = audioOutputStreamsFound.Length() - 1; i >= 0; --i) {
|
2012-07-31 05:17:22 -07:00
|
|
|
if (!audioOutputStreamsFound[i]) {
|
|
|
|
aStream->mAudioOutputStreams[i].mStream->Shutdown();
|
|
|
|
aStream->mAudioOutputStreams.RemoveElementAt(i);
|
|
|
|
}
|
2012-04-29 20:11:26 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-03-24 03:06:06 -07:00
|
|
|
TrackTicks
|
2012-04-29 20:11:26 -07:00
|
|
|
MediaStreamGraphImpl::PlayAudio(MediaStream* aStream,
|
|
|
|
GraphTime aFrom, GraphTime aTo)
|
|
|
|
{
|
2013-05-08 04:44:07 -07:00
|
|
|
MOZ_ASSERT(mRealtime, "Should only attempt to play audio in realtime mode");
|
|
|
|
|
2014-03-24 03:06:06 -07:00
|
|
|
TrackTicks ticksWritten = 0;
|
|
|
|
// We compute the number of needed ticks by converting a difference of graph
|
|
|
|
// time rather than by substracting two converted stream time to ensure that
|
|
|
|
// the rounding between {Graph,Stream}Time and track ticks is not dependant
|
|
|
|
// on the absolute value of the {Graph,Stream}Time, and so that number of
|
|
|
|
// ticks to play is the same for each cycle.
|
|
|
|
TrackTicks ticksNeeded = TimeToTicksRoundDown(IdealAudioRate(), aTo) - TimeToTicksRoundDown(IdealAudioRate(), aFrom);
|
|
|
|
|
2012-07-31 05:17:22 -07:00
|
|
|
if (aStream->mAudioOutputStreams.IsEmpty()) {
|
2014-03-24 03:06:06 -07:00
|
|
|
return 0;
|
2012-07-31 05:17:22 -07:00
|
|
|
}
|
2012-04-29 20:11:26 -07:00
|
|
|
|
|
|
|
// When we're playing multiple copies of this stream at the same time, they're
|
|
|
|
// perfectly correlated so adding volumes is the right thing to do.
|
|
|
|
float volume = 0.0f;
|
2012-08-22 08:56:38 -07:00
|
|
|
for (uint32_t i = 0; i < aStream->mAudioOutputs.Length(); ++i) {
|
2012-04-29 20:11:26 -07:00
|
|
|
volume += aStream->mAudioOutputs[i].mVolume;
|
|
|
|
}
|
|
|
|
|
2012-08-22 08:56:38 -07:00
|
|
|
for (uint32_t i = 0; i < aStream->mAudioOutputStreams.Length(); ++i) {
|
2012-07-31 05:17:22 -07:00
|
|
|
MediaStream::AudioOutputStream& audioOutput = aStream->mAudioOutputStreams[i];
|
|
|
|
StreamBuffer::Track* track = aStream->mBuffer.FindTrack(audioOutput.mTrackID);
|
|
|
|
AudioSegment* audio = track->Get<AudioSegment>();
|
2014-03-24 03:06:06 -07:00
|
|
|
AudioSegment output;
|
|
|
|
MOZ_ASSERT(track->GetRate() == IdealAudioRate());
|
|
|
|
|
|
|
|
// offset and audioOutput.mLastTickWritten can differ by at most one sample,
|
|
|
|
// because of the rounding issue. We track that to ensure we don't skip a
|
|
|
|
// sample, or play a sample twice.
|
|
|
|
TrackTicks offset = track->TimeToTicksRoundDown(GraphTimeToStreamTime(aStream, aFrom));
|
|
|
|
if (!audioOutput.mLastTickWritten) {
|
|
|
|
audioOutput.mLastTickWritten = offset;
|
|
|
|
}
|
|
|
|
if (audioOutput.mLastTickWritten != offset) {
|
|
|
|
// If there is a global underrun of the MSG, this property won't hold, and
|
|
|
|
// we reset the sample count tracking.
|
2014-04-13 12:41:07 -07:00
|
|
|
if (mozilla::Abs(audioOutput.mLastTickWritten - offset) != 1) {
|
2014-03-24 03:06:06 -07:00
|
|
|
audioOutput.mLastTickWritten = offset;
|
|
|
|
} else {
|
|
|
|
offset = audioOutput.mLastTickWritten;
|
|
|
|
}
|
|
|
|
}
|
2012-04-29 20:11:26 -07:00
|
|
|
|
2012-07-31 05:17:22 -07:00
|
|
|
// We don't update aStream->mBufferStartTime here to account for
|
|
|
|
// time spent blocked. Instead, we'll update it in UpdateCurrentTime after the
|
|
|
|
// blocked period has completed. But we do need to make sure we play from the
|
|
|
|
// right offsets in the stream buffer, even if we've already written silence for
|
|
|
|
// some amount of blocked time after the current time.
|
|
|
|
GraphTime t = aFrom;
|
2014-03-24 03:06:06 -07:00
|
|
|
while (ticksNeeded) {
|
2012-07-31 05:17:22 -07:00
|
|
|
GraphTime end;
|
|
|
|
bool blocked = aStream->mBlocked.GetAt(t, &end);
|
2013-01-15 04:22:03 -08:00
|
|
|
end = std::min(end, aTo);
|
2012-04-29 20:11:26 -07:00
|
|
|
|
2014-03-24 03:06:06 -07:00
|
|
|
// Check how many ticks of sound we can provide if we are blocked some
|
|
|
|
// time in the middle of this cycle.
|
|
|
|
TrackTicks toWrite = 0;
|
|
|
|
if (end >= aTo) {
|
|
|
|
toWrite = ticksNeeded;
|
|
|
|
} else {
|
|
|
|
toWrite = TimeToTicksRoundDown(IdealAudioRate(), end - aFrom);
|
|
|
|
}
|
|
|
|
|
2012-07-31 05:17:22 -07:00
|
|
|
if (blocked) {
|
2014-03-24 03:06:06 -07:00
|
|
|
output.InsertNullDataAtStart(toWrite);
|
|
|
|
STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p writing %ld blocking-silence samples for %f to %f (%ld to %ld)\n",
|
|
|
|
aStream, toWrite, MediaTimeToSeconds(t), MediaTimeToSeconds(end),
|
|
|
|
offset, offset + toWrite));
|
|
|
|
ticksNeeded -= toWrite;
|
2012-07-31 05:17:22 -07:00
|
|
|
} else {
|
2014-03-24 03:06:06 -07:00
|
|
|
TrackTicks endTicksNeeded = offset + toWrite;
|
|
|
|
TrackTicks endTicksAvailable = audio->GetDuration();
|
|
|
|
if (endTicksNeeded <= endTicksAvailable) {
|
|
|
|
output.AppendSlice(*audio, offset, endTicksNeeded);
|
|
|
|
} else {
|
|
|
|
MOZ_ASSERT(track->IsEnded(), "Not enough data, and track not ended.");
|
|
|
|
// If we are at the end of the track, maybe write the remaining
|
|
|
|
// samples, and pad with/output silence.
|
|
|
|
if (endTicksNeeded > endTicksAvailable &&
|
|
|
|
offset < endTicksAvailable) {
|
|
|
|
output.AppendSlice(*audio, offset, endTicksAvailable);
|
|
|
|
ticksNeeded -= endTicksAvailable - offset;
|
|
|
|
toWrite -= endTicksAvailable - offset;
|
|
|
|
}
|
|
|
|
output.AppendNullData(toWrite);
|
2012-07-31 05:17:22 -07:00
|
|
|
}
|
|
|
|
output.ApplyVolume(volume);
|
2014-03-24 03:06:06 -07:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p writing %ld samples for %f to %f (samples %ld to %ld)\n",
|
|
|
|
aStream, toWrite, MediaTimeToSeconds(t), MediaTimeToSeconds(end),
|
|
|
|
offset, endTicksNeeded));
|
|
|
|
ticksNeeded -= toWrite;
|
2012-07-31 05:17:22 -07:00
|
|
|
}
|
|
|
|
t = end;
|
2014-03-24 03:06:06 -07:00
|
|
|
offset += toWrite;
|
|
|
|
audioOutput.mLastTickWritten += toWrite;
|
2012-04-29 20:11:26 -07:00
|
|
|
}
|
2014-03-24 03:06:06 -07:00
|
|
|
|
|
|
|
// Need unique id for stream & track - and we want it to match the inserter
|
|
|
|
output.WriteTo(LATENCY_STREAM_ID(aStream, track->GetID()),
|
|
|
|
audioOutput.mStream, mMixer);
|
2012-04-29 20:11:26 -07:00
|
|
|
}
|
2014-03-24 03:06:06 -07:00
|
|
|
return ticksWritten;
|
2012-04-29 20:11:26 -07:00
|
|
|
}
|
|
|
|
|
2013-05-29 21:44:43 -07:00
|
|
|
static void
|
|
|
|
SetImageToBlackPixel(PlanarYCbCrImage* aImage)
|
|
|
|
{
|
|
|
|
uint8_t blackPixel[] = { 0x10, 0x80, 0x80 };
|
|
|
|
|
2013-10-01 17:57:50 -07:00
|
|
|
PlanarYCbCrData data;
|
2013-05-29 21:44:43 -07:00
|
|
|
data.mYChannel = blackPixel;
|
|
|
|
data.mCbChannel = blackPixel + 1;
|
|
|
|
data.mCrChannel = blackPixel + 2;
|
|
|
|
data.mYStride = data.mCbCrStride = 1;
|
2013-12-31 01:06:12 -08:00
|
|
|
data.mPicSize = data.mYSize = data.mCbCrSize = IntSize(1, 1);
|
2013-05-29 21:44:43 -07:00
|
|
|
aImage->SetData(data);
|
|
|
|
}
|
|
|
|
|
2012-04-29 20:11:26 -07:00
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::PlayVideo(MediaStream* aStream)
|
|
|
|
{
|
2013-05-08 04:44:07 -07:00
|
|
|
MOZ_ASSERT(mRealtime, "Should only attempt to play video in realtime mode");
|
|
|
|
|
2012-04-29 20:11:26 -07:00
|
|
|
if (aStream->mVideoOutputs.IsEmpty())
|
|
|
|
return;
|
|
|
|
|
|
|
|
// Display the next frame a bit early. This is better than letting the current
|
|
|
|
// frame be displayed for too long.
|
|
|
|
GraphTime framePosition = mCurrentTime + MEDIA_GRAPH_TARGET_PERIOD_MS;
|
|
|
|
NS_ASSERTION(framePosition >= aStream->mBufferStartTime, "frame position before buffer?");
|
|
|
|
StreamTime frameBufferTime = GraphTimeToStreamTime(aStream, framePosition);
|
|
|
|
|
2012-07-31 05:17:22 -07:00
|
|
|
TrackTicks start;
|
|
|
|
const VideoFrame* frame = nullptr;
|
|
|
|
StreamBuffer::Track* track;
|
|
|
|
for (StreamBuffer::TrackIter tracks(aStream->GetStreamBuffer(), MediaSegment::VIDEO);
|
|
|
|
!tracks.IsEnded(); tracks.Next()) {
|
|
|
|
VideoSegment* segment = tracks->Get<VideoSegment>();
|
|
|
|
TrackTicks thisStart;
|
|
|
|
const VideoFrame* thisFrame =
|
|
|
|
segment->GetFrameAt(tracks->TimeToTicksRoundDown(frameBufferTime), &thisStart);
|
|
|
|
if (thisFrame && thisFrame->GetImage()) {
|
|
|
|
start = thisStart;
|
|
|
|
frame = thisFrame;
|
|
|
|
track = tracks.get();
|
2012-04-29 20:11:26 -07:00
|
|
|
}
|
|
|
|
}
|
2012-07-31 05:17:22 -07:00
|
|
|
if (!frame || *frame == aStream->mLastPlayedVideoFrame)
|
|
|
|
return;
|
|
|
|
|
2013-11-20 19:02:42 -08:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p writing video frame %p (%dx%d)",
|
|
|
|
aStream, frame->GetImage(), frame->GetIntrinsicSize().width,
|
|
|
|
frame->GetIntrinsicSize().height));
|
2012-07-31 05:17:22 -07:00
|
|
|
GraphTime startTime = StreamTimeToGraphTime(aStream,
|
|
|
|
track->TicksToTimeRoundDown(start), INCLUDE_TRAILING_BLOCKED_INTERVAL);
|
|
|
|
TimeStamp targetTime = mCurrentTimeStamp +
|
|
|
|
TimeDuration::FromMilliseconds(double(startTime - mCurrentTime));
|
2012-08-22 08:56:38 -07:00
|
|
|
for (uint32_t i = 0; i < aStream->mVideoOutputs.Length(); ++i) {
|
2012-07-31 05:17:22 -07:00
|
|
|
VideoFrameContainer* output = aStream->mVideoOutputs[i];
|
2013-05-29 21:44:43 -07:00
|
|
|
|
|
|
|
if (frame->GetForceBlack()) {
|
|
|
|
nsRefPtr<Image> image =
|
2014-01-30 14:58:51 -08:00
|
|
|
output->GetImageContainer()->CreateImage(ImageFormat::PLANAR_YCBCR);
|
2013-05-29 21:44:43 -07:00
|
|
|
if (image) {
|
|
|
|
// Sets the image to a single black pixel, which will be scaled to fill
|
|
|
|
// the rendered size.
|
|
|
|
SetImageToBlackPixel(static_cast<PlanarYCbCrImage*>(image.get()));
|
|
|
|
}
|
|
|
|
output->SetCurrentFrame(frame->GetIntrinsicSize(), image,
|
|
|
|
targetTime);
|
|
|
|
} else {
|
|
|
|
output->SetCurrentFrame(frame->GetIntrinsicSize(), frame->GetImage(),
|
|
|
|
targetTime);
|
|
|
|
}
|
|
|
|
|
2012-07-31 05:17:22 -07:00
|
|
|
nsCOMPtr<nsIRunnable> event =
|
|
|
|
NS_NewRunnableMethod(output, &VideoFrameContainer::Invalidate);
|
|
|
|
NS_DispatchToMainThread(event, NS_DISPATCH_NORMAL);
|
|
|
|
}
|
2012-11-20 17:32:06 -08:00
|
|
|
if (!aStream->mNotifiedFinished) {
|
|
|
|
aStream->mLastPlayedVideoFrame = *frame;
|
|
|
|
}
|
2012-04-29 20:11:26 -07:00
|
|
|
}
|
|
|
|
|
2013-07-19 07:40:57 -07:00
|
|
|
bool
|
|
|
|
MediaStreamGraphImpl::ShouldUpdateMainThread()
|
|
|
|
{
|
|
|
|
if (mRealtime) {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
TimeStamp now = TimeStamp::Now();
|
|
|
|
if ((now - mLastMainThreadUpdate).ToMilliseconds() > MEDIA_GRAPH_TARGET_PERIOD_MS) {
|
|
|
|
mLastMainThreadUpdate = now;
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2012-04-29 20:11:26 -07:00
|
|
|
void
|
2013-06-17 06:06:34 -07:00
|
|
|
MediaStreamGraphImpl::PrepareUpdatesToMainThreadState(bool aFinalUpdate)
|
2012-04-29 20:11:26 -07:00
|
|
|
{
|
|
|
|
mMonitor.AssertCurrentThreadOwns();
|
|
|
|
|
2013-10-23 11:21:33 -07:00
|
|
|
// We don't want to frequently update the main thread about timing update
|
|
|
|
// when we are not running in realtime.
|
|
|
|
if (aFinalUpdate || ShouldUpdateMainThread()) {
|
2013-07-19 07:40:57 -07:00
|
|
|
mStreamUpdates.SetCapacity(mStreamUpdates.Length() + mStreams.Length());
|
|
|
|
for (uint32_t i = 0; i < mStreams.Length(); ++i) {
|
|
|
|
MediaStream* stream = mStreams[i];
|
|
|
|
if (!stream->MainThreadNeedsUpdates()) {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
StreamUpdate* update = mStreamUpdates.AppendElement();
|
|
|
|
update->mGraphUpdateIndex = stream->mGraphUpdateIndices.GetAt(mCurrentTime);
|
|
|
|
update->mStream = stream;
|
|
|
|
update->mNextMainThreadCurrentTime =
|
|
|
|
GraphTimeToStreamTime(stream, mCurrentTime);
|
2013-12-08 21:08:02 -08:00
|
|
|
update->mNextMainThreadFinished = stream->mNotifiedFinished;
|
2013-07-19 07:40:57 -07:00
|
|
|
}
|
|
|
|
if (!mPendingUpdateRunnables.IsEmpty()) {
|
|
|
|
mUpdateRunnables.MoveElementsFrom(mPendingUpdateRunnables);
|
2013-06-18 20:09:44 -07:00
|
|
|
}
|
2013-06-18 20:10:04 -07:00
|
|
|
}
|
2012-04-29 20:11:26 -07:00
|
|
|
|
2013-06-17 06:06:34 -07:00
|
|
|
// Don't send the message to the main thread if it's not going to have
|
|
|
|
// any work to do.
|
|
|
|
if (aFinalUpdate ||
|
|
|
|
!mUpdateRunnables.IsEmpty() ||
|
|
|
|
!mStreamUpdates.IsEmpty()) {
|
|
|
|
EnsureStableStateEventPosted();
|
|
|
|
}
|
2012-04-29 20:11:26 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::EnsureImmediateWakeUpLocked(MonitorAutoLock& aLock)
|
|
|
|
{
|
|
|
|
if (mWaitState == WAITSTATE_WAITING_FOR_NEXT_ITERATION ||
|
|
|
|
mWaitState == WAITSTATE_WAITING_INDEFINITELY) {
|
|
|
|
mWaitState = WAITSTATE_WAKING_UP;
|
|
|
|
aLock.Notify();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::EnsureNextIteration()
|
|
|
|
{
|
|
|
|
MonitorAutoLock lock(mMonitor);
|
|
|
|
EnsureNextIterationLocked(lock);
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::EnsureNextIterationLocked(MonitorAutoLock& aLock)
|
|
|
|
{
|
|
|
|
if (mNeedAnotherIteration)
|
|
|
|
return;
|
|
|
|
mNeedAnotherIteration = true;
|
|
|
|
if (mWaitState == WAITSTATE_WAITING_INDEFINITELY) {
|
|
|
|
mWaitState = WAITSTATE_WAKING_UP;
|
|
|
|
aLock.Notify();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-08-02 02:27:58 -07:00
|
|
|
/**
|
|
|
|
* Returns smallest value of t such that
|
|
|
|
* TimeToTicksRoundUp(aSampleRate, t) is a multiple of WEBAUDIO_BLOCK_SIZE
|
|
|
|
* and floor(TimeToTicksRoundUp(aSampleRate, t)/WEBAUDIO_BLOCK_SIZE) >
|
|
|
|
* floor(TimeToTicksRoundUp(aSampleRate, aTime)/WEBAUDIO_BLOCK_SIZE).
|
|
|
|
*/
|
2013-01-13 14:46:57 -08:00
|
|
|
static GraphTime
|
2013-08-02 02:27:58 -07:00
|
|
|
RoundUpToNextAudioBlock(TrackRate aSampleRate, GraphTime aTime)
|
2013-01-13 14:46:57 -08:00
|
|
|
{
|
2013-08-02 02:27:58 -07:00
|
|
|
TrackTicks ticks = TimeToTicksRoundUp(aSampleRate, aTime);
|
|
|
|
uint64_t block = ticks >> WEBAUDIO_BLOCK_SIZE_BITS;
|
|
|
|
uint64_t nextBlock = block + 1;
|
|
|
|
TrackTicks nextTicks = nextBlock << WEBAUDIO_BLOCK_SIZE_BITS;
|
|
|
|
// Find the smallest time t such that TimeToTicksRoundUp(aSampleRate,t) == nextTicks
|
|
|
|
// That's the smallest integer t such that
|
|
|
|
// t*aSampleRate > ((nextTicks - 1) << MEDIA_TIME_FRAC_BITS)
|
|
|
|
// Both sides are integers, so this is equivalent to
|
|
|
|
// t*aSampleRate >= ((nextTicks - 1) << MEDIA_TIME_FRAC_BITS) + 1
|
|
|
|
// t >= (((nextTicks - 1) << MEDIA_TIME_FRAC_BITS) + 1)/aSampleRate
|
|
|
|
// t = ceil((((nextTicks - 1) << MEDIA_TIME_FRAC_BITS) + 1)/aSampleRate)
|
|
|
|
// Using integer division, that's
|
|
|
|
// t = (((nextTicks - 1) << MEDIA_TIME_FRAC_BITS) + 1 + aSampleRate - 1)/aSampleRate
|
|
|
|
// = ((nextTicks - 1) << MEDIA_TIME_FRAC_BITS)/aSampleRate + 1
|
|
|
|
return ((nextTicks - 1) << MEDIA_TIME_FRAC_BITS)/aSampleRate + 1;
|
2013-01-13 14:46:57 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::ProduceDataForStreamsBlockByBlock(uint32_t aStreamIndex,
|
2013-05-24 10:09:29 -07:00
|
|
|
TrackRate aSampleRate,
|
2013-01-13 14:46:57 -08:00
|
|
|
GraphTime aFrom,
|
|
|
|
GraphTime aTo)
|
|
|
|
{
|
|
|
|
GraphTime t = aFrom;
|
|
|
|
while (t < aTo) {
|
2013-08-02 02:27:58 -07:00
|
|
|
GraphTime next = RoundUpToNextAudioBlock(aSampleRate, t);
|
2013-01-13 14:46:57 -08:00
|
|
|
for (uint32_t i = aStreamIndex; i < mStreams.Length(); ++i) {
|
2014-01-06 15:53:49 -08:00
|
|
|
ProcessedMediaStream* ps = mStreams[i]->AsProcessedStream();
|
2013-01-13 14:46:57 -08:00
|
|
|
if (ps) {
|
2014-03-04 13:53:55 -08:00
|
|
|
ps->ProcessInput(t, next, (next == aTo) ? ProcessedMediaStream::ALLOW_FINISH : 0);
|
2013-01-13 14:46:57 -08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
t = next;
|
|
|
|
}
|
|
|
|
NS_ASSERTION(t == aTo, "Something went wrong with rounding to block boundaries");
|
|
|
|
}
|
|
|
|
|
2013-12-08 21:08:02 -08:00
|
|
|
bool
|
|
|
|
MediaStreamGraphImpl::AllFinishedStreamsNotified()
|
|
|
|
{
|
|
|
|
for (uint32_t i = 0; i < mStreams.Length(); ++i) {
|
|
|
|
MediaStream* s = mStreams[i];
|
|
|
|
if (s->mFinished && !s->mNotifiedFinished) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2014-01-15 03:13:07 -08:00
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::PauseAllAudioOutputs()
|
|
|
|
{
|
|
|
|
for (uint32_t i = 0; i < mStreams.Length(); ++i) {
|
|
|
|
MediaStream* s = mStreams[i];
|
|
|
|
for (uint32_t j = 0; j < s->mAudioOutputStreams.Length(); ++j) {
|
|
|
|
s->mAudioOutputStreams[j].mStream->Pause();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::ResumeAllAudioOutputs()
|
|
|
|
{
|
|
|
|
for (uint32_t i = 0; i < mStreams.Length(); ++i) {
|
|
|
|
MediaStream* s = mStreams[i];
|
|
|
|
for (uint32_t j = 0; j < s->mAudioOutputStreams.Length(); ++j) {
|
|
|
|
s->mAudioOutputStreams[j].mStream->Resume();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-03-11 16:06:57 -07:00
|
|
|
struct AutoProfilerUnregisterThread
|
|
|
|
{
|
|
|
|
// The empty ctor is used to silence a pre-4.8.0 GCC unused variable warning.
|
|
|
|
AutoProfilerUnregisterThread()
|
|
|
|
{
|
|
|
|
}
|
|
|
|
|
|
|
|
~AutoProfilerUnregisterThread()
|
|
|
|
{
|
|
|
|
profiler_unregister_thread();
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2012-04-29 20:11:26 -07:00
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::RunThread()
|
|
|
|
{
|
|
|
|
nsTArray<MessageBlock> messageQueue;
|
|
|
|
{
|
|
|
|
MonitorAutoLock lock(mMonitor);
|
|
|
|
messageQueue.SwapElements(mMessageQueue);
|
|
|
|
}
|
|
|
|
NS_ASSERTION(!messageQueue.IsEmpty(),
|
|
|
|
"Shouldn't have started a graph with empty message queue!");
|
|
|
|
|
2013-05-16 16:30:41 -07:00
|
|
|
uint32_t ticksProcessed = 0;
|
2014-03-11 16:06:57 -07:00
|
|
|
AutoProfilerUnregisterThread autoUnregister;
|
2013-05-16 16:30:41 -07:00
|
|
|
|
2012-04-29 20:11:26 -07:00
|
|
|
for (;;) {
|
2014-04-13 11:08:10 -07:00
|
|
|
// Check if a memory report has been requested.
|
|
|
|
{
|
|
|
|
MonitorAutoLock lock(mMemoryReportMonitor);
|
|
|
|
if (mNeedsMemoryReport) {
|
|
|
|
mNeedsMemoryReport = false;
|
|
|
|
|
|
|
|
for (uint32_t i = 0; i < mStreams.Length(); ++i) {
|
|
|
|
AudioNodeStream* stream = mStreams[i]->AsAudioNodeStream();
|
|
|
|
if (stream) {
|
|
|
|
AudioNodeSizes usage;
|
|
|
|
stream->SizeOfAudioNodesIncludingThis(MallocSizeOf, usage);
|
|
|
|
mAudioStreamSizes.AppendElement(usage);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
lock.Notify();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-04-29 20:11:26 -07:00
|
|
|
// Update mCurrentTime to the min of the playing audio times, or using the
|
|
|
|
// wall-clock time change if no audio is playing.
|
|
|
|
UpdateCurrentTime();
|
|
|
|
|
|
|
|
// Calculate independent action times for each batch of messages (each
|
|
|
|
// batch corresponding to an event loop task). This isolates the performance
|
|
|
|
// of different scripts to some extent.
|
2012-08-22 08:56:38 -07:00
|
|
|
for (uint32_t i = 0; i < messageQueue.Length(); ++i) {
|
2012-04-29 20:11:26 -07:00
|
|
|
mProcessingGraphUpdateIndex = messageQueue[i].mGraphUpdateIndex;
|
|
|
|
nsTArray<nsAutoPtr<ControlMessage> >& messages = messageQueue[i].mMessages;
|
|
|
|
|
2012-08-22 08:56:38 -07:00
|
|
|
for (uint32_t j = 0; j < messages.Length(); ++j) {
|
2012-07-31 05:17:21 -07:00
|
|
|
messages[j]->Run();
|
2012-04-29 20:11:26 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
messageQueue.Clear();
|
|
|
|
|
2014-01-29 05:34:35 -08:00
|
|
|
if (mStreamOrderDirty) {
|
|
|
|
UpdateStreamOrder();
|
|
|
|
}
|
2012-07-31 05:17:21 -07:00
|
|
|
|
2014-04-17 02:15:47 -07:00
|
|
|
TrackRate sampleRate;
|
2013-05-24 10:09:29 -07:00
|
|
|
// Find the sampling rate that we need to use for non-realtime graphs.
|
|
|
|
if (!mRealtime) {
|
|
|
|
for (uint32_t i = 0; i < mStreams.Length(); ++i) {
|
|
|
|
AudioNodeStream* n = mStreams[i]->AsAudioNodeStream();
|
|
|
|
if (n) {
|
|
|
|
// We know that the rest of the streams will run at the same rate.
|
|
|
|
sampleRate = n->SampleRate();
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
2014-04-17 02:15:47 -07:00
|
|
|
} else {
|
|
|
|
sampleRate = IdealAudioRate();
|
2013-05-24 10:09:29 -07:00
|
|
|
}
|
|
|
|
|
2012-07-20 12:36:03 -07:00
|
|
|
GraphTime endBlockingDecisions =
|
2013-08-02 02:27:58 -07:00
|
|
|
RoundUpToNextAudioBlock(sampleRate, mCurrentTime + MillisecondsToMediaTime(AUDIO_TARGET_MS));
|
2012-07-20 12:36:03 -07:00
|
|
|
bool ensureNextIteration = false;
|
2012-07-31 05:17:21 -07:00
|
|
|
|
|
|
|
// Grab pending stream input.
|
2012-08-22 08:56:38 -07:00
|
|
|
for (uint32_t i = 0; i < mStreams.Length(); ++i) {
|
2012-04-29 20:11:40 -07:00
|
|
|
SourceMediaStream* is = mStreams[i]->AsSourceStream();
|
|
|
|
if (is) {
|
2012-05-31 23:26:17 -07:00
|
|
|
UpdateConsumptionState(is);
|
2012-07-20 12:36:03 -07:00
|
|
|
ExtractPendingInput(is, endBlockingDecisions, &ensureNextIteration);
|
2012-04-29 20:11:40 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-03-04 15:15:41 -08:00
|
|
|
// The loop is woken up so soon that mCurrentTime barely advances and we
|
|
|
|
// end up having endBlockingDecisions == mStateComputedTime.
|
|
|
|
// Since stream blocking is computed in the interval of
|
|
|
|
// [mStateComputedTime, endBlockingDecisions), it won't be computed at all.
|
|
|
|
// We should ensure next iteration so that pending blocking changes will be
|
|
|
|
// computed in next loop.
|
|
|
|
if (endBlockingDecisions == mStateComputedTime) {
|
|
|
|
ensureNextIteration = true;
|
|
|
|
}
|
|
|
|
|
2012-07-31 05:17:21 -07:00
|
|
|
// Figure out which streams are blocked and when.
|
2012-07-31 05:17:21 -07:00
|
|
|
GraphTime prevComputedTime = mStateComputedTime;
|
2012-07-20 12:36:03 -07:00
|
|
|
RecomputeBlocking(endBlockingDecisions);
|
2012-04-29 20:11:26 -07:00
|
|
|
|
2012-07-31 05:17:21 -07:00
|
|
|
// Play stream contents.
|
2012-04-29 20:11:26 -07:00
|
|
|
bool allBlockedForever = true;
|
2014-03-04 13:53:55 -08:00
|
|
|
// True when we've done ProcessInput for all processed streams.
|
2013-01-13 14:46:57 -08:00
|
|
|
bool doneAllProducing = false;
|
2014-03-24 03:06:06 -07:00
|
|
|
// This is the number of frame that are written to the AudioStreams, for
|
|
|
|
// this cycle.
|
|
|
|
TrackTicks ticksPlayed = 0;
|
2012-04-29 20:11:26 -07:00
|
|
|
// Figure out what each stream wants to do
|
2012-08-22 08:56:38 -07:00
|
|
|
for (uint32_t i = 0; i < mStreams.Length(); ++i) {
|
2012-04-29 20:11:26 -07:00
|
|
|
MediaStream* stream = mStreams[i];
|
2013-12-09 16:49:03 -08:00
|
|
|
if (!doneAllProducing) {
|
2013-01-13 14:46:57 -08:00
|
|
|
ProcessedMediaStream* ps = stream->AsProcessedStream();
|
|
|
|
if (ps) {
|
|
|
|
AudioNodeStream* n = stream->AsAudioNodeStream();
|
|
|
|
if (n) {
|
2013-05-24 10:09:29 -07:00
|
|
|
#ifdef DEBUG
|
|
|
|
// Verify that the sampling rate for all of the following streams is the same
|
|
|
|
for (uint32_t j = i + 1; j < mStreams.Length(); ++j) {
|
|
|
|
AudioNodeStream* nextStream = mStreams[j]->AsAudioNodeStream();
|
|
|
|
if (nextStream) {
|
|
|
|
MOZ_ASSERT(n->SampleRate() == nextStream->SampleRate(),
|
|
|
|
"All AudioNodeStreams in the graph must have the same sampling rate");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
#endif
|
2013-01-13 14:46:57 -08:00
|
|
|
// Since an AudioNodeStream is present, go ahead and
|
|
|
|
// produce audio block by block for all the rest of the streams.
|
2013-05-24 10:09:29 -07:00
|
|
|
ProduceDataForStreamsBlockByBlock(i, n->SampleRate(), prevComputedTime, mStateComputedTime);
|
|
|
|
ticksProcessed += TimeToTicksRoundDown(n->SampleRate(), mStateComputedTime - prevComputedTime);
|
2013-01-13 14:46:57 -08:00
|
|
|
doneAllProducing = true;
|
|
|
|
} else {
|
2014-03-04 13:53:55 -08:00
|
|
|
ps->ProcessInput(prevComputedTime, mStateComputedTime,
|
|
|
|
ProcessedMediaStream::ALLOW_FINISH);
|
2014-02-28 06:11:37 -08:00
|
|
|
NS_WARN_IF_FALSE(stream->mBuffer.GetEnd() >=
|
|
|
|
GraphTimeToStreamTime(stream, mStateComputedTime),
|
|
|
|
"Stream did not produce enough data");
|
2013-01-13 14:46:57 -08:00
|
|
|
}
|
|
|
|
}
|
2012-07-31 05:17:21 -07:00
|
|
|
}
|
2012-09-19 17:47:51 -07:00
|
|
|
NotifyHasCurrentData(stream);
|
2013-05-08 04:44:07 -07:00
|
|
|
if (mRealtime) {
|
|
|
|
// Only playback audio and video in real-time mode
|
|
|
|
CreateOrDestroyAudioStreams(prevComputedTime, stream);
|
2014-03-24 03:06:06 -07:00
|
|
|
TrackTicks ticksPlayedForThisStream = PlayAudio(stream, prevComputedTime, mStateComputedTime);
|
|
|
|
if (!ticksPlayed) {
|
|
|
|
ticksPlayed = ticksPlayedForThisStream;
|
|
|
|
} else {
|
|
|
|
MOZ_ASSERT(!ticksPlayedForThisStream || ticksPlayedForThisStream == ticksPlayed,
|
|
|
|
"Each stream should have the same number of frame.");
|
|
|
|
}
|
2013-05-08 04:44:07 -07:00
|
|
|
PlayVideo(stream);
|
|
|
|
}
|
2012-04-29 20:11:40 -07:00
|
|
|
SourceMediaStream* is = stream->AsSourceStream();
|
|
|
|
if (is) {
|
|
|
|
UpdateBufferSufficiencyState(is);
|
|
|
|
}
|
2012-04-29 20:11:26 -07:00
|
|
|
GraphTime end;
|
|
|
|
if (!stream->mBlocked.GetAt(mCurrentTime, &end) || end < GRAPH_TIME_MAX) {
|
|
|
|
allBlockedForever = false;
|
|
|
|
}
|
|
|
|
}
|
2014-03-24 03:06:06 -07:00
|
|
|
|
|
|
|
if (mMixer) {
|
|
|
|
mMixer->FinishMixing();
|
|
|
|
}
|
|
|
|
|
2014-01-15 03:13:07 -08:00
|
|
|
if (ensureNextIteration || !allBlockedForever) {
|
2012-04-29 20:11:26 -07:00
|
|
|
EnsureNextIteration();
|
|
|
|
}
|
|
|
|
|
2012-07-31 05:17:21 -07:00
|
|
|
// Send updates to the main thread and wait for the next control loop
|
|
|
|
// iteration.
|
2012-04-29 20:11:26 -07:00
|
|
|
{
|
2012-08-09 04:29:47 -07:00
|
|
|
MonitorAutoLock lock(mMonitor);
|
2013-12-08 21:08:02 -08:00
|
|
|
bool finalUpdate = mForceShutDown ||
|
|
|
|
(mCurrentTime >= mEndTime && AllFinishedStreamsNotified()) ||
|
|
|
|
(IsEmpty() && mMessageQueue.IsEmpty());
|
2013-06-17 06:06:34 -07:00
|
|
|
PrepareUpdatesToMainThreadState(finalUpdate);
|
|
|
|
if (finalUpdate) {
|
2012-04-29 20:11:26 -07:00
|
|
|
// Enter shutdown mode. The stable-state handler will detect this
|
|
|
|
// and complete shutdown. Destroy any streams immediately.
|
2013-11-20 19:02:42 -08:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG, ("MediaStreamGraph %p waiting for main thread cleanup", this));
|
2014-02-10 16:04:58 -08:00
|
|
|
// We'll shut down this graph object if it does not get restarted.
|
2012-04-29 20:11:26 -07:00
|
|
|
mLifecycleState = LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP;
|
2012-08-09 04:29:47 -07:00
|
|
|
// No need to Destroy streams here. The main-thread owner of each
|
2014-02-10 16:04:58 -08:00
|
|
|
// stream is responsible for calling Destroy on them.
|
2012-04-29 20:11:26 -07:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2013-05-08 04:44:07 -07:00
|
|
|
// No need to wait in non-realtime mode, just churn through the input as soon
|
|
|
|
// as possible.
|
|
|
|
if (mRealtime) {
|
|
|
|
PRIntervalTime timeout = PR_INTERVAL_NO_TIMEOUT;
|
|
|
|
TimeStamp now = TimeStamp::Now();
|
2014-01-15 03:13:07 -08:00
|
|
|
bool pausedOutputs = false;
|
2013-05-08 04:44:07 -07:00
|
|
|
if (mNeedAnotherIteration) {
|
|
|
|
int64_t timeoutMS = MEDIA_GRAPH_TARGET_PERIOD_MS -
|
|
|
|
int64_t((now - mCurrentTimeStamp).ToMilliseconds());
|
|
|
|
// Make sure timeoutMS doesn't overflow 32 bits by waking up at
|
|
|
|
// least once a minute, if we need to wake up at all
|
|
|
|
timeoutMS = std::max<int64_t>(0, std::min<int64_t>(timeoutMS, 60*1000));
|
|
|
|
timeout = PR_MillisecondsToInterval(uint32_t(timeoutMS));
|
2013-11-20 19:02:42 -08:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG+1, ("Waiting for next iteration; at %f, timeout=%f",
|
|
|
|
(now - mInitialTimeStamp).ToSeconds(), timeoutMS/1000.0));
|
2013-05-08 04:44:07 -07:00
|
|
|
mWaitState = WAITSTATE_WAITING_FOR_NEXT_ITERATION;
|
|
|
|
} else {
|
|
|
|
mWaitState = WAITSTATE_WAITING_INDEFINITELY;
|
2014-01-15 03:13:07 -08:00
|
|
|
PauseAllAudioOutputs();
|
|
|
|
pausedOutputs = true;
|
2013-05-08 04:44:07 -07:00
|
|
|
}
|
|
|
|
if (timeout > 0) {
|
|
|
|
mMonitor.Wait(timeout);
|
2013-11-20 19:02:42 -08:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG+1, ("Resuming after timeout; at %f, elapsed=%f",
|
|
|
|
(TimeStamp::Now() - mInitialTimeStamp).ToSeconds(),
|
|
|
|
(TimeStamp::Now() - now).ToSeconds()));
|
2013-05-08 04:44:07 -07:00
|
|
|
}
|
2014-01-15 03:13:07 -08:00
|
|
|
if (pausedOutputs) {
|
|
|
|
ResumeAllAudioOutputs();
|
|
|
|
}
|
2012-04-29 20:11:26 -07:00
|
|
|
}
|
|
|
|
mWaitState = WAITSTATE_RUNNING;
|
|
|
|
mNeedAnotherIteration = false;
|
|
|
|
messageQueue.SwapElements(mMessageQueue);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::ApplyStreamUpdate(StreamUpdate* aUpdate)
|
|
|
|
{
|
|
|
|
mMonitor.AssertCurrentThreadOwns();
|
|
|
|
|
|
|
|
MediaStream* stream = aUpdate->mStream;
|
|
|
|
if (!stream)
|
|
|
|
return;
|
|
|
|
stream->mMainThreadCurrentTime = aUpdate->mNextMainThreadCurrentTime;
|
|
|
|
stream->mMainThreadFinished = aUpdate->mNextMainThreadFinished;
|
2012-08-19 21:20:44 -07:00
|
|
|
|
2013-07-24 19:07:34 -07:00
|
|
|
if (stream->mWrapper) {
|
|
|
|
stream->mWrapper->NotifyStreamStateChanged();
|
|
|
|
}
|
2012-08-22 08:56:38 -07:00
|
|
|
for (int32_t i = stream->mMainThreadListeners.Length() - 1; i >= 0; --i) {
|
2012-08-19 21:20:44 -07:00
|
|
|
stream->mMainThreadListeners[i]->NotifyMainThreadStateChanged();
|
|
|
|
}
|
2012-04-29 20:11:26 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::ShutdownThreads()
|
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Must be called on main thread");
|
|
|
|
// mGraph's thread is not running so it's OK to do whatever here
|
2013-11-20 19:02:42 -08:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG, ("Stopping threads for MediaStreamGraph %p", this));
|
2012-04-29 20:11:26 -07:00
|
|
|
|
|
|
|
if (mThread) {
|
|
|
|
mThread->Shutdown();
|
2012-07-30 07:20:58 -07:00
|
|
|
mThread = nullptr;
|
2012-04-29 20:11:26 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::ForceShutDown()
|
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Must be called on main thread");
|
2013-11-20 19:02:42 -08:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG, ("MediaStreamGraph %p ForceShutdown", this));
|
2012-04-29 20:11:26 -07:00
|
|
|
{
|
|
|
|
MonitorAutoLock lock(mMonitor);
|
|
|
|
mForceShutDown = true;
|
|
|
|
EnsureImmediateWakeUpLocked(lock);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
namespace {
|
|
|
|
|
2013-07-19 07:40:57 -07:00
|
|
|
class MediaStreamGraphInitThreadRunnable : public nsRunnable {
|
|
|
|
public:
|
|
|
|
explicit MediaStreamGraphInitThreadRunnable(MediaStreamGraphImpl* aGraph)
|
|
|
|
: mGraph(aGraph)
|
|
|
|
{
|
|
|
|
}
|
|
|
|
NS_IMETHOD Run()
|
|
|
|
{
|
|
|
|
char aLocal;
|
|
|
|
profiler_register_thread("MediaStreamGraph", &aLocal);
|
|
|
|
mGraph->RunThread();
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
private:
|
|
|
|
MediaStreamGraphImpl* mGraph;
|
|
|
|
};
|
|
|
|
|
2012-04-29 20:11:26 -07:00
|
|
|
class MediaStreamGraphThreadRunnable : public nsRunnable {
|
|
|
|
public:
|
2013-02-01 12:13:32 -08:00
|
|
|
explicit MediaStreamGraphThreadRunnable(MediaStreamGraphImpl* aGraph)
|
|
|
|
: mGraph(aGraph)
|
|
|
|
{
|
|
|
|
}
|
2012-04-29 20:11:26 -07:00
|
|
|
NS_IMETHOD Run()
|
|
|
|
{
|
2013-02-01 12:13:32 -08:00
|
|
|
mGraph->RunThread();
|
2012-04-29 20:11:26 -07:00
|
|
|
return NS_OK;
|
|
|
|
}
|
2013-02-01 12:13:32 -08:00
|
|
|
private:
|
|
|
|
MediaStreamGraphImpl* mGraph;
|
2012-04-29 20:11:26 -07:00
|
|
|
};
|
|
|
|
|
|
|
|
class MediaStreamGraphShutDownRunnable : public nsRunnable {
|
|
|
|
public:
|
|
|
|
MediaStreamGraphShutDownRunnable(MediaStreamGraphImpl* aGraph) : mGraph(aGraph) {}
|
|
|
|
NS_IMETHOD Run()
|
|
|
|
{
|
|
|
|
NS_ASSERTION(mGraph->mDetectedNotRunning,
|
|
|
|
"We should know the graph thread control loop isn't running!");
|
2013-05-02 22:02:55 -07:00
|
|
|
|
2013-05-07 22:16:35 -07:00
|
|
|
mGraph->ShutdownThreads();
|
|
|
|
|
2012-04-29 20:11:26 -07:00
|
|
|
// mGraph's thread is not running so it's OK to do whatever here
|
|
|
|
if (mGraph->IsEmpty()) {
|
2014-02-10 16:04:58 -08:00
|
|
|
// mGraph is no longer needed, so delete it.
|
2014-04-13 11:08:10 -07:00
|
|
|
mGraph->Destroy();
|
2012-04-29 20:11:26 -07:00
|
|
|
} else {
|
2014-02-10 16:04:58 -08:00
|
|
|
// The graph is not empty. We must be in a forced shutdown, or a
|
|
|
|
// non-realtime graph that has finished processing. Some later
|
|
|
|
// AppendMessage will detect that the manager has been emptied, and
|
|
|
|
// delete it.
|
|
|
|
NS_ASSERTION(mGraph->mForceShutDown || !mGraph->mRealtime,
|
|
|
|
"Not in forced shutdown?");
|
2013-05-02 22:02:55 -07:00
|
|
|
for (uint32_t i = 0; i < mGraph->mStreams.Length(); ++i) {
|
|
|
|
DOMMediaStream* s = mGraph->mStreams[i]->GetWrapper();
|
|
|
|
if (s) {
|
|
|
|
s->NotifyMediaStreamGraphShutdown();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-04-29 20:11:26 -07:00
|
|
|
mGraph->mLifecycleState =
|
|
|
|
MediaStreamGraphImpl::LIFECYCLE_WAITING_FOR_STREAM_DESTRUCTION;
|
|
|
|
}
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
private:
|
|
|
|
MediaStreamGraphImpl* mGraph;
|
|
|
|
};
|
|
|
|
|
|
|
|
class MediaStreamGraphStableStateRunnable : public nsRunnable {
|
|
|
|
public:
|
2013-02-01 12:20:32 -08:00
|
|
|
explicit MediaStreamGraphStableStateRunnable(MediaStreamGraphImpl* aGraph)
|
|
|
|
: mGraph(aGraph)
|
|
|
|
{
|
|
|
|
}
|
2012-04-29 20:11:26 -07:00
|
|
|
NS_IMETHOD Run()
|
|
|
|
{
|
2013-02-01 12:20:32 -08:00
|
|
|
if (mGraph) {
|
|
|
|
mGraph->RunInStableState();
|
2012-04-29 20:11:26 -07:00
|
|
|
}
|
|
|
|
return NS_OK;
|
|
|
|
}
|
2013-02-01 12:20:32 -08:00
|
|
|
private:
|
|
|
|
MediaStreamGraphImpl* mGraph;
|
2012-04-29 20:11:26 -07:00
|
|
|
};
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Control messages forwarded from main thread to graph manager thread
|
|
|
|
*/
|
|
|
|
class CreateMessage : public ControlMessage {
|
|
|
|
public:
|
|
|
|
CreateMessage(MediaStream* aStream) : ControlMessage(aStream) {}
|
2013-05-29 08:38:39 -07:00
|
|
|
virtual void Run() MOZ_OVERRIDE
|
2012-04-29 20:11:26 -07:00
|
|
|
{
|
|
|
|
mStream->GraphImpl()->AddStream(mStream);
|
|
|
|
mStream->Init();
|
|
|
|
}
|
2013-05-29 08:38:39 -07:00
|
|
|
virtual void RunDuringShutdown() MOZ_OVERRIDE
|
|
|
|
{
|
|
|
|
// Make sure to run this message during shutdown too, to make sure
|
|
|
|
// that we balance the number of streams registered with the graph
|
|
|
|
// as they're destroyed during shutdown.
|
|
|
|
Run();
|
|
|
|
}
|
2012-04-29 20:11:26 -07:00
|
|
|
};
|
|
|
|
|
2012-06-18 19:30:09 -07:00
|
|
|
class MediaStreamGraphShutdownObserver MOZ_FINAL : public nsIObserver
|
2012-04-29 20:11:26 -07:00
|
|
|
{
|
|
|
|
public:
|
|
|
|
NS_DECL_ISUPPORTS
|
|
|
|
NS_DECL_NSIOBSERVER
|
|
|
|
};
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::RunInStableState()
|
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Must be called on main thread");
|
|
|
|
|
|
|
|
nsTArray<nsCOMPtr<nsIRunnable> > runnables;
|
2012-07-31 05:17:22 -07:00
|
|
|
// When we're doing a forced shutdown, pending control messages may be
|
|
|
|
// run on the main thread via RunDuringShutdown. Those messages must
|
|
|
|
// run without the graph monitor being held. So, we collect them here.
|
|
|
|
nsTArray<nsAutoPtr<ControlMessage> > controlMessagesToRunDuringShutdown;
|
2012-04-29 20:11:26 -07:00
|
|
|
|
|
|
|
{
|
|
|
|
MonitorAutoLock lock(mMonitor);
|
|
|
|
mPostedRunInStableStateEvent = false;
|
|
|
|
|
|
|
|
runnables.SwapElements(mUpdateRunnables);
|
2012-08-22 08:56:38 -07:00
|
|
|
for (uint32_t i = 0; i < mStreamUpdates.Length(); ++i) {
|
2012-04-29 20:11:26 -07:00
|
|
|
StreamUpdate* update = &mStreamUpdates[i];
|
|
|
|
if (update->mStream) {
|
|
|
|
ApplyStreamUpdate(update);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
mStreamUpdates.Clear();
|
|
|
|
|
2014-01-06 12:09:29 -08:00
|
|
|
// Don't start the thread for a non-realtime graph until it has been
|
|
|
|
// explicitly started by StartNonRealtimeProcessing.
|
|
|
|
if (mLifecycleState == LIFECYCLE_THREAD_NOT_STARTED &&
|
|
|
|
(mRealtime || mNonRealtimeProcessing)) {
|
2012-04-29 20:11:26 -07:00
|
|
|
mLifecycleState = LIFECYCLE_RUNNING;
|
|
|
|
// Start the thread now. We couldn't start it earlier because
|
|
|
|
// the graph might exit immediately on finding it has no streams. The
|
|
|
|
// first message for a new graph must create a stream.
|
2013-07-19 07:40:57 -07:00
|
|
|
nsCOMPtr<nsIRunnable> event = new MediaStreamGraphInitThreadRunnable(this);
|
2013-03-07 00:53:45 -08:00
|
|
|
NS_NewNamedThread("MediaStreamGrph", getter_AddRefs(mThread), event);
|
2012-04-29 20:11:26 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
if (mCurrentTaskMessageQueue.IsEmpty()) {
|
|
|
|
if (mLifecycleState == LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP && IsEmpty()) {
|
|
|
|
// Complete shutdown. First, ensure that this graph is no longer used.
|
|
|
|
// A new graph graph will be created if one is needed.
|
2013-11-20 19:02:42 -08:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG, ("Disconnecting MediaStreamGraph %p", this));
|
2013-02-04 09:27:54 -08:00
|
|
|
if (this == gGraph) {
|
|
|
|
// null out gGraph if that's the graph being shut down
|
|
|
|
gGraph = nullptr;
|
|
|
|
}
|
2012-04-29 20:11:26 -07:00
|
|
|
// Asynchronously clean up old graph. We don't want to do this
|
|
|
|
// synchronously because it spins the event loop waiting for threads
|
|
|
|
// to shut down, and we don't want to do that in a stable state handler.
|
|
|
|
mLifecycleState = LIFECYCLE_WAITING_FOR_THREAD_SHUTDOWN;
|
|
|
|
nsCOMPtr<nsIRunnable> event = new MediaStreamGraphShutDownRunnable(this);
|
|
|
|
NS_DispatchToMainThread(event);
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
if (mLifecycleState <= LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP) {
|
|
|
|
MessageBlock* block = mMessageQueue.AppendElement();
|
|
|
|
block->mMessages.SwapElements(mCurrentTaskMessageQueue);
|
2013-10-24 16:12:00 -07:00
|
|
|
block->mGraphUpdateIndex = mNextGraphUpdateIndex;
|
|
|
|
++mNextGraphUpdateIndex;
|
2012-04-29 20:11:26 -07:00
|
|
|
EnsureNextIterationLocked(lock);
|
|
|
|
}
|
|
|
|
|
2014-01-06 12:09:29 -08:00
|
|
|
// If the MediaStreamGraph has more messages going to it, try to revive
|
|
|
|
// it to process those messages. Don't do this if we're in a forced
|
|
|
|
// shutdown or it's a non-realtime graph that has already terminated
|
|
|
|
// processing.
|
|
|
|
if (mLifecycleState == LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP &&
|
|
|
|
mRealtime && !mForceShutDown) {
|
2012-04-29 20:11:26 -07:00
|
|
|
mLifecycleState = LIFECYCLE_RUNNING;
|
|
|
|
// Revive the MediaStreamGraph since we have more messages going to it.
|
|
|
|
// Note that we need to put messages into its queue before reviving it,
|
|
|
|
// or it might exit immediately.
|
2013-02-01 12:13:32 -08:00
|
|
|
nsCOMPtr<nsIRunnable> event = new MediaStreamGraphThreadRunnable(this);
|
2012-04-29 20:11:26 -07:00
|
|
|
mThread->Dispatch(event, 0);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-02-10 16:04:58 -08:00
|
|
|
if ((mForceShutDown || !mRealtime) &&
|
2014-01-29 21:50:17 -08:00
|
|
|
mLifecycleState == LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP) {
|
2014-01-06 12:09:29 -08:00
|
|
|
// Defer calls to RunDuringShutdown() to happen while mMonitor is not held.
|
|
|
|
for (uint32_t i = 0; i < mMessageQueue.Length(); ++i) {
|
|
|
|
MessageBlock& mb = mMessageQueue[i];
|
|
|
|
controlMessagesToRunDuringShutdown.MoveElementsFrom(mb.mMessages);
|
|
|
|
}
|
|
|
|
mMessageQueue.Clear();
|
2014-01-07 19:58:14 -08:00
|
|
|
MOZ_ASSERT(mCurrentTaskMessageQueue.IsEmpty());
|
2014-01-06 12:09:29 -08:00
|
|
|
// Stop MediaStreamGraph threads. Do not clear gGraph since
|
|
|
|
// we have outstanding DOM objects that may need it.
|
|
|
|
mLifecycleState = LIFECYCLE_WAITING_FOR_THREAD_SHUTDOWN;
|
|
|
|
nsCOMPtr<nsIRunnable> event = new MediaStreamGraphShutDownRunnable(this);
|
|
|
|
NS_DispatchToMainThread(event);
|
|
|
|
}
|
|
|
|
|
2012-04-29 20:11:26 -07:00
|
|
|
mDetectedNotRunning = mLifecycleState > LIFECYCLE_RUNNING;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Make sure we get a new current time in the next event loop task
|
|
|
|
mPostedRunInStableState = false;
|
|
|
|
|
2012-08-22 08:56:38 -07:00
|
|
|
for (uint32_t i = 0; i < runnables.Length(); ++i) {
|
2012-04-29 20:11:26 -07:00
|
|
|
runnables[i]->Run();
|
|
|
|
}
|
2012-08-22 08:56:38 -07:00
|
|
|
for (uint32_t i = 0; i < controlMessagesToRunDuringShutdown.Length(); ++i) {
|
2012-07-31 05:17:22 -07:00
|
|
|
controlMessagesToRunDuringShutdown[i]->RunDuringShutdown();
|
|
|
|
}
|
2012-04-29 20:11:26 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
static NS_DEFINE_CID(kAppShellCID, NS_APPSHELL_CID);
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::EnsureRunInStableState()
|
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "main thread only");
|
|
|
|
|
|
|
|
if (mPostedRunInStableState)
|
|
|
|
return;
|
|
|
|
mPostedRunInStableState = true;
|
2013-02-01 12:20:32 -08:00
|
|
|
nsCOMPtr<nsIRunnable> event = new MediaStreamGraphStableStateRunnable(this);
|
2012-04-29 20:11:26 -07:00
|
|
|
nsCOMPtr<nsIAppShell> appShell = do_GetService(kAppShellCID);
|
|
|
|
if (appShell) {
|
|
|
|
appShell->RunInStableState(event);
|
|
|
|
} else {
|
|
|
|
NS_ERROR("Appshell already destroyed?");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::EnsureStableStateEventPosted()
|
|
|
|
{
|
|
|
|
mMonitor.AssertCurrentThreadOwns();
|
|
|
|
|
|
|
|
if (mPostedRunInStableStateEvent)
|
|
|
|
return;
|
|
|
|
mPostedRunInStableStateEvent = true;
|
2013-02-01 12:20:32 -08:00
|
|
|
nsCOMPtr<nsIRunnable> event = new MediaStreamGraphStableStateRunnable(this);
|
2012-04-29 20:11:26 -07:00
|
|
|
NS_DispatchToMainThread(event);
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::AppendMessage(ControlMessage* aMessage)
|
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "main thread only");
|
2012-08-09 04:30:09 -07:00
|
|
|
NS_ASSERTION(!aMessage->GetStream() ||
|
|
|
|
!aMessage->GetStream()->IsDestroyed(),
|
|
|
|
"Stream already destroyed");
|
2012-04-29 20:11:26 -07:00
|
|
|
|
|
|
|
if (mDetectedNotRunning &&
|
|
|
|
mLifecycleState > LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP) {
|
|
|
|
// The graph control loop is not running and main thread cleanup has
|
|
|
|
// happened. From now on we can't append messages to mCurrentTaskMessageQueue,
|
2012-07-31 05:17:21 -07:00
|
|
|
// because that will never be processed again, so just RunDuringShutdown
|
2012-04-29 20:11:26 -07:00
|
|
|
// this message.
|
2014-02-10 16:04:58 -08:00
|
|
|
// This should only happen during forced shutdown, or after a non-realtime
|
|
|
|
// graph has finished processing.
|
2012-07-31 05:17:21 -07:00
|
|
|
aMessage->RunDuringShutdown();
|
2012-04-29 20:11:26 -07:00
|
|
|
delete aMessage;
|
2013-05-30 17:53:51 -07:00
|
|
|
if (IsEmpty() &&
|
|
|
|
mLifecycleState >= LIFECYCLE_WAITING_FOR_STREAM_DESTRUCTION) {
|
2013-02-04 09:29:14 -08:00
|
|
|
if (gGraph == this) {
|
|
|
|
gGraph = nullptr;
|
|
|
|
}
|
2014-04-13 11:08:10 -07:00
|
|
|
Destroy();
|
2012-04-29 20:11:26 -07:00
|
|
|
}
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
mCurrentTaskMessageQueue.AppendElement(aMessage);
|
2014-01-06 12:09:29 -08:00
|
|
|
EnsureRunInStableState();
|
2012-04-29 20:11:26 -07:00
|
|
|
}
|
|
|
|
|
2013-08-06 15:14:35 -07:00
|
|
|
MediaStream::MediaStream(DOMMediaStream* aWrapper)
|
|
|
|
: mBufferStartTime(0)
|
|
|
|
, mExplicitBlockerCount(0)
|
|
|
|
, mBlocked(false)
|
|
|
|
, mGraphUpdateIndices(0)
|
|
|
|
, mFinished(false)
|
|
|
|
, mNotifiedFinished(false)
|
|
|
|
, mNotifiedBlocked(false)
|
|
|
|
, mHasCurrentData(false)
|
|
|
|
, mNotifiedHasCurrentData(false)
|
|
|
|
, mWrapper(aWrapper)
|
|
|
|
, mMainThreadCurrentTime(0)
|
|
|
|
, mMainThreadFinished(false)
|
|
|
|
, mMainThreadDestroyed(false)
|
|
|
|
, mGraph(nullptr)
|
|
|
|
{
|
|
|
|
MOZ_COUNT_CTOR(MediaStream);
|
|
|
|
// aWrapper should not already be connected to a MediaStream! It needs
|
|
|
|
// to be hooked up to this stream, and since this stream is only just
|
|
|
|
// being created now, aWrapper must not be connected to anything.
|
|
|
|
NS_ASSERTION(!aWrapper || !aWrapper->GetStream(),
|
|
|
|
"Wrapper already has another media stream hooked up to it!");
|
|
|
|
}
|
|
|
|
|
2014-04-13 11:08:10 -07:00
|
|
|
size_t
|
|
|
|
MediaStream::SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const
|
|
|
|
{
|
|
|
|
size_t amount = 0;
|
|
|
|
|
|
|
|
// Not owned:
|
|
|
|
// - mGraph - Not reported here
|
|
|
|
// - mConsumers - elements
|
|
|
|
// Future:
|
|
|
|
// - mWrapper
|
|
|
|
// - mVideoOutputs - elements
|
|
|
|
// - mLastPlayedVideoFrame
|
|
|
|
// - mListeners - elements
|
|
|
|
// - mAudioOutputStreams - elements
|
|
|
|
|
|
|
|
amount += mBuffer.SizeOfExcludingThis(aMallocSizeOf);
|
|
|
|
amount += mAudioOutputs.SizeOfExcludingThis(aMallocSizeOf);
|
|
|
|
amount += mVideoOutputs.SizeOfExcludingThis(aMallocSizeOf);
|
|
|
|
amount += mExplicitBlockerCount.SizeOfExcludingThis(aMallocSizeOf);
|
|
|
|
amount += mListeners.SizeOfExcludingThis(aMallocSizeOf);
|
|
|
|
amount += mMainThreadListeners.SizeOfExcludingThis(aMallocSizeOf);
|
|
|
|
amount += mDisabledTrackIDs.SizeOfExcludingThis(aMallocSizeOf);
|
|
|
|
amount += mBlocked.SizeOfExcludingThis(aMallocSizeOf);
|
|
|
|
amount += mGraphUpdateIndices.SizeOfExcludingThis(aMallocSizeOf);
|
|
|
|
amount += mConsumers.SizeOfExcludingThis(aMallocSizeOf);
|
|
|
|
amount += mAudioOutputStreams.SizeOfExcludingThis(aMallocSizeOf);
|
|
|
|
for (size_t i = 0; i < mAudioOutputStreams.Length(); i++) {
|
|
|
|
amount += mAudioOutputStreams[i].SizeOfExcludingThis(aMallocSizeOf);
|
|
|
|
}
|
|
|
|
|
|
|
|
return amount;
|
|
|
|
}
|
|
|
|
|
|
|
|
size_t
|
|
|
|
MediaStream::SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const
|
|
|
|
{
|
|
|
|
return aMallocSizeOf(this) + SizeOfExcludingThis(aMallocSizeOf);
|
|
|
|
}
|
|
|
|
|
2012-04-29 20:11:26 -07:00
|
|
|
void
|
|
|
|
MediaStream::Init()
|
|
|
|
{
|
|
|
|
MediaStreamGraphImpl* graph = GraphImpl();
|
|
|
|
mBlocked.SetAtAndAfter(graph->mCurrentTime, true);
|
|
|
|
mExplicitBlockerCount.SetAtAndAfter(graph->mCurrentTime, true);
|
2012-07-31 05:17:21 -07:00
|
|
|
mExplicitBlockerCount.SetAtAndAfter(graph->mStateComputedTime, false);
|
2012-04-29 20:11:26 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
MediaStreamGraphImpl*
|
|
|
|
MediaStream::GraphImpl()
|
|
|
|
{
|
2013-02-01 11:43:36 -08:00
|
|
|
return mGraph;
|
2012-04-29 20:11:26 -07:00
|
|
|
}
|
|
|
|
|
2012-07-31 05:17:21 -07:00
|
|
|
MediaStreamGraph*
|
|
|
|
MediaStream::Graph()
|
|
|
|
{
|
2013-02-01 11:43:36 -08:00
|
|
|
return mGraph;
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStream::SetGraphImpl(MediaStreamGraphImpl* aGraph)
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(!mGraph, "Should only be called once");
|
|
|
|
mGraph = aGraph;
|
2012-07-31 05:17:21 -07:00
|
|
|
}
|
|
|
|
|
2013-06-10 12:01:19 -07:00
|
|
|
void
|
|
|
|
MediaStream::SetGraphImpl(MediaStreamGraph* aGraph)
|
|
|
|
{
|
|
|
|
MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(aGraph);
|
|
|
|
SetGraphImpl(graph);
|
|
|
|
}
|
|
|
|
|
2012-07-31 05:17:21 -07:00
|
|
|
StreamTime
|
|
|
|
MediaStream::GraphTimeToStreamTime(GraphTime aTime)
|
|
|
|
{
|
|
|
|
return GraphImpl()->GraphTimeToStreamTime(this, aTime);
|
|
|
|
}
|
|
|
|
|
2013-02-04 02:04:26 -08:00
|
|
|
StreamTime
|
|
|
|
MediaStream::GraphTimeToStreamTimeOptimistic(GraphTime aTime)
|
|
|
|
{
|
|
|
|
return GraphImpl()->GraphTimeToStreamTimeOptimistic(this, aTime);
|
|
|
|
}
|
|
|
|
|
|
|
|
GraphTime
|
|
|
|
MediaStream::StreamTimeToGraphTime(StreamTime aTime)
|
|
|
|
{
|
|
|
|
return GraphImpl()->StreamTimeToGraphTime(this, aTime, 0);
|
|
|
|
}
|
|
|
|
|
2012-07-31 05:17:21 -07:00
|
|
|
void
|
|
|
|
MediaStream::FinishOnGraphThread()
|
|
|
|
{
|
|
|
|
GraphImpl()->FinishStream(this);
|
|
|
|
}
|
|
|
|
|
2013-07-04 18:49:53 -07:00
|
|
|
int64_t
|
|
|
|
MediaStream::GetProcessingGraphUpdateIndex()
|
|
|
|
{
|
|
|
|
return GraphImpl()->GetProcessingGraphUpdateIndex();
|
|
|
|
}
|
|
|
|
|
2013-05-21 12:17:47 -07:00
|
|
|
StreamBuffer::Track*
|
|
|
|
MediaStream::EnsureTrack(TrackID aTrackId, TrackRate aSampleRate)
|
|
|
|
{
|
|
|
|
StreamBuffer::Track* track = mBuffer.FindTrack(aTrackId);
|
|
|
|
if (!track) {
|
|
|
|
nsAutoPtr<MediaSegment> segment(new AudioSegment());
|
|
|
|
for (uint32_t j = 0; j < mListeners.Length(); ++j) {
|
|
|
|
MediaStreamListener* l = mListeners[j];
|
2014-03-24 03:06:05 -07:00
|
|
|
l->NotifyQueuedTrackChanges(Graph(), aTrackId, IdealAudioRate(), 0,
|
2013-05-21 12:17:47 -07:00
|
|
|
MediaStreamListener::TRACK_EVENT_CREATED,
|
|
|
|
*segment);
|
|
|
|
}
|
|
|
|
track = &mBuffer.AddTrack(aTrackId, aSampleRate, 0, segment.forget());
|
|
|
|
}
|
|
|
|
return track;
|
|
|
|
}
|
|
|
|
|
2013-01-06 18:31:30 -08:00
|
|
|
void
|
|
|
|
MediaStream::RemoveAllListenersImpl()
|
|
|
|
{
|
|
|
|
for (int32_t i = mListeners.Length() - 1; i >= 0; --i) {
|
|
|
|
nsRefPtr<MediaStreamListener> listener = mListeners[i].forget();
|
|
|
|
listener->NotifyRemoved(GraphImpl());
|
|
|
|
}
|
|
|
|
mListeners.Clear();
|
|
|
|
}
|
|
|
|
|
2012-04-29 20:11:26 -07:00
|
|
|
void
|
|
|
|
MediaStream::DestroyImpl()
|
|
|
|
{
|
2013-01-06 18:31:30 -08:00
|
|
|
RemoveAllListenersImpl();
|
|
|
|
|
2012-08-22 08:56:38 -07:00
|
|
|
for (int32_t i = mConsumers.Length() - 1; i >= 0; --i) {
|
2012-07-31 05:17:21 -07:00
|
|
|
mConsumers[i]->Disconnect();
|
|
|
|
}
|
2012-08-22 08:56:38 -07:00
|
|
|
for (uint32_t i = 0; i < mAudioOutputStreams.Length(); ++i) {
|
2012-07-31 05:17:22 -07:00
|
|
|
mAudioOutputStreams[i].mStream->Shutdown();
|
2012-04-29 20:11:26 -07:00
|
|
|
}
|
2012-07-31 05:17:22 -07:00
|
|
|
mAudioOutputStreams.Clear();
|
2012-04-29 20:11:26 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStream::Destroy()
|
|
|
|
{
|
2012-10-25 18:39:05 -07:00
|
|
|
// Keep this stream alive until we leave this method
|
|
|
|
nsRefPtr<MediaStream> kungFuDeathGrip = this;
|
|
|
|
|
2012-04-29 20:11:26 -07:00
|
|
|
class Message : public ControlMessage {
|
|
|
|
public:
|
|
|
|
Message(MediaStream* aStream) : ControlMessage(aStream) {}
|
2012-07-31 05:17:21 -07:00
|
|
|
virtual void Run()
|
2012-04-29 20:11:26 -07:00
|
|
|
{
|
|
|
|
mStream->DestroyImpl();
|
|
|
|
mStream->GraphImpl()->RemoveStream(mStream);
|
|
|
|
}
|
2012-07-31 05:17:21 -07:00
|
|
|
virtual void RunDuringShutdown()
|
|
|
|
{ Run(); }
|
2012-04-29 20:11:26 -07:00
|
|
|
};
|
2012-07-30 07:20:58 -07:00
|
|
|
mWrapper = nullptr;
|
2012-10-25 15:07:59 -07:00
|
|
|
GraphImpl()->AppendMessage(new Message(this));
|
2012-10-25 18:39:05 -07:00
|
|
|
// Message::RunDuringShutdown may have removed this stream from the graph,
|
|
|
|
// but our kungFuDeathGrip above will have kept this stream alive if
|
|
|
|
// necessary.
|
2012-10-25 16:08:38 -07:00
|
|
|
mMainThreadDestroyed = true;
|
2012-04-29 20:11:26 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStream::AddAudioOutput(void* aKey)
|
|
|
|
{
|
|
|
|
class Message : public ControlMessage {
|
|
|
|
public:
|
|
|
|
Message(MediaStream* aStream, void* aKey) : ControlMessage(aStream), mKey(aKey) {}
|
2012-07-31 05:17:21 -07:00
|
|
|
virtual void Run()
|
2012-04-29 20:11:26 -07:00
|
|
|
{
|
|
|
|
mStream->AddAudioOutputImpl(mKey);
|
|
|
|
}
|
|
|
|
void* mKey;
|
|
|
|
};
|
|
|
|
GraphImpl()->AppendMessage(new Message(this, aKey));
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStream::SetAudioOutputVolumeImpl(void* aKey, float aVolume)
|
|
|
|
{
|
2012-08-22 08:56:38 -07:00
|
|
|
for (uint32_t i = 0; i < mAudioOutputs.Length(); ++i) {
|
2012-04-29 20:11:26 -07:00
|
|
|
if (mAudioOutputs[i].mKey == aKey) {
|
|
|
|
mAudioOutputs[i].mVolume = aVolume;
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
NS_ERROR("Audio output key not found");
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStream::SetAudioOutputVolume(void* aKey, float aVolume)
|
|
|
|
{
|
|
|
|
class Message : public ControlMessage {
|
|
|
|
public:
|
|
|
|
Message(MediaStream* aStream, void* aKey, float aVolume) :
|
|
|
|
ControlMessage(aStream), mKey(aKey), mVolume(aVolume) {}
|
2012-07-31 05:17:21 -07:00
|
|
|
virtual void Run()
|
2012-04-29 20:11:26 -07:00
|
|
|
{
|
|
|
|
mStream->SetAudioOutputVolumeImpl(mKey, mVolume);
|
|
|
|
}
|
|
|
|
void* mKey;
|
|
|
|
float mVolume;
|
|
|
|
};
|
|
|
|
GraphImpl()->AppendMessage(new Message(this, aKey, aVolume));
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStream::RemoveAudioOutputImpl(void* aKey)
|
|
|
|
{
|
2012-08-22 08:56:38 -07:00
|
|
|
for (uint32_t i = 0; i < mAudioOutputs.Length(); ++i) {
|
2012-04-29 20:11:26 -07:00
|
|
|
if (mAudioOutputs[i].mKey == aKey) {
|
|
|
|
mAudioOutputs.RemoveElementAt(i);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
NS_ERROR("Audio output key not found");
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStream::RemoveAudioOutput(void* aKey)
|
|
|
|
{
|
|
|
|
class Message : public ControlMessage {
|
|
|
|
public:
|
|
|
|
Message(MediaStream* aStream, void* aKey) :
|
|
|
|
ControlMessage(aStream), mKey(aKey) {}
|
2012-07-31 05:17:21 -07:00
|
|
|
virtual void Run()
|
2012-04-29 20:11:26 -07:00
|
|
|
{
|
|
|
|
mStream->RemoveAudioOutputImpl(mKey);
|
|
|
|
}
|
|
|
|
void* mKey;
|
|
|
|
};
|
|
|
|
GraphImpl()->AppendMessage(new Message(this, aKey));
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStream::AddVideoOutput(VideoFrameContainer* aContainer)
|
|
|
|
{
|
|
|
|
class Message : public ControlMessage {
|
|
|
|
public:
|
|
|
|
Message(MediaStream* aStream, VideoFrameContainer* aContainer) :
|
|
|
|
ControlMessage(aStream), mContainer(aContainer) {}
|
2012-07-31 05:17:21 -07:00
|
|
|
virtual void Run()
|
2012-04-29 20:11:26 -07:00
|
|
|
{
|
|
|
|
mStream->AddVideoOutputImpl(mContainer.forget());
|
|
|
|
}
|
|
|
|
nsRefPtr<VideoFrameContainer> mContainer;
|
|
|
|
};
|
|
|
|
GraphImpl()->AppendMessage(new Message(this, aContainer));
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStream::RemoveVideoOutput(VideoFrameContainer* aContainer)
|
|
|
|
{
|
|
|
|
class Message : public ControlMessage {
|
|
|
|
public:
|
|
|
|
Message(MediaStream* aStream, VideoFrameContainer* aContainer) :
|
|
|
|
ControlMessage(aStream), mContainer(aContainer) {}
|
2012-07-31 05:17:21 -07:00
|
|
|
virtual void Run()
|
2012-04-29 20:11:26 -07:00
|
|
|
{
|
|
|
|
mStream->RemoveVideoOutputImpl(mContainer);
|
|
|
|
}
|
|
|
|
nsRefPtr<VideoFrameContainer> mContainer;
|
|
|
|
};
|
|
|
|
GraphImpl()->AppendMessage(new Message(this, aContainer));
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
2012-08-22 08:56:38 -07:00
|
|
|
MediaStream::ChangeExplicitBlockerCount(int32_t aDelta)
|
2012-04-29 20:11:26 -07:00
|
|
|
{
|
|
|
|
class Message : public ControlMessage {
|
|
|
|
public:
|
2012-08-22 08:56:38 -07:00
|
|
|
Message(MediaStream* aStream, int32_t aDelta) :
|
2012-04-29 20:11:26 -07:00
|
|
|
ControlMessage(aStream), mDelta(aDelta) {}
|
2012-07-31 05:17:21 -07:00
|
|
|
virtual void Run()
|
2012-04-29 20:11:26 -07:00
|
|
|
{
|
|
|
|
mStream->ChangeExplicitBlockerCountImpl(
|
2012-07-31 05:17:21 -07:00
|
|
|
mStream->GraphImpl()->mStateComputedTime, mDelta);
|
2012-04-29 20:11:26 -07:00
|
|
|
}
|
2012-08-22 08:56:38 -07:00
|
|
|
int32_t mDelta;
|
2012-04-29 20:11:26 -07:00
|
|
|
};
|
2014-01-13 08:38:30 -08:00
|
|
|
|
|
|
|
// This can happen if this method has been called asynchronously, and the
|
|
|
|
// stream has been destroyed since then.
|
|
|
|
if (mMainThreadDestroyed) {
|
|
|
|
return;
|
|
|
|
}
|
2012-04-29 20:11:26 -07:00
|
|
|
GraphImpl()->AppendMessage(new Message(this, aDelta));
|
|
|
|
}
|
|
|
|
|
2012-05-24 03:37:14 -07:00
|
|
|
void
|
|
|
|
MediaStream::AddListenerImpl(already_AddRefed<MediaStreamListener> aListener)
|
|
|
|
{
|
|
|
|
MediaStreamListener* listener = *mListeners.AppendElement() = aListener;
|
|
|
|
listener->NotifyBlockingChanged(GraphImpl(),
|
2013-01-02 05:49:18 -08:00
|
|
|
mNotifiedBlocked ? MediaStreamListener::BLOCKED : MediaStreamListener::UNBLOCKED);
|
2012-05-24 03:37:14 -07:00
|
|
|
if (mNotifiedFinished) {
|
|
|
|
listener->NotifyFinished(GraphImpl());
|
|
|
|
}
|
2013-03-20 04:19:39 -07:00
|
|
|
if (mNotifiedHasCurrentData) {
|
|
|
|
listener->NotifyHasCurrentData(GraphImpl());
|
|
|
|
}
|
2012-05-24 03:37:14 -07:00
|
|
|
}
|
|
|
|
|
2012-04-29 20:11:26 -07:00
|
|
|
void
|
|
|
|
MediaStream::AddListener(MediaStreamListener* aListener)
|
|
|
|
{
|
|
|
|
class Message : public ControlMessage {
|
|
|
|
public:
|
|
|
|
Message(MediaStream* aStream, MediaStreamListener* aListener) :
|
|
|
|
ControlMessage(aStream), mListener(aListener) {}
|
2012-07-31 05:17:21 -07:00
|
|
|
virtual void Run()
|
2012-04-29 20:11:26 -07:00
|
|
|
{
|
|
|
|
mStream->AddListenerImpl(mListener.forget());
|
|
|
|
}
|
|
|
|
nsRefPtr<MediaStreamListener> mListener;
|
|
|
|
};
|
|
|
|
GraphImpl()->AppendMessage(new Message(this, aListener));
|
|
|
|
}
|
|
|
|
|
2013-01-06 18:31:30 -08:00
|
|
|
void
|
|
|
|
MediaStream::RemoveListenerImpl(MediaStreamListener* aListener)
|
|
|
|
{
|
|
|
|
// wouldn't need this if we could do it in the opposite order
|
|
|
|
nsRefPtr<MediaStreamListener> listener(aListener);
|
|
|
|
mListeners.RemoveElement(aListener);
|
|
|
|
listener->NotifyRemoved(GraphImpl());
|
|
|
|
}
|
|
|
|
|
2012-04-29 20:11:26 -07:00
|
|
|
void
|
|
|
|
MediaStream::RemoveListener(MediaStreamListener* aListener)
|
|
|
|
{
|
|
|
|
class Message : public ControlMessage {
|
|
|
|
public:
|
|
|
|
Message(MediaStream* aStream, MediaStreamListener* aListener) :
|
|
|
|
ControlMessage(aStream), mListener(aListener) {}
|
2012-07-31 05:17:21 -07:00
|
|
|
virtual void Run()
|
2012-04-29 20:11:26 -07:00
|
|
|
{
|
|
|
|
mStream->RemoveListenerImpl(mListener);
|
|
|
|
}
|
|
|
|
nsRefPtr<MediaStreamListener> mListener;
|
|
|
|
};
|
2013-02-28 11:53:38 -08:00
|
|
|
// If the stream is destroyed the Listeners have or will be
|
|
|
|
// removed.
|
|
|
|
if (!IsDestroyed()) {
|
|
|
|
GraphImpl()->AppendMessage(new Message(this, aListener));
|
|
|
|
}
|
2012-04-29 20:11:26 -07:00
|
|
|
}
|
2013-10-24 16:07:29 -07:00
|
|
|
|
|
|
|
void
|
|
|
|
MediaStream::RunAfterPendingUpdates(nsRefPtr<nsIRunnable> aRunnable)
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(NS_IsMainThread());
|
|
|
|
MediaStreamGraphImpl* graph = GraphImpl();
|
|
|
|
|
|
|
|
// Special case when a non-realtime graph has not started, to ensure the
|
|
|
|
// runnable will run in finite time.
|
|
|
|
if (!(graph->mRealtime || graph->mNonRealtimeProcessing)) {
|
|
|
|
aRunnable->Run();
|
|
|
|
}
|
|
|
|
|
|
|
|
class Message : public ControlMessage {
|
|
|
|
public:
|
|
|
|
explicit Message(MediaStream* aStream,
|
|
|
|
already_AddRefed<nsIRunnable> aRunnable)
|
|
|
|
: ControlMessage(aStream)
|
|
|
|
, mRunnable(aRunnable) {}
|
|
|
|
virtual void Run() MOZ_OVERRIDE
|
|
|
|
{
|
|
|
|
mStream->Graph()->
|
|
|
|
DispatchToMainThreadAfterStreamStateUpdate(mRunnable.forget());
|
|
|
|
}
|
|
|
|
virtual void RunDuringShutdown() MOZ_OVERRIDE
|
|
|
|
{
|
|
|
|
mRunnable->Run();
|
|
|
|
}
|
|
|
|
private:
|
|
|
|
nsRefPtr<nsIRunnable> mRunnable;
|
|
|
|
};
|
|
|
|
|
|
|
|
graph->AppendMessage(new Message(this, aRunnable.forget()));
|
|
|
|
}
|
2012-04-29 20:11:26 -07:00
|
|
|
|
2013-05-29 21:44:43 -07:00
|
|
|
void
|
|
|
|
MediaStream::SetTrackEnabledImpl(TrackID aTrackID, bool aEnabled)
|
|
|
|
{
|
|
|
|
if (aEnabled) {
|
|
|
|
mDisabledTrackIDs.RemoveElement(aTrackID);
|
|
|
|
} else {
|
|
|
|
if (!mDisabledTrackIDs.Contains(aTrackID)) {
|
|
|
|
mDisabledTrackIDs.AppendElement(aTrackID);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStream::SetTrackEnabled(TrackID aTrackID, bool aEnabled)
|
|
|
|
{
|
|
|
|
class Message : public ControlMessage {
|
|
|
|
public:
|
|
|
|
Message(MediaStream* aStream, TrackID aTrackID, bool aEnabled) :
|
|
|
|
ControlMessage(aStream), mTrackID(aTrackID), mEnabled(aEnabled) {}
|
|
|
|
virtual void Run()
|
|
|
|
{
|
|
|
|
mStream->SetTrackEnabledImpl(mTrackID, mEnabled);
|
|
|
|
}
|
|
|
|
TrackID mTrackID;
|
|
|
|
bool mEnabled;
|
|
|
|
};
|
|
|
|
GraphImpl()->AppendMessage(new Message(this, aTrackID, aEnabled));
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
2013-08-25 23:07:17 -07:00
|
|
|
MediaStream::ApplyTrackDisabling(TrackID aTrackID, MediaSegment* aSegment, MediaSegment* aRawSegment)
|
2013-05-29 21:44:43 -07:00
|
|
|
{
|
2013-08-25 23:07:17 -07:00
|
|
|
// mMutex must be owned here if this is a SourceMediaStream
|
2013-05-29 21:44:43 -07:00
|
|
|
if (!mDisabledTrackIDs.Contains(aTrackID)) {
|
|
|
|
return;
|
|
|
|
}
|
2013-08-25 23:07:17 -07:00
|
|
|
aSegment->ReplaceWithDisabled();
|
|
|
|
if (aRawSegment) {
|
|
|
|
aRawSegment->ReplaceWithDisabled();
|
2013-05-29 21:44:43 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-04-29 20:11:40 -07:00
|
|
|
void
|
2012-05-22 23:01:15 -07:00
|
|
|
SourceMediaStream::DestroyImpl()
|
2012-04-29 20:11:40 -07:00
|
|
|
{
|
|
|
|
{
|
|
|
|
MutexAutoLock lock(mMutex);
|
2012-05-22 23:01:15 -07:00
|
|
|
mDestroyed = true;
|
|
|
|
}
|
|
|
|
MediaStream::DestroyImpl();
|
|
|
|
}
|
|
|
|
|
2012-07-20 12:36:03 -07:00
|
|
|
void
|
|
|
|
SourceMediaStream::SetPullEnabled(bool aEnabled)
|
|
|
|
{
|
|
|
|
MutexAutoLock lock(mMutex);
|
|
|
|
mPullEnabled = aEnabled;
|
|
|
|
if (mPullEnabled && !mDestroyed) {
|
|
|
|
GraphImpl()->EnsureNextIteration();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-05-22 23:01:15 -07:00
|
|
|
void
|
|
|
|
SourceMediaStream::AddTrack(TrackID aID, TrackRate aRate, TrackTicks aStart,
|
|
|
|
MediaSegment* aSegment)
|
|
|
|
{
|
|
|
|
MutexAutoLock lock(mMutex);
|
|
|
|
TrackData* data = mUpdateTracks.AppendElement();
|
|
|
|
data->mID = aID;
|
2014-03-24 03:06:05 -07:00
|
|
|
data->mInputRate = aRate;
|
|
|
|
// We resample all audio input tracks to the sample rate of the audio mixer.
|
|
|
|
data->mOutputRate = aSegment->GetType() == MediaSegment::AUDIO ?
|
|
|
|
IdealAudioRate() : aRate;
|
2012-05-22 23:01:15 -07:00
|
|
|
data->mStart = aStart;
|
|
|
|
data->mCommands = TRACK_CREATE;
|
|
|
|
data->mData = aSegment;
|
|
|
|
data->mHaveEnough = false;
|
|
|
|
if (!mDestroyed) {
|
|
|
|
GraphImpl()->EnsureNextIteration();
|
|
|
|
}
|
2012-04-29 20:11:40 -07:00
|
|
|
}
|
|
|
|
|
2014-03-24 03:06:05 -07:00
|
|
|
void
|
|
|
|
SourceMediaStream::ResampleAudioToGraphSampleRate(TrackData* aTrackData, MediaSegment* aSegment)
|
|
|
|
{
|
|
|
|
if (aSegment->GetType() != MediaSegment::AUDIO ||
|
|
|
|
aTrackData->mInputRate == IdealAudioRate()) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
AudioSegment* segment = static_cast<AudioSegment*>(aSegment);
|
|
|
|
if (!aTrackData->mResampler) {
|
|
|
|
int channels = segment->ChannelCount();
|
|
|
|
SpeexResamplerState* state = speex_resampler_init(channels,
|
|
|
|
aTrackData->mInputRate,
|
|
|
|
IdealAudioRate(),
|
|
|
|
SPEEX_RESAMPLER_QUALITY_DEFAULT,
|
|
|
|
nullptr);
|
|
|
|
if (state) {
|
|
|
|
aTrackData->mResampler.own(state);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
segment->ResampleChunks(aTrackData->mResampler);
|
|
|
|
}
|
|
|
|
|
2013-02-25 01:25:07 -08:00
|
|
|
bool
|
2013-08-24 06:53:11 -07:00
|
|
|
SourceMediaStream::AppendToTrack(TrackID aID, MediaSegment* aSegment, MediaSegment *aRawSegment)
|
2012-04-29 20:11:40 -07:00
|
|
|
{
|
2012-05-22 23:01:15 -07:00
|
|
|
MutexAutoLock lock(mMutex);
|
2012-10-24 16:21:32 -07:00
|
|
|
// ::EndAllTrackAndFinished() can end these before the sources notice
|
2013-02-25 01:25:07 -08:00
|
|
|
bool appended = false;
|
2012-10-24 16:21:32 -07:00
|
|
|
if (!mFinished) {
|
|
|
|
TrackData *track = FindDataForTrack(aID);
|
|
|
|
if (track) {
|
2013-08-24 06:53:01 -07:00
|
|
|
// Data goes into mData, and on the next iteration of the MSG moves
|
|
|
|
// into the track's segment after NotifyQueuedTrackChanges(). This adds
|
|
|
|
// 0-10ms of delay before data gets to direct listeners.
|
|
|
|
// Indirect listeners (via subsequent TrackUnion nodes) are synced to
|
|
|
|
// playout time, and so can be delayed by buffering.
|
|
|
|
|
2013-08-25 23:07:17 -07:00
|
|
|
// Apply track disabling before notifying any consumers directly
|
|
|
|
// or inserting into the graph
|
|
|
|
ApplyTrackDisabling(aID, aSegment, aRawSegment);
|
|
|
|
|
2014-03-24 03:06:05 -07:00
|
|
|
ResampleAudioToGraphSampleRate(track, aSegment);
|
|
|
|
|
2013-08-24 06:53:11 -07:00
|
|
|
// Must notify first, since AppendFrom() will empty out aSegment
|
|
|
|
NotifyDirectConsumers(track, aRawSegment ? aRawSegment : aSegment);
|
|
|
|
track->mData->AppendFrom(aSegment); // note: aSegment is now dead
|
2013-02-25 01:25:07 -08:00
|
|
|
appended = true;
|
2012-10-24 16:21:32 -07:00
|
|
|
} else {
|
2013-02-25 01:25:07 -08:00
|
|
|
aSegment->Clear();
|
2013-02-27 04:49:26 -08:00
|
|
|
}
|
2012-05-22 23:01:15 -07:00
|
|
|
}
|
|
|
|
if (!mDestroyed) {
|
|
|
|
GraphImpl()->EnsureNextIteration();
|
2012-04-29 20:11:40 -07:00
|
|
|
}
|
2013-02-25 01:25:07 -08:00
|
|
|
return appended;
|
2012-04-29 20:11:40 -07:00
|
|
|
}
|
|
|
|
|
2013-08-24 06:53:11 -07:00
|
|
|
void
|
|
|
|
SourceMediaStream::NotifyDirectConsumers(TrackData *aTrack,
|
|
|
|
MediaSegment *aSegment)
|
|
|
|
{
|
|
|
|
// Call with mMutex locked
|
|
|
|
MOZ_ASSERT(aTrack);
|
|
|
|
|
|
|
|
for (uint32_t j = 0; j < mDirectListeners.Length(); ++j) {
|
|
|
|
MediaStreamDirectListener* l = mDirectListeners[j];
|
|
|
|
TrackTicks offset = 0; // FIX! need a separate TrackTicks.... or the end of the internal buffer
|
2014-03-24 03:06:05 -07:00
|
|
|
l->NotifyRealtimeData(static_cast<MediaStreamGraph*>(GraphImpl()), aTrack->mID, aTrack->mOutputRate,
|
2013-08-24 06:53:11 -07:00
|
|
|
offset, aTrack->mCommands, *aSegment);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
SourceMediaStream::AddDirectListener(MediaStreamDirectListener* aListener)
|
|
|
|
{
|
|
|
|
MutexAutoLock lock(mMutex);
|
|
|
|
mDirectListeners.AppendElement(aListener);
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
SourceMediaStream::RemoveDirectListener(MediaStreamDirectListener* aListener)
|
|
|
|
{
|
|
|
|
MutexAutoLock lock(mMutex);
|
|
|
|
mDirectListeners.RemoveElement(aListener);
|
|
|
|
}
|
|
|
|
|
2012-04-29 20:11:40 -07:00
|
|
|
bool
|
|
|
|
SourceMediaStream::HaveEnoughBuffered(TrackID aID)
|
|
|
|
{
|
|
|
|
MutexAutoLock lock(mMutex);
|
2012-05-08 22:53:49 -07:00
|
|
|
TrackData *track = FindDataForTrack(aID);
|
|
|
|
if (track) {
|
|
|
|
return track->mHaveEnough;
|
|
|
|
}
|
2013-02-25 01:25:07 -08:00
|
|
|
return false;
|
2012-04-29 20:11:40 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
SourceMediaStream::DispatchWhenNotEnoughBuffered(TrackID aID,
|
2014-02-17 14:53:53 -08:00
|
|
|
nsIEventTarget* aSignalThread, nsIRunnable* aSignalRunnable)
|
2012-04-29 20:11:40 -07:00
|
|
|
{
|
|
|
|
MutexAutoLock lock(mMutex);
|
|
|
|
TrackData* data = FindDataForTrack(aID);
|
2012-05-08 22:53:49 -07:00
|
|
|
if (!data) {
|
2013-02-25 01:25:07 -08:00
|
|
|
aSignalThread->Dispatch(aSignalRunnable, 0);
|
2012-05-08 22:53:49 -07:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2012-04-29 20:11:40 -07:00
|
|
|
if (data->mHaveEnough) {
|
2014-02-14 10:38:58 -08:00
|
|
|
if (data->mDispatchWhenNotEnough.IsEmpty()) {
|
|
|
|
data->mDispatchWhenNotEnough.AppendElement()->Init(aSignalThread, aSignalRunnable);
|
|
|
|
}
|
2012-04-29 20:11:40 -07:00
|
|
|
} else {
|
|
|
|
aSignalThread->Dispatch(aSignalRunnable, 0);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
SourceMediaStream::EndTrack(TrackID aID)
|
|
|
|
{
|
2012-05-22 23:01:15 -07:00
|
|
|
MutexAutoLock lock(mMutex);
|
2012-10-24 16:21:32 -07:00
|
|
|
// ::EndAllTrackAndFinished() can end these before the sources call this
|
|
|
|
if (!mFinished) {
|
|
|
|
TrackData *track = FindDataForTrack(aID);
|
|
|
|
if (track) {
|
|
|
|
track->mCommands |= TRACK_END;
|
|
|
|
}
|
2012-05-22 23:01:15 -07:00
|
|
|
}
|
|
|
|
if (!mDestroyed) {
|
|
|
|
GraphImpl()->EnsureNextIteration();
|
2012-04-29 20:11:40 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
SourceMediaStream::AdvanceKnownTracksTime(StreamTime aKnownTime)
|
|
|
|
{
|
2012-05-22 23:01:15 -07:00
|
|
|
MutexAutoLock lock(mMutex);
|
|
|
|
mUpdateKnownTracksTime = aKnownTime;
|
|
|
|
if (!mDestroyed) {
|
|
|
|
GraphImpl()->EnsureNextIteration();
|
2012-04-29 20:11:40 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
2012-10-24 16:21:32 -07:00
|
|
|
SourceMediaStream::FinishWithLockHeld()
|
2012-04-29 20:11:40 -07:00
|
|
|
{
|
2013-04-08 05:03:33 -07:00
|
|
|
mMutex.AssertCurrentThreadOwns();
|
2012-05-22 23:01:15 -07:00
|
|
|
mUpdateFinished = true;
|
|
|
|
if (!mDestroyed) {
|
|
|
|
GraphImpl()->EnsureNextIteration();
|
2012-04-29 20:11:40 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-10-24 16:21:32 -07:00
|
|
|
void
|
|
|
|
SourceMediaStream::EndAllTrackAndFinish()
|
|
|
|
{
|
2013-04-08 05:03:33 -07:00
|
|
|
MutexAutoLock lock(mMutex);
|
|
|
|
for (uint32_t i = 0; i < mUpdateTracks.Length(); ++i) {
|
|
|
|
SourceMediaStream::TrackData* data = &mUpdateTracks[i];
|
|
|
|
data->mCommands |= TRACK_END;
|
2012-10-24 16:21:32 -07:00
|
|
|
}
|
|
|
|
FinishWithLockHeld();
|
|
|
|
// we will call NotifyFinished() to let GetUserMedia know
|
|
|
|
}
|
|
|
|
|
2013-08-24 06:53:01 -07:00
|
|
|
TrackTicks
|
|
|
|
SourceMediaStream::GetBufferedTicks(TrackID aID)
|
|
|
|
{
|
|
|
|
StreamBuffer::Track* track = mBuffer.FindTrack(aID);
|
|
|
|
if (track) {
|
|
|
|
MediaSegment* segment = track->GetSegment();
|
|
|
|
if (segment) {
|
|
|
|
return segment->GetDuration() -
|
|
|
|
track->TimeToTicksRoundDown(
|
|
|
|
GraphTimeToStreamTime(GraphImpl()->mStateComputedTime));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2014-03-24 03:06:06 -07:00
|
|
|
void
|
|
|
|
SourceMediaStream::RegisterForAudioMixing()
|
|
|
|
{
|
|
|
|
MutexAutoLock lock(mMutex);
|
|
|
|
mNeedsMixing = true;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool
|
|
|
|
SourceMediaStream::NeedsMixing()
|
|
|
|
{
|
|
|
|
MutexAutoLock lock(mMutex);
|
|
|
|
return mNeedsMixing;
|
|
|
|
}
|
|
|
|
|
2012-07-31 05:17:21 -07:00
|
|
|
void
|
|
|
|
MediaInputPort::Init()
|
|
|
|
{
|
2013-11-20 19:02:42 -08:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG, ("Adding MediaInputPort %p (from %p to %p) to the graph",
|
|
|
|
this, mSource, mDest));
|
2012-07-31 05:17:21 -07:00
|
|
|
mSource->AddConsumer(this);
|
|
|
|
mDest->AddInput(this);
|
2012-08-23 05:46:20 -07:00
|
|
|
// mPortCount decremented via MediaInputPort::Destroy's message
|
2012-07-31 05:17:21 -07:00
|
|
|
++mDest->GraphImpl()->mPortCount;
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaInputPort::Disconnect()
|
|
|
|
{
|
|
|
|
NS_ASSERTION(!mSource == !mDest,
|
|
|
|
"mSource must either both be null or both non-null");
|
|
|
|
if (!mSource)
|
|
|
|
return;
|
|
|
|
|
|
|
|
mSource->RemoveConsumer(this);
|
|
|
|
mSource = nullptr;
|
|
|
|
mDest->RemoveInput(this);
|
|
|
|
mDest = nullptr;
|
2014-01-29 05:34:35 -08:00
|
|
|
|
|
|
|
GraphImpl()->SetStreamOrderDirty();
|
2012-07-31 05:17:21 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
MediaInputPort::InputInterval
|
|
|
|
MediaInputPort::GetNextInputInterval(GraphTime aTime)
|
|
|
|
{
|
|
|
|
InputInterval result = { GRAPH_TIME_MAX, GRAPH_TIME_MAX, false };
|
|
|
|
GraphTime t = aTime;
|
|
|
|
GraphTime end;
|
|
|
|
for (;;) {
|
|
|
|
if (!mDest->mBlocked.GetAt(t, &end))
|
|
|
|
break;
|
|
|
|
if (end == GRAPH_TIME_MAX)
|
|
|
|
return result;
|
|
|
|
t = end;
|
|
|
|
}
|
|
|
|
result.mStart = t;
|
|
|
|
GraphTime sourceEnd;
|
|
|
|
result.mInputIsBlocked = mSource->mBlocked.GetAt(t, &sourceEnd);
|
2013-01-15 04:22:03 -08:00
|
|
|
result.mEnd = std::min(end, sourceEnd);
|
2012-07-31 05:17:21 -07:00
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaInputPort::Destroy()
|
|
|
|
{
|
|
|
|
class Message : public ControlMessage {
|
|
|
|
public:
|
|
|
|
Message(MediaInputPort* aPort)
|
2012-08-23 05:46:20 -07:00
|
|
|
: ControlMessage(nullptr), mPort(aPort) {}
|
2012-07-31 05:17:21 -07:00
|
|
|
virtual void Run()
|
|
|
|
{
|
|
|
|
mPort->Disconnect();
|
2012-08-23 05:46:20 -07:00
|
|
|
--mPort->GraphImpl()->mPortCount;
|
2012-07-31 05:17:21 -07:00
|
|
|
NS_RELEASE(mPort);
|
|
|
|
}
|
|
|
|
virtual void RunDuringShutdown()
|
|
|
|
{
|
|
|
|
Run();
|
|
|
|
}
|
|
|
|
MediaInputPort* mPort;
|
|
|
|
};
|
2012-08-23 05:46:20 -07:00
|
|
|
GraphImpl()->AppendMessage(new Message(this));
|
|
|
|
}
|
|
|
|
|
|
|
|
MediaStreamGraphImpl*
|
|
|
|
MediaInputPort::GraphImpl()
|
|
|
|
{
|
2013-02-01 11:49:58 -08:00
|
|
|
return mGraph;
|
2012-08-23 05:46:20 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
MediaStreamGraph*
|
|
|
|
MediaInputPort::Graph()
|
|
|
|
{
|
2013-02-01 11:49:58 -08:00
|
|
|
return mGraph;
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaInputPort::SetGraphImpl(MediaStreamGraphImpl* aGraph)
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(!mGraph, "Should only be called once");
|
|
|
|
mGraph = aGraph;
|
2012-07-31 05:17:21 -07:00
|
|
|
}
|
|
|
|
|
2012-11-22 14:25:05 -08:00
|
|
|
already_AddRefed<MediaInputPort>
|
2013-05-05 08:47:36 -07:00
|
|
|
ProcessedMediaStream::AllocateInputPort(MediaStream* aStream, uint32_t aFlags,
|
|
|
|
uint16_t aInputNumber, uint16_t aOutputNumber)
|
2012-07-31 05:17:21 -07:00
|
|
|
{
|
2012-11-22 14:25:05 -08:00
|
|
|
// This method creates two references to the MediaInputPort: one for
|
|
|
|
// the main thread, and one for the MediaStreamGraph.
|
2012-07-31 05:17:21 -07:00
|
|
|
class Message : public ControlMessage {
|
|
|
|
public:
|
|
|
|
Message(MediaInputPort* aPort)
|
|
|
|
: ControlMessage(aPort->GetDestination()),
|
|
|
|
mPort(aPort) {}
|
|
|
|
virtual void Run()
|
|
|
|
{
|
|
|
|
mPort->Init();
|
2012-11-22 14:25:05 -08:00
|
|
|
// The graph holds its reference implicitly
|
2014-01-29 05:34:35 -08:00
|
|
|
mPort->GraphImpl()->SetStreamOrderDirty();
|
2014-03-15 12:00:16 -07:00
|
|
|
unused << mPort.forget();
|
2012-07-31 05:17:21 -07:00
|
|
|
}
|
2013-05-25 07:01:08 -07:00
|
|
|
virtual void RunDuringShutdown()
|
|
|
|
{
|
|
|
|
Run();
|
|
|
|
}
|
2012-11-22 14:25:05 -08:00
|
|
|
nsRefPtr<MediaInputPort> mPort;
|
2012-07-31 05:17:21 -07:00
|
|
|
};
|
2013-05-05 08:47:36 -07:00
|
|
|
nsRefPtr<MediaInputPort> port = new MediaInputPort(aStream, this, aFlags,
|
|
|
|
aInputNumber, aOutputNumber);
|
2013-02-01 11:49:58 -08:00
|
|
|
port->SetGraphImpl(GraphImpl());
|
2012-07-31 05:17:21 -07:00
|
|
|
GraphImpl()->AppendMessage(new Message(port));
|
2012-11-22 14:25:05 -08:00
|
|
|
return port.forget();
|
2012-07-31 05:17:21 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
ProcessedMediaStream::Finish()
|
|
|
|
{
|
|
|
|
class Message : public ControlMessage {
|
|
|
|
public:
|
|
|
|
Message(ProcessedMediaStream* aStream)
|
|
|
|
: ControlMessage(aStream) {}
|
|
|
|
virtual void Run()
|
|
|
|
{
|
|
|
|
mStream->GraphImpl()->FinishStream(mStream);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
GraphImpl()->AppendMessage(new Message(this));
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
ProcessedMediaStream::SetAutofinish(bool aAutofinish)
|
|
|
|
{
|
|
|
|
class Message : public ControlMessage {
|
|
|
|
public:
|
|
|
|
Message(ProcessedMediaStream* aStream, bool aAutofinish)
|
|
|
|
: ControlMessage(aStream), mAutofinish(aAutofinish) {}
|
|
|
|
virtual void Run()
|
|
|
|
{
|
2013-02-04 02:04:26 -08:00
|
|
|
static_cast<ProcessedMediaStream*>(mStream)->SetAutofinishImpl(mAutofinish);
|
2012-07-31 05:17:21 -07:00
|
|
|
}
|
|
|
|
bool mAutofinish;
|
|
|
|
};
|
|
|
|
GraphImpl()->AppendMessage(new Message(this, aAutofinish));
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
ProcessedMediaStream::DestroyImpl()
|
|
|
|
{
|
2012-08-22 08:56:38 -07:00
|
|
|
for (int32_t i = mInputs.Length() - 1; i >= 0; --i) {
|
2012-07-31 05:17:21 -07:00
|
|
|
mInputs[i]->Disconnect();
|
|
|
|
}
|
|
|
|
MediaStream::DestroyImpl();
|
2014-01-29 05:34:35 -08:00
|
|
|
GraphImpl()->SetStreamOrderDirty();
|
2012-07-31 05:17:21 -07:00
|
|
|
}
|
2012-04-29 20:11:26 -07:00
|
|
|
|
2012-05-09 20:30:34 -07:00
|
|
|
/**
|
|
|
|
* We make the initial mCurrentTime nonzero so that zero times can have
|
|
|
|
* special meaning if necessary.
|
|
|
|
*/
|
2012-08-22 08:56:38 -07:00
|
|
|
static const int32_t INITIAL_CURRENT_TIME = 1;
|
2012-05-09 20:30:34 -07:00
|
|
|
|
2013-05-08 04:44:07 -07:00
|
|
|
MediaStreamGraphImpl::MediaStreamGraphImpl(bool aRealtime)
|
2012-07-31 05:17:21 -07:00
|
|
|
: mCurrentTime(INITIAL_CURRENT_TIME)
|
|
|
|
, mStateComputedTime(INITIAL_CURRENT_TIME)
|
2012-04-29 20:11:26 -07:00
|
|
|
, mProcessingGraphUpdateIndex(0)
|
2012-07-31 05:17:21 -07:00
|
|
|
, mPortCount(0)
|
2012-04-29 20:11:26 -07:00
|
|
|
, mMonitor("MediaStreamGraphImpl")
|
|
|
|
, mLifecycleState(LIFECYCLE_THREAD_NOT_STARTED)
|
|
|
|
, mWaitState(WAITSTATE_RUNNING)
|
2013-12-08 21:08:02 -08:00
|
|
|
, mEndTime(GRAPH_TIME_MAX)
|
2012-04-29 20:11:26 -07:00
|
|
|
, mNeedAnotherIteration(false)
|
|
|
|
, mForceShutDown(false)
|
|
|
|
, mPostedRunInStableStateEvent(false)
|
|
|
|
, mDetectedNotRunning(false)
|
|
|
|
, mPostedRunInStableState(false)
|
2013-05-08 04:44:07 -07:00
|
|
|
, mRealtime(aRealtime)
|
2013-05-16 16:30:41 -07:00
|
|
|
, mNonRealtimeProcessing(false)
|
2013-09-13 09:12:07 -07:00
|
|
|
, mStreamOrderDirty(false)
|
2013-09-24 19:10:24 -07:00
|
|
|
, mLatencyLog(AsyncLatencyLogger::Get())
|
2014-03-24 03:06:06 -07:00
|
|
|
, mMixer(nullptr)
|
2014-04-13 11:08:10 -07:00
|
|
|
, mMemoryReportMonitor("MSGIMemory")
|
|
|
|
, mSelfRef(MOZ_THIS_IN_INITIALIZER_LIST())
|
|
|
|
, mAudioStreamSizes()
|
|
|
|
, mNeedsMemoryReport(false)
|
2012-04-29 20:11:26 -07:00
|
|
|
{
|
|
|
|
#ifdef PR_LOGGING
|
|
|
|
if (!gMediaStreamGraphLog) {
|
|
|
|
gMediaStreamGraphLog = PR_NewLogModule("MediaStreamGraph");
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
2013-07-19 07:40:57 -07:00
|
|
|
mCurrentTimeStamp = mInitialTimeStamp = mLastMainThreadUpdate = TimeStamp::Now();
|
2014-04-13 11:08:10 -07:00
|
|
|
|
|
|
|
RegisterWeakMemoryReporter(this);
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStreamGraphImpl::Destroy()
|
|
|
|
{
|
|
|
|
// First unregister from memory reporting.
|
|
|
|
UnregisterWeakMemoryReporter(this);
|
|
|
|
|
|
|
|
// Clear the self reference which will destroy this instance.
|
|
|
|
mSelfRef = nullptr;
|
2012-04-29 20:11:26 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
NS_IMPL_ISUPPORTS1(MediaStreamGraphShutdownObserver, nsIObserver)
|
|
|
|
|
|
|
|
static bool gShutdownObserverRegistered = false;
|
|
|
|
|
|
|
|
NS_IMETHODIMP
|
|
|
|
MediaStreamGraphShutdownObserver::Observe(nsISupports *aSubject,
|
|
|
|
const char *aTopic,
|
2014-01-04 07:02:17 -08:00
|
|
|
const char16_t *aData)
|
2012-04-29 20:11:26 -07:00
|
|
|
{
|
|
|
|
if (strcmp(aTopic, NS_XPCOM_SHUTDOWN_OBSERVER_ID) == 0) {
|
|
|
|
if (gGraph) {
|
|
|
|
gGraph->ForceShutDown();
|
|
|
|
}
|
|
|
|
nsContentUtils::UnregisterShutdownObserver(this);
|
|
|
|
gShutdownObserverRegistered = false;
|
|
|
|
}
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
|
|
|
MediaStreamGraph*
|
|
|
|
MediaStreamGraph::GetInstance()
|
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Main thread only");
|
|
|
|
|
|
|
|
if (!gGraph) {
|
|
|
|
if (!gShutdownObserverRegistered) {
|
|
|
|
gShutdownObserverRegistered = true;
|
|
|
|
nsContentUtils::RegisterShutdownObserver(new MediaStreamGraphShutdownObserver());
|
|
|
|
}
|
|
|
|
|
2013-05-08 04:44:07 -07:00
|
|
|
gGraph = new MediaStreamGraphImpl(true);
|
2014-04-13 11:08:10 -07:00
|
|
|
|
2013-11-20 19:02:42 -08:00
|
|
|
STREAM_LOG(PR_LOG_DEBUG, ("Starting up MediaStreamGraph %p", gGraph));
|
2014-03-24 03:06:05 -07:00
|
|
|
|
|
|
|
AudioStream::InitPreferredSampleRate();
|
2012-04-29 20:11:26 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
return gGraph;
|
|
|
|
}
|
|
|
|
|
2013-05-08 04:44:07 -07:00
|
|
|
MediaStreamGraph*
|
|
|
|
MediaStreamGraph::CreateNonRealtimeInstance()
|
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Main thread only");
|
|
|
|
|
|
|
|
MediaStreamGraphImpl* graph = new MediaStreamGraphImpl(false);
|
2014-04-13 11:08:10 -07:00
|
|
|
|
2013-05-08 04:44:07 -07:00
|
|
|
return graph;
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MediaStreamGraph::DestroyNonRealtimeInstance(MediaStreamGraph* aGraph)
|
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "Main thread only");
|
2013-09-09 22:05:22 -07:00
|
|
|
MOZ_ASSERT(aGraph->IsNonRealtime(), "Should not destroy the global graph here");
|
2013-05-08 04:44:07 -07:00
|
|
|
|
|
|
|
MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(aGraph);
|
2013-09-09 22:05:22 -07:00
|
|
|
if (graph->mForceShutDown)
|
|
|
|
return; // already done
|
|
|
|
|
2013-05-25 06:59:59 -07:00
|
|
|
if (!graph->mNonRealtimeProcessing) {
|
|
|
|
// Start the graph, but don't produce anything
|
2013-12-08 21:08:02 -08:00
|
|
|
graph->StartNonRealtimeProcessing(1, 0);
|
2013-05-25 06:59:59 -07:00
|
|
|
}
|
2013-05-08 04:44:07 -07:00
|
|
|
graph->ForceShutDown();
|
|
|
|
}
|
|
|
|
|
2014-04-13 11:08:10 -07:00
|
|
|
NS_IMPL_ISUPPORTS1(MediaStreamGraphImpl, nsIMemoryReporter)
|
|
|
|
|
|
|
|
struct ArrayClearer
|
|
|
|
{
|
|
|
|
ArrayClearer(nsTArray<AudioNodeSizes>& aArray) : mArray(aArray) {}
|
|
|
|
~ArrayClearer() { mArray.Clear(); }
|
|
|
|
nsTArray<AudioNodeSizes>& mArray;
|
|
|
|
};
|
|
|
|
|
|
|
|
NS_IMETHODIMP
|
|
|
|
MediaStreamGraphImpl::CollectReports(nsIHandleReportCallback* aHandleReport,
|
|
|
|
nsISupports* aData)
|
|
|
|
{
|
|
|
|
// Clears out the report array after we're done with it.
|
|
|
|
ArrayClearer reportCleanup(mAudioStreamSizes);
|
|
|
|
|
|
|
|
{
|
|
|
|
MonitorAutoLock memoryReportLock(mMemoryReportMonitor);
|
|
|
|
mNeedsMemoryReport = true;
|
|
|
|
|
|
|
|
{
|
|
|
|
// Wake up the MSG thread.
|
|
|
|
MonitorAutoLock monitorLock(mMonitor);
|
|
|
|
EnsureImmediateWakeUpLocked(monitorLock);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Wait for the report to complete.
|
|
|
|
nsresult rv;
|
|
|
|
while ((rv = memoryReportLock.Wait()) != NS_OK) {
|
|
|
|
if (PR_GetError() != PR_PENDING_INTERRUPT_ERROR) {
|
|
|
|
return rv;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#define REPORT(_path, _amount, _desc) \
|
|
|
|
do { \
|
|
|
|
nsresult rv; \
|
|
|
|
rv = aHandleReport->Callback(EmptyCString(), _path, \
|
|
|
|
KIND_HEAP, UNITS_BYTES, _amount, \
|
|
|
|
NS_LITERAL_CSTRING(_desc), aData); \
|
|
|
|
NS_ENSURE_SUCCESS(rv, rv); \
|
|
|
|
} while (0)
|
|
|
|
|
|
|
|
for (size_t i = 0; i < mAudioStreamSizes.Length(); i++) {
|
|
|
|
const AudioNodeSizes& usage = mAudioStreamSizes[i];
|
|
|
|
const char* const nodeType = usage.mNodeType.get();
|
|
|
|
|
|
|
|
nsPrintfCString domNodePath("explicit/webaudio/audio-node/%s/dom-nodes",
|
|
|
|
nodeType);
|
|
|
|
REPORT(domNodePath, usage.mDomNode,
|
|
|
|
"Memory used by AudioNode DOM objects (Web Audio).");
|
|
|
|
|
|
|
|
nsPrintfCString enginePath("explicit/webaudio/audio-node/%s/engine-objects",
|
|
|
|
nodeType);
|
|
|
|
REPORT(enginePath, usage.mEngine,
|
|
|
|
"Memory used by AudioNode engine objects (Web Audio).");
|
|
|
|
|
|
|
|
nsPrintfCString streamPath("explicit/webaudio/audio-node/%s/stream-objects",
|
|
|
|
nodeType);
|
|
|
|
REPORT(streamPath, usage.mStream,
|
|
|
|
"Memory used by AudioNode stream objects (Web Audio).");
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
#undef REPORT
|
|
|
|
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
2012-04-29 20:11:40 -07:00
|
|
|
SourceMediaStream*
|
2013-02-15 00:01:58 -08:00
|
|
|
MediaStreamGraph::CreateSourceStream(DOMMediaStream* aWrapper)
|
2012-04-29 20:11:40 -07:00
|
|
|
{
|
|
|
|
SourceMediaStream* stream = new SourceMediaStream(aWrapper);
|
|
|
|
NS_ADDREF(stream);
|
2013-02-01 11:43:36 -08:00
|
|
|
MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(this);
|
|
|
|
stream->SetGraphImpl(graph);
|
|
|
|
graph->AppendMessage(new CreateMessage(stream));
|
2012-04-29 20:11:40 -07:00
|
|
|
return stream;
|
|
|
|
}
|
|
|
|
|
2012-07-31 05:17:21 -07:00
|
|
|
ProcessedMediaStream*
|
2013-02-15 00:01:58 -08:00
|
|
|
MediaStreamGraph::CreateTrackUnionStream(DOMMediaStream* aWrapper)
|
2012-07-31 05:17:21 -07:00
|
|
|
{
|
|
|
|
TrackUnionStream* stream = new TrackUnionStream(aWrapper);
|
|
|
|
NS_ADDREF(stream);
|
2013-02-01 11:43:36 -08:00
|
|
|
MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(this);
|
|
|
|
stream->SetGraphImpl(graph);
|
|
|
|
graph->AppendMessage(new CreateMessage(stream));
|
2012-07-31 05:17:21 -07:00
|
|
|
return stream;
|
|
|
|
}
|
|
|
|
|
2013-07-24 04:29:39 -07:00
|
|
|
AudioNodeExternalInputStream*
|
|
|
|
MediaStreamGraph::CreateAudioNodeExternalInputStream(AudioNodeEngine* aEngine, TrackRate aSampleRate)
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(NS_IsMainThread());
|
|
|
|
if (!aSampleRate) {
|
|
|
|
aSampleRate = aEngine->NodeMainThread()->Context()->SampleRate();
|
|
|
|
}
|
|
|
|
AudioNodeExternalInputStream* stream = new AudioNodeExternalInputStream(aEngine, aSampleRate);
|
|
|
|
NS_ADDREF(stream);
|
|
|
|
MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(this);
|
|
|
|
stream->SetGraphImpl(graph);
|
|
|
|
graph->AppendMessage(new CreateMessage(stream));
|
|
|
|
return stream;
|
|
|
|
}
|
|
|
|
|
2013-01-13 14:46:57 -08:00
|
|
|
AudioNodeStream*
|
2013-03-17 17:37:47 -07:00
|
|
|
MediaStreamGraph::CreateAudioNodeStream(AudioNodeEngine* aEngine,
|
2013-05-24 10:09:29 -07:00
|
|
|
AudioNodeStreamKind aKind,
|
|
|
|
TrackRate aSampleRate)
|
2013-01-13 14:46:57 -08:00
|
|
|
{
|
2013-04-27 16:25:23 -07:00
|
|
|
MOZ_ASSERT(NS_IsMainThread());
|
2013-05-24 10:09:29 -07:00
|
|
|
if (!aSampleRate) {
|
|
|
|
aSampleRate = aEngine->NodeMainThread()->Context()->SampleRate();
|
|
|
|
}
|
|
|
|
AudioNodeStream* stream = new AudioNodeStream(aEngine, aKind, aSampleRate);
|
2013-01-13 14:46:57 -08:00
|
|
|
NS_ADDREF(stream);
|
2013-02-01 11:43:36 -08:00
|
|
|
MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(this);
|
|
|
|
stream->SetGraphImpl(graph);
|
2013-05-01 18:02:31 -07:00
|
|
|
if (aEngine->HasNode()) {
|
|
|
|
stream->SetChannelMixingParametersImpl(aEngine->NodeMainThread()->ChannelCount(),
|
|
|
|
aEngine->NodeMainThread()->ChannelCountModeValue(),
|
|
|
|
aEngine->NodeMainThread()->ChannelInterpretationValue());
|
|
|
|
}
|
2013-02-01 11:43:36 -08:00
|
|
|
graph->AppendMessage(new CreateMessage(stream));
|
2013-01-13 14:46:57 -08:00
|
|
|
return stream;
|
|
|
|
}
|
|
|
|
|
2013-09-09 22:05:22 -07:00
|
|
|
bool
|
|
|
|
MediaStreamGraph::IsNonRealtime() const
|
|
|
|
{
|
|
|
|
return this != gGraph;
|
|
|
|
}
|
|
|
|
|
2013-05-16 16:30:41 -07:00
|
|
|
void
|
2013-12-08 21:08:02 -08:00
|
|
|
MediaStreamGraph::StartNonRealtimeProcessing(TrackRate aRate, uint32_t aTicksToProcess)
|
2013-05-16 16:30:41 -07:00
|
|
|
{
|
|
|
|
NS_ASSERTION(NS_IsMainThread(), "main thread only");
|
|
|
|
|
|
|
|
MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(this);
|
|
|
|
NS_ASSERTION(!graph->mRealtime, "non-realtime only");
|
|
|
|
|
|
|
|
if (graph->mNonRealtimeProcessing)
|
|
|
|
return;
|
2013-12-08 21:08:02 -08:00
|
|
|
graph->mEndTime = graph->mCurrentTime + TicksToTimeRoundUp(aRate, aTicksToProcess);
|
2013-05-16 16:30:41 -07:00
|
|
|
graph->mNonRealtimeProcessing = true;
|
|
|
|
graph->EnsureRunInStableState();
|
|
|
|
}
|
|
|
|
|
2013-09-13 09:12:07 -07:00
|
|
|
void
|
|
|
|
ProcessedMediaStream::AddInput(MediaInputPort* aPort)
|
|
|
|
{
|
|
|
|
mInputs.AppendElement(aPort);
|
|
|
|
GraphImpl()->SetStreamOrderDirty();
|
|
|
|
}
|
|
|
|
|
2012-04-29 20:11:26 -07:00
|
|
|
}
|