Bug 952893 - Backout changesets 780cba0f9f59, 887fe3a09c3d and fb04a4252ea3 for bad commit message, DONTBUILD.

This commit is contained in:
Paul Adenot 2014-01-29 17:36:56 +01:00
parent cef0bf3210
commit 8ae42d79f9
13 changed files with 14 additions and 171 deletions

View File

@ -10,7 +10,6 @@
#include "ThreeDPoint.h" #include "ThreeDPoint.h"
#include "AudioChannelFormat.h" #include "AudioChannelFormat.h"
#include "AudioParamTimeline.h" #include "AudioParamTimeline.h"
#include "AudioContext.h"
using namespace mozilla::dom; using namespace mozilla::dom;
@ -31,7 +30,7 @@ AudioNodeStream::~AudioNodeStream()
} }
void void
AudioNodeStream::SetStreamTimeParameter(uint32_t aIndex, AudioContext* aContext, AudioNodeStream::SetStreamTimeParameter(uint32_t aIndex, MediaStream* aRelativeToStream,
double aStreamTime) double aStreamTime)
{ {
class Message : public ControlMessage { class Message : public ControlMessage {
@ -51,9 +50,7 @@ AudioNodeStream::SetStreamTimeParameter(uint32_t aIndex, AudioContext* aContext,
}; };
MOZ_ASSERT(this); MOZ_ASSERT(this);
GraphImpl()->AppendMessage(new Message(this, aIndex, GraphImpl()->AppendMessage(new Message(this, aIndex, aRelativeToStream, aStreamTime));
aContext->DestinationStream(),
aStreamTime - aContext->ExtraCurrentTime()));
} }
void void

View File

@ -16,7 +16,6 @@ namespace dom {
struct ThreeDPoint; struct ThreeDPoint;
class AudioParamTimeline; class AudioParamTimeline;
class DelayNodeEngine; class DelayNodeEngine;
class AudioContext;
} }
class ThreadSharedFloatArrayBufferList; class ThreadSharedFloatArrayBufferList;
@ -34,8 +33,6 @@ class AudioNodeEngine;
*/ */
class AudioNodeStream : public ProcessedMediaStream { class AudioNodeStream : public ProcessedMediaStream {
public: public:
typedef mozilla::dom::AudioContext AudioContext;
enum { AUDIO_TRACK = 1 }; enum { AUDIO_TRACK = 1 };
typedef nsAutoTArray<AudioChunk, 1> OutputChunks; typedef nsAutoTArray<AudioChunk, 1> OutputChunks;
@ -69,7 +66,7 @@ public:
* Sets a parameter that's a time relative to some stream's played time. * Sets a parameter that's a time relative to some stream's played time.
* This time is converted to a time relative to this stream when it's set. * This time is converted to a time relative to this stream when it's set.
*/ */
void SetStreamTimeParameter(uint32_t aIndex, AudioContext* aContext, void SetStreamTimeParameter(uint32_t aIndex, MediaStream* aRelativeToStream,
double aStreamTime); double aStreamTime);
void SetDoubleParameter(uint32_t aIndex, double aValue); void SetDoubleParameter(uint32_t aIndex, double aValue);
void SetInt32Parameter(uint32_t aIndex, int32_t aValue); void SetInt32Parameter(uint32_t aIndex, int32_t aValue);

View File

@ -1116,28 +1116,6 @@ MediaStreamGraphImpl::AllFinishedStreamsNotified()
return true; return true;
} }
void
MediaStreamGraphImpl::PauseAllAudioOutputs()
{
for (uint32_t i = 0; i < mStreams.Length(); ++i) {
MediaStream* s = mStreams[i];
for (uint32_t j = 0; j < s->mAudioOutputStreams.Length(); ++j) {
s->mAudioOutputStreams[j].mStream->Pause();
}
}
}
void
MediaStreamGraphImpl::ResumeAllAudioOutputs()
{
for (uint32_t i = 0; i < mStreams.Length(); ++i) {
MediaStream* s = mStreams[i];
for (uint32_t j = 0; j < s->mAudioOutputStreams.Length(); ++j) {
s->mAudioOutputStreams[j].mStream->Resume();
}
}
}
void void
MediaStreamGraphImpl::RunThread() MediaStreamGraphImpl::RunThread()
{ {
@ -1202,6 +1180,7 @@ MediaStreamGraphImpl::RunThread()
RecomputeBlocking(endBlockingDecisions); RecomputeBlocking(endBlockingDecisions);
// Play stream contents. // Play stream contents.
uint32_t audioStreamsActive = 0;
bool allBlockedForever = true; bool allBlockedForever = true;
// True when we've done ProduceOutput for all processed streams. // True when we've done ProduceOutput for all processed streams.
bool doneAllProducing = false; bool doneAllProducing = false;
@ -1242,6 +1221,7 @@ MediaStreamGraphImpl::RunThread()
// Only playback audio and video in real-time mode // Only playback audio and video in real-time mode
CreateOrDestroyAudioStreams(prevComputedTime, stream); CreateOrDestroyAudioStreams(prevComputedTime, stream);
PlayAudio(stream, prevComputedTime, mStateComputedTime); PlayAudio(stream, prevComputedTime, mStateComputedTime);
audioStreamsActive += stream->mAudioOutputStreams.Length();
PlayVideo(stream); PlayVideo(stream);
} }
SourceMediaStream* is = stream->AsSourceStream(); SourceMediaStream* is = stream->AsSourceStream();
@ -1253,7 +1233,7 @@ MediaStreamGraphImpl::RunThread()
allBlockedForever = false; allBlockedForever = false;
} }
} }
if (ensureNextIteration || !allBlockedForever) { if (ensureNextIteration || !allBlockedForever || audioStreamsActive > 0) {
EnsureNextIteration(); EnsureNextIteration();
} }
@ -1281,7 +1261,6 @@ MediaStreamGraphImpl::RunThread()
if (mRealtime) { if (mRealtime) {
PRIntervalTime timeout = PR_INTERVAL_NO_TIMEOUT; PRIntervalTime timeout = PR_INTERVAL_NO_TIMEOUT;
TimeStamp now = TimeStamp::Now(); TimeStamp now = TimeStamp::Now();
bool pausedOutputs = false;
if (mNeedAnotherIteration) { if (mNeedAnotherIteration) {
int64_t timeoutMS = MEDIA_GRAPH_TARGET_PERIOD_MS - int64_t timeoutMS = MEDIA_GRAPH_TARGET_PERIOD_MS -
int64_t((now - mCurrentTimeStamp).ToMilliseconds()); int64_t((now - mCurrentTimeStamp).ToMilliseconds());
@ -1294,8 +1273,6 @@ MediaStreamGraphImpl::RunThread()
mWaitState = WAITSTATE_WAITING_FOR_NEXT_ITERATION; mWaitState = WAITSTATE_WAITING_FOR_NEXT_ITERATION;
} else { } else {
mWaitState = WAITSTATE_WAITING_INDEFINITELY; mWaitState = WAITSTATE_WAITING_INDEFINITELY;
PauseAllAudioOutputs();
pausedOutputs = true;
} }
if (timeout > 0) { if (timeout > 0) {
mMonitor.Wait(timeout); mMonitor.Wait(timeout);
@ -1303,9 +1280,6 @@ MediaStreamGraphImpl::RunThread()
(TimeStamp::Now() - mInitialTimeStamp).ToSeconds(), (TimeStamp::Now() - mInitialTimeStamp).ToSeconds(),
(TimeStamp::Now() - now).ToSeconds())); (TimeStamp::Now() - now).ToSeconds()));
} }
if (pausedOutputs) {
ResumeAllAudioOutputs();
}
} }
mWaitState = WAITSTATE_RUNNING; mWaitState = WAITSTATE_RUNNING;
mNeedAnotherIteration = false; mNeedAnotherIteration = false;

View File

@ -370,14 +370,6 @@ public:
{ {
mStreamOrderDirty = true; mStreamOrderDirty = true;
} }
/**
* Pause all AudioStreams being written to by MediaStreams
*/
void PauseAllAudioOutputs();
/**
* Resume all AudioStreams being written to by MediaStreams
*/
void ResumeAllAudioOutputs();
// Data members // Data members

View File

@ -44,9 +44,6 @@ protected:
* The implementation records a mCurrent (the value at the current time) * The implementation records a mCurrent (the value at the current time)
* and an array of "change times" (greater than the current time) and the * and an array of "change times" (greater than the current time) and the
* new value for each change time. This is a simple but dumb implementation. * new value for each change time. This is a simple but dumb implementation.
* We maintain the invariant that each change entry in the array must have
* a different value to the value in the previous change entry (or, for
* the first change entry, mCurrent).
*/ */
template <typename Time, typename T, uint32_t ReservedChanges> template <typename Time, typename T, uint32_t ReservedChanges>
class TimeVarying : public TimeVaryingBase { class TimeVarying : public TimeVaryingBase {
@ -82,9 +79,6 @@ public:
} }
mChanges.RemoveElementAt(i); mChanges.RemoveElementAt(i);
} }
if (mCurrent == aValue) {
return;
}
mChanges.InsertElementAt(0, Entry(aTime, aValue)); mChanges.InsertElementAt(0, Entry(aTime, aValue));
} }
/** /**

View File

@ -532,7 +532,7 @@ AudioBufferSourceNode::Start(double aWhen, double aOffset,
// Don't set parameter unnecessarily // Don't set parameter unnecessarily
if (aWhen > 0.0) { if (aWhen > 0.0) {
ns->SetStreamTimeParameter(START, Context(), aWhen); ns->SetStreamTimeParameter(START, Context()->DestinationStream(), aWhen);
} }
MarkActive(); MarkActive();
@ -616,7 +616,8 @@ AudioBufferSourceNode::Stop(double aWhen, ErrorResult& aRv)
return; return;
} }
ns->SetStreamTimeParameter(STOP, Context(), std::max(0.0, aWhen)); ns->SetStreamTimeParameter(STOP, Context()->DestinationStream(),
std::max(0.0, aWhen));
} }
void void

View File

@ -84,7 +84,6 @@ AudioContext::AudioContext(nsPIDOMWindow* aWindow,
: nsDOMEventTargetHelper(aWindow) : nsDOMEventTargetHelper(aWindow)
, mSampleRate(GetSampleRateForAudioContext(aIsOffline, aSampleRate)) , mSampleRate(GetSampleRateForAudioContext(aIsOffline, aSampleRate))
, mNumberOfChannels(aNumberOfChannels) , mNumberOfChannels(aNumberOfChannels)
, mNodeCount(0)
, mIsOffline(aIsOffline) , mIsOffline(aIsOffline)
, mIsStarted(!aIsOffline) , mIsStarted(!aIsOffline)
, mIsShutDown(false) , mIsShutDown(false)
@ -96,10 +95,6 @@ AudioContext::AudioContext(nsPIDOMWindow* aWindow,
mDestination = new AudioDestinationNode(this, aIsOffline, aNumberOfChannels, mDestination = new AudioDestinationNode(this, aIsOffline, aNumberOfChannels,
aLength, aSampleRate); aLength, aSampleRate);
mDestination->Stream()->AddAudioOutput(&gWebAudioOutputKey); mDestination->Stream()->AddAudioOutput(&gWebAudioOutputKey);
// We skip calling SetIsOnlyNodeForContext during mDestination's constructor,
// because we can only call SetIsOnlyNodeForContext after mDestination has
// been set up.
mDestination->SetIsOnlyNodeForContext(true);
} }
AudioContext::~AudioContext() AudioContext::~AudioContext()
@ -548,8 +543,7 @@ AudioContext::DestinationStream() const
double double
AudioContext::CurrentTime() const AudioContext::CurrentTime() const
{ {
return MediaTimeToSeconds(Destination()->Stream()->GetCurrentTime()) + return MediaTimeToSeconds(Destination()->Stream()->GetCurrentTime());
Destination()->ExtraCurrentTime();
} }
void void
@ -595,18 +589,6 @@ AudioContext::Resume()
} }
} }
void
AudioContext::UpdateNodeCount(int32_t aDelta)
{
bool firstNode = mNodeCount == 0;
mNodeCount += aDelta;
MOZ_ASSERT(mNodeCount >= 0);
// mDestinationNode may be null when we're destroying nodes unlinked by CC
if (!firstNode && mDestination) {
mDestination->SetIsOnlyNodeForContext(mNodeCount == 1);
}
}
JSContext* JSContext*
AudioContext::GetJSContext() const AudioContext::GetJSContext() const
{ {
@ -697,11 +679,5 @@ AudioContext::CollectReports(nsIHandleReportCallback* aHandleReport,
amount, "Memory used by AudioContext objects (Web Audio)."); amount, "Memory used by AudioContext objects (Web Audio).");
} }
double
AudioContext::ExtraCurrentTime() const
{
return mDestination->ExtraCurrentTime();
}
} }
} }

View File

@ -246,12 +246,6 @@ public:
AudioChannel MozAudioChannelType() const; AudioChannel MozAudioChannelType() const;
void SetMozAudioChannelType(AudioChannel aValue, ErrorResult& aRv); void SetMozAudioChannelType(AudioChannel aValue, ErrorResult& aRv);
void UpdateNodeCount(int32_t aDelta);
// Returns the difference between CurrentTime() and the current time of the
// AudioDestinationNode's MediaStream.
double ExtraCurrentTime() const;
private: private:
void RemoveFromDecodeQueue(WebAudioDecodeJob* aDecodeJob); void RemoveFromDecodeQueue(WebAudioDecodeJob* aDecodeJob);
void ShutdownDecoder(); void ShutdownDecoder();
@ -278,8 +272,6 @@ private:
nsTHashtable<nsPtrHashKey<PannerNode> > mPannerNodes; nsTHashtable<nsPtrHashKey<PannerNode> > mPannerNodes;
// Number of channels passed in the OfflineAudioContext ctor. // Number of channels passed in the OfflineAudioContext ctor.
uint32_t mNumberOfChannels; uint32_t mNumberOfChannels;
// Number of nodes that currently exist for this AudioContext
int32_t mNodeCount;
bool mIsOffline; bool mIsOffline;
bool mIsStarted; bool mIsStarted;
bool mIsShutDown; bool mIsShutDown;

View File

@ -17,8 +17,6 @@
#include "nsIPermissionManager.h" #include "nsIPermissionManager.h"
#include "nsIScriptObjectPrincipal.h" #include "nsIScriptObjectPrincipal.h"
#include "nsServiceManagerUtils.h" #include "nsServiceManagerUtils.h"
#include "nsIAppShell.h"
#include "nsWidgetsCID.h"
namespace mozilla { namespace mozilla {
namespace dom { namespace dom {
@ -219,8 +217,6 @@ AudioDestinationNode::AudioDestinationNode(AudioContext* aContext,
, mAudioChannel(AudioChannel::Normal) , mAudioChannel(AudioChannel::Normal)
, mIsOffline(aIsOffline) , mIsOffline(aIsOffline)
, mHasFinished(false) , mHasFinished(false)
, mExtraCurrentTime(0)
, mExtraCurrentTimeUpdatedSinceLastStableState(false)
{ {
MediaStreamGraph* graph = aIsOffline ? MediaStreamGraph* graph = aIsOffline ?
MediaStreamGraph::CreateNonRealtimeInstance() : MediaStreamGraph::CreateNonRealtimeInstance() :
@ -490,62 +486,6 @@ AudioDestinationNode::CreateAudioChannelAgent()
mAudioChannelAgent->StartPlaying(&state); mAudioChannelAgent->StartPlaying(&state);
SetCanPlay(state == AudioChannelState::AUDIO_CHANNEL_STATE_NORMAL); SetCanPlay(state == AudioChannelState::AUDIO_CHANNEL_STATE_NORMAL);
} }
void
AudioDestinationNode::NotifyStableState()
{
mExtraCurrentTimeUpdatedSinceLastStableState = false;
}
static NS_DEFINE_CID(kAppShellCID, NS_APPSHELL_CID);
void
AudioDestinationNode::ScheduleStableStateNotification()
{
nsCOMPtr<nsIAppShell> appShell = do_GetService(kAppShellCID);
if (appShell) {
nsCOMPtr<nsIRunnable> event =
NS_NewRunnableMethod(this, &AudioDestinationNode::NotifyStableState);
appShell->RunInStableState(event);
}
}
double
AudioDestinationNode::ExtraCurrentTime()
{
if (!mStartedBlockingDueToBeingOnlyNode.IsNull() &&
!mExtraCurrentTimeUpdatedSinceLastStableState) {
mExtraCurrentTimeUpdatedSinceLastStableState = true;
mExtraCurrentTimeSinceLastStartedBlocking =
(TimeStamp::Now() - mStartedBlockingDueToBeingOnlyNode).ToSeconds();
ScheduleStableStateNotification();
}
return mExtraCurrentTime + mExtraCurrentTimeSinceLastStartedBlocking;
}
void
AudioDestinationNode::SetIsOnlyNodeForContext(bool aIsOnlyNode)
{
if (!mStartedBlockingDueToBeingOnlyNode.IsNull() == aIsOnlyNode) {
return;
}
if (aIsOnlyNode) {
mStream->ChangeExplicitBlockerCount(1);
mStartedBlockingDueToBeingOnlyNode = TimeStamp::Now();
mExtraCurrentTimeSinceLastStartedBlocking = 0;
// Don't do an update of mExtraCurrentTimeSinceLastStartedBlocking until the next stable state.
mExtraCurrentTimeUpdatedSinceLastStableState = true;
ScheduleStableStateNotification();
} else {
// Force update of mExtraCurrentTimeSinceLastStartedBlocking if necessary
ExtraCurrentTime();
mExtraCurrentTime += mExtraCurrentTimeSinceLastStartedBlocking;
mStream->ChangeExplicitBlockerCount(-1);
mStartedBlockingDueToBeingOnlyNode = TimeStamp();
}
}
} }
} }

View File

@ -70,22 +70,12 @@ public:
virtual void NotifyMainThreadStateChanged() MOZ_OVERRIDE; virtual void NotifyMainThreadStateChanged() MOZ_OVERRIDE;
void FireOfflineCompletionEvent(); void FireOfflineCompletionEvent();
// An amount that should be added to the MediaStream's current time to
// get the AudioContext.currentTime.
double ExtraCurrentTime();
// When aIsOnlyNode is true, this is the only node for the AudioContext.
void SetIsOnlyNodeForContext(bool aIsOnlyNode);
private: private:
bool CheckAudioChannelPermissions(AudioChannel aValue); bool CheckAudioChannelPermissions(AudioChannel aValue);
void CreateAudioChannelAgent(); void CreateAudioChannelAgent();
void SetCanPlay(bool aCanPlay); void SetCanPlay(bool aCanPlay);
void NotifyStableState();
void ScheduleStableStateNotification();
SelfReference<AudioDestinationNode> mOfflineRenderingRef; SelfReference<AudioDestinationNode> mOfflineRenderingRef;
uint32_t mFramesToProduce; uint32_t mFramesToProduce;
@ -95,10 +85,6 @@ private:
AudioChannel mAudioChannel; AudioChannel mAudioChannel;
bool mIsOffline; bool mIsOffline;
bool mHasFinished; bool mHasFinished;
TimeStamp mStartedBlockingDueToBeingOnlyNode;
double mExtraCurrentTime;
double mExtraCurrentTimeSinceLastStartedBlocking;
bool mExtraCurrentTimeUpdatedSinceLastStableState;
}; };
} }

View File

@ -19,7 +19,6 @@ NS_IMPL_CYCLE_COLLECTION_CLASS(AudioNode)
NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN_INHERITED(AudioNode, nsDOMEventTargetHelper) NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN_INHERITED(AudioNode, nsDOMEventTargetHelper)
tmp->DisconnectFromGraph(); tmp->DisconnectFromGraph();
tmp->mContext->UpdateNodeCount(-1);
NS_IMPL_CYCLE_COLLECTION_UNLINK(mContext) NS_IMPL_CYCLE_COLLECTION_UNLINK(mContext)
NS_IMPL_CYCLE_COLLECTION_UNLINK(mOutputNodes) NS_IMPL_CYCLE_COLLECTION_UNLINK(mOutputNodes)
NS_IMPL_CYCLE_COLLECTION_UNLINK(mOutputParams) NS_IMPL_CYCLE_COLLECTION_UNLINK(mOutputParams)
@ -59,9 +58,6 @@ AudioNode::AudioNode(AudioContext* aContext,
, mChannelInterpretation(aChannelInterpretation) , mChannelInterpretation(aChannelInterpretation)
{ {
MOZ_ASSERT(aContext); MOZ_ASSERT(aContext);
nsDOMEventTargetHelper::BindToOwner(aContext->GetParentObject());
SetIsDOMBinding();
aContext->UpdateNodeCount(1);
} }
AudioNode::~AudioNode() AudioNode::~AudioNode()
@ -69,9 +65,6 @@ AudioNode::~AudioNode()
MOZ_ASSERT(mInputNodes.IsEmpty()); MOZ_ASSERT(mInputNodes.IsEmpty());
MOZ_ASSERT(mOutputNodes.IsEmpty()); MOZ_ASSERT(mOutputNodes.IsEmpty());
MOZ_ASSERT(mOutputParams.IsEmpty()); MOZ_ASSERT(mOutputParams.IsEmpty());
if (mContext) {
mContext->UpdateNodeCount(-1);
}
} }
template <class InputNode> template <class InputNode>

View File

@ -578,7 +578,8 @@ OscillatorNode::Start(double aWhen, ErrorResult& aRv)
// TODO: Perhaps we need to do more here. // TODO: Perhaps we need to do more here.
ns->SetStreamTimeParameter(OscillatorNodeEngine::START, ns->SetStreamTimeParameter(OscillatorNodeEngine::START,
Context(), aWhen); Context()->DestinationStream(),
aWhen);
MarkActive(); MarkActive();
} }
@ -604,7 +605,8 @@ OscillatorNode::Stop(double aWhen, ErrorResult& aRv)
// TODO: Perhaps we need to do more here. // TODO: Perhaps we need to do more here.
ns->SetStreamTimeParameter(OscillatorNodeEngine::STOP, ns->SetStreamTimeParameter(OscillatorNodeEngine::STOP,
Context(), std::max(0.0, aWhen)); Context()->DestinationStream(),
std::max(0.0, aWhen));
} }
void void

View File

@ -9,7 +9,6 @@ PARALLEL_DIRS += ['blink', 'test']
TEST_TOOL_DIRS += ['compiledtest'] TEST_TOOL_DIRS += ['compiledtest']
EXPORTS += [ EXPORTS += [
'AudioContext.h',
'AudioParamTimeline.h', 'AudioParamTimeline.h',
'MediaBufferDecoder.h', 'MediaBufferDecoder.h',
'ThreeDPoint.h', 'ThreeDPoint.h',