Bug 952893. Part 1: Block the AudioDestinationNode when it's the only node in the AudioContext. r=padenot,karlt

This commit is contained in:
Robert O'Callahan 2014-01-16 00:08:20 +13:00
parent d253ed932f
commit b026956759
12 changed files with 170 additions and 21 deletions

View File

@ -10,6 +10,7 @@
#include "ThreeDPoint.h"
#include "AudioChannelFormat.h"
#include "AudioParamTimeline.h"
#include "AudioContext.h"
using namespace mozilla::dom;
@ -30,7 +31,7 @@ AudioNodeStream::~AudioNodeStream()
}
void
AudioNodeStream::SetStreamTimeParameter(uint32_t aIndex, MediaStream* aRelativeToStream,
AudioNodeStream::SetStreamTimeParameter(uint32_t aIndex, AudioContext* aContext,
double aStreamTime)
{
class Message : public ControlMessage {
@ -50,7 +51,9 @@ AudioNodeStream::SetStreamTimeParameter(uint32_t aIndex, MediaStream* aRelativeT
};
MOZ_ASSERT(this);
GraphImpl()->AppendMessage(new Message(this, aIndex, aRelativeToStream, aStreamTime));
GraphImpl()->AppendMessage(new Message(this, aIndex,
aContext->DestinationStream(),
aContext->DOMTimeToStreamTime(aStreamTime)));
}
void

View File

@ -16,6 +16,7 @@ namespace dom {
struct ThreeDPoint;
class AudioParamTimeline;
class DelayNodeEngine;
class AudioContext;
}
class ThreadSharedFloatArrayBufferList;
@ -33,6 +34,8 @@ class AudioNodeEngine;
*/
class AudioNodeStream : public ProcessedMediaStream {
public:
typedef mozilla::dom::AudioContext AudioContext;
enum { AUDIO_TRACK = 1 };
typedef nsAutoTArray<AudioChunk, 1> OutputChunks;
@ -66,7 +69,7 @@ public:
* Sets a parameter that's a time relative to some stream's played time.
* This time is converted to a time relative to this stream when it's set.
*/
void SetStreamTimeParameter(uint32_t aIndex, MediaStream* aRelativeToStream,
void SetStreamTimeParameter(uint32_t aIndex, AudioContext* aContext,
double aStreamTime);
void SetDoubleParameter(uint32_t aIndex, double aValue);
void SetInt32Parameter(uint32_t aIndex, int32_t aValue);

View File

@ -532,7 +532,7 @@ AudioBufferSourceNode::Start(double aWhen, double aOffset,
// Don't set parameter unnecessarily
if (aWhen > 0.0) {
ns->SetStreamTimeParameter(START, Context()->DestinationStream(), aWhen);
ns->SetStreamTimeParameter(START, Context(), aWhen);
}
MarkActive();
@ -616,8 +616,7 @@ AudioBufferSourceNode::Stop(double aWhen, ErrorResult& aRv)
return;
}
ns->SetStreamTimeParameter(STOP, Context()->DestinationStream(),
std::max(0.0, aWhen));
ns->SetStreamTimeParameter(STOP, Context(), std::max(0.0, aWhen));
}
void

View File

@ -84,6 +84,7 @@ AudioContext::AudioContext(nsPIDOMWindow* aWindow,
: nsDOMEventTargetHelper(aWindow)
, mSampleRate(GetSampleRateForAudioContext(aIsOffline, aSampleRate))
, mNumberOfChannels(aNumberOfChannels)
, mNodeCount(0)
, mIsOffline(aIsOffline)
, mIsStarted(!aIsOffline)
, mIsShutDown(false)
@ -95,6 +96,10 @@ AudioContext::AudioContext(nsPIDOMWindow* aWindow,
mDestination = new AudioDestinationNode(this, aIsOffline, aNumberOfChannels,
aLength, aSampleRate);
mDestination->Stream()->AddAudioOutput(&gWebAudioOutputKey);
// We skip calling SetIsOnlyNodeForContext during mDestination's constructor,
// because we can only call SetIsOnlyNodeForContext after mDestination has
// been set up.
mDestination->SetIsOnlyNodeForContext(true);
}
AudioContext::~AudioContext()
@ -543,7 +548,8 @@ AudioContext::DestinationStream() const
double
AudioContext::CurrentTime() const
{
return MediaTimeToSeconds(Destination()->Stream()->GetCurrentTime());
return MediaTimeToSeconds(Destination()->Stream()->GetCurrentTime()) +
ExtraCurrentTime();
}
void
@ -589,6 +595,18 @@ AudioContext::Resume()
}
}
void
AudioContext::UpdateNodeCount(int32_t aDelta)
{
bool firstNode = mNodeCount == 0;
mNodeCount += aDelta;
MOZ_ASSERT(mNodeCount >= 0);
// mDestinationNode may be null when we're destroying nodes unlinked by CC
if (!firstNode && mDestination) {
mDestination->SetIsOnlyNodeForContext(mNodeCount == 1);
}
}
JSContext*
AudioContext::GetJSContext() const
{
@ -679,5 +697,11 @@ AudioContext::CollectReports(nsIHandleReportCallback* aHandleReport,
amount, "Memory used by AudioContext objects (Web Audio).");
}
double
AudioContext::ExtraCurrentTime() const
{
return mDestination->ExtraCurrentTime();
}
}
}

View File

@ -246,7 +246,23 @@ public:
AudioChannel MozAudioChannelType() const;
void SetMozAudioChannelType(AudioChannel aValue, ErrorResult& aRv);
void UpdateNodeCount(int32_t aDelta);
double DOMTimeToStreamTime(double aTime) const
{
return aTime - ExtraCurrentTime();
}
private:
/**
* Returns the amount of extra time added to the current time of the
* AudioDestinationNode's MediaStream to get this AudioContext's currentTime.
* Must be subtracted from all DOM API parameter times that are on the same
* timeline as AudioContext's currentTime to get times we can pass to the
* MediaStreamGraph.
*/
double ExtraCurrentTime() const;
void RemoveFromDecodeQueue(WebAudioDecodeJob* aDecodeJob);
void ShutdownDecoder();
@ -272,6 +288,8 @@ private:
nsTHashtable<nsPtrHashKey<PannerNode> > mPannerNodes;
// Number of channels passed in the OfflineAudioContext ctor.
uint32_t mNumberOfChannels;
// Number of nodes that currently exist for this AudioContext
int32_t mNodeCount;
bool mIsOffline;
bool mIsStarted;
bool mIsShutDown;

View File

@ -17,6 +17,8 @@
#include "nsIPermissionManager.h"
#include "nsIScriptObjectPrincipal.h"
#include "nsServiceManagerUtils.h"
#include "nsIAppShell.h"
#include "nsWidgetsCID.h"
namespace mozilla {
namespace dom {
@ -217,6 +219,9 @@ AudioDestinationNode::AudioDestinationNode(AudioContext* aContext,
, mAudioChannel(AudioChannel::Normal)
, mIsOffline(aIsOffline)
, mHasFinished(false)
, mExtraCurrentTime(0)
, mExtraCurrentTimeSinceLastStartedBlocking(0)
, mExtraCurrentTimeUpdatedSinceLastStableState(false)
{
MediaStreamGraph* graph = aIsOffline ?
MediaStreamGraph::CreateNonRealtimeInstance() :
@ -486,6 +491,76 @@ AudioDestinationNode::CreateAudioChannelAgent()
mAudioChannelAgent->StartPlaying(&state);
SetCanPlay(state == AudioChannelState::AUDIO_CHANNEL_STATE_NORMAL);
}
void
AudioDestinationNode::NotifyStableState()
{
mExtraCurrentTimeUpdatedSinceLastStableState = false;
}
static NS_DEFINE_CID(kAppShellCID, NS_APPSHELL_CID);
void
AudioDestinationNode::ScheduleStableStateNotification()
{
nsCOMPtr<nsIAppShell> appShell = do_GetService(kAppShellCID);
if (appShell) {
nsCOMPtr<nsIRunnable> event =
NS_NewRunnableMethod(this, &AudioDestinationNode::NotifyStableState);
appShell->RunInStableState(event);
}
}
double
AudioDestinationNode::ExtraCurrentTime()
{
if (!mStartedBlockingDueToBeingOnlyNode.IsNull() &&
!mExtraCurrentTimeUpdatedSinceLastStableState) {
mExtraCurrentTimeUpdatedSinceLastStableState = true;
mExtraCurrentTimeSinceLastStartedBlocking =
(TimeStamp::Now() - mStartedBlockingDueToBeingOnlyNode).ToSeconds();
ScheduleStableStateNotification();
}
return mExtraCurrentTime + mExtraCurrentTimeSinceLastStartedBlocking;
}
void
AudioDestinationNode::SetIsOnlyNodeForContext(bool aIsOnlyNode)
{
if (!mStartedBlockingDueToBeingOnlyNode.IsNull() == aIsOnlyNode) {
// Nothing changed.
return;
}
if (!mStream) {
// DestroyMediaStream has been called, presumably during CC Unlink().
return;
}
if (mIsOffline) {
// Don't block the destination stream for offline AudioContexts, since
// we expect the zero data produced when there are no other nodes to
// show up in its result buffer. Also, we would get confused by adding
// ExtraCurrentTime before StartRendering has even been called.
return;
}
if (aIsOnlyNode) {
mStream->ChangeExplicitBlockerCount(1);
mStartedBlockingDueToBeingOnlyNode = TimeStamp::Now();
mExtraCurrentTimeSinceLastStartedBlocking = 0;
// Don't do an update of mExtraCurrentTimeSinceLastStartedBlocking until the next stable state.
mExtraCurrentTimeUpdatedSinceLastStableState = true;
ScheduleStableStateNotification();
} else {
// Force update of mExtraCurrentTimeSinceLastStartedBlocking if necessary
ExtraCurrentTime();
mExtraCurrentTime += mExtraCurrentTimeSinceLastStartedBlocking;
mStream->ChangeExplicitBlockerCount(-1);
mStartedBlockingDueToBeingOnlyNode = TimeStamp();
}
}
}
}

View File

@ -70,12 +70,22 @@ public:
virtual void NotifyMainThreadStateChanged() MOZ_OVERRIDE;
void FireOfflineCompletionEvent();
// An amount that should be added to the MediaStream's current time to
// get the AudioContext.currentTime.
double ExtraCurrentTime();
// When aIsOnlyNode is true, this is the only node for the AudioContext.
void SetIsOnlyNodeForContext(bool aIsOnlyNode);
private:
bool CheckAudioChannelPermissions(AudioChannel aValue);
void CreateAudioChannelAgent();
void SetCanPlay(bool aCanPlay);
void NotifyStableState();
void ScheduleStableStateNotification();
SelfReference<AudioDestinationNode> mOfflineRenderingRef;
uint32_t mFramesToProduce;
@ -85,6 +95,11 @@ private:
AudioChannel mAudioChannel;
bool mIsOffline;
bool mHasFinished;
TimeStamp mStartedBlockingDueToBeingOnlyNode;
double mExtraCurrentTime;
double mExtraCurrentTimeSinceLastStartedBlocking;
bool mExtraCurrentTimeUpdatedSinceLastStableState;
};
}

View File

@ -19,6 +19,7 @@ NS_IMPL_CYCLE_COLLECTION_CLASS(AudioNode)
NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN_INHERITED(AudioNode, nsDOMEventTargetHelper)
tmp->DisconnectFromGraph();
tmp->mContext->UpdateNodeCount(-1);
NS_IMPL_CYCLE_COLLECTION_UNLINK(mContext)
NS_IMPL_CYCLE_COLLECTION_UNLINK(mOutputNodes)
NS_IMPL_CYCLE_COLLECTION_UNLINK(mOutputParams)
@ -58,6 +59,9 @@ AudioNode::AudioNode(AudioContext* aContext,
, mChannelInterpretation(aChannelInterpretation)
{
MOZ_ASSERT(aContext);
nsDOMEventTargetHelper::BindToOwner(aContext->GetParentObject());
SetIsDOMBinding();
aContext->UpdateNodeCount(1);
}
AudioNode::~AudioNode()
@ -65,6 +69,9 @@ AudioNode::~AudioNode()
MOZ_ASSERT(mInputNodes.IsEmpty());
MOZ_ASSERT(mOutputNodes.IsEmpty());
MOZ_ASSERT(mOutputParams.IsEmpty());
if (mContext) {
mContext->UpdateNodeCount(-1);
}
}
template <class InputNode>

View File

@ -42,6 +42,11 @@ public:
return mNode->Context();
}
double DOMTimeToStreamTime(double aTime) const
{
return mNode->Context()->DOMTimeToStreamTime(aTime);
}
virtual JSObject* WrapObject(JSContext* aCx,
JS::Handle<JSObject*> aScope) MOZ_OVERRIDE;
@ -54,7 +59,7 @@ public:
return;
}
AudioParamTimeline::SetValueCurveAtTime(aValues.Data(), aValues.Length(),
aStartTime, aDuration, aRv);
DOMTimeToStreamTime(aStartTime), aDuration, aRv);
mCallback(mNode);
}
@ -76,7 +81,7 @@ public:
aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR);
return;
}
AudioParamTimeline::SetValueAtTime(aValue, aStartTime, aRv);
AudioParamTimeline::SetValueAtTime(aValue, DOMTimeToStreamTime(aStartTime), aRv);
mCallback(mNode);
}
void LinearRampToValueAtTime(float aValue, double aEndTime, ErrorResult& aRv)
@ -85,7 +90,7 @@ public:
aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR);
return;
}
AudioParamTimeline::LinearRampToValueAtTime(aValue, aEndTime, aRv);
AudioParamTimeline::LinearRampToValueAtTime(aValue, DOMTimeToStreamTime(aEndTime), aRv);
mCallback(mNode);
}
void ExponentialRampToValueAtTime(float aValue, double aEndTime, ErrorResult& aRv)
@ -94,7 +99,7 @@ public:
aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR);
return;
}
AudioParamTimeline::ExponentialRampToValueAtTime(aValue, aEndTime, aRv);
AudioParamTimeline::ExponentialRampToValueAtTime(aValue, DOMTimeToStreamTime(aEndTime), aRv);
mCallback(mNode);
}
void SetTargetAtTime(float aTarget, double aStartTime, double aTimeConstant, ErrorResult& aRv)
@ -104,7 +109,7 @@ public:
aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR);
return;
}
AudioParamTimeline::SetTargetAtTime(aTarget, aStartTime, aTimeConstant, aRv);
AudioParamTimeline::SetTargetAtTime(aTarget, DOMTimeToStreamTime(aStartTime), aTimeConstant, aRv);
mCallback(mNode);
}
void SetTargetValueAtTime(float aTarget, double aStartTime, double aTimeConstant, ErrorResult& aRv)
@ -117,7 +122,7 @@ public:
aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR);
return;
}
AudioParamTimeline::CancelScheduledValues(aStartTime);
AudioParamTimeline::CancelScheduledValues(DOMTimeToStreamTime(aStartTime));
mCallback(mNode);
}

View File

@ -578,8 +578,7 @@ OscillatorNode::Start(double aWhen, ErrorResult& aRv)
// TODO: Perhaps we need to do more here.
ns->SetStreamTimeParameter(OscillatorNodeEngine::START,
Context()->DestinationStream(),
aWhen);
Context(), aWhen);
MarkActive();
}
@ -605,8 +604,7 @@ OscillatorNode::Stop(double aWhen, ErrorResult& aRv)
// TODO: Perhaps we need to do more here.
ns->SetStreamTimeParameter(OscillatorNodeEngine::STOP,
Context()->DestinationStream(),
std::max(0.0, aWhen));
Context(), std::max(0.0, aWhen));
}
void

View File

@ -9,6 +9,7 @@ PARALLEL_DIRS += ['blink', 'test']
TEST_TOOL_DIRS += ['compiledtest']
EXPORTS += [
'AudioContext.h',
'AudioParamTimeline.h',
'MediaBufferDecoder.h',
'ThreeDPoint.h',

View File

@ -53,10 +53,11 @@ function compareChannels(buf1, buf2,
}
};
is(difference, 0, "Found " + difference + " different samples, maxDifference: " +
maxDifference + ", first bad index: " + firstBadIndex +
" with source offset " + sourceOffset + " and destination offset " +
destOffset);
is(difference, 0, "maxDifference: " + maxDifference +
", first bad index: " + firstBadIndex +
" with test-data offset " + sourceOffset + " and expected-data offset " +
destOffset + "; corresponding values " + buf1[firstBadIndex + sourceOffset] +
" and " + buf2[firstBadIndex + destOffset] + " --- differences");
}
function compareBuffers(got, expected) {