Bug 836599 - Part 10: Use the non-realtime MediaStreamGraph API and a custom destination node engine for OfflineAudioContext; r=roc

We offload most of the logic for OfflineAudioContext to the destination node,
since that's where the sample recording needs to happen, so doing this will
make the code simpler.
This commit is contained in:
Ehsan Akhgari 2013-05-16 19:30:42 -04:00
parent a96e98f013
commit e27939f54d
4 changed files with 87 additions and 11 deletions

View File

@ -46,8 +46,14 @@ NS_INTERFACE_MAP_END_INHERITING(nsDOMEventTargetHelper)
static uint8_t gWebAudioOutputKey;
AudioContext::AudioContext(nsPIDOMWindow* aWindow, bool aIsOffline)
: mDestination(new AudioDestinationNode(this, MediaStreamGraph::GetInstance()))
AudioContext::AudioContext(nsPIDOMWindow* aWindow,
bool aIsOffline,
uint32_t aNumberOfChannels,
uint32_t aLength,
float aSampleRate)
: mDestination(new AudioDestinationNode(this, aIsOffline,
aNumberOfChannels,
aLength, aSampleRate))
, mIsOffline(aIsOffline)
{
// Actually play audio
@ -107,7 +113,11 @@ AudioContext::Constructor(const GlobalObject& aGlobal,
return nullptr;
}
nsRefPtr<AudioContext> object = new AudioContext(window, true);
nsRefPtr<AudioContext> object = new AudioContext(window,
true,
aNumberOfChannels,
aLength,
aSampleRate);
window->AddAudioContext(object);
return object.forget();
}
@ -465,5 +475,13 @@ AudioContext::GetJSContext() const
return scriptContext->GetNativeContext();
}
void
AudioContext::StartRendering()
{
MOZ_ASSERT(mIsOffline, "This should only be called on OfflineAudioContext");
mDestination->StartRendering();
}
}
}

View File

@ -58,8 +58,12 @@ class WaveShaperNode;
class AudioContext MOZ_FINAL : public nsDOMEventTargetHelper,
public EnableWebAudioCheck
{
explicit AudioContext(nsPIDOMWindow* aParentWindow, bool aIsOffline);
virtual ~AudioContext();
AudioContext(nsPIDOMWindow* aParentWindow,
bool aIsOffline,
uint32_t aNumberOfChannels = 0,
uint32_t aLength = 0,
float aSampleRate = 0.0f);
~AudioContext();
public:
NS_DECL_ISUPPORTS_INHERITED
@ -177,7 +181,7 @@ public:
const Optional<OwningNonNull<DecodeErrorCallback> >& aFailureCallback);
// OfflineAudioContext methods
void StartRendering() {}
void StartRendering();
IMPL_EVENT_HANDLER(complete)
uint32_t GetRate() const { return IdealAudioRate(); }

View File

@ -13,16 +13,54 @@
namespace mozilla {
namespace dom {
class OfflineDestinationNodeEngine : public AudioNodeEngine
{
public:
OfflineDestinationNodeEngine(AudioDestinationNode* aNode,
uint32_t aNumberOfChannels,
uint32_t aLength,
float aSampleRate)
: AudioNodeEngine(aNode)
, mNumberOfChannels(aNumberOfChannels)
, mLength(aLength)
, mSampleRate(aSampleRate)
{
}
virtual void ProduceAudioBlock(AudioNodeStream* aStream,
const AudioChunk& aInput,
AudioChunk* aOutput,
bool* aFinished) MOZ_OVERRIDE
{
*aOutput = aInput;
}
uint32_t mNumberOfChannels;
uint32_t mLength;
float mSampleRate;
};
NS_IMPL_ISUPPORTS_INHERITED0(AudioDestinationNode, AudioNode)
AudioDestinationNode::AudioDestinationNode(AudioContext* aContext, MediaStreamGraph* aGraph)
AudioDestinationNode::AudioDestinationNode(AudioContext* aContext,
bool aIsOffline,
uint32_t aNumberOfChannels,
uint32_t aLength,
float aSampleRate)
: AudioNode(aContext,
2,
aIsOffline ? aNumberOfChannels : 2,
ChannelCountMode::Explicit,
ChannelInterpretation::Speakers)
, mFramesToProduce(aLength)
{
mStream = aGraph->CreateAudioNodeStream(new AudioNodeEngine(this),
MediaStreamGraph::EXTERNAL_STREAM);
MediaStreamGraph* graph = aIsOffline ?
MediaStreamGraph::CreateNonRealtimeInstance() :
MediaStreamGraph::GetInstance();
AudioNodeEngine* engine = aIsOffline ?
new OfflineDestinationNodeEngine(this, aNumberOfChannels,
aLength, aSampleRate) :
new AudioNodeEngine(this);
mStream = graph->CreateAudioNodeStream(engine, MediaStreamGraph::EXTERNAL_STREAM);
}
JSObject*
@ -31,5 +69,11 @@ AudioDestinationNode::WrapObject(JSContext* aCx, JS::Handle<JSObject*> aScope)
return AudioDestinationNodeBinding::Wrap(aCx, aScope, this);
}
void
AudioDestinationNode::StartRendering()
{
mStream->Graph()->StartNonRealtimeProcessing(mFramesToProduce);
}
}
}

View File

@ -17,7 +17,13 @@ class AudioContext;
class AudioDestinationNode : public AudioNode
{
public:
AudioDestinationNode(AudioContext* aContext, MediaStreamGraph* aGraph);
// This node type knows what MediaStreamGraph to use based on
// whether it's in offline mode.
AudioDestinationNode(AudioContext* aContext,
bool aIsOffline,
uint32_t aNumberOfChannels = 0,
uint32_t aLength = 0,
float aSampleRate = 0.0f);
NS_DECL_ISUPPORTS_INHERITED
@ -29,6 +35,10 @@ public:
return 0;
}
void StartRendering();
private:
uint32_t mFramesToProduce;
};
}