gecko/content/media/webaudio/AudioContext.h
Karl Tomlinson 608b070c45 b=910171 add a general means to keep active nodes alive from the AudioContext r=ehsan
AudioNodes that keep playing or tail-time references need to have these
references cleared when an AudioContext has completed or is shut down by the
window.

Storing references on the AudioContext instead of on the AudioNodes will allow
the AudioContext to report playing references to the cycle collector until
offline rendering starts for bug 914033.  This is not necessary for tail-time
references, but it is tidier to use the same code for playing and tail-time
references.

--HG--
extra : transplant_source : %E0%F1%06%BFV%B6XI%9BX%8E%8D7%3FsU%8F%F9%14r
2013-09-17 11:53:40 +12:00

280 lines
8.3 KiB
C++

/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef AudioContext_h_
#define AudioContext_h_
#include "EnableWebAudioCheck.h"
#include "MediaBufferDecoder.h"
#include "mozilla/Attributes.h"
#include "mozilla/dom/TypedArray.h"
#include "nsAutoPtr.h"
#include "nsCOMPtr.h"
#include "nsCycleCollectionParticipant.h"
#include "nsDOMEventTargetHelper.h"
#include "nsHashKeys.h"
#include "nsTHashtable.h"
#include "js/TypeDecls.h"
// X11 has a #define for CurrentTime. Unbelievable :-(.
// See content/media/DOMMediaStream.h for more fun!
#ifdef CurrentTime
#undef CurrentTime
#endif
class nsPIDOMWindow;
namespace mozilla {
class DOMMediaStream;
class ErrorResult;
class MediaStream;
class MediaStreamGraph;
namespace dom {
class AnalyserNode;
class AudioBuffer;
class AudioBufferSourceNode;
class AudioDestinationNode;
class AudioListener;
class AudioNode;
class BiquadFilterNode;
class ChannelMergerNode;
class ChannelSplitterNode;
class ConvolverNode;
class DelayNode;
class DynamicsCompressorNode;
class GainNode;
class HTMLMediaElement;
class MediaElementAudioSourceNode;
class GlobalObject;
class MediaStreamAudioDestinationNode;
class MediaStreamAudioSourceNode;
class OscillatorNode;
class PannerNode;
class ScriptProcessorNode;
class WaveShaperNode;
class PeriodicWave;
class AudioContext MOZ_FINAL : public nsDOMEventTargetHelper,
public EnableWebAudioCheck
{
AudioContext(nsPIDOMWindow* aParentWindow,
bool aIsOffline,
uint32_t aNumberOfChannels = 0,
uint32_t aLength = 0,
float aSampleRate = 0.0f);
~AudioContext();
public:
NS_DECL_ISUPPORTS_INHERITED
NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(AudioContext,
nsDOMEventTargetHelper)
nsPIDOMWindow* GetParentObject() const
{
return GetOwner();
}
void Shutdown(); // idempotent
void Suspend();
void Resume();
virtual JSObject* WrapObject(JSContext* aCx,
JS::Handle<JSObject*> aScope) MOZ_OVERRIDE;
using nsDOMEventTargetHelper::DispatchTrustedEvent;
// Constructor for regular AudioContext
static already_AddRefed<AudioContext>
Constructor(const GlobalObject& aGlobal, ErrorResult& aRv);
// Constructor for offline AudioContext
static already_AddRefed<AudioContext>
Constructor(const GlobalObject& aGlobal,
uint32_t aNumberOfChannels,
uint32_t aLength,
float aSampleRate,
ErrorResult& aRv);
// AudioContext methods
AudioDestinationNode* Destination() const
{
return mDestination;
}
float SampleRate() const
{
return mSampleRate;
}
double CurrentTime() const;
AudioListener* Listener();
already_AddRefed<AudioBufferSourceNode> CreateBufferSource();
already_AddRefed<AudioBuffer>
CreateBuffer(JSContext* aJSContext, uint32_t aNumberOfChannels,
uint32_t aLength, float aSampleRate,
ErrorResult& aRv);
already_AddRefed<AudioBuffer>
CreateBuffer(JSContext* aJSContext, const ArrayBuffer& aBuffer,
bool aMixToMono, ErrorResult& aRv);
already_AddRefed<MediaStreamAudioDestinationNode>
CreateMediaStreamDestination(ErrorResult& aRv);
already_AddRefed<ScriptProcessorNode>
CreateScriptProcessor(uint32_t aBufferSize,
uint32_t aNumberOfInputChannels,
uint32_t aNumberOfOutputChannels,
ErrorResult& aRv);
already_AddRefed<ScriptProcessorNode>
CreateJavaScriptNode(uint32_t aBufferSize,
uint32_t aNumberOfInputChannels,
uint32_t aNumberOfOutputChannels,
ErrorResult& aRv)
{
return CreateScriptProcessor(aBufferSize, aNumberOfInputChannels,
aNumberOfOutputChannels, aRv);
}
already_AddRefed<AnalyserNode>
CreateAnalyser();
already_AddRefed<GainNode>
CreateGain();
already_AddRefed<WaveShaperNode>
CreateWaveShaper();
already_AddRefed<GainNode>
CreateGainNode()
{
return CreateGain();
}
already_AddRefed<MediaElementAudioSourceNode>
CreateMediaElementSource(HTMLMediaElement& aMediaElement, ErrorResult& aRv);
already_AddRefed<MediaStreamAudioSourceNode>
CreateMediaStreamSource(DOMMediaStream& aMediaStream, ErrorResult& aRv);
already_AddRefed<DelayNode>
CreateDelay(double aMaxDelayTime, ErrorResult& aRv);
already_AddRefed<DelayNode>
CreateDelayNode(double aMaxDelayTime, ErrorResult& aRv)
{
return CreateDelay(aMaxDelayTime, aRv);
}
already_AddRefed<PannerNode>
CreatePanner();
already_AddRefed<ConvolverNode>
CreateConvolver();
already_AddRefed<ChannelSplitterNode>
CreateChannelSplitter(uint32_t aNumberOfOutputs, ErrorResult& aRv);
already_AddRefed<ChannelMergerNode>
CreateChannelMerger(uint32_t aNumberOfInputs, ErrorResult& aRv);
already_AddRefed<DynamicsCompressorNode>
CreateDynamicsCompressor();
already_AddRefed<BiquadFilterNode>
CreateBiquadFilter();
already_AddRefed<OscillatorNode>
CreateOscillator();
already_AddRefed<PeriodicWave>
CreatePeriodicWave(const Float32Array& aRealData, const Float32Array& aImagData,
ErrorResult& aRv);
void DecodeAudioData(const ArrayBuffer& aBuffer,
DecodeSuccessCallback& aSuccessCallback,
const Optional<OwningNonNull<DecodeErrorCallback> >& aFailureCallback);
// OfflineAudioContext methods
void StartRendering(ErrorResult& aRv);
IMPL_EVENT_HANDLER(complete)
bool IsOffline() const { return mIsOffline; }
MediaStreamGraph* Graph() const;
MediaStream* DestinationStream() const;
// Nodes register here if they will produce sound even if they have silent
// or no input connections. The AudioContext will keep registered nodes
// alive until the context is collected. This takes care of "playing"
// references and "tail-time" references.
void RegisterActiveNode(AudioNode* aNode);
// Nodes unregister when they have finished producing sound for the
// foreseeable future.
// Do NOT call UnregisterActiveNode from an AudioNode destructor.
// If the destructor is called, then the Node has already been unregistered.
// The destructor may be called during hashtable enumeration, during which
// unregistering would not be safe.
void UnregisterActiveNode(AudioNode* aNode);
void UnregisterAudioBufferSourceNode(AudioBufferSourceNode* aNode);
void UnregisterPannerNode(PannerNode* aNode);
void UnregisterOscillatorNode(OscillatorNode* aNode);
void UnregisterScriptProcessorNode(ScriptProcessorNode* aNode);
void UpdatePannerSource();
uint32_t MaxChannelCount() const;
void Mute() const;
void Unmute() const;
JSContext* GetJSContext() const;
private:
void RemoveFromDecodeQueue(WebAudioDecodeJob* aDecodeJob);
void ShutdownDecoder();
friend struct ::mozilla::WebAudioDecodeJob;
private:
// Note that it's important for mSampleRate to be initialized before
// mDestination, as mDestination's constructor needs to access it!
const float mSampleRate;
nsRefPtr<AudioDestinationNode> mDestination;
nsRefPtr<AudioListener> mListener;
MediaBufferDecoder mDecoder;
nsTArray<nsRefPtr<WebAudioDecodeJob> > mDecodeJobs;
// See RegisterActiveNode. These will keep the AudioContext alive while it
// is rendering and the window remains alive.
nsTHashtable<nsRefPtrHashKey<AudioNode> > mActiveNodes;
// Two hashsets containing all the PannerNodes and AudioBufferSourceNodes,
// to compute the doppler shift, and also to stop AudioBufferSourceNodes.
// These are all weak pointers.
nsTHashtable<nsPtrHashKey<PannerNode> > mPannerNodes;
nsTHashtable<nsPtrHashKey<AudioBufferSourceNode> > mAudioBufferSourceNodes;
nsTHashtable<nsPtrHashKey<OscillatorNode> > mOscillatorNodes;
// Hashset containing all ScriptProcessorNodes in order to stop them.
// These are all weak pointers.
nsTHashtable<nsPtrHashKey<ScriptProcessorNode> > mScriptProcessorNodes;
// Number of channels passed in the OfflineAudioContext ctor.
uint32_t mNumberOfChannels;
bool mIsOffline;
bool mIsStarted;
};
}
}
#endif