2012-08-31 13:59:37 -07:00
|
|
|
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
|
|
|
|
/* vim:set ts=2 sw=2 sts=2 et cindent: */
|
|
|
|
/* This Source Code Form is subject to the terms of the Mozilla Public
|
|
|
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
|
|
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
|
|
|
|
|
|
|
#include "AudioContext.h"
|
2013-09-10 00:03:37 -07:00
|
|
|
|
2013-03-17 00:55:15 -07:00
|
|
|
#include "nsPIDOMWindow.h"
|
2012-08-31 13:59:37 -07:00
|
|
|
#include "mozilla/ErrorResult.h"
|
2013-07-24 20:01:49 -07:00
|
|
|
#include "mozilla/dom/AnalyserNode.h"
|
2013-05-16 16:30:41 -07:00
|
|
|
#include "mozilla/dom/AudioContextBinding.h"
|
2013-07-24 20:01:49 -07:00
|
|
|
#include "mozilla/dom/HTMLMediaElement.h"
|
2013-05-16 16:30:41 -07:00
|
|
|
#include "mozilla/dom/OfflineAudioContextBinding.h"
|
2013-09-10 00:03:37 -07:00
|
|
|
#include "mozilla/dom/OwningNonNull.h"
|
2013-02-04 15:07:25 -08:00
|
|
|
#include "MediaStreamGraph.h"
|
2012-09-18 16:07:33 -07:00
|
|
|
#include "AudioDestinationNode.h"
|
|
|
|
#include "AudioBufferSourceNode.h"
|
2012-09-21 15:42:14 -07:00
|
|
|
#include "AudioBuffer.h"
|
2012-10-31 12:09:32 -07:00
|
|
|
#include "GainNode.h"
|
2013-07-24 20:01:49 -07:00
|
|
|
#include "MediaElementAudioSourceNode.h"
|
2013-07-24 04:29:39 -07:00
|
|
|
#include "MediaStreamAudioSourceNode.h"
|
2012-10-31 17:26:03 -07:00
|
|
|
#include "DelayNode.h"
|
2012-11-05 18:14:13 -08:00
|
|
|
#include "PannerNode.h"
|
2012-11-05 16:26:03 -08:00
|
|
|
#include "AudioListener.h"
|
2012-11-06 17:01:11 -08:00
|
|
|
#include "DynamicsCompressorNode.h"
|
2012-11-07 17:59:14 -08:00
|
|
|
#include "BiquadFilterNode.h"
|
2013-04-13 18:37:04 -07:00
|
|
|
#include "ScriptProcessorNode.h"
|
2013-05-05 08:49:37 -07:00
|
|
|
#include "ChannelMergerNode.h"
|
2013-05-05 08:49:13 -07:00
|
|
|
#include "ChannelSplitterNode.h"
|
2013-05-21 12:17:47 -07:00
|
|
|
#include "MediaStreamAudioDestinationNode.h"
|
2013-05-13 21:12:30 -07:00
|
|
|
#include "WaveShaperNode.h"
|
2013-06-19 15:24:26 -07:00
|
|
|
#include "PeriodicWave.h"
|
2013-06-10 13:07:55 -07:00
|
|
|
#include "ConvolverNode.h"
|
2013-08-19 11:53:00 -07:00
|
|
|
#include "OscillatorNode.h"
|
2013-02-01 14:13:23 -08:00
|
|
|
#include "nsNetUtil.h"
|
2012-08-31 13:59:37 -07:00
|
|
|
|
|
|
|
namespace mozilla {
|
2012-09-07 15:13:26 -07:00
|
|
|
namespace dom {
|
2012-08-31 13:59:37 -07:00
|
|
|
|
2013-09-23 18:47:30 -07:00
|
|
|
NS_IMPL_CYCLE_COLLECTION_CLASS(AudioContext)
|
|
|
|
|
|
|
|
NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN(AudioContext)
|
|
|
|
NS_IMPL_CYCLE_COLLECTION_UNLINK(mDestination)
|
|
|
|
NS_IMPL_CYCLE_COLLECTION_UNLINK(mListener)
|
|
|
|
if (!tmp->mIsStarted) {
|
|
|
|
NS_IMPL_CYCLE_COLLECTION_UNLINK(mActiveNodes)
|
|
|
|
}
|
|
|
|
NS_IMPL_CYCLE_COLLECTION_UNLINK_END_INHERITED(nsDOMEventTargetHelper)
|
|
|
|
|
|
|
|
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN_INHERITED(AudioContext, nsDOMEventTargetHelper)
|
|
|
|
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mDestination)
|
|
|
|
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mListener)
|
|
|
|
if (!tmp->mIsStarted) {
|
|
|
|
MOZ_ASSERT(tmp->mIsOffline,
|
|
|
|
"Online AudioContexts should always be started");
|
|
|
|
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mActiveNodes)
|
|
|
|
}
|
|
|
|
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
|
2012-11-27 15:08:22 -08:00
|
|
|
|
2013-04-24 21:28:39 -07:00
|
|
|
NS_IMPL_ADDREF_INHERITED(AudioContext, nsDOMEventTargetHelper)
|
|
|
|
NS_IMPL_RELEASE_INHERITED(AudioContext, nsDOMEventTargetHelper)
|
|
|
|
NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION_INHERITED(AudioContext)
|
|
|
|
NS_INTERFACE_MAP_END_INHERITING(nsDOMEventTargetHelper)
|
2012-08-31 13:59:37 -07:00
|
|
|
|
2013-02-04 15:07:25 -08:00
|
|
|
static uint8_t gWebAudioOutputKey;
|
|
|
|
|
2013-05-16 16:30:42 -07:00
|
|
|
AudioContext::AudioContext(nsPIDOMWindow* aWindow,
|
|
|
|
bool aIsOffline,
|
|
|
|
uint32_t aNumberOfChannels,
|
|
|
|
uint32_t aLength,
|
|
|
|
float aSampleRate)
|
2013-05-24 10:09:29 -07:00
|
|
|
: mSampleRate(aIsOffline ? aSampleRate : IdealAudioRate())
|
2013-06-10 10:32:28 -07:00
|
|
|
, mNumberOfChannels(aNumberOfChannels)
|
2013-05-16 16:30:41 -07:00
|
|
|
, mIsOffline(aIsOffline)
|
2013-09-15 22:14:45 -07:00
|
|
|
, mIsStarted(!aIsOffline)
|
2013-09-17 18:10:30 -07:00
|
|
|
, mIsShutDown(false)
|
2012-08-31 13:59:37 -07:00
|
|
|
{
|
2013-04-24 21:28:39 -07:00
|
|
|
nsDOMEventTargetHelper::BindToOwner(aWindow);
|
2013-09-11 21:16:53 -07:00
|
|
|
aWindow->AddAudioContext(this);
|
2012-08-31 13:59:37 -07:00
|
|
|
SetIsDOMBinding();
|
2013-10-03 15:40:20 -07:00
|
|
|
|
|
|
|
// Note: AudioDestinationNode needs an AudioContext that must already be
|
|
|
|
// bound to the window.
|
|
|
|
mDestination = new AudioDestinationNode(this, aIsOffline, aNumberOfChannels,
|
|
|
|
aLength, aSampleRate);
|
|
|
|
mDestination->Stream()->AddAudioOutput(&gWebAudioOutputKey);
|
2012-08-31 13:59:37 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
AudioContext::~AudioContext()
|
|
|
|
{
|
2013-09-11 21:16:53 -07:00
|
|
|
nsPIDOMWindow* window = GetOwner();
|
|
|
|
if (window) {
|
|
|
|
window->RemoveAudioContext(this);
|
|
|
|
}
|
2012-08-31 13:59:37 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
JSObject*
|
2013-04-25 09:29:54 -07:00
|
|
|
AudioContext::WrapObject(JSContext* aCx, JS::Handle<JSObject*> aScope)
|
2012-08-31 13:59:37 -07:00
|
|
|
{
|
2013-05-16 16:30:41 -07:00
|
|
|
if (mIsOffline) {
|
|
|
|
return OfflineAudioContextBinding::Wrap(aCx, aScope, this);
|
|
|
|
} else {
|
|
|
|
return AudioContextBinding::Wrap(aCx, aScope, this);
|
|
|
|
}
|
2012-08-31 13:59:37 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
/* static */ already_AddRefed<AudioContext>
|
2013-08-22 22:17:08 -07:00
|
|
|
AudioContext::Constructor(const GlobalObject& aGlobal,
|
|
|
|
ErrorResult& aRv)
|
2012-08-31 13:59:37 -07:00
|
|
|
{
|
2013-08-22 22:17:08 -07:00
|
|
|
nsCOMPtr<nsPIDOMWindow> window = do_QueryInterface(aGlobal.GetAsSupports());
|
2012-08-31 13:59:37 -07:00
|
|
|
if (!window) {
|
|
|
|
aRv.Throw(NS_ERROR_FAILURE);
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
2013-05-16 16:30:41 -07:00
|
|
|
nsRefPtr<AudioContext> object = new AudioContext(window, false);
|
|
|
|
return object.forget();
|
|
|
|
}
|
|
|
|
|
|
|
|
/* static */ already_AddRefed<AudioContext>
|
|
|
|
AudioContext::Constructor(const GlobalObject& aGlobal,
|
|
|
|
uint32_t aNumberOfChannels,
|
|
|
|
uint32_t aLength,
|
|
|
|
float aSampleRate,
|
|
|
|
ErrorResult& aRv)
|
|
|
|
{
|
2013-08-22 22:17:08 -07:00
|
|
|
nsCOMPtr<nsPIDOMWindow> window = do_QueryInterface(aGlobal.GetAsSupports());
|
2013-05-16 16:30:41 -07:00
|
|
|
if (!window) {
|
|
|
|
aRv.Throw(NS_ERROR_FAILURE);
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
2013-05-29 16:35:30 -07:00
|
|
|
if (aNumberOfChannels == 0 ||
|
|
|
|
aNumberOfChannels > WebAudioUtils::MaxChannelCount ||
|
|
|
|
aLength == 0 ||
|
2013-05-30 17:54:07 -07:00
|
|
|
aSampleRate <= 1.0f ||
|
2013-05-29 16:35:30 -07:00
|
|
|
aSampleRate >= TRACK_RATE_MAX) {
|
2013-05-24 10:11:32 -07:00
|
|
|
// The DOM binding protects us against infinity and NaN
|
2013-05-29 16:35:30 -07:00
|
|
|
aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR);
|
2013-05-16 16:30:41 -07:00
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
2013-05-16 16:30:42 -07:00
|
|
|
nsRefPtr<AudioContext> object = new AudioContext(window,
|
|
|
|
true,
|
|
|
|
aNumberOfChannels,
|
|
|
|
aLength,
|
|
|
|
aSampleRate);
|
2013-03-17 00:55:15 -07:00
|
|
|
return object.forget();
|
2012-08-31 13:59:37 -07:00
|
|
|
}
|
|
|
|
|
2012-09-18 16:07:33 -07:00
|
|
|
already_AddRefed<AudioBufferSourceNode>
|
|
|
|
AudioContext::CreateBufferSource()
|
|
|
|
{
|
|
|
|
nsRefPtr<AudioBufferSourceNode> bufferNode =
|
|
|
|
new AudioBufferSourceNode(this);
|
|
|
|
return bufferNode.forget();
|
|
|
|
}
|
|
|
|
|
2012-09-21 15:42:14 -07:00
|
|
|
already_AddRefed<AudioBuffer>
|
|
|
|
AudioContext::CreateBuffer(JSContext* aJSContext, uint32_t aNumberOfChannels,
|
|
|
|
uint32_t aLength, float aSampleRate,
|
|
|
|
ErrorResult& aRv)
|
|
|
|
{
|
2013-05-23 19:39:36 -07:00
|
|
|
if (aSampleRate < 8000 || aSampleRate > 96000 || !aLength) {
|
|
|
|
aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR);
|
2013-03-08 09:29:00 -08:00
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
2013-02-04 15:07:25 -08:00
|
|
|
if (aLength > INT32_MAX) {
|
|
|
|
aRv.Throw(NS_ERROR_OUT_OF_MEMORY);
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
|
|
|
nsRefPtr<AudioBuffer> buffer =
|
|
|
|
new AudioBuffer(this, int32_t(aLength), aSampleRate);
|
2012-09-21 15:42:14 -07:00
|
|
|
if (!buffer->InitializeBuffers(aNumberOfChannels, aJSContext)) {
|
|
|
|
aRv.Throw(NS_ERROR_OUT_OF_MEMORY);
|
|
|
|
return nullptr;
|
|
|
|
}
|
2013-02-04 15:07:25 -08:00
|
|
|
|
2012-09-21 15:42:14 -07:00
|
|
|
return buffer.forget();
|
|
|
|
}
|
|
|
|
|
2013-05-03 13:42:28 -07:00
|
|
|
already_AddRefed<AudioBuffer>
|
2013-08-05 10:40:01 -07:00
|
|
|
AudioContext::CreateBuffer(JSContext* aJSContext, const ArrayBuffer& aBuffer,
|
2013-05-03 13:42:28 -07:00
|
|
|
bool aMixToMono, ErrorResult& aRv)
|
|
|
|
{
|
2013-07-04 06:25:12 -07:00
|
|
|
// Do not accept this method unless the legacy pref has been set.
|
|
|
|
if (!Preferences::GetBool("media.webaudio.legacy.AudioContext")) {
|
|
|
|
aRv.ThrowNotEnoughArgsError();
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
2013-05-03 13:42:28 -07:00
|
|
|
// Sniff the content of the media.
|
|
|
|
// Failed type sniffing will be handled by SyncDecodeMedia.
|
|
|
|
nsAutoCString contentType;
|
|
|
|
NS_SniffContent(NS_DATA_SNIFFER_CATEGORY, nullptr,
|
|
|
|
aBuffer.Data(), aBuffer.Length(),
|
|
|
|
contentType);
|
|
|
|
|
2013-07-16 02:00:36 -07:00
|
|
|
nsRefPtr<WebAudioDecodeJob> job =
|
|
|
|
new WebAudioDecodeJob(contentType, this, aBuffer);
|
2013-05-03 13:42:28 -07:00
|
|
|
|
|
|
|
if (mDecoder.SyncDecodeMedia(contentType.get(),
|
2013-07-16 02:00:36 -07:00
|
|
|
aBuffer.Data(), aBuffer.Length(), *job) &&
|
|
|
|
job->mOutput) {
|
|
|
|
nsRefPtr<AudioBuffer> buffer = job->mOutput.forget();
|
2013-05-05 08:15:58 -07:00
|
|
|
if (aMixToMono) {
|
|
|
|
buffer->MixToMono(aJSContext);
|
|
|
|
}
|
|
|
|
return buffer.forget();
|
2013-05-03 13:42:28 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
2013-04-13 18:37:04 -07:00
|
|
|
namespace {
|
|
|
|
|
|
|
|
bool IsValidBufferSize(uint32_t aBufferSize) {
|
|
|
|
switch (aBufferSize) {
|
|
|
|
case 0: // let the implementation choose the buffer size
|
|
|
|
case 256:
|
|
|
|
case 512:
|
|
|
|
case 1024:
|
|
|
|
case 2048:
|
|
|
|
case 4096:
|
|
|
|
case 8192:
|
|
|
|
case 16384:
|
|
|
|
return true;
|
|
|
|
default:
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2013-05-21 12:17:47 -07:00
|
|
|
already_AddRefed<MediaStreamAudioDestinationNode>
|
2013-07-18 02:57:38 -07:00
|
|
|
AudioContext::CreateMediaStreamDestination(ErrorResult& aRv)
|
2013-05-21 12:17:47 -07:00
|
|
|
{
|
2013-07-18 02:57:38 -07:00
|
|
|
if (mIsOffline) {
|
|
|
|
aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR);
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
2013-05-21 12:17:47 -07:00
|
|
|
nsRefPtr<MediaStreamAudioDestinationNode> node =
|
|
|
|
new MediaStreamAudioDestinationNode(this);
|
|
|
|
return node.forget();
|
|
|
|
}
|
|
|
|
|
2013-04-13 18:37:04 -07:00
|
|
|
already_AddRefed<ScriptProcessorNode>
|
|
|
|
AudioContext::CreateScriptProcessor(uint32_t aBufferSize,
|
|
|
|
uint32_t aNumberOfInputChannels,
|
|
|
|
uint32_t aNumberOfOutputChannels,
|
|
|
|
ErrorResult& aRv)
|
|
|
|
{
|
2013-05-23 05:26:21 -07:00
|
|
|
if ((aNumberOfInputChannels == 0 && aNumberOfOutputChannels == 0) ||
|
2013-05-29 04:36:37 -07:00
|
|
|
aNumberOfInputChannels > WebAudioUtils::MaxChannelCount ||
|
|
|
|
aNumberOfOutputChannels > WebAudioUtils::MaxChannelCount ||
|
2013-04-13 18:37:04 -07:00
|
|
|
!IsValidBufferSize(aBufferSize)) {
|
|
|
|
aRv.Throw(NS_ERROR_DOM_INDEX_SIZE_ERR);
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
|
|
|
nsRefPtr<ScriptProcessorNode> scriptProcessor =
|
|
|
|
new ScriptProcessorNode(this, aBufferSize, aNumberOfInputChannels,
|
|
|
|
aNumberOfOutputChannels);
|
|
|
|
return scriptProcessor.forget();
|
|
|
|
}
|
|
|
|
|
2013-03-31 20:41:14 -07:00
|
|
|
already_AddRefed<AnalyserNode>
|
|
|
|
AudioContext::CreateAnalyser()
|
|
|
|
{
|
|
|
|
nsRefPtr<AnalyserNode> analyserNode = new AnalyserNode(this);
|
|
|
|
return analyserNode.forget();
|
|
|
|
}
|
|
|
|
|
2013-07-24 20:01:49 -07:00
|
|
|
already_AddRefed<MediaElementAudioSourceNode>
|
|
|
|
AudioContext::CreateMediaElementSource(HTMLMediaElement& aMediaElement,
|
|
|
|
ErrorResult& aRv)
|
|
|
|
{
|
|
|
|
if (mIsOffline) {
|
|
|
|
aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR);
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
nsRefPtr<DOMMediaStream> stream = aMediaElement.MozCaptureStream(aRv);
|
|
|
|
if (aRv.Failed()) {
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
nsRefPtr<MediaElementAudioSourceNode> mediaElementAudioSourceNode =
|
|
|
|
new MediaElementAudioSourceNode(this, stream);
|
|
|
|
return mediaElementAudioSourceNode.forget();
|
|
|
|
}
|
|
|
|
|
2013-07-24 04:29:39 -07:00
|
|
|
already_AddRefed<MediaStreamAudioSourceNode>
|
2013-07-24 19:07:34 -07:00
|
|
|
AudioContext::CreateMediaStreamSource(DOMMediaStream& aMediaStream,
|
2013-07-24 04:29:39 -07:00
|
|
|
ErrorResult& aRv)
|
|
|
|
{
|
|
|
|
if (mIsOffline) {
|
|
|
|
aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR);
|
|
|
|
return nullptr;
|
|
|
|
}
|
2013-07-24 20:01:49 -07:00
|
|
|
nsRefPtr<MediaStreamAudioSourceNode> mediaStreamAudioSourceNode =
|
|
|
|
new MediaStreamAudioSourceNode(this, &aMediaStream);
|
2013-07-24 04:29:39 -07:00
|
|
|
return mediaStreamAudioSourceNode.forget();
|
|
|
|
}
|
|
|
|
|
2012-10-31 12:09:32 -07:00
|
|
|
already_AddRefed<GainNode>
|
|
|
|
AudioContext::CreateGain()
|
|
|
|
{
|
|
|
|
nsRefPtr<GainNode> gainNode = new GainNode(this);
|
|
|
|
return gainNode.forget();
|
|
|
|
}
|
|
|
|
|
2013-05-13 21:12:30 -07:00
|
|
|
already_AddRefed<WaveShaperNode>
|
|
|
|
AudioContext::CreateWaveShaper()
|
|
|
|
{
|
|
|
|
nsRefPtr<WaveShaperNode> waveShaperNode = new WaveShaperNode(this);
|
|
|
|
return waveShaperNode.forget();
|
|
|
|
}
|
|
|
|
|
2012-10-31 17:26:03 -07:00
|
|
|
already_AddRefed<DelayNode>
|
2012-11-19 12:52:29 -08:00
|
|
|
AudioContext::CreateDelay(double aMaxDelayTime, ErrorResult& aRv)
|
2012-10-31 17:26:03 -07:00
|
|
|
{
|
2013-03-25 12:44:14 -07:00
|
|
|
if (aMaxDelayTime > 0. && aMaxDelayTime < 180.) {
|
2012-11-15 17:48:04 -08:00
|
|
|
nsRefPtr<DelayNode> delayNode = new DelayNode(this, aMaxDelayTime);
|
|
|
|
return delayNode.forget();
|
2012-11-08 10:17:22 -08:00
|
|
|
}
|
2012-11-15 17:48:04 -08:00
|
|
|
aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR);
|
|
|
|
return nullptr;
|
2012-10-31 17:26:03 -07:00
|
|
|
}
|
|
|
|
|
2012-11-05 18:14:13 -08:00
|
|
|
already_AddRefed<PannerNode>
|
|
|
|
AudioContext::CreatePanner()
|
|
|
|
{
|
|
|
|
nsRefPtr<PannerNode> pannerNode = new PannerNode(this);
|
2013-04-24 20:32:41 -07:00
|
|
|
mPannerNodes.PutEntry(pannerNode);
|
2012-11-05 18:14:13 -08:00
|
|
|
return pannerNode.forget();
|
|
|
|
}
|
|
|
|
|
2013-06-10 13:07:55 -07:00
|
|
|
already_AddRefed<ConvolverNode>
|
|
|
|
AudioContext::CreateConvolver()
|
|
|
|
{
|
|
|
|
nsRefPtr<ConvolverNode> convolverNode = new ConvolverNode(this);
|
|
|
|
return convolverNode.forget();
|
|
|
|
}
|
|
|
|
|
2013-05-05 08:49:13 -07:00
|
|
|
already_AddRefed<ChannelSplitterNode>
|
|
|
|
AudioContext::CreateChannelSplitter(uint32_t aNumberOfOutputs, ErrorResult& aRv)
|
|
|
|
{
|
|
|
|
if (aNumberOfOutputs == 0 ||
|
2013-05-29 04:36:37 -07:00
|
|
|
aNumberOfOutputs > WebAudioUtils::MaxChannelCount) {
|
2013-05-05 08:49:13 -07:00
|
|
|
aRv.Throw(NS_ERROR_DOM_INDEX_SIZE_ERR);
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
|
|
|
nsRefPtr<ChannelSplitterNode> splitterNode =
|
|
|
|
new ChannelSplitterNode(this, aNumberOfOutputs);
|
|
|
|
return splitterNode.forget();
|
|
|
|
}
|
|
|
|
|
2013-05-05 08:49:37 -07:00
|
|
|
already_AddRefed<ChannelMergerNode>
|
|
|
|
AudioContext::CreateChannelMerger(uint32_t aNumberOfInputs, ErrorResult& aRv)
|
|
|
|
{
|
|
|
|
if (aNumberOfInputs == 0 ||
|
2013-05-29 04:36:37 -07:00
|
|
|
aNumberOfInputs > WebAudioUtils::MaxChannelCount) {
|
2013-05-05 08:49:37 -07:00
|
|
|
aRv.Throw(NS_ERROR_DOM_INDEX_SIZE_ERR);
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
|
|
|
nsRefPtr<ChannelMergerNode> mergerNode =
|
|
|
|
new ChannelMergerNode(this, aNumberOfInputs);
|
|
|
|
return mergerNode.forget();
|
|
|
|
}
|
|
|
|
|
2012-11-06 17:01:11 -08:00
|
|
|
already_AddRefed<DynamicsCompressorNode>
|
|
|
|
AudioContext::CreateDynamicsCompressor()
|
|
|
|
{
|
|
|
|
nsRefPtr<DynamicsCompressorNode> compressorNode =
|
|
|
|
new DynamicsCompressorNode(this);
|
|
|
|
return compressorNode.forget();
|
|
|
|
}
|
|
|
|
|
2012-11-07 17:59:14 -08:00
|
|
|
already_AddRefed<BiquadFilterNode>
|
|
|
|
AudioContext::CreateBiquadFilter()
|
|
|
|
{
|
|
|
|
nsRefPtr<BiquadFilterNode> filterNode =
|
|
|
|
new BiquadFilterNode(this);
|
|
|
|
return filterNode.forget();
|
|
|
|
}
|
|
|
|
|
2013-08-19 11:53:00 -07:00
|
|
|
already_AddRefed<OscillatorNode>
|
|
|
|
AudioContext::CreateOscillator()
|
|
|
|
{
|
|
|
|
nsRefPtr<OscillatorNode> oscillatorNode =
|
|
|
|
new OscillatorNode(this);
|
|
|
|
return oscillatorNode.forget();
|
|
|
|
}
|
|
|
|
|
2013-06-19 15:24:26 -07:00
|
|
|
already_AddRefed<PeriodicWave>
|
|
|
|
AudioContext::CreatePeriodicWave(const Float32Array& aRealData,
|
|
|
|
const Float32Array& aImagData,
|
|
|
|
ErrorResult& aRv)
|
2013-05-28 04:19:07 -07:00
|
|
|
{
|
|
|
|
if (aRealData.Length() != aImagData.Length() ||
|
|
|
|
aRealData.Length() == 0 ||
|
|
|
|
aRealData.Length() > 4096) {
|
|
|
|
aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR);
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
2013-06-19 15:24:26 -07:00
|
|
|
nsRefPtr<PeriodicWave> periodicWave =
|
2013-08-28 15:39:26 -07:00
|
|
|
new PeriodicWave(this, aRealData.Data(), aImagData.Data(),
|
|
|
|
aImagData.Length(), aRv);
|
|
|
|
if (aRv.Failed()) {
|
|
|
|
return nullptr;
|
|
|
|
}
|
2013-06-19 15:24:26 -07:00
|
|
|
return periodicWave.forget();
|
2013-05-28 04:19:07 -07:00
|
|
|
}
|
|
|
|
|
2012-11-05 16:26:03 -08:00
|
|
|
AudioListener*
|
|
|
|
AudioContext::Listener()
|
|
|
|
{
|
|
|
|
if (!mListener) {
|
|
|
|
mListener = new AudioListener(this);
|
|
|
|
}
|
|
|
|
return mListener;
|
|
|
|
}
|
|
|
|
|
2013-02-01 14:13:23 -08:00
|
|
|
void
|
|
|
|
AudioContext::DecodeAudioData(const ArrayBuffer& aBuffer,
|
|
|
|
DecodeSuccessCallback& aSuccessCallback,
|
|
|
|
const Optional<OwningNonNull<DecodeErrorCallback> >& aFailureCallback)
|
|
|
|
{
|
|
|
|
// Sniff the content of the media.
|
|
|
|
// Failed type sniffing will be handled by AsyncDecodeMedia.
|
|
|
|
nsAutoCString contentType;
|
|
|
|
NS_SniffContent(NS_DATA_SNIFFER_CATEGORY, nullptr,
|
|
|
|
aBuffer.Data(), aBuffer.Length(),
|
|
|
|
contentType);
|
|
|
|
|
|
|
|
nsCOMPtr<DecodeErrorCallback> failureCallback;
|
|
|
|
if (aFailureCallback.WasPassed()) {
|
2013-06-19 11:48:43 -07:00
|
|
|
failureCallback = &aFailureCallback.Value();
|
2013-02-01 14:13:23 -08:00
|
|
|
}
|
2013-07-16 02:00:36 -07:00
|
|
|
nsRefPtr<WebAudioDecodeJob> job(
|
|
|
|
new WebAudioDecodeJob(contentType, this, aBuffer,
|
2013-02-01 14:13:23 -08:00
|
|
|
&aSuccessCallback, failureCallback));
|
|
|
|
mDecoder.AsyncDecodeMedia(contentType.get(),
|
2013-05-12 21:17:00 -07:00
|
|
|
aBuffer.Data(), aBuffer.Length(), *job);
|
2013-02-01 14:13:23 -08:00
|
|
|
// Transfer the ownership to mDecodeJobs
|
|
|
|
mDecodeJobs.AppendElement(job.forget());
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
AudioContext::RemoveFromDecodeQueue(WebAudioDecodeJob* aDecodeJob)
|
|
|
|
{
|
|
|
|
mDecodeJobs.RemoveElement(aDecodeJob);
|
|
|
|
}
|
|
|
|
|
2013-09-16 16:53:40 -07:00
|
|
|
void
|
|
|
|
AudioContext::RegisterActiveNode(AudioNode* aNode)
|
|
|
|
{
|
2013-09-17 18:10:30 -07:00
|
|
|
if (!mIsShutDown) {
|
|
|
|
mActiveNodes.PutEntry(aNode);
|
|
|
|
}
|
2013-09-16 16:53:40 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
AudioContext::UnregisterActiveNode(AudioNode* aNode)
|
|
|
|
{
|
|
|
|
mActiveNodes.RemoveEntry(aNode);
|
|
|
|
}
|
|
|
|
|
2013-04-11 05:47:57 -07:00
|
|
|
void
|
|
|
|
AudioContext::UnregisterAudioBufferSourceNode(AudioBufferSourceNode* aNode)
|
|
|
|
{
|
2013-04-30 16:20:55 -07:00
|
|
|
UpdatePannerSource();
|
2013-04-11 05:47:57 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
AudioContext::UnregisterPannerNode(PannerNode* aNode)
|
|
|
|
{
|
2013-04-24 20:32:41 -07:00
|
|
|
mPannerNodes.RemoveEntry(aNode);
|
2013-07-16 05:45:41 -07:00
|
|
|
if (mListener) {
|
|
|
|
mListener->UnregisterPannerNode(aNode);
|
|
|
|
}
|
2013-04-11 05:47:57 -07:00
|
|
|
}
|
|
|
|
|
2013-04-24 20:32:41 -07:00
|
|
|
static PLDHashOperator
|
|
|
|
FindConnectedSourcesOn(nsPtrHashKey<PannerNode>* aEntry, void* aData)
|
|
|
|
{
|
|
|
|
aEntry->GetKey()->FindConnectedSources();
|
|
|
|
return PL_DHASH_NEXT;
|
2013-04-22 18:23:54 -07:00
|
|
|
}
|
|
|
|
|
2013-04-11 05:47:57 -07:00
|
|
|
void
|
|
|
|
AudioContext::UpdatePannerSource()
|
|
|
|
{
|
2013-04-24 20:32:41 -07:00
|
|
|
mPannerNodes.EnumerateEntries(FindConnectedSourcesOn, nullptr);
|
2013-04-11 05:47:57 -07:00
|
|
|
}
|
|
|
|
|
2013-06-10 10:32:28 -07:00
|
|
|
uint32_t
|
|
|
|
AudioContext::MaxChannelCount() const
|
|
|
|
{
|
|
|
|
return mIsOffline ? mNumberOfChannels : AudioStream::MaxNumberOfChannels();
|
|
|
|
}
|
|
|
|
|
2013-02-04 15:07:25 -08:00
|
|
|
MediaStreamGraph*
|
|
|
|
AudioContext::Graph() const
|
|
|
|
{
|
|
|
|
return Destination()->Stream()->Graph();
|
2012-08-31 13:59:37 -07:00
|
|
|
}
|
2013-02-04 15:07:25 -08:00
|
|
|
|
|
|
|
MediaStream*
|
|
|
|
AudioContext::DestinationStream() const
|
|
|
|
{
|
2013-09-17 06:23:52 -07:00
|
|
|
if (Destination()) {
|
|
|
|
return Destination()->Stream();
|
|
|
|
}
|
|
|
|
return nullptr;
|
2012-09-07 15:13:26 -07:00
|
|
|
}
|
2012-08-31 13:59:37 -07:00
|
|
|
|
2013-03-14 18:01:02 -07:00
|
|
|
double
|
|
|
|
AudioContext::CurrentTime() const
|
|
|
|
{
|
|
|
|
return MediaTimeToSeconds(Destination()->Stream()->GetCurrentTime());
|
|
|
|
}
|
|
|
|
|
2013-04-22 08:45:34 -07:00
|
|
|
void
|
|
|
|
AudioContext::Shutdown()
|
|
|
|
{
|
2013-09-17 18:10:30 -07:00
|
|
|
mIsShutDown = true;
|
|
|
|
|
2013-04-22 08:45:34 -07:00
|
|
|
Suspend();
|
2013-08-02 09:07:17 -07:00
|
|
|
|
2013-11-08 17:07:45 -08:00
|
|
|
mDecoder.Shutdown();
|
|
|
|
|
2013-09-16 16:53:40 -07:00
|
|
|
// Release references to active nodes.
|
|
|
|
// Active AudioNodes don't unregister in destructors, at which point the
|
|
|
|
// Node is already unregistered.
|
|
|
|
mActiveNodes.Clear();
|
|
|
|
|
2013-05-16 16:31:08 -07:00
|
|
|
// For offline contexts, we can destroy the MediaStreamGraph at this point.
|
2013-09-18 17:04:13 -07:00
|
|
|
if (mIsOffline && mDestination) {
|
2013-09-09 22:10:53 -07:00
|
|
|
mDestination->OfflineShutdown();
|
2013-05-16 16:31:08 -07:00
|
|
|
}
|
2013-04-22 08:45:34 -07:00
|
|
|
}
|
|
|
|
|
2013-03-21 19:59:33 -07:00
|
|
|
void
|
|
|
|
AudioContext::Suspend()
|
|
|
|
{
|
2013-04-14 18:52:55 -07:00
|
|
|
MediaStream* ds = DestinationStream();
|
|
|
|
if (ds) {
|
|
|
|
ds->ChangeExplicitBlockerCount(1);
|
|
|
|
}
|
2013-03-21 19:59:33 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
AudioContext::Resume()
|
|
|
|
{
|
2013-04-14 18:52:55 -07:00
|
|
|
MediaStream* ds = DestinationStream();
|
|
|
|
if (ds) {
|
|
|
|
ds->ChangeExplicitBlockerCount(-1);
|
|
|
|
}
|
2013-03-21 19:59:33 -07:00
|
|
|
}
|
|
|
|
|
2013-04-14 11:18:43 -07:00
|
|
|
JSContext*
|
|
|
|
AudioContext::GetJSContext() const
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(NS_IsMainThread());
|
|
|
|
|
|
|
|
nsCOMPtr<nsIScriptGlobalObject> scriptGlobal =
|
|
|
|
do_QueryInterface(GetParentObject());
|
2013-04-27 15:44:13 -07:00
|
|
|
if (!scriptGlobal) {
|
|
|
|
return nullptr;
|
|
|
|
}
|
2013-04-14 11:18:43 -07:00
|
|
|
nsIScriptContext* scriptContext = scriptGlobal->GetContext();
|
|
|
|
if (!scriptContext) {
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
return scriptContext->GetNativeContext();
|
|
|
|
}
|
|
|
|
|
2013-05-16 16:30:42 -07:00
|
|
|
void
|
2013-09-15 22:14:45 -07:00
|
|
|
AudioContext::StartRendering(ErrorResult& aRv)
|
2013-05-16 16:30:42 -07:00
|
|
|
{
|
|
|
|
MOZ_ASSERT(mIsOffline, "This should only be called on OfflineAudioContext");
|
2013-09-15 22:14:45 -07:00
|
|
|
if (mIsStarted) {
|
|
|
|
aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
|
|
|
|
return;
|
|
|
|
}
|
2013-05-16 16:30:42 -07:00
|
|
|
|
2013-09-15 22:14:45 -07:00
|
|
|
mIsStarted = true;
|
2013-05-16 16:30:42 -07:00
|
|
|
mDestination->StartRendering();
|
|
|
|
}
|
|
|
|
|
2013-07-03 17:44:32 -07:00
|
|
|
void
|
|
|
|
AudioContext::Mute() const
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(!mIsOffline);
|
|
|
|
mDestination->Mute();
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
AudioContext::Unmute() const
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(!mIsOffline);
|
|
|
|
mDestination->Unmute();
|
|
|
|
}
|
|
|
|
|
2013-10-11 04:55:47 -07:00
|
|
|
AudioChannel
|
|
|
|
AudioContext::MozAudioChannelType() const
|
|
|
|
{
|
|
|
|
return mDestination->MozAudioChannelType();
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
AudioContext::SetMozAudioChannelType(AudioChannel aValue, ErrorResult& aRv)
|
|
|
|
{
|
|
|
|
mDestination->SetMozAudioChannelType(aValue, aRv);
|
|
|
|
}
|
|
|
|
|
2013-02-04 15:07:25 -08:00
|
|
|
}
|
|
|
|
}
|