2012-09-18 16:07:33 -07:00
|
|
|
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
|
|
|
|
/* vim:set ts=2 sw=2 sts=2 et cindent: */
|
|
|
|
/* This Source Code Form is subject to the terms of the Mozilla Public
|
|
|
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
|
|
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
|
|
|
|
|
|
|
#include "AudioDestinationNode.h"
|
|
|
|
#include "mozilla/dom/AudioDestinationNodeBinding.h"
|
2013-10-08 11:20:33 -07:00
|
|
|
#include "mozilla/Preferences.h"
|
|
|
|
#include "AudioChannelAgent.h"
|
2014-03-20 03:45:55 -07:00
|
|
|
#include "AudioChannelService.h"
|
2013-02-04 15:07:25 -08:00
|
|
|
#include "AudioNodeEngine.h"
|
|
|
|
#include "AudioNodeStream.h"
|
|
|
|
#include "MediaStreamGraph.h"
|
2013-05-16 16:30:57 -07:00
|
|
|
#include "OfflineAudioCompletionEvent.h"
|
2013-10-08 11:20:33 -07:00
|
|
|
#include "nsIInterfaceRequestorUtils.h"
|
|
|
|
#include "nsIDocShell.h"
|
2013-10-11 04:55:47 -07:00
|
|
|
#include "nsIPermissionManager.h"
|
|
|
|
#include "nsIScriptObjectPrincipal.h"
|
|
|
|
#include "nsServiceManagerUtils.h"
|
2014-01-15 03:08:20 -08:00
|
|
|
#include "nsIAppShell.h"
|
|
|
|
#include "nsWidgetsCID.h"
|
2012-09-18 16:07:33 -07:00
|
|
|
|
|
|
|
namespace mozilla {
|
|
|
|
namespace dom {
|
|
|
|
|
2014-03-11 03:46:55 -07:00
|
|
|
static uint8_t gWebAudioOutputKey;
|
|
|
|
|
2013-05-16 16:30:42 -07:00
|
|
|
class OfflineDestinationNodeEngine : public AudioNodeEngine
|
|
|
|
{
|
|
|
|
public:
|
2013-05-16 16:30:57 -07:00
|
|
|
typedef AutoFallibleTArray<nsAutoArrayPtr<float>, 2> InputChannels;
|
|
|
|
|
2013-05-16 16:30:42 -07:00
|
|
|
OfflineDestinationNodeEngine(AudioDestinationNode* aNode,
|
|
|
|
uint32_t aNumberOfChannels,
|
|
|
|
uint32_t aLength,
|
|
|
|
float aSampleRate)
|
|
|
|
: AudioNodeEngine(aNode)
|
2013-05-16 16:30:57 -07:00
|
|
|
, mWriteIndex(0)
|
2013-05-16 16:30:42 -07:00
|
|
|
, mLength(aLength)
|
|
|
|
, mSampleRate(aSampleRate)
|
|
|
|
{
|
2013-05-16 16:30:57 -07:00
|
|
|
// These allocations might fail if content provides a huge number of
|
|
|
|
// channels or size, but it's OK since we'll deal with the failure
|
|
|
|
// gracefully.
|
|
|
|
if (mInputChannels.SetLength(aNumberOfChannels)) {
|
|
|
|
static const fallible_t fallible = fallible_t();
|
|
|
|
for (uint32_t i = 0; i < aNumberOfChannels; ++i) {
|
|
|
|
mInputChannels[i] = new(fallible) float[aLength];
|
|
|
|
if (!mInputChannels[i]) {
|
|
|
|
mInputChannels.Clear();
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2013-05-16 16:30:42 -07:00
|
|
|
}
|
|
|
|
|
2014-03-04 13:09:49 -08:00
|
|
|
virtual void ProcessBlock(AudioNodeStream* aStream,
|
|
|
|
const AudioChunk& aInput,
|
|
|
|
AudioChunk* aOutput,
|
|
|
|
bool* aFinished) MOZ_OVERRIDE
|
2013-05-16 16:30:42 -07:00
|
|
|
{
|
2013-05-16 16:30:57 -07:00
|
|
|
// Do this just for the sake of political correctness; this output
|
|
|
|
// will not go anywhere.
|
2013-05-16 16:30:42 -07:00
|
|
|
*aOutput = aInput;
|
2013-05-16 16:30:57 -07:00
|
|
|
|
|
|
|
// Handle the case of allocation failure in the input buffer
|
|
|
|
if (mInputChannels.IsEmpty()) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2013-12-08 21:08:02 -08:00
|
|
|
if (mWriteIndex >= mLength) {
|
|
|
|
NS_ASSERTION(mWriteIndex == mLength, "Overshot length");
|
|
|
|
// Don't record any more.
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2013-05-16 16:30:57 -07:00
|
|
|
// Record our input buffer
|
|
|
|
MOZ_ASSERT(mWriteIndex < mLength, "How did this happen?");
|
|
|
|
const uint32_t duration = std::min(WEBAUDIO_BLOCK_SIZE, mLength - mWriteIndex);
|
2013-05-29 16:33:14 -07:00
|
|
|
const uint32_t commonChannelCount = std::min(mInputChannels.Length(),
|
|
|
|
aInput.mChannelData.Length());
|
|
|
|
// First, copy as many channels in the input as we have
|
|
|
|
for (uint32_t i = 0; i < commonChannelCount; ++i) {
|
2013-05-16 16:30:57 -07:00
|
|
|
if (aInput.IsNull()) {
|
|
|
|
PodZero(mInputChannels[i] + mWriteIndex, duration);
|
|
|
|
} else {
|
|
|
|
const float* inputBuffer = static_cast<const float*>(aInput.mChannelData[i]);
|
|
|
|
if (duration == WEBAUDIO_BLOCK_SIZE) {
|
|
|
|
// Use the optimized version of the copy with scale operation
|
|
|
|
AudioBlockCopyChannelWithScale(inputBuffer, aInput.mVolume,
|
|
|
|
mInputChannels[i] + mWriteIndex);
|
|
|
|
} else {
|
|
|
|
if (aInput.mVolume == 1.0f) {
|
|
|
|
PodCopy(mInputChannels[i] + mWriteIndex, inputBuffer, duration);
|
|
|
|
} else {
|
|
|
|
for (uint32_t j = 0; j < duration; ++j) {
|
|
|
|
mInputChannels[i][mWriteIndex + j] = aInput.mVolume * inputBuffer[j];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2013-05-29 16:33:14 -07:00
|
|
|
// Then, silence all of the remaining channels
|
|
|
|
for (uint32_t i = commonChannelCount; i < mInputChannels.Length(); ++i) {
|
|
|
|
PodZero(mInputChannels[i] + mWriteIndex, duration);
|
|
|
|
}
|
2013-05-16 16:30:57 -07:00
|
|
|
mWriteIndex += duration;
|
|
|
|
|
2013-12-08 21:08:02 -08:00
|
|
|
if (mWriteIndex >= mLength) {
|
|
|
|
NS_ASSERTION(mWriteIndex == mLength, "Overshot length");
|
|
|
|
// Go to finished state. When the graph's current time eventually reaches
|
|
|
|
// the end of the stream, then the main thread will be notified and we'll
|
|
|
|
// shut down the AudioContext.
|
2013-05-16 16:30:57 -07:00
|
|
|
*aFinished = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-12-08 21:08:02 -08:00
|
|
|
void FireOfflineCompletionEvent(AudioDestinationNode* aNode)
|
2013-05-16 16:30:57 -07:00
|
|
|
{
|
2013-12-08 21:08:02 -08:00
|
|
|
AudioContext* context = aNode->Context();
|
|
|
|
context->Shutdown();
|
|
|
|
// Shutdown drops self reference, but the context is still referenced by aNode,
|
|
|
|
// which is strongly referenced by the runnable that called
|
|
|
|
// AudioDestinationNode::FireOfflineCompletionEvent.
|
|
|
|
|
|
|
|
AutoPushJSContext cx(context->GetJSContext());
|
|
|
|
if (!cx) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
JSAutoRequest ar(cx);
|
2013-05-16 16:30:57 -07:00
|
|
|
|
2013-12-08 21:08:02 -08:00
|
|
|
// Create the input buffer
|
|
|
|
nsRefPtr<AudioBuffer> renderedBuffer = new AudioBuffer(context,
|
|
|
|
mLength,
|
|
|
|
mSampleRate);
|
|
|
|
if (!renderedBuffer->InitializeBuffers(mInputChannels.Length(), cx)) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
for (uint32_t i = 0; i < mInputChannels.Length(); ++i) {
|
|
|
|
renderedBuffer->SetRawChannelContents(cx, i, mInputChannels[i]);
|
|
|
|
}
|
2013-05-16 16:30:57 -07:00
|
|
|
|
2013-12-08 21:08:02 -08:00
|
|
|
nsRefPtr<OfflineAudioCompletionEvent> event =
|
|
|
|
new OfflineAudioCompletionEvent(context, nullptr, nullptr);
|
|
|
|
event->InitEvent(renderedBuffer);
|
|
|
|
context->DispatchTrustedEvent(event);
|
2013-05-16 16:30:42 -07:00
|
|
|
}
|
|
|
|
|
2013-05-16 16:30:57 -07:00
|
|
|
private:
|
|
|
|
// The input to the destination node is recorded in the mInputChannels buffer.
|
|
|
|
// When this buffer fills up with mLength frames, the buffered input is sent
|
|
|
|
// to the main thread in order to dispatch OfflineAudioCompletionEvent.
|
|
|
|
InputChannels mInputChannels;
|
|
|
|
// An index representing the next offset in mInputChannels to be written to.
|
|
|
|
uint32_t mWriteIndex;
|
|
|
|
// How many frames the OfflineAudioContext intends to produce.
|
2013-05-16 16:30:42 -07:00
|
|
|
uint32_t mLength;
|
|
|
|
float mSampleRate;
|
|
|
|
};
|
|
|
|
|
2013-07-03 17:44:32 -07:00
|
|
|
class DestinationNodeEngine : public AudioNodeEngine
|
|
|
|
{
|
|
|
|
public:
|
|
|
|
explicit DestinationNodeEngine(AudioDestinationNode* aNode)
|
|
|
|
: AudioNodeEngine(aNode)
|
|
|
|
, mVolume(1.0f)
|
|
|
|
{
|
|
|
|
}
|
|
|
|
|
2014-03-04 13:09:49 -08:00
|
|
|
virtual void ProcessBlock(AudioNodeStream* aStream,
|
|
|
|
const AudioChunk& aInput,
|
|
|
|
AudioChunk* aOutput,
|
|
|
|
bool* aFinished) MOZ_OVERRIDE
|
2013-07-03 17:44:32 -07:00
|
|
|
{
|
|
|
|
*aOutput = aInput;
|
|
|
|
aOutput->mVolume *= mVolume;
|
|
|
|
}
|
|
|
|
|
|
|
|
virtual void SetDoubleParameter(uint32_t aIndex, double aParam) MOZ_OVERRIDE
|
|
|
|
{
|
|
|
|
if (aIndex == VOLUME) {
|
|
|
|
mVolume = aParam;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
enum Parameters {
|
|
|
|
VOLUME,
|
|
|
|
};
|
|
|
|
|
|
|
|
private:
|
|
|
|
float mVolume;
|
|
|
|
};
|
|
|
|
|
2013-10-08 11:20:33 -07:00
|
|
|
static bool UseAudioChannelService()
|
|
|
|
{
|
|
|
|
return Preferences::GetBool("media.useAudioChannelService");
|
|
|
|
}
|
|
|
|
|
|
|
|
NS_IMPL_CYCLE_COLLECTION_INHERITED_1(AudioDestinationNode, AudioNode,
|
|
|
|
mAudioChannelAgent)
|
|
|
|
|
|
|
|
NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION_INHERITED(AudioDestinationNode)
|
|
|
|
NS_INTERFACE_MAP_ENTRY(nsIDOMEventListener)
|
|
|
|
NS_INTERFACE_MAP_ENTRY(nsIAudioChannelAgentCallback)
|
2013-10-16 02:27:45 -07:00
|
|
|
NS_INTERFACE_MAP_ENTRY(nsISupportsWeakReference)
|
2013-10-08 11:20:33 -07:00
|
|
|
NS_INTERFACE_MAP_END_INHERITING(AudioNode)
|
|
|
|
|
|
|
|
NS_IMPL_ADDREF_INHERITED(AudioDestinationNode, AudioNode)
|
|
|
|
NS_IMPL_RELEASE_INHERITED(AudioDestinationNode, AudioNode)
|
2013-02-12 08:42:59 -08:00
|
|
|
|
2013-05-16 16:30:42 -07:00
|
|
|
AudioDestinationNode::AudioDestinationNode(AudioContext* aContext,
|
|
|
|
bool aIsOffline,
|
|
|
|
uint32_t aNumberOfChannels,
|
|
|
|
uint32_t aLength,
|
|
|
|
float aSampleRate)
|
2013-04-27 15:44:50 -07:00
|
|
|
: AudioNode(aContext,
|
2013-05-16 16:30:42 -07:00
|
|
|
aIsOffline ? aNumberOfChannels : 2,
|
2013-04-27 15:44:50 -07:00
|
|
|
ChannelCountMode::Explicit,
|
|
|
|
ChannelInterpretation::Speakers)
|
2013-05-16 16:30:42 -07:00
|
|
|
, mFramesToProduce(aLength)
|
2013-10-11 04:55:47 -07:00
|
|
|
, mAudioChannel(AudioChannel::Normal)
|
2013-12-08 21:08:02 -08:00
|
|
|
, mIsOffline(aIsOffline)
|
|
|
|
, mHasFinished(false)
|
2014-01-15 03:08:20 -08:00
|
|
|
, mExtraCurrentTime(0)
|
|
|
|
, mExtraCurrentTimeSinceLastStartedBlocking(0)
|
|
|
|
, mExtraCurrentTimeUpdatedSinceLastStableState(false)
|
2012-09-18 16:07:33 -07:00
|
|
|
{
|
2013-05-16 16:30:42 -07:00
|
|
|
MediaStreamGraph* graph = aIsOffline ?
|
|
|
|
MediaStreamGraph::CreateNonRealtimeInstance() :
|
|
|
|
MediaStreamGraph::GetInstance();
|
|
|
|
AudioNodeEngine* engine = aIsOffline ?
|
|
|
|
new OfflineDestinationNodeEngine(this, aNumberOfChannels,
|
|
|
|
aLength, aSampleRate) :
|
2013-07-03 17:44:32 -07:00
|
|
|
static_cast<AudioNodeEngine*>(new DestinationNodeEngine(this));
|
|
|
|
|
2013-05-16 16:30:42 -07:00
|
|
|
mStream = graph->CreateAudioNodeStream(engine, MediaStreamGraph::EXTERNAL_STREAM);
|
2013-12-08 21:08:02 -08:00
|
|
|
mStream->AddMainThreadListener(this);
|
2014-03-11 03:46:55 -07:00
|
|
|
mStream->AddAudioOutput(&gWebAudioOutputKey);
|
2013-10-08 11:20:33 -07:00
|
|
|
|
2014-03-20 03:45:55 -07:00
|
|
|
AudioChannel channel = AudioChannelService::GetDefaultAudioChannel();
|
|
|
|
if (channel != AudioChannel::Normal) {
|
|
|
|
ErrorResult rv;
|
|
|
|
SetMozAudioChannelType(channel, rv);
|
|
|
|
}
|
|
|
|
|
2013-10-08 11:20:33 -07:00
|
|
|
if (!aIsOffline && UseAudioChannelService()) {
|
|
|
|
nsCOMPtr<nsIDOMEventTarget> target = do_QueryInterface(GetOwner());
|
|
|
|
if (target) {
|
|
|
|
target->AddSystemEventListener(NS_LITERAL_STRING("visibilitychange"), this,
|
|
|
|
/* useCapture = */ true,
|
|
|
|
/* wantsUntrusted = */ false);
|
|
|
|
}
|
|
|
|
|
2013-10-11 04:55:47 -07:00
|
|
|
CreateAudioChannelAgent();
|
2013-10-08 11:20:33 -07:00
|
|
|
}
|
2012-09-18 16:07:33 -07:00
|
|
|
}
|
|
|
|
|
2013-09-09 22:05:22 -07:00
|
|
|
void
|
|
|
|
AudioDestinationNode::DestroyMediaStream()
|
|
|
|
{
|
2013-10-08 11:20:33 -07:00
|
|
|
if (mAudioChannelAgent && !Context()->IsOffline()) {
|
|
|
|
mAudioChannelAgent->StopPlaying();
|
|
|
|
mAudioChannelAgent = nullptr;
|
|
|
|
|
|
|
|
nsCOMPtr<nsIDOMEventTarget> target = do_QueryInterface(GetOwner());
|
|
|
|
NS_ENSURE_TRUE_VOID(target);
|
|
|
|
|
|
|
|
target->RemoveSystemEventListener(NS_LITERAL_STRING("visibilitychange"), this,
|
|
|
|
/* useCapture = */ true);
|
|
|
|
}
|
|
|
|
|
2013-09-09 22:05:22 -07:00
|
|
|
if (!mStream)
|
|
|
|
return;
|
|
|
|
|
2013-12-08 21:08:02 -08:00
|
|
|
mStream->RemoveMainThreadListener(this);
|
2013-09-09 22:05:22 -07:00
|
|
|
MediaStreamGraph* graph = mStream->Graph();
|
|
|
|
if (graph->IsNonRealtime()) {
|
|
|
|
MediaStreamGraph::DestroyNonRealtimeInstance(graph);
|
|
|
|
}
|
|
|
|
AudioNode::DestroyMediaStream();
|
|
|
|
}
|
|
|
|
|
2013-12-08 21:08:02 -08:00
|
|
|
void
|
|
|
|
AudioDestinationNode::NotifyMainThreadStateChanged()
|
|
|
|
{
|
|
|
|
if (mStream->IsFinished() && !mHasFinished) {
|
|
|
|
mHasFinished = true;
|
|
|
|
if (mIsOffline) {
|
|
|
|
nsCOMPtr<nsIRunnable> runnable =
|
|
|
|
NS_NewRunnableMethod(this, &AudioDestinationNode::FireOfflineCompletionEvent);
|
|
|
|
NS_DispatchToCurrentThread(runnable);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
AudioDestinationNode::FireOfflineCompletionEvent()
|
|
|
|
{
|
|
|
|
AudioNodeStream* stream = static_cast<AudioNodeStream*>(Stream());
|
|
|
|
OfflineDestinationNodeEngine* engine =
|
|
|
|
static_cast<OfflineDestinationNodeEngine*>(stream->Engine());
|
|
|
|
engine->FireOfflineCompletionEvent(this);
|
|
|
|
}
|
|
|
|
|
2013-06-10 10:32:28 -07:00
|
|
|
uint32_t
|
|
|
|
AudioDestinationNode::MaxChannelCount() const
|
|
|
|
{
|
|
|
|
return Context()->MaxChannelCount();
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
AudioDestinationNode::SetChannelCount(uint32_t aChannelCount, ErrorResult& aRv)
|
|
|
|
{
|
|
|
|
if (aChannelCount > MaxChannelCount()) {
|
|
|
|
aRv.Throw(NS_ERROR_DOM_INDEX_SIZE_ERR);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
AudioNode::SetChannelCount(aChannelCount, aRv);
|
|
|
|
}
|
|
|
|
|
2013-07-03 17:44:32 -07:00
|
|
|
void
|
|
|
|
AudioDestinationNode::Mute()
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(Context() && !Context()->IsOffline());
|
|
|
|
SendDoubleParameterToStream(DestinationNodeEngine::VOLUME, 0.0f);
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
AudioDestinationNode::Unmute()
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(Context() && !Context()->IsOffline());
|
|
|
|
SendDoubleParameterToStream(DestinationNodeEngine::VOLUME, 1.0f);
|
|
|
|
}
|
|
|
|
|
2013-05-16 16:31:08 -07:00
|
|
|
void
|
2013-09-09 22:10:53 -07:00
|
|
|
AudioDestinationNode::OfflineShutdown()
|
2013-05-16 16:31:08 -07:00
|
|
|
{
|
|
|
|
MOZ_ASSERT(Context() && Context()->IsOffline(),
|
|
|
|
"Should only be called on a valid OfflineAudioContext");
|
2013-09-09 22:10:53 -07:00
|
|
|
|
2013-05-16 16:31:08 -07:00
|
|
|
MediaStreamGraph::DestroyNonRealtimeInstance(mStream->Graph());
|
2013-09-09 22:10:53 -07:00
|
|
|
mOfflineRenderingRef.Drop(this);
|
2013-05-16 16:31:08 -07:00
|
|
|
}
|
|
|
|
|
2012-09-18 16:07:33 -07:00
|
|
|
JSObject*
|
2013-04-25 09:29:54 -07:00
|
|
|
AudioDestinationNode::WrapObject(JSContext* aCx, JS::Handle<JSObject*> aScope)
|
2012-09-18 16:07:33 -07:00
|
|
|
{
|
2013-03-11 16:03:47 -07:00
|
|
|
return AudioDestinationNodeBinding::Wrap(aCx, aScope, this);
|
2012-09-18 16:07:33 -07:00
|
|
|
}
|
|
|
|
|
2013-05-16 16:30:42 -07:00
|
|
|
void
|
|
|
|
AudioDestinationNode::StartRendering()
|
|
|
|
{
|
2013-09-09 22:10:53 -07:00
|
|
|
mOfflineRenderingRef.Take(this);
|
2013-12-08 21:08:02 -08:00
|
|
|
mStream->Graph()->StartNonRealtimeProcessing(TrackRate(Context()->SampleRate()), mFramesToProduce);
|
2013-05-16 16:30:42 -07:00
|
|
|
}
|
|
|
|
|
2013-10-08 11:20:33 -07:00
|
|
|
void
|
|
|
|
AudioDestinationNode::SetCanPlay(bool aCanPlay)
|
|
|
|
{
|
|
|
|
mStream->SetTrackEnabled(AudioNodeStream::AUDIO_TRACK, aCanPlay);
|
|
|
|
}
|
|
|
|
|
|
|
|
NS_IMETHODIMP
|
|
|
|
AudioDestinationNode::HandleEvent(nsIDOMEvent* aEvent)
|
|
|
|
{
|
|
|
|
nsAutoString type;
|
|
|
|
aEvent->GetType(type);
|
|
|
|
|
|
|
|
if (!type.EqualsLiteral("visibilitychange")) {
|
|
|
|
return NS_ERROR_FAILURE;
|
|
|
|
}
|
|
|
|
|
|
|
|
nsCOMPtr<nsIDocShell> docshell = do_GetInterface(GetOwner());
|
|
|
|
NS_ENSURE_TRUE(docshell, NS_ERROR_FAILURE);
|
|
|
|
|
|
|
|
bool isActive = false;
|
|
|
|
docshell->GetIsActive(&isActive);
|
|
|
|
|
|
|
|
mAudioChannelAgent->SetVisibilityState(isActive);
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
|
|
|
NS_IMETHODIMP
|
|
|
|
AudioDestinationNode::CanPlayChanged(int32_t aCanPlay)
|
|
|
|
{
|
|
|
|
SetCanPlay(aCanPlay == AudioChannelState::AUDIO_CHANNEL_STATE_NORMAL);
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
2014-03-11 03:46:55 -07:00
|
|
|
NS_IMETHODIMP
|
|
|
|
AudioDestinationNode::WindowVolumeChanged()
|
|
|
|
{
|
|
|
|
MOZ_ASSERT(mAudioChannelAgent);
|
|
|
|
|
|
|
|
if (!mStream) {
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
|
|
|
float volume;
|
|
|
|
nsresult rv = mAudioChannelAgent->GetWindowVolume(&volume);
|
|
|
|
NS_ENSURE_SUCCESS(rv, rv);
|
|
|
|
|
|
|
|
mStream->SetAudioOutputVolume(&gWebAudioOutputKey, volume);
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
2013-10-11 04:55:47 -07:00
|
|
|
AudioChannel
|
|
|
|
AudioDestinationNode::MozAudioChannelType() const
|
|
|
|
{
|
|
|
|
return mAudioChannel;
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
AudioDestinationNode::SetMozAudioChannelType(AudioChannel aValue, ErrorResult& aRv)
|
|
|
|
{
|
|
|
|
if (Context()->IsOffline()) {
|
|
|
|
aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (aValue != mAudioChannel &&
|
|
|
|
CheckAudioChannelPermissions(aValue)) {
|
|
|
|
mAudioChannel = aValue;
|
|
|
|
|
|
|
|
if (mAudioChannelAgent) {
|
|
|
|
CreateAudioChannelAgent();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
bool
|
|
|
|
AudioDestinationNode::CheckAudioChannelPermissions(AudioChannel aValue)
|
|
|
|
{
|
|
|
|
if (!Preferences::GetBool("media.useAudioChannelService")) {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Only normal channel doesn't need permission.
|
|
|
|
if (aValue == AudioChannel::Normal) {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2014-03-20 03:45:55 -07:00
|
|
|
// Maybe this audio channel is equal to the default one.
|
|
|
|
if (aValue == AudioChannelService::GetDefaultAudioChannel()) {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2013-10-11 04:55:47 -07:00
|
|
|
nsCOMPtr<nsIPermissionManager> permissionManager =
|
|
|
|
do_GetService(NS_PERMISSIONMANAGER_CONTRACTID);
|
|
|
|
if (!permissionManager) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
nsCOMPtr<nsIScriptObjectPrincipal> sop = do_QueryInterface(GetOwner());
|
|
|
|
NS_ASSERTION(sop, "Window didn't QI to nsIScriptObjectPrincipal!");
|
|
|
|
nsCOMPtr<nsIPrincipal> principal = sop->GetPrincipal();
|
|
|
|
|
|
|
|
uint32_t perm = nsIPermissionManager::UNKNOWN_ACTION;
|
|
|
|
|
|
|
|
nsCString channel;
|
|
|
|
channel.AssignASCII(AudioChannelValues::strings[uint32_t(aValue)].value,
|
|
|
|
AudioChannelValues::strings[uint32_t(aValue)].length);
|
|
|
|
permissionManager->TestExactPermissionFromPrincipal(principal,
|
|
|
|
nsCString(NS_LITERAL_CSTRING("audio-channel-") + channel).get(),
|
|
|
|
&perm);
|
|
|
|
|
|
|
|
return perm == nsIPermissionManager::ALLOW_ACTION;
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
AudioDestinationNode::CreateAudioChannelAgent()
|
|
|
|
{
|
|
|
|
if (mAudioChannelAgent) {
|
|
|
|
mAudioChannelAgent->StopPlaying();
|
|
|
|
}
|
|
|
|
|
|
|
|
AudioChannelType type = AUDIO_CHANNEL_NORMAL;
|
|
|
|
switch(mAudioChannel) {
|
|
|
|
case AudioChannel::Normal:
|
|
|
|
type = AUDIO_CHANNEL_NORMAL;
|
|
|
|
break;
|
|
|
|
|
|
|
|
case AudioChannel::Content:
|
|
|
|
type = AUDIO_CHANNEL_CONTENT;
|
|
|
|
break;
|
|
|
|
|
|
|
|
case AudioChannel::Notification:
|
|
|
|
type = AUDIO_CHANNEL_NOTIFICATION;
|
|
|
|
break;
|
|
|
|
|
|
|
|
case AudioChannel::Alarm:
|
|
|
|
type = AUDIO_CHANNEL_ALARM;
|
|
|
|
break;
|
|
|
|
|
|
|
|
case AudioChannel::Telephony:
|
|
|
|
type = AUDIO_CHANNEL_TELEPHONY;
|
|
|
|
break;
|
|
|
|
|
|
|
|
case AudioChannel::Ringer:
|
|
|
|
type = AUDIO_CHANNEL_RINGER;
|
|
|
|
break;
|
|
|
|
|
|
|
|
case AudioChannel::Publicnotification:
|
|
|
|
type = AUDIO_CHANNEL_PUBLICNOTIFICATION;
|
|
|
|
break;
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
mAudioChannelAgent = new AudioChannelAgent();
|
2014-03-11 03:46:04 -07:00
|
|
|
mAudioChannelAgent->InitWithWeakCallback(GetOwner(), type, this);
|
2013-10-11 04:55:47 -07:00
|
|
|
|
|
|
|
nsCOMPtr<nsIDocShell> docshell = do_GetInterface(GetOwner());
|
|
|
|
if (docshell) {
|
|
|
|
bool isActive = false;
|
|
|
|
docshell->GetIsActive(&isActive);
|
|
|
|
mAudioChannelAgent->SetVisibilityState(isActive);
|
|
|
|
}
|
|
|
|
|
|
|
|
int32_t state = 0;
|
|
|
|
mAudioChannelAgent->StartPlaying(&state);
|
|
|
|
SetCanPlay(state == AudioChannelState::AUDIO_CHANNEL_STATE_NORMAL);
|
|
|
|
}
|
2014-01-15 03:08:20 -08:00
|
|
|
|
|
|
|
void
|
|
|
|
AudioDestinationNode::NotifyStableState()
|
|
|
|
{
|
|
|
|
mExtraCurrentTimeUpdatedSinceLastStableState = false;
|
|
|
|
}
|
|
|
|
|
|
|
|
static NS_DEFINE_CID(kAppShellCID, NS_APPSHELL_CID);
|
|
|
|
|
|
|
|
void
|
|
|
|
AudioDestinationNode::ScheduleStableStateNotification()
|
|
|
|
{
|
|
|
|
nsCOMPtr<nsIAppShell> appShell = do_GetService(kAppShellCID);
|
|
|
|
if (appShell) {
|
|
|
|
nsCOMPtr<nsIRunnable> event =
|
|
|
|
NS_NewRunnableMethod(this, &AudioDestinationNode::NotifyStableState);
|
|
|
|
appShell->RunInStableState(event);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
double
|
|
|
|
AudioDestinationNode::ExtraCurrentTime()
|
|
|
|
{
|
|
|
|
if (!mStartedBlockingDueToBeingOnlyNode.IsNull() &&
|
|
|
|
!mExtraCurrentTimeUpdatedSinceLastStableState) {
|
|
|
|
mExtraCurrentTimeUpdatedSinceLastStableState = true;
|
|
|
|
mExtraCurrentTimeSinceLastStartedBlocking =
|
|
|
|
(TimeStamp::Now() - mStartedBlockingDueToBeingOnlyNode).ToSeconds();
|
|
|
|
ScheduleStableStateNotification();
|
|
|
|
}
|
|
|
|
return mExtraCurrentTime + mExtraCurrentTimeSinceLastStartedBlocking;
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
AudioDestinationNode::SetIsOnlyNodeForContext(bool aIsOnlyNode)
|
|
|
|
{
|
|
|
|
if (!mStartedBlockingDueToBeingOnlyNode.IsNull() == aIsOnlyNode) {
|
|
|
|
// Nothing changed.
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!mStream) {
|
|
|
|
// DestroyMediaStream has been called, presumably during CC Unlink().
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (mIsOffline) {
|
|
|
|
// Don't block the destination stream for offline AudioContexts, since
|
|
|
|
// we expect the zero data produced when there are no other nodes to
|
|
|
|
// show up in its result buffer. Also, we would get confused by adding
|
|
|
|
// ExtraCurrentTime before StartRendering has even been called.
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (aIsOnlyNode) {
|
|
|
|
mStream->ChangeExplicitBlockerCount(1);
|
|
|
|
mStartedBlockingDueToBeingOnlyNode = TimeStamp::Now();
|
|
|
|
mExtraCurrentTimeSinceLastStartedBlocking = 0;
|
|
|
|
// Don't do an update of mExtraCurrentTimeSinceLastStartedBlocking until the next stable state.
|
|
|
|
mExtraCurrentTimeUpdatedSinceLastStableState = true;
|
|
|
|
ScheduleStableStateNotification();
|
|
|
|
} else {
|
|
|
|
// Force update of mExtraCurrentTimeSinceLastStartedBlocking if necessary
|
|
|
|
ExtraCurrentTime();
|
|
|
|
mExtraCurrentTime += mExtraCurrentTimeSinceLastStartedBlocking;
|
|
|
|
mStream->ChangeExplicitBlockerCount(-1);
|
|
|
|
mStartedBlockingDueToBeingOnlyNode = TimeStamp();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-09-18 16:07:33 -07:00
|
|
|
}
|
2013-10-11 04:55:47 -07:00
|
|
|
|
2012-09-18 16:07:33 -07:00
|
|
|
}
|