Bug 865257 - Implement MediaStreamAudioDestinationNode. r=ehsan,roc

This commit is contained in:
Josh Matthews 2013-05-21 15:17:47 -04:00
parent 2d265fae5c
commit 9ea183ce8d
19 changed files with 318 additions and 22 deletions

View File

@ -19,6 +19,7 @@ namespace mozilla {
* for regular audio contexts, and the rate requested by the web content
* for offline audio contexts.
* Each chunk in the track is a single block of WEBAUDIO_BLOCK_SIZE samples.
* Note: This must be a different value than MEDIA_STREAM_DEST_TRACK_ID
*/
static const int AUDIO_NODE_STREAM_TRACK_ID = 1;
@ -235,23 +236,6 @@ AudioNodeStream::SetChannelMixingParametersImpl(uint32_t aNumberOfChannels,
mChannelInterpretation = aChannelInterpretation;
}
StreamBuffer::Track*
AudioNodeStream::EnsureTrack()
{
StreamBuffer::Track* track = mBuffer.FindTrack(AUDIO_NODE_STREAM_TRACK_ID);
if (!track) {
nsAutoPtr<MediaSegment> segment(new AudioSegment());
for (uint32_t j = 0; j < mListeners.Length(); ++j) {
MediaStreamListener* l = mListeners[j];
l->NotifyQueuedTrackChanges(Graph(), AUDIO_NODE_STREAM_TRACK_ID, mSampleRate, 0,
MediaStreamListener::TRACK_EVENT_CREATED,
*segment);
}
track = &mBuffer.AddTrack(AUDIO_NODE_STREAM_TRACK_ID, mSampleRate, 0, segment.forget());
}
return track;
}
bool
AudioNodeStream::AllInputsFinished() const
{
@ -399,7 +383,7 @@ AudioNodeStream::ProduceOutput(GraphTime aFrom, GraphTime aTo)
FinishOutput();
}
StreamBuffer::Track* track = EnsureTrack();
StreamBuffer::Track* track = EnsureTrack(AUDIO_NODE_STREAM_TRACK_ID, mSampleRate);
AudioSegment* segment = track->Get<AudioSegment>();
@ -460,7 +444,7 @@ AudioNodeStream::ProduceOutput(GraphTime aFrom, GraphTime aTo)
TrackTicks
AudioNodeStream::GetCurrentPosition()
{
return EnsureTrack()->Get<AudioSegment>()->GetDuration();
return EnsureTrack(AUDIO_NODE_STREAM_TRACK_ID, mSampleRate)->Get<AudioSegment>()->GetDuration();
}
void
@ -470,7 +454,7 @@ AudioNodeStream::FinishOutput()
return;
}
StreamBuffer::Track* track = EnsureTrack();
StreamBuffer::Track* track = EnsureTrack(AUDIO_NODE_STREAM_TRACK_ID, mSampleRate);
track->SetEnded();
FinishOnGraphThread();

View File

@ -116,7 +116,6 @@ public:
protected:
void FinishOutput();
StreamBuffer::Track* EnsureTrack();
void ObtainInputBlock(AudioChunk& aTmpChunk, uint32_t aPortIndex);
// The engine that will generate output for this node.

View File

@ -8,6 +8,7 @@
#include "nsContentUtils.h"
#include "mozilla/dom/MediaStreamBinding.h"
#include "mozilla/dom/LocalMediaStreamBinding.h"
#include "mozilla/dom/AudioNode.h"
#include "MediaStreamGraph.h"
#include "AudioStreamTrack.h"
#include "VideoStreamTrack.h"
@ -39,6 +40,15 @@ NS_IMPL_CYCLE_COLLECTION_TRACE_WRAPPERCACHE(DOMMediaStream)
NS_IMPL_ISUPPORTS_INHERITED1(DOMLocalMediaStream, DOMMediaStream,
nsIDOMLocalMediaStream)
NS_IMPL_CYCLE_COLLECTION_INHERITED_1(DOMAudioNodeMediaStream, DOMMediaStream,
mStreamNode)
NS_IMPL_ADDREF_INHERITED(DOMAudioNodeMediaStream, DOMMediaStream)
NS_IMPL_RELEASE_INHERITED(DOMAudioNodeMediaStream, DOMMediaStream)
NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION_INHERITED(DOMAudioNodeMediaStream)
NS_INTERFACE_MAP_END_INHERITING(DOMMediaStream)
class DOMMediaStream::StreamListener : public MediaStreamListener {
public:
StreamListener(DOMMediaStream* aStream)
@ -346,3 +356,18 @@ DOMLocalMediaStream::CreateTrackUnionStream(nsIDOMWindow* aWindow,
stream->InitTrackUnionStream(aWindow, aHintContents);
return stream.forget();
}
DOMAudioNodeMediaStream::DOMAudioNodeMediaStream(AudioNode* aNode)
: mStreamNode(aNode)
{
}
already_AddRefed<DOMAudioNodeMediaStream>
DOMAudioNodeMediaStream::CreateTrackUnionStream(nsIDOMWindow* aWindow,
AudioNode* aNode,
TrackTypeHints aHintContents)
{
nsRefPtr<DOMAudioNodeMediaStream> stream = new DOMAudioNodeMediaStream(aNode);
stream->InitTrackUnionStream(aWindow, aHintContents);
return stream.forget();
}

View File

@ -33,6 +33,7 @@ namespace mozilla {
class MediaStream;
namespace dom {
class AudioNode;
class MediaStreamTrack;
class AudioStreamTrack;
class VideoStreamTrack;
@ -206,6 +207,29 @@ public:
CreateTrackUnionStream(nsIDOMWindow* aWindow, TrackTypeHints aHintContents = 0);
};
class DOMAudioNodeMediaStream : public DOMMediaStream
{
typedef dom::AudioNode AudioNode;
public:
DOMAudioNodeMediaStream(AudioNode* aNode);
NS_DECL_ISUPPORTS_INHERITED
NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(DOMAudioNodeMediaStream, DOMMediaStream)
/**
* Create a DOMAudioNodeMediaStream whose underlying stream is a TrackUnionStream.
*/
static already_AddRefed<DOMAudioNodeMediaStream>
CreateTrackUnionStream(nsIDOMWindow* aWindow,
AudioNode* aNode,
TrackTypeHints aHintContents = 0);
private:
// If this object wraps a stream owned by an AudioNode, we need to ensure that
// the node isn't cycle-collected too early.
nsRefPtr<AudioNode> mStreamNode;
};
}
#endif /* NSDOMMEDIASTREAM_H_ */

View File

@ -1525,6 +1525,23 @@ MediaStream::FinishOnGraphThread()
GraphImpl()->FinishStream(this);
}
StreamBuffer::Track*
MediaStream::EnsureTrack(TrackID aTrackId, TrackRate aSampleRate)
{
StreamBuffer::Track* track = mBuffer.FindTrack(aTrackId);
if (!track) {
nsAutoPtr<MediaSegment> segment(new AudioSegment());
for (uint32_t j = 0; j < mListeners.Length(); ++j) {
MediaStreamListener* l = mListeners[j];
l->NotifyQueuedTrackChanges(Graph(), aTrackId, aSampleRate, 0,
MediaStreamListener::TRACK_EVENT_CREATED,
*segment);
}
track = &mBuffer.AddTrack(aTrackId, aSampleRate, 0, segment.forget());
}
return track;
}
void
MediaStream::RemoveAllListenersImpl()
{

View File

@ -277,9 +277,11 @@ public:
, mMainThreadDestroyed(false)
, mGraph(nullptr)
{
MOZ_COUNT_CTOR(MediaStream);
}
virtual ~MediaStream()
{
MOZ_COUNT_DTOR(MediaStream);
NS_ASSERTION(mMainThreadDestroyed, "Should have been destroyed already");
NS_ASSERTION(mMainThreadListeners.IsEmpty(),
"All main thread listeners should have been removed");
@ -431,6 +433,8 @@ public:
bool HasCurrentData() { return mHasCurrentData; }
StreamBuffer::Track* EnsureTrack(TrackID aTrack, TrackRate aSampleRate);
void ApplyTrackDisabling(TrackID aTrackID, MediaSegment* aSegment);
DOMMediaStream* GetWrapper()

View File

@ -26,6 +26,7 @@ class TrackUnionStream : public ProcessedMediaStream {
public:
TrackUnionStream(DOMMediaStream* aWrapper) :
ProcessedMediaStream(aWrapper),
mFilterCallback(nullptr),
mMaxTrackID(0) {}
virtual void RemoveInput(MediaInputPort* aPort)
@ -75,7 +76,7 @@ public:
break;
}
}
if (!found) {
if (!found && (!mFilterCallback || mFilterCallback(tracks.get()))) {
bool trackFinished = false;
uint32_t mapIndex = AddTrack(mInputs[i], tracks.get(), aFrom);
CopyTrackData(tracks.get(), mapIndex, aFrom, aTo, &trackFinished);
@ -107,7 +108,16 @@ public:
}
}
// Consumers may specify a filtering callback to apply to every input track.
// Returns true to allow the track to act as an input; false to reject it entirely.
typedef bool (*TrackIDFilterCallback)(StreamBuffer::Track*);
void SetTrackIDFilter(TrackIDFilterCallback aCallback) {
mFilterCallback = aCallback;
}
protected:
TrackIDFilterCallback mFilterCallback;
// Only non-ended tracks are allowed to persist in this map.
struct TrackMapEntry {
MediaInputPort* mInputPort;

View File

@ -71,6 +71,7 @@ EXPORTS += [
'SharedBuffer.h',
'StreamBuffer.h',
'TimeVarying.h',
'TrackUnionStream.h',
'VideoFrameContainer.h',
'VideoSegment.h',
'VideoUtils.h',

View File

@ -24,6 +24,7 @@
#include "ScriptProcessorNode.h"
#include "ChannelMergerNode.h"
#include "ChannelSplitterNode.h"
#include "MediaStreamAudioDestinationNode.h"
#include "WaveShaperNode.h"
#include "WaveTable.h"
#include "ConvolverNode.h"
@ -204,6 +205,14 @@ bool IsValidBufferSize(uint32_t aBufferSize) {
}
already_AddRefed<MediaStreamAudioDestinationNode>
AudioContext::CreateMediaStreamDestination()
{
nsRefPtr<MediaStreamAudioDestinationNode> node =
new MediaStreamAudioDestinationNode(this);
return node.forget();
}
already_AddRefed<ScriptProcessorNode>
AudioContext::CreateScriptProcessor(uint32_t aBufferSize,
uint32_t aNumberOfInputChannels,

View File

@ -51,6 +51,7 @@ class DelayNode;
class DynamicsCompressorNode;
class GainNode;
class GlobalObject;
class MediaStreamAudioDestinationNode;
class OfflineRenderSuccessCallback;
class PannerNode;
class ScriptProcessorNode;
@ -125,6 +126,9 @@ public:
CreateBuffer(JSContext* aJSContext, ArrayBuffer& aBuffer,
bool aMixToMono, ErrorResult& aRv);
already_AddRefed<MediaStreamAudioDestinationNode>
CreateMediaStreamDestination();
already_AddRefed<ScriptProcessorNode>
CreateScriptProcessor(uint32_t aBufferSize,
uint32_t aNumberOfInputChannels,

View File

@ -0,0 +1,95 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "MediaStreamAudioDestinationNode.h"
#include "mozilla/dom/AudioStreamTrack.h"
#include "mozilla/dom/MediaStreamAudioDestinationNodeBinding.h"
#include "AudioNodeEngine.h"
#include "AudioNodeStream.h"
#include "DOMMediaStream.h"
#include "TrackUnionStream.h"
namespace mozilla {
namespace dom {
NS_IMPL_CYCLE_COLLECTION_INHERITED_1(MediaStreamAudioDestinationNode, AudioNode, mDOMStream)
NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION_INHERITED(MediaStreamAudioDestinationNode)
NS_INTERFACE_MAP_END_INHERITING(AudioNode)
NS_IMPL_ADDREF_INHERITED(MediaStreamAudioDestinationNode, AudioNode)
NS_IMPL_RELEASE_INHERITED(MediaStreamAudioDestinationNode, AudioNode)
// This must be a different value than AUDIO_NODE_STREAM_TRACK_ID
static const int MEDIA_STREAM_DEST_TRACK_ID = 2;
class MediaStreamDestinationEngine : public AudioNodeEngine {
public:
MediaStreamDestinationEngine(AudioNode* aNode, ProcessedMediaStream* aOutputStream)
: AudioNodeEngine(aNode)
, mOutputStream(aOutputStream)
{
MOZ_ASSERT(mOutputStream);
}
virtual void ProduceAudioBlock(AudioNodeStream* aStream,
const AudioChunk& aInput,
AudioChunk* aOutput,
bool* aFinished) MOZ_OVERRIDE
{
*aOutput = aInput;
StreamBuffer::Track* track = mOutputStream->EnsureTrack(MEDIA_STREAM_DEST_TRACK_ID,
aStream->SampleRate());
AudioSegment* segment = track->Get<AudioSegment>();
segment->AppendAndConsumeChunk(aOutput);
}
private:
ProcessedMediaStream* mOutputStream;
};
// This callback is used to ensure that only the audio data for this track is audible
static bool FilterAudioNodeStreamTrack(StreamBuffer::Track* aTrack)
{
return aTrack->GetID() == MEDIA_STREAM_DEST_TRACK_ID;
}
MediaStreamAudioDestinationNode::MediaStreamAudioDestinationNode(AudioContext* aContext)
: AudioNode(aContext,
2,
ChannelCountMode::Explicit,
ChannelInterpretation::Speakers)
, mDOMStream(DOMAudioNodeMediaStream::CreateTrackUnionStream(GetOwner(),
this,
DOMMediaStream::HINT_CONTENTS_AUDIO))
{
TrackUnionStream* tus = static_cast<TrackUnionStream*>(mDOMStream->GetStream());
MOZ_ASSERT(tus == mDOMStream->GetStream()->AsProcessedStream());
tus->SetTrackIDFilter(FilterAudioNodeStreamTrack);
MediaStreamDestinationEngine* engine = new MediaStreamDestinationEngine(this, tus);
mStream = aContext->Graph()->CreateAudioNodeStream(engine, MediaStreamGraph::INTERNAL_STREAM);
mPort = tus->AllocateInputPort(mStream, 0);
}
void
MediaStreamAudioDestinationNode::DestroyMediaStream()
{
AudioNode::DestroyMediaStream();
if (mPort) {
mPort->Destroy();
mPort = nullptr;
}
}
JSObject*
MediaStreamAudioDestinationNode::WrapObject(JSContext* aCx, JS::Handle<JSObject*> aScope)
{
return MediaStreamAudioDestinationNodeBinding::Wrap(aCx, aScope, this);
}
}
}

View File

@ -0,0 +1,48 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef MediaStreamAudioDestinationNode_h_
#define MediaStreamAudioDestinationNode_h_
#include "AudioNode.h"
namespace mozilla {
class DOMMediaStream;
namespace dom {
class MediaStreamAudioDestinationNode : public AudioNode
{
public:
explicit MediaStreamAudioDestinationNode(AudioContext* aContext);
NS_DECL_ISUPPORTS_INHERITED
NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(MediaStreamAudioDestinationNode, AudioNode)
virtual JSObject* WrapObject(JSContext* aCx,
JS::Handle<JSObject*> aScope) MOZ_OVERRIDE;
virtual uint16_t NumberOfOutputs() const MOZ_FINAL MOZ_OVERRIDE
{
return 0;
}
virtual void DestroyMediaStream() MOZ_OVERRIDE;
DOMMediaStream* DOMStream() const
{
return mDOMStream;
}
private:
nsRefPtr<DOMMediaStream> mDOMStream;
nsRefPtr<MediaInputPort> mPort;
};
}
}
#endif

View File

@ -39,6 +39,7 @@ EXPORTS.mozilla.dom += [
'DynamicsCompressorNode.h',
'EnableWebAudioCheck.h',
'GainNode.h',
'MediaStreamAudioDestinationNode.h',
'OfflineAudioCompletionEvent.h',
'PannerNode.h',
'ScriptProcessorNode.h',
@ -65,6 +66,7 @@ CPP_SOURCES += [
'EnableWebAudioCheck.cpp',
'GainNode.cpp',
'MediaBufferDecoder.cpp',
'MediaStreamAudioDestinationNode.cpp',
'OfflineAudioCompletionEvent.cpp',
'PannerNode.cpp',
'ScriptProcessorNode.cpp',

View File

@ -62,6 +62,7 @@ MOCHITEST_FILES := \
test_gainNodeInLoop.html \
test_maxChannelCount.html \
test_mediaDecoding.html \
test_mediaStreamAudioDestinationNode.html \
test_mixingRules.html \
test_nodeToParamConnection.html \
test_OfflineAudioContext.html \

View File

@ -0,0 +1,46 @@
<!DOCTYPE HTML>
<html>
<head>
<title>Test MediaStreamAudioDestinationNode</title>
<script type="text/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
<script type="text/javascript" src="webaudio.js"></script>
<link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
</head>
<body>
<pre id="test">
<audio id="audioelem"></audio>
<script class="testbody" type="text/javascript">
SimpleTest.waitForExplicitFinish();
addLoadEvent(function() {
SpecialPowers.setBoolPref("media.webaudio.enabled", true);
var context = new AudioContext();
var buffer = context.createBuffer(1, 2048, context.sampleRate);
for (var i = 0; i < 2048; ++i) {
buffer.getChannelData(0)[i] = Math.sin(440 * 2 * Math.PI * i / context.sampleRate);
}
var source = context.createBufferSource();
source.buffer = buffer;
var dest = context.createMediaStreamDestination();
source.connect(dest);
var elem = document.getElementById('audioelem');
elem.mozSrcObject = dest.stream;
elem.onloadedmetadata = function() {
ok(true, "got metadata event");
setTimeout(function() {
is(elem.played.length, 1, "should have a played interval");
is(elem.played.start(0), 0, "should have played immediately");
isnot(elem.played.end(0), 0, "should have played for a non-zero interval");
SpecialPowers.clearUserPref("media.webaudio.enabled");
SimpleTest.finish();
}, 2000);
};
source.start(0);
elem.play();
});
</script>

View File

@ -624,6 +624,11 @@ DOMInterfaces = {
'skipGen': True
}],
'MediaStreamAudioDestinationNode': {
'resultNotAddRefed': [ 'stream' ],
'binaryNames': { 'stream': 'DOMStream' }
},
'MediaStreamList': {
'headerFile': 'MediaStreamList.h',
'resultNotAddRefed': [ '__indexedGetter' ],

View File

@ -35,6 +35,9 @@ interface AudioContext : EventTarget {
[Creator]
AudioBufferSourceNode createBufferSource();
[Creator]
MediaStreamAudioDestinationNode createMediaStreamDestination();
[Creator, Throws]
ScriptProcessorNode createScriptProcessor(optional unsigned long bufferSize = 0,
optional unsigned long numberOfInputChannels = 2,

View File

@ -0,0 +1,18 @@
/* -*- Mode: IDL; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/.
*
* The origin of this IDL file is
* https://dvcs.w3.org/hg/audio/raw-file/tip/webaudio/specification.html
*
* Copyright © 2012 W3C® (MIT, ERCIM, Keio), All Rights Reserved. W3C
* liability, trademark and document use rules apply.
*/
[PrefControlled]
interface MediaStreamAudioDestinationNode : AudioNode {
readonly attribute MediaStream stream;
};

View File

@ -173,6 +173,7 @@ webidl_files = \
Location.webidl \
MediaError.webidl \
MediaStream.webidl \
MediaStreamAudioDestinationNode.webidl \
MediaStreamEvent.webidl \
MediaStreamTrack.webidl \
MessageEvent.webidl \