Bug 816780 - Merge all incoming m-lines into one MediaStream. r=jesup,abr

This commit is contained in:
EKR 2013-01-24 08:34:18 -08:00
parent e8edc81a2c
commit d005034d5d
16 changed files with 383 additions and 296 deletions

View File

@ -29,8 +29,8 @@ https://bugzilla.mozilla.org/show_bug.cgi?id=796892
var pcRemote;
var test_data = {
pcLocal: { audio: [], video: []},
pcRemote: { audio: [], video: []}
pcLocal: [],
pcRemote: []
};
runTest(function () {
@ -42,21 +42,17 @@ https://bugzilla.mozilla.org/show_bug.cgi?id=796892
pcRemote = new mozRTCPeerConnection();
pcLocal.onaddstream = function (aObj) {
test_data.pcLocal[aObj.type].push(aObj.stream);
test_data.pcLocal.push(aObj.stream);
if (aObj.type === "audio") {
audioPCRemote.mozSrcObject = aObj.stream;
audioPCRemote.play();
}
};
pcRemote.onaddstream = function (aObj) {
test_data.pcRemote[aObj.type].push(aObj.stream);
test_data.pcRemote.push(aObj.stream);
if (aObj.type === "audio") {
audioPCLocal.mozSrcObject = aObj.stream;
audioPCLocal.play();
}
};
navigator.mozGetUserMedia({audio: true, fake: true}, function onSuccess(aLocalInputStream) {
@ -74,18 +70,12 @@ https://bugzilla.mozilla.org/show_bug.cgi?id=796892
is(pcRemote.localStreams.length, 1,
"A single local stream has been attached to the remote peer");
is(test_data.pcLocal.audio.length, 1,
"A remote audio stream has been attached to the local peer");
is(test_data.pcLocal.video.length, 0,
"A temporary remote video stream has been attached to the local peer");
is(test_data.pcRemote.audio.length, 1,
"A remote audio stream has been attached to the remote peer");
is(test_data.pcRemote.video.length, 0,
"A temporary remote video stream has been attached to the remote peer");
// TODO: check that the streams are of the expected types.
// Bug 834837.
ok(PeerConnection.findStream(pcLocal.remoteStreams, test_data.pcLocal.audio[0]) !== -1,
ok(PeerConnection.findStream(pcLocal.remoteStreams, test_data.pcLocal[0]) !== -1,
"Remote audio stream for local peer is accessible");
ok(PeerConnection.findStream(pcRemote.remoteStreams, test_data.pcRemote.audio[0]) !== -1,
ok(PeerConnection.findStream(pcRemote.remoteStreams, test_data.pcRemote[0]) !== -1,
"Remote audio stream for remote peer is accessible");
info("For now simply disconnect. We will add checks for media in a follow-up bug");

View File

@ -15,8 +15,6 @@ https://bugzilla.mozilla.org/show_bug.cgi?id=796890
<a target="_blank" href="https://bugzilla.mozilla.org/show_bug.cgi?id=796890">Basic audio-video peer connection</a>
<p id="display"></p>
<div id="content" style="display: none">
<audio id="audioPCLocal" controls></audio>
<audio id="audioPCRemote" controls></audio>
<audio id="audioLocal" controls></audio>
<video id="videoPCLocal" width="160" height="120" controls></video>
@ -37,8 +35,8 @@ https://bugzilla.mozilla.org/show_bug.cgi?id=796890
var pcRemote;
var test_data = {
pcLocal: { audio: [], video: []},
pcRemote: { audio: [], video: []}
pcLocal: [],
pcRemote: []
};
runTest(function () {
@ -54,37 +52,17 @@ https://bugzilla.mozilla.org/show_bug.cgi?id=796890
pcRemote = new mozRTCPeerConnection();
pcLocal.onaddstream = function (aObj) {
test_data.pcLocal[aObj.type].push(aObj.stream);
test_data.pcLocal.push(aObj.stream);
switch (aObj.type) {
case "audio":
audioPCRemote.mozSrcObject = aObj.stream;
audioPCRemote.play();
break;
case "video":
videoPCRemote.mozSrcObject = aObj.stream;
videoPCRemote.play();
break;
default:
ok(false, "Not supported type of MediaStream for local peer: " + aObj.type);
}
};
pcRemote.onaddstream = function (aObj) {
test_data.pcRemote[aObj.type].push(aObj.stream);
test_data.pcRemote.push(aObj.stream);
switch (aObj.type) {
case "audio":
audioPCLocal.mozSrcObject = aObj.stream;
audioPCLocal.play();
break;
case "video":
videoPCLocal.mozSrcObject = aObj.stream;
videoPCLocal.play();
break;
default:
ok(false, "Not supported type of MediaStream for remote peer: " + aObj.type);
}
};
navigator.mozGetUserMedia({audio: true, fake: true},
@ -115,23 +93,17 @@ https://bugzilla.mozilla.org/show_bug.cgi?id=796890
is(pcRemote.localStreams.length, 2,
"Two local local streams have been attached to the remote peer");
is(test_data.pcLocal.video.length, 1,
"A remote video stream has been attached to the local peer");
is(test_data.pcLocal.audio.length, 1,
"A remote audio stream has been attached to the local peer");
is(test_data.pcRemote.video.length, 1,
"A remote video stream has been attached to the remote peer");
is(test_data.pcRemote.audio.length, 1,
"A remote audio stream has been attached to the remote peer");
is(test_data.pcLocal.length, 1,
"A remote stream has been attached to the local peer");
is(test_data.pcRemote.length, 1,
"A remote stream has been attached to the remote peer");
ok(PeerConnection.findStream(pcLocal.remoteStreams, test_data.pcLocal.audio[0]) !== -1,
"Remote audio stream for local peer is accessible");
ok(PeerConnection.findStream(pcLocal.remoteStreams, test_data.pcLocal.video[0]) !== -1,
"Remote video stream for local peer is accessible");
ok(PeerConnection.findStream(pcRemote.remoteStreams, test_data.pcRemote.audio[0]) !== -1,
"Remote audio stream for remote peer is accessible");
ok(PeerConnection.findStream(pcRemote.remoteStreams, test_data.pcRemote.video[0]) !== -1,
"Remote video stream for remote peer is accessible");
// TODO: check that the streams are of the expected types.
// Bug 834837.
ok(PeerConnection.findStream(pcLocal.remoteStreams, test_data.pcLocal[0]) !== -1,
"Remote stream for local peer is accessible");
ok(PeerConnection.findStream(pcRemote.remoteStreams, test_data.pcRemote[0]) !== -1,
"Remote stream for remote peer is accessible");
info("For now simply disconnect. We will add checks for media in a follow-up bug");
disconnect();

View File

@ -15,8 +15,6 @@ https://bugzilla.mozilla.org/show_bug.cgi?id=796890
<a target="_blank" href="https://bugzilla.mozilla.org/show_bug.cgi?id=796890">Basic audio-video peer connection</a>
<p id="display"></p>
<div id="content" style="display: none">
<audio id="audioPCLocal" controls></audio>
<audio id="audioPCRemote" controls></audio>
<audio id="audioLocal" controls></audio>
<video id="videoPCLocal" width="160" height="120" controls></video>
@ -26,9 +24,6 @@ https://bugzilla.mozilla.org/show_bug.cgi?id=796890
<pre id="test">
<script type="application/javascript">
var audioLocal;
var audioPCLocal;
var audioPCRemote;
var videoLocal;
var videoPCLocal;
var videoPCRemote;
@ -37,15 +32,12 @@ https://bugzilla.mozilla.org/show_bug.cgi?id=796890
var pcRemote;
var test_data = {
pcLocal: { audio: [], video: []},
pcRemote: { audio: [], video: []}
pcLocal: [],
pcRemote: []
};
runTest(function () {
audioLocal = document.getElementById("audioLocal");
audioPCLocal = document.getElementById("audioPCLocal");
audioPCRemote = document.getElementById("audioPCRemote");
videoLocal = document.getElementById("videoLocal");
videoPCLocal = document.getElementById("videoPCLocal");
videoPCRemote = document.getElementById("videoPCRemote");
@ -54,37 +46,17 @@ https://bugzilla.mozilla.org/show_bug.cgi?id=796890
pcRemote = new mozRTCPeerConnection();
pcLocal.onaddstream = function (aObj) {
test_data.pcLocal[aObj.type].push(aObj.stream);
test_data.pcLocal.push(aObj.stream);
switch (aObj.type) {
case "audio":
audioPCRemote.mozSrcObject = aObj.stream;
audioPCRemote.play();
break;
case "video":
videoPCRemote.mozSrcObject = aObj.stream;
videoPCRemote.play();
break;
default:
ok(false, "Not supported type of MediaStream for local peer: " + aObj.type);
}
};
pcRemote.onaddstream = function (aObj) {
test_data.pcRemote[aObj.type].push(aObj.stream);
test_data.pcRemote.push(aObj.stream);
switch (aObj.type) {
case "audio":
audioPCLocal.mozSrcObject = aObj.stream;
audioPCLocal.play();
break;
case "video":
videoPCLocal.mozSrcObject = aObj.stream;
videoPCLocal.play();
break;
default:
ok(false, "Not supported type of MediaStream for remote peer: " + aObj.type);
}
};
navigator.mozGetUserMedia({audio: true, video: true, fake: true},
@ -104,24 +76,18 @@ https://bugzilla.mozilla.org/show_bug.cgi?id=796890
is(pcRemote.localStreams.length, 1,
"A single local stream has been attached to the remote peer");
// Bug 816780 - onaddstream fires twice on a peerconnection when you add a stream with both audio and video
is(test_data.pcLocal.video.length, 1,
"A remote video stream has been attached to the local peer");
is(test_data.pcLocal.audio.length, 1,
"A remote audio stream has been attached to the local peer");
is(test_data.pcRemote.video.length, 1,
"A remote video stream has been attached to the remote peer");
is(test_data.pcRemote.audio.length, 1,
"A remote audio stream has been attached to the remote peer");
is(test_data.pcLocal.length, 1,
"A remote stream has been attached to the local peer");
is(test_data.pcRemote.length, 1,
"A remote stream has been attached to the remote peer");
ok(PeerConnection.findStream(pcLocal.remoteStreams, test_data.pcLocal.audio[0]) !== -1,
"Remote audio stream for local peer is accessible");
ok(PeerConnection.findStream(pcLocal.remoteStreams, test_data.pcLocal.video[0]) !== -1,
"Remote video stream for local peer is accessible");
ok(PeerConnection.findStream(pcRemote.remoteStreams, test_data.pcRemote.audio[0]) !== -1,
"Remote audio stream for remote peer is accessible");
ok(PeerConnection.findStream(pcRemote.remoteStreams, test_data.pcRemote.video[0]) !== -1,
"Remote video stream for remote peer is accessible");
// TODO: check that the streams are of the expected types.
// Bug 834837.
ok(PeerConnection.findStream(pcLocal.remoteStreams, test_data.pcLocal[0]) !== -1,
"Remote stream for local peer is accessible");
ok(PeerConnection.findStream(pcRemote.remoteStreams, test_data.pcRemote[0]) !== -1,
"Remote stream for remote peer is accessible");
info("For now simply disconnect. We will add checks for media in a follow-up bug");
disconnect();

View File

@ -29,8 +29,8 @@ https://bugzilla.mozilla.org/show_bug.cgi?id=796888
var pcRemote;
var test_data = {
pcLocal: { audio: [], video: []},
pcRemote: { audio: [], video: []}
pcLocal: [],
pcRemote: []
};
runTest(function () {
@ -42,21 +42,17 @@ https://bugzilla.mozilla.org/show_bug.cgi?id=796888
pcRemote = new mozRTCPeerConnection();
pcLocal.onaddstream = function (aObj) {
test_data.pcLocal[aObj.type].push(aObj.stream);
test_data.pcLocal.push(aObj.stream);
if (aObj.type === "video") {
videoPCRemote.mozSrcObject = aObj.stream;
videoPCRemote.play();
}
};
pcRemote.onaddstream = function (aObj) {
test_data.pcRemote[aObj.type].push(aObj.stream);
test_data.pcRemote.push(aObj.stream);
if (aObj.type === "video") {
videoPCLocal.mozSrcObject = aObj.stream;
videoPCLocal.play();
}
};
navigator.mozGetUserMedia({video: true, fake: true}, function onSuccess(aLocalInputStream) {
@ -74,18 +70,12 @@ https://bugzilla.mozilla.org/show_bug.cgi?id=796888
is(pcRemote.localStreams.length, 1,
"A single local stream has been attached to the remote peer");
is(test_data.pcLocal.video.length, 1,
"A remote video stream has been attached to the local peer");
is(test_data.pcLocal.audio.length, 0,
"A temporary remote audio stream has been attached to the local peer");
is(test_data.pcRemote.video.length, 1,
"A remote video stream has been attached to the remote peer");
is(test_data.pcRemote.audio.length, 0,
"A temporary remote audio stream has been attached to the remote peer");
// TODO: check that the streams are of the expected types.
// Bug 834837.
ok(PeerConnection.findStream(pcLocal.remoteStreams, test_data.pcLocal.video[0]) !== -1,
ok(PeerConnection.findStream(pcLocal.remoteStreams, test_data.pcLocal[0]) !== -1,
"Remote video stream for local peer is accessible");
ok(PeerConnection.findStream(pcRemote.remoteStreams, test_data.pcRemote.video[0]) !== -1,
ok(PeerConnection.findStream(pcRemote.remoteStreams, test_data.pcRemote[0]) !== -1,
"Remote video stream for remote peer is accessible");
info("For now simply disconnect. We will add checks for media in a follow-up bug");

View File

@ -29,8 +29,8 @@ https://bugzilla.mozilla.org/show_bug.cgi?id=827843
var pcRemote;
var test_data = {
pcLocal: { audio: [], video: []},
pcRemote: { audio: [], video: []}
pcLocal: [],
pcRemote: []
};
runTest(function () {
@ -42,21 +42,17 @@ https://bugzilla.mozilla.org/show_bug.cgi?id=827843
pcRemote = new mozRTCPeerConnection();
pcLocal.onaddstream = function (aObj) {
test_data.pcLocal[aObj.type].push(aObj.stream);
test_data.pcLocal.push(aObj.stream);
if (aObj.type === "audio") {
audioPCRemote.mozSrcObject = aObj.stream;
audioPCRemote.play();
}
};
pcRemote.onaddstream = function (aObj) {
test_data.pcRemote[aObj.type].push(aObj.stream);
test_data.pcRemote.push(aObj.stream);
if (aObj.type === "audio") {
audioPCLocal.mozSrcObject = aObj.stream;
audioPCLocal.play();
}
};
navigator.mozGetUserMedia({audio: true, fake: true}, function onSuccess(aLocalInputStream) {

View File

@ -907,7 +907,6 @@ static short vcmCreateRemoteStream_m(
cc_mcapid_t mcap_id,
const char *peerconnection,
int *pc_stream_id) {
uint32_t hints = 0;
nsresult res;
*pc_stream_id = -1;
@ -916,15 +915,8 @@ static short vcmCreateRemoteStream_m(
sipcc::PeerConnectionWrapper pc(peerconnection);
ENSURE_PC(pc, VCM_ERROR);
if (CC_IS_AUDIO(mcap_id)) {
hints |= nsDOMMediaStream::HINT_CONTENTS_AUDIO;
}
if (CC_IS_VIDEO(mcap_id)) {
hints |= nsDOMMediaStream::HINT_CONTENTS_VIDEO;
}
nsRefPtr<sipcc::RemoteSourceStreamInfo> info;
res = pc.impl()->CreateRemoteSourceStreamInfo(hints, &info);
res = pc.impl()->CreateRemoteSourceStreamInfo(&info);
if (NS_FAILED(res)) {
return VCM_ERROR;
}
@ -934,22 +926,8 @@ static short vcmCreateRemoteStream_m(
return VCM_ERROR;
}
if (CC_IS_AUDIO(mcap_id)) {
mozilla::AudioSegment *segment = new mozilla::AudioSegment();
segment->Init(1); // 1 Channel
// TODO(ekr@rtfm.com): Clean up Track IDs
info->GetMediaStream()->GetStream()->AsSourceStream()->AddTrack(1, 16000, 0, segment);
// We aren't going to add any more tracks
info->GetMediaStream()->GetStream()->AsSourceStream()->
AdvanceKnownTracksTime(mozilla::STREAM_TIME_MAX);
}
if (CC_IS_VIDEO(mcap_id)) {
// AddTrack takes ownership of segment
}
CSFLogDebug( logTag, "%s: created remote stream with index %d hints=%d",
__FUNCTION__, *pc_stream_id, hints);
CSFLogDebug( logTag, "%s: created remote stream with index %d",
__FUNCTION__, *pc_stream_id);
return 0;
}
@ -1356,6 +1334,7 @@ static int vcmRxStartICE_m(cc_mcapid_t mcap_id,
pc.impl()->GetMainThread().get(),
pc.impl()->GetSTSThread(),
stream->GetMediaStream()->GetStream(),
pc_track_id,
conduit, rtp_flow, rtcp_flow);
nsresult res = pipeline->Init();
@ -1399,6 +1378,7 @@ static int vcmRxStartICE_m(cc_mcapid_t mcap_id,
pc.impl()->GetMainThread().get(),
pc.impl()->GetSTSThread(),
stream->GetMediaStream()->GetStream(),
pc_track_id,
conduit, rtp_flow, rtcp_flow);
nsresult res = pipeline->Init();
@ -1985,6 +1965,7 @@ static int vcmTxStartICE_m(cc_mcapid_t mcap_id,
pc.impl()->GetMainThread().get(),
pc.impl()->GetSTSThread(),
stream->GetMediaStream()->GetStream(),
pc_track_id,
conduit, rtp_flow, rtcp_flow);
nsresult res = pipeline->Init();
@ -2025,6 +2006,7 @@ static int vcmTxStartICE_m(cc_mcapid_t mcap_id,
pc.impl()->GetMainThread().get(),
pc.impl()->GetSTSThread(),
stream->GetMediaStream()->GetStream(),
pc_track_id,
conduit, rtp_flow, rtcp_flow);
nsresult res = pipeline->Init();

View File

@ -17,6 +17,7 @@
#include "Layers.h"
#include "ImageTypes.h"
#include "ImageContainer.h"
#include "VideoUtils.h"
#endif
#include "logging.h"
@ -480,7 +481,8 @@ nsresult MediaPipelineTransmit::Init() {
description_ = pc_ + "| ";
description_ += conduit_->type() == MediaSessionConduit::AUDIO ?
"Transmit audio" : "Transmit video";
"Transmit audio[" : "Transmit video[";
description_ += track_id_ + "]";
// TODO(ekr@rtfm.com): Check for errors
MOZ_MTLOG(PR_LOG_DEBUG, "Attaching pipeline to stream "
@ -813,13 +815,28 @@ nsresult MediaPipelineReceiveAudio::Init() {
ASSERT_ON_THREAD(main_thread_);
MOZ_MTLOG(PR_LOG_DEBUG, __FUNCTION__);
description_ = pc_ + "| Receive audio";
description_ = pc_ + "| Receive audio[";
description_ += track_id_ + "]";
stream_->AddListener(listener_);
return MediaPipelineReceive::Init();
}
MediaPipelineReceiveAudio::PipelineListener::PipelineListener(
SourceMediaStream * source, TrackID track_id,
const RefPtr<MediaSessionConduit>& conduit)
: source_(source),
track_id_(track_id),
conduit_(conduit),
played_(0) {
mozilla::AudioSegment *segment = new mozilla::AudioSegment();
segment->Init(1); // 1 Channel
source_->AddTrack(track_id_, 16000, 0, segment);
source_->AdvanceKnownTracksTime(STREAM_TIME_MAX);
}
void MediaPipelineReceiveAudio::PipelineListener::
NotifyPull(MediaStreamGraph* graph, StreamTime desired_time) {
MOZ_ASSERT(source_);
@ -853,8 +870,7 @@ NotifyPull(MediaStreamGraph* graph, StreamTime desired_time) {
channels.AppendElement(samples_data);
segment.AppendFrames(samples.forget(), channels, samples_length);
source_->AppendToTrack(1, // TODO(ekr@rtfm.com): Track ID
&segment);
source_->AppendToTrack(track_id_, &segment);
played_ += 10;
}
@ -864,7 +880,10 @@ nsresult MediaPipelineReceiveVideo::Init() {
ASSERT_ON_THREAD(main_thread_);
MOZ_MTLOG(PR_LOG_DEBUG, __FUNCTION__);
description_ = pc_ + "| Receive video";
description_ = pc_ + "| Receive video[";
description_ += track_id_ + "]";
stream_->AddListener(listener_);
static_cast<VideoSessionConduit *>(conduit_.get())->
AttachRenderer(renderer_);
@ -872,28 +891,32 @@ nsresult MediaPipelineReceiveVideo::Init() {
return MediaPipelineReceive::Init();
}
MediaPipelineReceiveVideo::PipelineRenderer::PipelineRenderer(
MediaPipelineReceiveVideo *pipeline) :
pipeline_(pipeline),
width_(640), height_(480) {
MediaPipelineReceiveVideo::PipelineListener::PipelineListener(
SourceMediaStream* source, TrackID track_id)
: source_(source),
track_id_(track_id),
played_(0),
width_(640),
height_(480),
#ifdef MOZILLA_INTERNAL_API
image_container_(),
image_(),
#endif
monitor_("Video PipelineListener") {
#ifdef MOZILLA_INTERNAL_API
image_container_ = layers::LayerManager::CreateImageContainer();
SourceMediaStream *source =
pipeline_->stream_->AsSourceStream();
source->AddTrack(1 /* Track ID */, 30, 0, new VideoSegment());
source->AdvanceKnownTracksTime(STREAM_TIME_MAX);
source_->AddTrack(track_id_, USECS_PER_S, 0, new VideoSegment());
source_->AdvanceKnownTracksTime(STREAM_TIME_MAX);
#endif
}
void MediaPipelineReceiveVideo::PipelineRenderer::RenderVideoFrame(
void MediaPipelineReceiveVideo::PipelineListener::RenderVideoFrame(
const unsigned char* buffer,
unsigned int buffer_size,
uint32_t time_stamp,
int64_t render_time) {
#ifdef MOZILLA_INTERNAL_API
SourceMediaStream *source =
pipeline_->stream_->AsSourceStream();
ReentrantMonitorAutoEnter enter(monitor_);
// Create a video frame and append it to the track.
ImageFormat format = PLANAR_YCBCR;
@ -919,12 +942,30 @@ void MediaPipelineReceiveVideo::PipelineRenderer::RenderVideoFrame(
videoImage->SetData(data);
VideoSegment segment;
char buf[32];
PR_snprintf(buf, 32, "%p", source);
image_ = image.forget();
#endif
}
segment.AppendFrame(image.forget(), 1, gfxIntSize(width_, height_));
source->AppendToTrack(1, &(segment));
void MediaPipelineReceiveVideo::PipelineListener::
NotifyPull(MediaStreamGraph* graph, StreamTime desired_time) {
ReentrantMonitorAutoEnter enter(monitor_);
#ifdef MOZILLA_INTERNAL_API
nsRefPtr<layers::Image> image = image_;
TrackTicks target = TimeToTicksRoundUp(USECS_PER_S, desired_time);
TrackTicks delta = target - played_;
// Don't append if we've already provided a frame that supposedly
// goes past the current aDesiredTime Doing so means a negative
// delta and thus messes up handling of the graph
if (delta > 0) {
VideoSegment segment;
segment.AppendFrame(image ? image.forget() : nullptr, delta,
gfxIntSize(width_, height_));
source_->AppendToTrack(track_id_, &(segment));
played_ = target;
}
#endif
}

View File

@ -16,6 +16,7 @@
#endif
#include "MediaConduitInterface.h"
#include "AudioSegment.h"
#include "mozilla/ReentrantMonitor.h"
#include "SrtpFlow.h"
#include "databuffer.h"
#include "runnable_utils.h"
@ -66,11 +67,13 @@ class MediaPipeline : public sigslot::has_slots<> {
nsCOMPtr<nsIEventTarget> main_thread,
nsCOMPtr<nsIEventTarget> sts_thread,
MediaStream *stream,
TrackID track_id,
RefPtr<MediaSessionConduit> conduit,
RefPtr<TransportFlow> rtp_transport,
RefPtr<TransportFlow> rtcp_transport)
: direction_(direction),
stream_(stream),
track_id_(track_id),
conduit_(conduit),
rtp_transport_(rtp_transport),
rtp_state_(MP_CONNECTING),
@ -169,6 +172,8 @@ class MediaPipeline : public sigslot::has_slots<> {
RefPtr<MediaStream> stream_; // A pointer to the stream we are servicing.
// Written on the main thread.
// Used on STS and MediaStreamGraph threads.
TrackID track_id_; // The track on the stream.
// Written and used as the stream_;
RefPtr<MediaSessionConduit> conduit_; // Our conduit. Written on the main
// thread. Read on STS thread.
@ -225,11 +230,12 @@ class MediaPipelineTransmit : public MediaPipeline {
nsCOMPtr<nsIEventTarget> main_thread,
nsCOMPtr<nsIEventTarget> sts_thread,
MediaStream *stream,
TrackID track_id,
RefPtr<MediaSessionConduit> conduit,
RefPtr<TransportFlow> rtp_transport,
RefPtr<TransportFlow> rtcp_transport) :
MediaPipeline(pc, TRANSMIT, main_thread, sts_thread,
stream, conduit, rtp_transport,
stream, track_id, conduit, rtp_transport,
rtcp_transport),
listener_(new PipelineListener(conduit)) {}
@ -303,11 +309,12 @@ class MediaPipelineReceive : public MediaPipeline {
nsCOMPtr<nsIEventTarget> main_thread,
nsCOMPtr<nsIEventTarget> sts_thread,
MediaStream *stream,
TrackID track_id,
RefPtr<MediaSessionConduit> conduit,
RefPtr<TransportFlow> rtp_transport,
RefPtr<TransportFlow> rtcp_transport) :
MediaPipeline(pc, RECEIVE, main_thread, sts_thread,
stream, conduit, rtp_transport,
stream, track_id, conduit, rtp_transport,
rtcp_transport),
segments_added_(0) {
}
@ -329,14 +336,15 @@ class MediaPipelineReceiveAudio : public MediaPipelineReceive {
nsCOMPtr<nsIEventTarget> main_thread,
nsCOMPtr<nsIEventTarget> sts_thread,
MediaStream *stream,
TrackID track_id,
RefPtr<AudioSessionConduit> conduit,
RefPtr<TransportFlow> rtp_transport,
RefPtr<TransportFlow> rtcp_transport) :
MediaPipelineReceive(pc, main_thread, sts_thread,
stream, conduit, rtp_transport,
stream, track_id, conduit, rtp_transport,
rtcp_transport),
listener_(new PipelineListener(stream->AsSourceStream(),
conduit)) {
track_id, conduit)) {
}
virtual void DetachMediaStream() {
@ -354,11 +362,8 @@ class MediaPipelineReceiveAudio : public MediaPipelineReceive {
// Separate class to allow ref counting
class PipelineListener : public MediaStreamListener {
public:
PipelineListener(SourceMediaStream * source,
const RefPtr<MediaSessionConduit>& conduit)
: source_(source),
conduit_(conduit),
played_(0) {}
PipelineListener(SourceMediaStream * source, TrackID track_id,
const RefPtr<MediaSessionConduit>& conduit);
// Implement MediaStreamListener
virtual void NotifyQueuedTrackChanges(MediaStreamGraph* graph, TrackID tid,
@ -370,6 +375,7 @@ class MediaPipelineReceiveAudio : public MediaPipelineReceive {
private:
SourceMediaStream *source_;
TrackID track_id_;
RefPtr<MediaSessionConduit> conduit_;
uint64_t played_; // Amount of media played in milliseconds.
};
@ -386,20 +392,29 @@ class MediaPipelineReceiveVideo : public MediaPipelineReceive {
nsCOMPtr<nsIEventTarget> main_thread,
nsCOMPtr<nsIEventTarget> sts_thread,
MediaStream *stream,
TrackID track_id,
RefPtr<VideoSessionConduit> conduit,
RefPtr<TransportFlow> rtp_transport,
RefPtr<TransportFlow> rtcp_transport) :
MediaPipelineReceive(pc, main_thread, sts_thread,
stream, conduit, rtp_transport,
stream, track_id, conduit, rtp_transport,
rtcp_transport),
renderer_(new PipelineRenderer(this)) {
renderer_(new PipelineRenderer(this)),
listener_(new PipelineListener(stream->AsSourceStream(), track_id)) {
}
// Called on the main thread.
virtual void DetachMediaStream() {
ASSERT_ON_THREAD(main_thread_);
conduit_ = nullptr; // Force synchronous destruction so we
// stop generating video.
stream_->RemoveListener(listener_);
// Remove our reference so that when the MediaStreamGraph
// releases the listener, it will be destroyed.
listener_ = nullptr;
stream_ = nullptr;
}
@ -408,34 +423,79 @@ class MediaPipelineReceiveVideo : public MediaPipelineReceive {
private:
class PipelineRenderer : public VideoRenderer {
public:
PipelineRenderer(MediaPipelineReceiveVideo *);
PipelineRenderer(MediaPipelineReceiveVideo *pipeline) :
pipeline_(pipeline) {}
void Detach() { pipeline_ = NULL; }
// Implement VideoRenderer
virtual void FrameSizeChange(unsigned int width,
unsigned int height,
unsigned int number_of_streams) {
pipeline_->listener_->FrameSizeChange(width, height, number_of_streams);
}
virtual void RenderVideoFrame(const unsigned char* buffer,
unsigned int buffer_size,
uint32_t time_stamp,
int64_t render_time) {
pipeline_->listener_->RenderVideoFrame(buffer, buffer_size, time_stamp,
render_time);
}
private:
MediaPipelineReceiveVideo *pipeline_; // Raw pointer to avoid cycles
};
// Separate class to allow ref counting
class PipelineListener : public MediaStreamListener {
public:
PipelineListener(SourceMediaStream * source, TrackID track_id);
// Implement MediaStreamListenerb
virtual void NotifyQueuedTrackChanges(MediaStreamGraph* graph, TrackID tid,
TrackRate rate,
TrackTicks offset,
uint32_t events,
const MediaSegment& queued_media) {}
virtual void NotifyPull(MediaStreamGraph* aGraph, StreamTime aDesiredTime);
// Accessors for external writes from the renderer
void FrameSizeChange(unsigned int width,
unsigned int height,
unsigned int number_of_streams) {
ReentrantMonitorAutoEnter enter(monitor_);
width_ = width;
height_ = height;
}
virtual void RenderVideoFrame(const unsigned char* buffer,
void RenderVideoFrame(const unsigned char* buffer,
unsigned int buffer_size,
uint32_t time_stamp,
int64_t render_time);
private:
MediaPipelineReceiveVideo *pipeline_; // Raw pointer to avoid cycles
#ifdef MOZILLA_INTERNAL_API
nsRefPtr<layers::ImageContainer> image_container_;
#endif
SourceMediaStream *source_;
TrackID track_id_;
TrackTicks played_; // Amount of media played.
int width_;
int height_;
#ifdef MOZILLA_INTERNAL_API
nsRefPtr<layers::ImageContainer> image_container_;
nsRefPtr<layers::Image> image_;
#endif
mozilla::ReentrantMonitor monitor_; // Monitor for processing WebRTC frames.
// Protects image_ against:
// - Writing from the GIPS thread
// - Reading from the MSG thread
};
friend class PipelineRenderer;
RefPtr<PipelineRenderer> renderer_;
RefPtr<PipelineListener> listener_;
};

View File

@ -177,7 +177,6 @@ public:
case REMOTESTREAMADD:
{
nsDOMMediaStream* stream = nullptr;
uint32_t hint;
if (!mRemoteStream) {
CSFLogErrorS(logTag, __FUNCTION__ << " GetRemoteStream returned NULL");
@ -188,15 +187,14 @@ public:
if (!stream) {
CSFLogErrorS(logTag, __FUNCTION__ << " GetMediaStream returned NULL");
} else {
hint = stream->GetHintContents();
if (hint == nsDOMMediaStream::HINT_CONTENTS_AUDIO) {
mObserver->OnAddStream(stream, "audio");
} else if (hint == nsDOMMediaStream::HINT_CONTENTS_VIDEO) {
// We provide a type field because it is in the IDL
// and we want code that looks at it not to crash.
// We use "video" so that if an app looks for
// that string it has some chance of working.
// TODO(ekr@rtfm.com): Bug 834847
// The correct way for content JS to know stream type
// is via get{Audio,Video}Tracks. See Bug 834835.
mObserver->OnAddStream(stream, "video");
} else {
CSFLogErrorS(logTag, __FUNCTION__ << "Audio & Video not supported");
MOZ_ASSERT(PR_FALSE);
}
}
break;
}
@ -278,14 +276,19 @@ PeerConnectionImpl::MakeMediaStream(uint32_t aHint, nsIDOMMediaStream** aRetval)
}
nsresult
PeerConnectionImpl::CreateRemoteSourceStreamInfo(uint32_t aHint, nsRefPtr<RemoteSourceStreamInfo>* aInfo)
PeerConnectionImpl::CreateRemoteSourceStreamInfo(nsRefPtr<RemoteSourceStreamInfo>*
aInfo)
{
MOZ_ASSERT(aInfo);
PC_AUTO_ENTER_API_CALL_NO_CHECK();
nsIDOMMediaStream* stream;
nsresult res = MakeMediaStream(aHint, &stream);
// We need to pass a dummy hint here because FakeMediaStream currently
// needs to actually propagate a hint for local streams.
// TODO(ekr@rtfm.com): Clean up when we have explicit track lists.
// See bug 834835.
nsresult res = MakeMediaStream(0, &stream);
if (NS_FAILED(res)) {
return res;
}

View File

@ -149,8 +149,7 @@ public:
return mRole;
}
nsresult CreateRemoteSourceStreamInfo(uint32_t aHint,
nsRefPtr<RemoteSourceStreamInfo>* aInfo);
nsresult CreateRemoteSourceStreamInfo(nsRefPtr<RemoteSourceStreamInfo>* aInfo);
// Implementation of the only observer we need
virtual void onCallEvent(

View File

@ -83,6 +83,14 @@ static fsmdef_media_t *
gsmsdp_add_media_line(fsmdef_dcb_t *dcb_p, const cc_media_cap_t *media_cap,
uint8_t cap_index, uint16_t level,
cpr_ip_type addr_type, boolean offer);
static boolean
gsmsdp_add_remote_stream(uint16_t idx, int pc_stream_id,
fsmdef_dcb_t *dcb_p);
static boolean
gsmsdp_add_remote_track(uint16_t idx, uint16_t track,
fsmdef_dcb_t *dcb_p, fsmdef_media_t *media);
extern cc_media_cap_table_t g_media_table;
@ -305,16 +313,15 @@ static const cc_media_remote_stream_table_t *gsmsdp_get_media_stream_table (fsmd
static const char *fname = "gsmsdp_get_media_stream_table";
if ( dcb_p->remote_media_stream_tbl == NULL ) {
dcb_p->remote_media_stream_tbl = (cc_media_remote_stream_table_t*) cpr_malloc(sizeof(cc_media_remote_stream_table_t));
memset(dcb_p->remote_media_stream_tbl, 0, sizeof(cc_media_remote_stream_table_t));
if ( dcb_p->remote_media_stream_tbl == NULL ) {
GSM_ERR_MSG(GSM_L_C_F_PREFIX"media track table malloc failed.\n",
dcb_p->line, dcb_p->call_id, fname);
return NULL;
}
}
memset(dcb_p->remote_media_stream_tbl, 0, sizeof(cc_media_remote_stream_table_t));
return (dcb_p->remote_media_stream_tbl);
}
@ -4272,6 +4279,7 @@ gsmsdp_negotiate_media_lines (fsm_fcb_t *fcb_p, cc_sdp_t *sdp_p, boolean initial
int rtcpmux = 0;
tinybool rtcp_mux = FALSE;
sdp_result_e sdp_res;
boolean created_media_stream = FALSE;
config_get_value(CFGID_SDPMODE, &sdpmode, sizeof(sdpmode));
@ -4571,23 +4579,54 @@ gsmsdp_negotiate_media_lines (fsm_fcb_t *fcb_p, cc_sdp_t *sdp_p, boolean initial
config_get_value(CFGID_RTCPMUX, &rtcpmux, sizeof(rtcpmux));
if (rtcpmux) {
gsmsdp_set_rtcp_mux_attribute (SDP_ATTR_RTCP_MUX, media->level, sdp_p->src_sdp, TRUE);
gsmsdp_set_rtcp_mux_attribute (SDP_ATTR_RTCP_MUX, media->level,
sdp_p->src_sdp, TRUE);
}
if (notify_stream_added) {
/*
* Add track to remote streams in dcb
*/
int pc_stream_id = 0;
if (SDP_MEDIA_APPLICATION != media_type) {
lsm_add_remote_stream (dcb_p->line, dcb_p->call_id, media, &pc_stream_id);
gsmsdp_add_remote_stream(i-1, pc_stream_id, dcb_p, media);
int pc_stream_id = -1;
/* This is a hack to keep all the media in a single
stream.
TODO(ekr@rtfm.com): revisit when we have media
assigned to streams in the SDP */
if (!created_media_stream){
lsm_add_remote_stream (dcb_p->line,
dcb_p->call_id,
media,
&pc_stream_id);
MOZ_ASSERT(pc_stream_id == 0);
/* Use index 0 because we only have one stream */
result = gsmsdp_add_remote_stream(0,
pc_stream_id,
dcb_p);
MOZ_ASSERT(result); /* TODO(ekr@rtfm.com)
add real error checking,
but this "can't fail" */
created_media_stream = TRUE;
}
/* Now add the track to the single media stream.
use index 0 because we only have one stream */
result = gsmsdp_add_remote_track(0, i, dcb_p, media);
MOZ_ASSERT(result); /* TODO(ekr@rtfm.com) add real
error checking, but this
"can't fail" */
} else {
/*
* Inform VCM that a Data Channel has been negotiated
*/
lsm_data_channel_negotiated(dcb_p->line, dcb_p->call_id, media, &pc_stream_id);
int pc_stream_id; /* Set but unused. Provided to
fulfill the API contract
TODO(adam@nostrum.com):
use or remove */
lsm_data_channel_negotiated(dcb_p->line, dcb_p->call_id,
media, &pc_stream_id);
}
}
}
@ -4675,13 +4714,21 @@ gsmsdp_negotiate_media_lines (fsm_fcb_t *fcb_p, cc_sdp_t *sdp_p, boolean initial
*/
if (notify_stream_added) {
for (j=0; j < CC_MAX_STREAMS; j++ ) {
/* If this stream has been created it should have > 0 tracks. */
if (dcb_p->remote_media_stream_tbl->streams[j].num_tracks) {
ui_on_remote_stream_added(evOnRemoteStreamAdd, dcb_p->line, dcb_p->call_id,
dcb_p->caller_id.call_instance_id, dcb_p->remote_media_stream_tbl->streams[j]);
if (dcb_p->remote_media_stream_tbl->streams[j].
num_tracks &&
(!dcb_p->remote_media_stream_tbl->streams[j].
num_tracks_notified)) {
/* Note that we only notify when the number of tracks
changes from 0 -> !0 (i.e. on creation).
TODO(adam@nostrum.com): Figure out how to notify
when streams gain tracks */
ui_on_remote_stream_added(evOnRemoteStreamAdd,
dcb_p->line, dcb_p->call_id,
dcb_p->caller_id.call_instance_id,
dcb_p->remote_media_stream_tbl->streams[j]);
/* Setting num_tracks == 0 indicates stream not set */
dcb_p->remote_media_stream_tbl->streams[j].num_tracks = 0;
dcb_p->remote_media_stream_tbl->streams[j].num_tracks_notified =
dcb_p->remote_media_stream_tbl->streams[j].num_tracks;
}
}
}
@ -6599,33 +6646,74 @@ gsmsdp_sdp_differs_from_previous_sdp (boolean rcv_only, fsmdef_media_t *media)
*
* Description:
*
* For each remote media stream add a track to the dcb for the
* Add a media stream with no tracks to the dcb for the
* current session.
*
* Parameters:
*
* idx - Stream index
* pc_stream_id - stream id from vcm layer, will be set as stream id
*
* dcb_p - Pointer to the DCB whose SDP is to be manipulated.
* media - Pointer to the fsmdef_media_t for the current media entry.
*
* returns TRUE for success and FALSE for failure
*/
void gsmsdp_add_remote_stream(uint16_t idx, int pc_stream_id, fsmdef_dcb_t *dcb_p, fsmdef_media_t *media) {
/*
* This function is in its infancy, but when complete will create a list
* of streams, each with its list of tracks and associated data.
* Currently this just creates 1 track per 1 stream.
*/
static boolean gsmsdp_add_remote_stream(uint16_t idx, int pc_stream_id, fsmdef_dcb_t *dcb_p) {
PR_ASSERT(idx < CC_MAX_STREAMS);
if (idx >= CC_MAX_STREAMS)
return FALSE;
PR_ASSERT(!dcb_p->remote_media_stream_tbl->streams[idx].created);
if (dcb_p->remote_media_stream_tbl->streams[idx].created)
return FALSE;
if (idx < CC_MAX_STREAMS) {
dcb_p->remote_media_stream_tbl->streams[idx].num_tracks = 1;
dcb_p->remote_media_stream_tbl->streams[idx].media_stream_id = pc_stream_id;
dcb_p->remote_media_stream_tbl->streams[idx].track[0].media_stream_track_id = idx+1;
dcb_p->remote_media_stream_tbl->streams[idx].track[0].video = (media->type == 0 ? FALSE : TRUE);
}
dcb_p->remote_media_stream_tbl->streams[idx].created = TRUE;
return TRUE;
}
/*
* gsmsdp_add_remote_track
*
* Description:
*
* Add a track to a media stream
*
* Parameters:
*
* idx - Stream index
* track - the track id
* dcb_p - Pointer to the DCB whose SDP is to be manipulated.
* media - the media object to add.
*
* returns TRUE for success and FALSE for failure
*/
static boolean gsmsdp_add_remote_track(uint16_t idx, uint16_t track,
fsmdef_dcb_t *dcb_p,
fsmdef_media_t *media) {
cc_media_remote_track_table_t *stream;
PR_ASSERT(idx < CC_MAX_STREAMS);
if (idx >= CC_MAX_STREAMS)
return FALSE;
stream = &dcb_p->remote_media_stream_tbl->streams[idx];
PR_ASSERT(stream->created);
if (!stream->created)
return FALSE;
PR_ASSERT(stream->num_tracks < (CC_MAX_TRACKS - 1));
if (stream->num_tracks > (CC_MAX_TRACKS - 1))
return FALSE;
stream->track[stream->num_tracks].media_stream_track_id = track;
stream->track[stream->num_tracks].video =
(media->type == SDP_MEDIA_VIDEO) ? TRUE : FALSE;
++stream->num_tracks;
return TRUE;
}
cc_causes_t

View File

@ -129,7 +129,6 @@ extern boolean gsmsdp_update_local_sdp_media_capability(fsmdef_dcb_t *dcb_p,
boolean refresh, boolean hold);
boolean is_gsmsdp_media_ip_updated_to_latest( fsmdef_dcb_t * dcb );
void gsmsdp_add_remote_stream(uint16_t idx, int pc_stream_id, fsmdef_dcb_t * dcb, fsmdef_media_t *media);
cc_causes_t gsmsdp_install_peer_ice_attributes(fsm_fcb_t *fcb_p);
cc_causes_t gsmsdp_configure_dtls_data_attributes(fsm_fcb_t *fcb_p);
cc_causes_t gsmsdp_find_level_from_mid(fsmdef_dcb_t * dcb, const char * mid, uint16_t *level);

View File

@ -979,24 +979,22 @@ lsm_rx_start (lsm_lcb_t *lcb, const char *fname, fsmdef_media_t *media)
/* TODO(ekr@rtfm.com): Needs changing for when we
have > 2 streams. (adam@nostrum.com): For now,
we know that the stream IDs are assigned in the
same order as things appear in the media objects.
The "level" in the media objects are indexed
starting from one, while pc_stream_id is
zero-indexed. This means that the stream ID
will (for now) be equal to media->level-1. */
we use all the same stream so pc_stream_id == 0
and the tracks are assigned in order and are
equal to the level in the media objects */
if ( media->cap_index == CC_VIDEO_1 ) {
attrs.video.opaque = media->video;
pc_stream_id = media->level - 1;
pc_stream_id = 0;
pc_track_id = media->level;
} else {
attrs.audio.packetization_period = media->packetization_period;
attrs.audio.max_packetization_period = media->max_packetization_period;
attrs.audio.avt_payload_type = media->avt_payload_type;
attrs.audio.mixing_mode = mix_mode;
attrs.audio.mixing_party = mix_party;
pc_stream_id = media->level - 1;
pc_stream_id = 0;
pc_track_id = media->level;
}
pc_track_id = 0;
dcb->cur_video_avail &= ~CC_ATTRIB_CAST;
config_get_value(CFGID_SDPMODE, &sdpmode, sizeof(sdpmode));

View File

@ -790,7 +790,9 @@ typedef struct cc_media_track_t_ {
} cc_media_track_t;
typedef struct cc_media_remote_track_table_t_ {
boolean created;
uint32_t num_tracks;
uint32_t num_tracks_notified;
uint32_t media_stream_id;
cc_media_track_t track[CC_MAX_TRACKS];
} cc_media_remote_track_table_t;

View File

@ -147,7 +147,7 @@ class TestAgentSend : public TestAgent {
test_pc,
NULL,
test_utils->sts_target(),
audio_->GetStream(), audio_conduit_, audio_flow_, NULL);
audio_->GetStream(), 1, audio_conduit_, audio_flow_, NULL);
audio_pipeline_->Init();
@ -185,7 +185,7 @@ class TestAgentReceive : public TestAgent {
test_pc,
NULL,
test_utils->sts_target(),
audio_->GetStream(),
audio_->GetStream(), 1,
static_cast<mozilla::AudioSessionConduit *>(audio_conduit_.get()),
audio_flow_, NULL);

View File

@ -1344,7 +1344,7 @@ TEST_F(SignalingTest, CreateOfferAddCandidate)
// XXX adam@nostrum.com -- This test seems questionable; we need to think
// through what actually needs to be tested here.
TEST_F(SignalingTest, OfferAnswerReNegotiateOfferAnswerDontReceiveVideoNoVideoStream)
TEST_F(SignalingTest, DISABLED_OfferAnswerReNegotiateOfferAnswerDontReceiveVideoNoVideoStream)
{
sipcc::MediaConstraints aconstraints;
aconstraints.setBooleanConstraint("OfferToReceiveAudio", true, false);
@ -1691,7 +1691,8 @@ TEST_F(SignalingTest, FullChromeHandshake)
ASSERT_NE(answer.find("111 opus/"), std::string::npos);
}
TEST_F(SignalingTest, OfferAllDynamicTypes)
// Disabled pending resolution of bug 818640.
TEST_F(SignalingTest, DISABLED_OfferAllDynamicTypes)
{
sipcc::MediaConstraints constraints;
std::string offer;