Bug 1017888 - Part 2: Testing for renegotiation. r=mt, r=drno

--HG--
extra : rebase_source : 7434ef35ea6294966220f20431941e0790c4221c
This commit is contained in:
Byron Campen [:bwc] 2015-02-10 10:17:03 -08:00
parent eee4f8cd6d
commit 3a680bf820
24 changed files with 2567 additions and 449 deletions

View File

@ -17,154 +17,159 @@ function getBlobContent(blob) {
});
}
var commandsCreateDataChannel = [
function PC_REMOTE_EXPECT_DATA_CHANNEL(test) {
test.pcRemote.expectDataChannel();
},
function PC_LOCAL_CREATE_DATA_CHANNEL(test) {
var channel = test.pcLocal.createDataChannel({});
is(channel.binaryType, "blob", channel + " is of binary type 'blob'");
is(channel.readyState, "connecting", channel + " is in state: 'connecting'");
is(test.pcLocal.signalingState, STABLE,
"Create datachannel does not change signaling state");
}
];
var commandsWaitForDataChannel = [
function PC_LOCAL_VERIFY_DATA_CHANNEL_STATE(test) {
return test.pcLocal.dataChannels[0].opened;
},
function PC_REMOTE_VERIFY_DATA_CHANNEL_STATE(test) {
return test.pcRemote.nextDataChannel.then(channel => channel.opened);
},
];
var commandsCheckDataChannel = [
function SEND_MESSAGE(test) {
var message = "Lorem ipsum dolor sit amet";
return test.send(message).then(result => {
is(result.data, message, "Message correctly transmitted from pcLocal to pcRemote.");
});
},
function SEND_BLOB(test) {
var contents = ["At vero eos et accusam et justo duo dolores et ea rebum."];
var blob = new Blob(contents, { "type" : "text/plain" });
return test.send(blob).then(result => {
ok(result.data instanceof Blob, "Received data is of instance Blob");
is(result.data.size, blob.size, "Received data has the correct size.");
return getBlobContent(result.data);
}).then(recv_contents =>
is(recv_contents, contents, "Received data has the correct content."));
},
function CREATE_SECOND_DATA_CHANNEL(test) {
return test.createDataChannel({ }).then(result => {
var sourceChannel = result.local;
var targetChannel = result.remote;
is(sourceChannel.readyState, "open", sourceChannel + " is in state: 'open'");
is(targetChannel.readyState, "open", targetChannel + " is in state: 'open'");
is(targetChannel.binaryType, "blob", targetChannel + " is of binary type 'blob'");
});
},
function SEND_MESSAGE_THROUGH_LAST_OPENED_CHANNEL(test) {
var channels = test.pcRemote.dataChannels;
var message = "I am the Omega";
return test.send(message).then(result => {
is(channels.indexOf(result.channel), channels.length - 1, "Last channel used");
is(result.data, message, "Received message has the correct content.");
});
},
function SEND_MESSAGE_THROUGH_FIRST_CHANNEL(test) {
var message = "Message through 1st channel";
var options = {
sourceChannel: test.pcLocal.dataChannels[0],
targetChannel: test.pcRemote.dataChannels[0]
};
return test.send(message, options).then(result => {
is(test.pcRemote.dataChannels.indexOf(result.channel), 0, "1st channel used");
is(result.data, message, "Received message has the correct content.");
});
},
function SEND_MESSAGE_BACK_THROUGH_FIRST_CHANNEL(test) {
var message = "Return a message also through 1st channel";
var options = {
sourceChannel: test.pcRemote.dataChannels[0],
targetChannel: test.pcLocal.dataChannels[0]
};
return test.send(message, options).then(result => {
is(test.pcLocal.dataChannels.indexOf(result.channel), 0, "1st channel used");
is(result.data, message, "Return message has the correct content.");
});
},
function CREATE_NEGOTIATED_DATA_CHANNEL(test) {
var options = {
negotiated:true,
id: 5,
protocol: "foo/bar",
ordered: false,
maxRetransmits: 500
};
return test.createDataChannel(options).then(result => {
var sourceChannel2 = result.local;
var targetChannel2 = result.remote;
is(sourceChannel2.readyState, "open", sourceChannel2 + " is in state: 'open'");
is(targetChannel2.readyState, "open", targetChannel2 + " is in state: 'open'");
is(targetChannel2.binaryType, "blob", targetChannel2 + " is of binary type 'blob'");
is(sourceChannel2.id, options.id, sourceChannel2 + " id is:" + sourceChannel2.id);
var reliable = !options.ordered ? false : (options.maxRetransmits || options.maxRetransmitTime);
is(sourceChannel2.protocol, options.protocol, sourceChannel2 + " protocol is:" + sourceChannel2.protocol);
is(sourceChannel2.reliable, reliable, sourceChannel2 + " reliable is:" + sourceChannel2.reliable);
/*
These aren't exposed by IDL yet
is(sourceChannel2.ordered, options.ordered, sourceChannel2 + " ordered is:" + sourceChannel2.ordered);
is(sourceChannel2.maxRetransmits, options.maxRetransmits, sourceChannel2 + " maxRetransmits is:" +
sourceChannel2.maxRetransmits);
is(sourceChannel2.maxRetransmitTime, options.maxRetransmitTime, sourceChannel2 + " maxRetransmitTime is:" +
sourceChannel2.maxRetransmitTime);
*/
is(targetChannel2.id, options.id, targetChannel2 + " id is:" + targetChannel2.id);
is(targetChannel2.protocol, options.protocol, targetChannel2 + " protocol is:" + targetChannel2.protocol);
is(targetChannel2.reliable, reliable, targetChannel2 + " reliable is:" + targetChannel2.reliable);
/*
These aren't exposed by IDL yet
is(targetChannel2.ordered, options.ordered, targetChannel2 + " ordered is:" + targetChannel2.ordered);
is(targetChannel2.maxRetransmits, options.maxRetransmits, targetChannel2 + " maxRetransmits is:" +
targetChannel2.maxRetransmits);
is(targetChannel2.maxRetransmitTime, options.maxRetransmitTime, targetChannel2 + " maxRetransmitTime is:" +
targetChannel2.maxRetransmitTime);
*/
});
},
function SEND_MESSAGE_THROUGH_LAST_OPENED_CHANNEL2(test) {
var channels = test.pcRemote.dataChannels;
var message = "I am the walrus; Goo goo g'joob";
return test.send(message).then(result => {
is(channels.indexOf(result.channel), channels.length - 1, "Last channel used");
is(result.data, message, "Received message has the correct content.");
});
}
];
function addInitialDataChannel(chain) {
chain.insertBefore('PC_LOCAL_CREATE_OFFER', [
function PC_REMOTE_EXPECT_DATA_CHANNEL(test) {
test.pcRemote.expectDataChannel();
},
function PC_LOCAL_CREATE_DATA_CHANNEL(test) {
var channel = test.pcLocal.createDataChannel({});
is(channel.binaryType, "blob", channel + " is of binary type 'blob'");
is(channel.readyState, "connecting", channel + " is in state: 'connecting'");
is(test.pcLocal.signalingState, STABLE,
"Create datachannel does not change signaling state");
}
]);
chain.insertBefore('PC_LOCAL_CHECK_MEDIA_TRACKS', [
function PC_LOCAL_VERIFY_DATA_CHANNEL_STATE(test) {
return test.pcLocal.dataChannels[0].opened;
},
function PC_REMOTE_VERIFY_DATA_CHANNEL_STATE(test) {
return test.pcRemote.nextDataChannel.then(channel => channel.opened);
}
]);
chain.insertBefore('PC_LOCAL_CREATE_OFFER', commandsCreateDataChannel);
chain.insertBefore('PC_LOCAL_CHECK_MEDIA_TRACKS', commandsWaitForDataChannel);
chain.removeAfter('PC_REMOTE_CHECK_ICE_CONNECTIONS');
chain.append([
function SEND_MESSAGE(test) {
var message = "Lorem ipsum dolor sit amet";
return test.send(message).then(result => {
is(result.data, message, "Message correctly transmitted from pcLocal to pcRemote.");
});
},
function SEND_BLOB(test) {
var contents = ["At vero eos et accusam et justo duo dolores et ea rebum."];
var blob = new Blob(contents, { "type" : "text/plain" });
return test.send(blob).then(result => {
ok(result.data instanceof Blob, "Received data is of instance Blob");
is(result.data.size, blob.size, "Received data has the correct size.");
return getBlobContent(result.data);
}).then(recv_contents =>
is(recv_contents, contents, "Received data has the correct content."));
},
function CREATE_SECOND_DATA_CHANNEL(test) {
return test.createDataChannel({ }).then(result => {
var sourceChannel = result.local;
var targetChannel = result.remote;
is(sourceChannel.readyState, "open", sourceChannel + " is in state: 'open'");
is(targetChannel.readyState, "open", targetChannel + " is in state: 'open'");
is(targetChannel.binaryType, "blob", targetChannel + " is of binary type 'blob'");
});
},
function SEND_MESSAGE_THROUGH_LAST_OPENED_CHANNEL(test) {
var channels = test.pcRemote.dataChannels;
var message = "I am the Omega";
return test.send(message).then(result => {
is(channels.indexOf(result.channel), channels.length - 1, "Last channel used");
is(result.data, message, "Received message has the correct content.");
});
},
function SEND_MESSAGE_THROUGH_FIRST_CHANNEL(test) {
var message = "Message through 1st channel";
var options = {
sourceChannel: test.pcLocal.dataChannels[0],
targetChannel: test.pcRemote.dataChannels[0]
};
return test.send(message, options).then(result => {
is(test.pcRemote.dataChannels.indexOf(result.channel), 0, "1st channel used");
is(result.data, message, "Received message has the correct content.");
});
},
function SEND_MESSAGE_BACK_THROUGH_FIRST_CHANNEL(test) {
var message = "Return a message also through 1st channel";
var options = {
sourceChannel: test.pcRemote.dataChannels[0],
targetChannel: test.pcLocal.dataChannels[0]
};
return test.send(message, options).then(result => {
is(test.pcLocal.dataChannels.indexOf(result.channel), 0, "1st channel used");
is(result.data, message, "Return message has the correct content.");
});
},
function CREATE_NEGOTIATED_DATA_CHANNEL(test) {
var options = {
negotiated:true,
id: 5,
protocol: "foo/bar",
ordered: false,
maxRetransmits: 500
};
return test.createDataChannel(options).then(result => {
var sourceChannel2 = result.local;
var targetChannel2 = result.remote;
is(sourceChannel2.readyState, "open", sourceChannel2 + " is in state: 'open'");
is(targetChannel2.readyState, "open", targetChannel2 + " is in state: 'open'");
is(targetChannel2.binaryType, "blob", targetChannel2 + " is of binary type 'blob'");
is(sourceChannel2.id, options.id, sourceChannel2 + " id is:" + sourceChannel2.id);
var reliable = !options.ordered ? false : (options.maxRetransmits || options.maxRetransmitTime);
is(sourceChannel2.protocol, options.protocol, sourceChannel2 + " protocol is:" + sourceChannel2.protocol);
is(sourceChannel2.reliable, reliable, sourceChannel2 + " reliable is:" + sourceChannel2.reliable);
/*
These aren't exposed by IDL yet
is(sourceChannel2.ordered, options.ordered, sourceChannel2 + " ordered is:" + sourceChannel2.ordered);
is(sourceChannel2.maxRetransmits, options.maxRetransmits, sourceChannel2 + " maxRetransmits is:" +
sourceChannel2.maxRetransmits);
is(sourceChannel2.maxRetransmitTime, options.maxRetransmitTime, sourceChannel2 + " maxRetransmitTime is:" +
sourceChannel2.maxRetransmitTime);
*/
is(targetChannel2.id, options.id, targetChannel2 + " id is:" + targetChannel2.id);
is(targetChannel2.protocol, options.protocol, targetChannel2 + " protocol is:" + targetChannel2.protocol);
is(targetChannel2.reliable, reliable, targetChannel2 + " reliable is:" + targetChannel2.reliable);
/*
These aren't exposed by IDL yet
is(targetChannel2.ordered, options.ordered, targetChannel2 + " ordered is:" + targetChannel2.ordered);
is(targetChannel2.maxRetransmits, options.maxRetransmits, targetChannel2 + " maxRetransmits is:" +
targetChannel2.maxRetransmits);
is(targetChannel2.maxRetransmitTime, options.maxRetransmitTime, targetChannel2 + " maxRetransmitTime is:" +
targetChannel2.maxRetransmitTime);
*/
});
},
function SEND_MESSAGE_THROUGH_LAST_OPENED_CHANNEL2(test) {
var channels = test.pcRemote.dataChannels;
var message = "I am the walrus; Goo goo g'joob";
return test.send(message).then(result => {
is(channels.indexOf(result.channel), channels.length - 1, "Last channel used");
is(result.data, message, "Received message has the correct content.");
});
}
]);
chain.append(commandsCheckDataChannel);
}

View File

@ -363,12 +363,19 @@ CommandChain.prototype = {
/**
* Returns the index of the specified command in the chain.
* @param {start} Optional param specifying the index at which the search will
* start. If not specified, the search starts at index 0.
*/
indexOf: function(functionOrName) {
indexOf: function(functionOrName, start) {
start = start || 0;
if (typeof functionOrName === 'string') {
return this.commands.findIndex(f => f.name === functionOrName);
var index = this.commands.slice(start).findIndex(f => f.name === functionOrName);
if (index !== -1) {
index += start;
}
return index;
}
return this.commands.indexOf(functionOrName);
return this.commands.indexOf(functionOrName, start);
},
/**
@ -379,20 +386,35 @@ CommandChain.prototype = {
},
/**
* Inserts the new commands before the specified command.
* Inserts the new commands after every occurrence of the specified command
*/
insertBefore: function(functionOrName, commands) {
this._insertHelper(functionOrName, commands, 0);
insertAfterEach: function(functionOrName, commands) {
this._insertHelper(functionOrName, commands, 1, true);
},
_insertHelper: function(functionOrName, commands, delta) {
var index = this.indexOf(functionOrName);
/**
* Inserts the new commands before the specified command.
*/
insertBefore: function(functionOrName, commands, all, start) {
this._insertHelper(functionOrName, commands, 0, all, start);
},
if (index >= 0) {
this.commands = [].concat(
this.commands.slice(0, index + delta),
commands,
this.commands.slice(index + delta));
_insertHelper: function(functionOrName, commands, delta, all, start) {
var index = this.indexOf(functionOrName);
start = start || 0;
for (; index !== -1; index = this.indexOf(functionOrName, index)) {
if (!start) {
this.commands = [].concat(
this.commands.slice(0, index + delta),
commands,
this.commands.slice(index + delta));
if (!all) {
break;
}
} else {
start -= 1;
}
index += (commands.length + 1);
}
},
@ -460,7 +482,7 @@ CommandChain.prototype = {
*/
filterOut: function (id_match) {
this.commands = this.commands.filter(c => !id_match.test(c.name));
}
},
};

View File

@ -151,6 +151,31 @@ skip-if = (toolkit == 'gonk' || (e10s && debug)) # b2g (Bug 1059867) or fd exhau
[test_peerConnection_twoVideoStreams.html]
skip-if = (toolkit == 'gonk' || (e10s && debug)) # b2g (Bug 1059867) or fd exhaustion on e10s debug intermittent (Bug 1126078)
[test_peerConnection_addSecondAudioStream.html]
skip-if = toolkit == 'gonk' # b2g (Bug 1059867)
[test_peerConnection_answererAddSecondAudioStream.html]
skip-if = toolkit == 'gonk' # b2g (Bug 1059867)
[test_peerConnection_removeAudioTrack.html]
skip-if = toolkit == 'gonk' # b2g (Bug 1059867)
[test_peerConnection_removeThenAddAudioTrack.html]
skip-if = toolkit == 'gonk' # b2g (Bug 1059867)
[test_peerConnection_addSecondVideoStream.html]
skip-if = toolkit == 'gonk' # b2g (Bug 1059867)
[test_peerConnection_removeVideoTrack.html]
skip-if = toolkit == 'gonk' # b2g (Bug 1059867)
[test_peerConnection_removeThenAddVideoTrack.html]
skip-if = toolkit == 'gonk' # b2g (Bug 1059867)
[test_peerConnection_addSecondAudioStreamNoBundle.html]
skip-if = toolkit == 'gonk' # b2g (Bug 1059867)
[test_peerConnection_removeThenAddAudioTrackNoBundle.html]
skip-if = toolkit == 'gonk' # b2g (Bug 1059867)
[test_peerConnection_addSecondVideoStreamNoBundle.html]
skip-if = toolkit == 'gonk' # b2g (Bug 1059867)
[test_peerConnection_removeThenAddVideoTrackNoBundle.html]
skip-if = toolkit == 'gonk' # b2g (Bug 1059867)
[test_peerConnection_addDataChannel.html]
skip-if = toolkit == 'gonk' # b2g (Bug 1059867)
[test_peerConnection_addDataChannelNoBundle.html]
skip-if = toolkit == 'gonk' # b2g (Bug 1059867)
# Bug 950317: Hack for making a cleanup hook after finishing all WebRTC cases
[test_zmedia_cleanup.html]

View File

@ -121,6 +121,12 @@ function removeVP8(sdp) {
return updated_sdp;
}
var makeDefaultCommands = () => {
return [].concat(commandsPeerConnectionInitial,
commandsGetUserMedia,
commandsPeerConnectionOfferAnswer);
};
/**
* This class handles tests for peer connections.
*
@ -142,7 +148,7 @@ function removeVP8(sdp) {
function PeerConnectionTest(options) {
// If no options are specified make it an empty object
options = options || { };
options.commands = options.commands || commandsPeerConnection;
options.commands = options.commands || makeDefaultCommands();
options.is_local = "is_local" in options ? options.is_local : true;
options.is_remote = "is_remote" in options ? options.is_remote : true;
@ -740,8 +746,6 @@ function PeerConnectionWrapper(label, configuration, h264) {
this.dataChannels = [ ];
this.addStreamCounter = {audio: 0, video: 0 };
this._local_ice_candidates = [];
this._remote_ice_candidates = [];
this.holdIceCandidates = new Promise(r => this.releaseIceCandidates = r);
@ -749,6 +753,16 @@ function PeerConnectionWrapper(label, configuration, h264) {
this.remoteRequiresTrickleIce = false;
this.localMediaElements = [];
this.expectedLocalTrackTypesById = {};
this.expectedRemoteTrackTypesById = {};
this.observedRemoteTrackTypesById = {};
this.disableRtpCountChecking = false;
this.negotiationNeededFired = false;
this.iceCheckingRestartExpected = false;
this.h264 = typeof h264 !== "undefined" ? true : false;
info("Creating " + this);
@ -769,27 +783,6 @@ function PeerConnectionWrapper(label, configuration, h264) {
});
};
/**
* Callback for native peer connection 'onaddstream' events.
*
* @param {Object} event
* Event data which includes the stream to be added
*/
this._pc.onaddstream = event => {
info(this + ": 'onaddstream' event fired for " + JSON.stringify(event.stream));
var type = '';
if (event.stream.getAudioTracks().length > 0) {
type = 'audio';
this.addStreamCounter.audio += this.countTracksInStreams('audio', [event.stream]);
}
if (event.stream.getVideoTracks().length > 0) {
type += 'video';
this.addStreamCounter.video += this.countTracksInStreams('video', [event.stream]);
}
this.attachMedia(event.stream, type, 'remote');
};
createOneShotEventWrapper(this, this._pc, 'datachannel');
this._pc.addEventListener('datachannel', e => {
var wrapper = new DataChannelWrapper(e.channel, this);
@ -797,6 +790,7 @@ function PeerConnectionWrapper(label, configuration, h264) {
});
createOneShotEventWrapper(this, this._pc, 'signalingstatechange');
createOneShotEventWrapper(this, this._pc, 'negotiationneeded');
}
PeerConnectionWrapper.prototype = {
@ -888,6 +882,12 @@ PeerConnectionWrapper.prototype = {
is(sender.track, track, "addTrack returns sender");
});
}
stream.getTracks().forEach(track => {
ok(track.id, "track has id");
ok(track.kind, "track has kind");
this.expectedLocalTrackTypesById[track.id] = track.kind;
});
}
var element = createMediaElement(type, this.label + '_' + side + this.streams.length);
@ -902,6 +902,12 @@ PeerConnectionWrapper.prototype = {
}
},
removeSender : function(index) {
var sender = this._pc.getSenders()[index];
delete this.expectedLocalTrackTypesById[sender.track.id];
this._pc.removeTrack(sender);
},
/**
* Requests all the media streams as specified in the constrains property.
*
@ -1068,6 +1074,59 @@ PeerConnectionWrapper.prototype = {
});
},
/**
* Checks whether a given track is expected, has not been observed yet, and
* is of the correct type. Then, moves the track from
* |expectedTrackTypesById| to |observedTrackTypesById|.
*/
checkTrackIsExpected : function(track,
expectedTrackTypesById,
observedTrackTypesById) {
ok(expectedTrackTypesById[track.id], "track id " + track.id + " was expected");
ok(!observedTrackTypesById[track.id], "track id " + track.id + " was not yet observed");
var observedKind = track.kind;
var expectedKind = expectedTrackTypesById[track.id];
is(observedKind, expectedKind,
"track id " + track.id + " was of kind " +
observedKind + ", which matches " + expectedKind);
observedTrackTypesById[track.id] = expectedTrackTypesById[track.id];
delete expectedTrackTypesById[track.id];
},
setupAddStreamEventHandler: function() {
var resolveAllAddStreamEventsDone;
// checkMediaTracks waits on this promise later on in the test.
this.allAddStreamEventsDonePromise =
new Promise(resolve => resolveAllAddStreamEventsDone = resolve);
this._pc.addEventListener('addstream', event => {
info(this + ": 'onaddstream' event fired for " + JSON.stringify(event.stream));
// TODO(bug 1130185): We need to handle addtrack events once we start
// testing addTrack on pre-existing streams.
event.stream.getTracks().forEach(track => {
this.checkTrackIsExpected(track,
this.expectedRemoteTrackTypesById,
this.observedRemoteTrackTypesById);
});
if (Object.keys(this.expectedRemoteTrackTypesById).length === 0) {
resolveAllAddStreamEventsDone();
}
var type = '';
if (event.stream.getAudioTracks().length > 0) {
type = 'audio';
}
if (event.stream.getVideoTracks().length > 0) {
type += 'video';
}
this.attachMedia(event.stream, type, 'remote');
});
},
/**
* Either adds a given ICE candidate right away or stores it to be added
* later, depending on the state of the PeerConnection.
@ -1148,7 +1207,14 @@ PeerConnectionWrapper.prototype = {
var newstate = this._pc.iceConnectionState;
var oldstate = this.iceConnectionLog[this.iceConnectionLog.length - 1]
if (Object.keys(iceStateTransitions).indexOf(oldstate) != -1) {
ok(iceStateTransitions[oldstate].indexOf(newstate) != -1, this + ": legal ICE state transition from " + oldstate + " to " + newstate);
if (this.iceCheckingRestartExpected) {
is(newstate, "checking",
"iceconnectionstate event \'" + newstate +
"\' matches expected state \'checking\'");
this.iceCheckingRestartExpected = false;
} else {
ok(iceStateTransitions[oldstate].indexOf(newstate) != -1, this + ": legal ICE state transition from " + oldstate + " to " + newstate);
}
} else {
ok(false, this + ": old ICE state " + oldstate + " missing in ICE transition array");
}
@ -1284,23 +1350,25 @@ PeerConnectionWrapper.prototype = {
}
},
/*
* Counts the amount of tracks of the given type in a set of streams.
*
* @param type audio|video
* @param streams
* An array of streams (as returned by getLocalStreams()) to be
* examined.
*/
countTracksInStreams: function(type, streams) {
if (!Array.isArray(streams)) {
return 0;
}
var f = (type === 'video') ? "getVideoTracks" : "getAudioTracks";
checkLocalMediaTracks : function() {
var observedLocalTrackTypesById = {};
// We do not want to empty out this.expectedLocalTrackTypesById, so make a
// copy.
var expectedLocalTrackTypesById =
JSON.parse(JSON.stringify((this.expectedLocalTrackTypesById)));
info(this + " Checking local tracks " +
JSON.stringify(expectedLocalTrackTypesById));
this._pc.getLocalStreams().forEach(stream => {
stream.getTracks().forEach(track => {
this.checkTrackIsExpected(track,
expectedLocalTrackTypesById,
observedLocalTrackTypesById);
});
});
return streams.reduce((count, st) => {
return count + st[f]().length;
}, 0);
Object.keys(expectedLocalTrackTypesById).forEach(id => {
ok(false, this + " local id " + id + " was observed");
});
},
/**
@ -1309,42 +1377,18 @@ PeerConnectionWrapper.prototype = {
* @param {object} constraints
* The media constraints of the remote peer connection object
*/
checkMediaTracks : function(remoteConstraints) {
var waitForExpectedTracks = type => {
var outstandingCount = this.countTracksInConstraint(type, remoteConstraints);
outstandingCount -= this.addStreamCounter[type];
if (outstandingCount <= 0) {
return Promise.resolve();
}
checkMediaTracks : function() {
this.checkLocalMediaTracks();
return new Promise(resolve => {
this._pc.addEventListener('addstream', e => {
outstandingCount -= this.countTracksInStreams(type, [e.stream]);
if (outstandingCount <= 0) {
resolve();
}
});
});
};
info(this + " Checking remote tracks " +
JSON.stringify(this.expectedRemoteTrackTypesById));
var checkTrackCounts = (side, streams, constraints) => {
['audio', 'video'].forEach(type => {
var actual = this.countTracksInStreams(type, streams);
var expected = this.countTracksInConstraint(type, constraints);
is(actual, expected, this + ' has ' + actual + ' ' +
side + ' ' + type + ' tracks');
});
};
// No tracks are expected
if (Object.keys(this.expectedRemoteTrackTypesById).length === 0) {
return;
}
info(this + " checkMediaTracks() got called before onAddStream fired");
var checkPromise = Promise.all([
waitForExpectedTracks('audio'),
waitForExpectedTracks('video')
]).then(() => {
checkTrackCounts('local', this._pc.getLocalStreams(), this.constraints);
checkTrackCounts('remote', this._pc.getRemoteStreams(), remoteConstraints);
});
return timerGuard(checkPromise, 60000, "onaddstream never fired");
return timerGuard(this.allAddStreamEventsDonePromise, 60000, "onaddstream never fired");
},
checkMsids: function() {
@ -1515,10 +1559,12 @@ PeerConnectionWrapper.prototype = {
if(res.type == "outboundrtp") {
ok(rem.type == "inboundrtp", "Rtcp is inbound");
ok(rem.packetsReceived !== undefined, "Rtcp packetsReceived");
ok(rem.packetsReceived <= res.packetsSent, "No more than sent");
ok(rem.packetsLost !== undefined, "Rtcp packetsLost");
ok(rem.bytesReceived >= rem.packetsReceived, "Rtcp bytesReceived");
ok(rem.bytesReceived <= res.bytesSent, "No more than sent bytes");
if (!this.disableRtpCountChecking) {
ok(rem.packetsReceived <= res.packetsSent, "No more than sent packets");
ok(rem.bytesReceived <= res.bytesSent, "No more than sent bytes");
}
ok(rem.jitter !== undefined, "Rtcp jitter");
ok(rem.mozRtt !== undefined, "Rtcp rtt");
ok(rem.mozRtt >= 0, "Rtcp rtt " + rem.mozRtt + " >= 0");

View File

@ -63,17 +63,19 @@ function dumpSdp(test) {
}
function waitForIceConnected(test, pc) {
if (pc.isIceConnected()) {
info(pc + ": ICE connection state log: " + pc.iceConnectionLog);
ok(true, pc + ": ICE is in connected state");
return Promise.resolve();
}
if (!pc.iceCheckingRestartExpected) {
if (pc.isIceConnected()) {
info(pc + ": ICE connection state log: " + pc.iceConnectionLog);
ok(true, pc + ": ICE is in connected state");
return Promise.resolve();
}
if (!pc.isIceConnectionPending()) {
dumpSdp(test);
var details = pc + ": ICE is already in bad state: " + pc.iceConnectionState;
ok(false, details);
return Promise.reject(new Error(details));
if (!pc.isIceConnectionPending()) {
dumpSdp(test);
var details = pc + ": ICE is already in bad state: " + pc.iceConnectionState;
ok(false, details);
return Promise.reject(new Error(details));
}
}
return pc.waitForIceConnected()
@ -135,7 +137,9 @@ function checkTrackStats(pc, audio, outbound) {
var checkAllTrackStats = pc =>
Promise.all([0, 1, 2, 3].map(i => checkTrackStats(pc, i & 1, i & 2)));
var commandsPeerConnection = [
// Commands run once at the beginning of each test, even when performing a
// renegotiation test.
var commandsPeerConnectionInitial = [
function PC_SETUP_SIGNALING_CLIENT(test) {
if (test.steeplechase) {
setTimeout(() => {
@ -169,12 +173,12 @@ var commandsPeerConnection = [
test.pcRemote.logSignalingState();
},
function PC_LOCAL_GUM(test) {
return test.pcLocal.getAllUserMedia(test.pcLocal.constraints);
function PC_LOCAL_SETUP_ADDSTREAM_HANDLER(test) {
test.pcLocal.setupAddStreamEventHandler();
},
function PC_REMOTE_GUM(test) {
return test.pcRemote.getAllUserMedia(test.pcRemote.constraints);
function PC_REMOTE_SETUP_ADDSTREAM_HANDLER(test) {
test.pcRemote.setupAddStreamEventHandler();
},
function PC_LOCAL_CHECK_INITIAL_SIGNALINGSTATE(test) {
@ -197,6 +201,34 @@ var commandsPeerConnection = [
"Initial remote ICE connection state is 'new'");
},
];
var commandsGetUserMedia = [
function PC_LOCAL_GUM(test) {
return test.pcLocal.getAllUserMedia(test.pcLocal.constraints);
},
function PC_REMOTE_GUM(test) {
return test.pcRemote.getAllUserMedia(test.pcRemote.constraints);
},
];
var commandsBeforeRenegotiation = [
function PC_LOCAL_SETUP_NEGOTIATION_CALLBACK(test) {
test.pcLocal.onnegotiationneeded = event => {
test.pcLocal.negotiationNeededFired = true;
};
},
];
var commandsAfterRenegotiation = [
function PC_LOCAL_CHECK_NEGOTIATION_CALLBACK(test) {
ok(test.pcLocal.negotiationNeededFired, "Expected negotiationneeded event");
test.pcLocal.negotiationNeededFired = false;
},
];
var commandsPeerConnectionOfferAnswer = [
function PC_LOCAL_SETUP_ICE_HANDLER(test) {
test.pcLocal.setupIceCandidateHandler(test);
if (test.steeplechase) {
@ -215,6 +247,56 @@ var commandsPeerConnection = [
}
},
function PC_LOCAL_STEEPLECHASE_SIGNAL_EXPECTED_LOCAL_TRACKS(test) {
if (test.steeplechase) {
send_message({"type": "local_expected_tracks",
"expected_tracks": test.pcLocal.expectedLocalTrackTypesById});
}
},
function PC_REMOTE_STEEPLECHASE_SIGNAL_EXPECTED_LOCAL_TRACKS(test) {
if (test.steeplechase) {
send_message({"type": "remote_expected_tracks",
"expected_tracks": test.pcRemote.expectedLocalTrackTypesById});
}
},
function PC_LOCAL_GET_EXPECTED_REMOTE_TRACKS(test) {
if (test.steeplechase) {
return test.getSignalingMessage("remote_expected_tracks").then(
message => {
test.pcLocal.expectedRemoteTrackTypesById = message.expected_tracks;
});
} else {
// Deep copy, as similar to steeplechase as possible
test.pcLocal.expectedRemoteTrackTypesById =
JSON.parse(JSON.stringify((test.pcRemote.expectedLocalTrackTypesById)));
}
// Remove what we've already observed
Object.keys(test.pcLocal.observedRemoteTrackTypesById).forEach(id => {
delete test.pcLocal.expectedRemoteTrackTypesById[id];
});
},
function PC_LOCAL_GET_EXPECTED_REMOTE_TRACKS(test) {
if (test.steeplechase) {
return test.getSignalingMessage("local_expected_tracks").then(
message => {
test.pcRemote.expectedRemoteTrackTypesById = message.expected_tracks;
});
} else {
// Deep copy, as similar to steeplechase as possible
test.pcRemote.expectedRemoteTrackTypesById =
JSON.parse(JSON.stringify((test.pcLocal.expectedLocalTrackTypesById)));
}
// Remove what we've already observed
Object.keys(test.pcRemote.observedRemoteTrackTypesById).forEach(id => {
delete test.pcRemote.expectedRemoteTrackTypesById[id];
});
},
function PC_LOCAL_CREATE_OFFER(test) {
return test.createOffer(test.pcLocal).then(offer => {
is(test.pcLocal.signalingState, STABLE,
@ -390,11 +472,11 @@ var commandsPeerConnection = [
},
function PC_LOCAL_CHECK_MEDIA_TRACKS(test) {
return test.pcLocal.checkMediaTracks(test._answer_constraints);
return test.pcLocal.checkMediaTracks();
},
function PC_REMOTE_CHECK_MEDIA_TRACKS(test) {
return test.pcRemote.checkMediaTracks(test._offer_constraints);
return test.pcRemote.checkMediaTracks();
},
function PC_LOCAL_CHECK_MEDIA_FLOW_PRESENT(test) {
@ -468,3 +550,32 @@ var commandsPeerConnection = [
return checkAllTrackStats(test.pcRemote);
}
];
function PC_LOCAL_REMOVE_BUNDLE_FROM_OFFER(test) {
test.originalOffer.sdp = test.originalOffer.sdp.replace(
/a=group:BUNDLE .*\r\n/g,
""
);
info("Updated no bundle offer: " + JSON.stringify(test.originalOffer));
};
var addRenegotiation = (chain, commands, checks) => {
chain.append(commandsBeforeRenegotiation);
chain.append(commands);
chain.append(commandsAfterRenegotiation);
chain.append(commandsPeerConnectionOfferAnswer);
if (checks) {
chain.append(checks);
}
};
var addRenegotiationAnswerer = (chain, commands, checks) => {
chain.append(function SWAP_PC_LOCAL_PC_REMOTE(test) {
var temp = test.pcLocal;
test.pcLocal = test.pcRemote;
test.pcRemote = temp;
});
addRenegotiation(chain, commands, checks);
};

View File

@ -0,0 +1,34 @@
<!DOCTYPE HTML>
<html>
<head>
<script type="application/javascript" src="pc.js"></script>
</head>
<body>
<pre id="test">
<script type="application/javascript">
createHTML({
bug: "1017888",
title: "Renegotiation: add DataChannel"
});
var test;
runNetworkTest(function (options) {
test = new PeerConnectionTest(options);
addRenegotiation(test.chain,
commandsCreateDataChannel,
commandsCheckDataChannel);
// Insert before the second PC_LOCAL_CHECK_MEDIA_TRACKS
test.chain.insertBefore('PC_LOCAL_CHECK_MEDIA_TRACKS',
commandsWaitForDataChannel,
false,
1);
test.setMediaConstraints([{audio: true}], [{audio: true}]);
test.run();
});
</script>
</pre>
</body>
</html>

View File

@ -0,0 +1,46 @@
<!DOCTYPE HTML>
<html>
<head>
<script type="application/javascript" src="pc.js"></script>
</head>
<body>
<pre id="test">
<script type="application/javascript">
createHTML({
bug: "1017888",
title: "Renegotiation: add DataChannel"
});
var test;
runNetworkTest(function (options) {
test = new PeerConnectionTest(options);
addRenegotiation(test.chain,
commandsCreateDataChannel.concat(
[
function PC_LOCAL_EXPECT_ICE_CHECKING(test) {
test.pcLocal.iceCheckingRestartExpected = true;
},
function PC_REMOTE_EXPECT_ICE_CHECKING(test) {
test.pcRemote.iceCheckingRestartExpected = true;
},
]
),
commandsCheckDataChannel);
test.chain.insertAfterEach('PC_LOCAL_CREATE_OFFER',
PC_LOCAL_REMOVE_BUNDLE_FROM_OFFER);
// Insert before the second PC_LOCAL_CHECK_MEDIA_TRACKS
test.chain.insertBefore('PC_LOCAL_CHECK_MEDIA_TRACKS',
commandsWaitForDataChannel,
false,
1);
test.setMediaConstraints([{audio: true}], [{audio: true}]);
test.run();
});
</script>
</pre>
</body>
</html>

View File

@ -7,49 +7,24 @@
<pre id="test">
<script type="application/javascript">
createHTML({
bug: "1091242",
bug: "1017888",
title: "Renegotiation: add second audio stream"
});
var test;
runNetworkTest(function (options) {
test = new PeerConnectionTest(options);
test.chain.append([
function PC_LOCAL_SETUP_NEGOTIATION_CALLBACK(test) {
test.pcLocal.onNegotiationneededFired = false;
test.pcLocal._pc.onnegotiationneeded = anEvent => {
info("pcLocal.onnegotiationneeded fired");
test.pcLocal.onNegotiationneededFired = true;
};
},
function PC_LOCAL_ADD_SECOND_STREAM(test) {
return test.pcLocal.getAllUserMedia([{audio: true}]);
},
function PC_LOCAL_CREATE_NEW_OFFER(test) {
ok(test.pcLocal.onNegotiationneededFired, "onnegotiationneeded");
return test.createOffer(test.pcLocal).then(offer => {
test._new_offer = offer;
});
},
function PC_LOCAL_SET_NEW_LOCAL_DESCRIPTION(test) {
return test.setLocalDescription(test.pcLocal, test._new_offer, HAVE_LOCAL_OFFER);
},
function PC_REMOTE_SET_NEW_REMOTE_DESCRIPTION(test) {
return test.setRemoteDescription(test.pcRemote, test._new_offer, HAVE_REMOTE_OFFER);
},
function PC_REMOTE_CREATE_NEW_ANSWER(test) {
return test.createAnswer(test.pcRemote).then(answer => {
test._new_answer = answer;
});
},
function PC_REMOTE_SET_NEW_LOCAL_DESCRIPTION(test) {
return test.setLocalDescription(test.pcRemote, test._new_answer, STABLE);
},
function PC_LOCAL_SET_NEW_REMOTE_DESCRIPTION(test) {
return test.setRemoteDescription(test.pcLocal, test._new_answer, STABLE);
}
// TODO(bug 1093835): figure out how to verify if media flows through the new stream
]);
addRenegotiation(test.chain,
[
function PC_LOCAL_ADD_SECOND_STREAM(test) {
test.setMediaConstraints([{audio: true}, {audio: true}],
[{audio: true}]);
return test.pcLocal.getAllUserMedia([{audio: true}]);
},
]
);
// TODO(bug 1093835): figure out how to verify if media flows through the new stream
test.setMediaConstraints([{audio: true}], [{audio: true}]);
test.run();
});

View File

@ -0,0 +1,43 @@
<!DOCTYPE HTML>
<html>
<head>
<script type="application/javascript" src="pc.js"></script>
</head>
<body>
<pre id="test">
<script type="application/javascript">
createHTML({
bug: "1017888",
title: "Renegotiation: add second audio stream, no bundle"
});
var test;
runNetworkTest(function (options) {
test = new PeerConnectionTest(options);
addRenegotiation(test.chain,
[
function PC_LOCAL_ADD_SECOND_STREAM(test) {
test.setMediaConstraints([{audio: true}, {audio: true}],
[{audio: true}]);
// Since this is a NoBundle variant, adding a track will cause us to
// go back to checking.
test.pcLocal.iceCheckingRestartExpected = true;
return test.pcLocal.getAllUserMedia([{audio: true}]);
},
function PC_REMOTE_EXPECT_ICE_CHECKING(test) {
test.pcRemote.iceCheckingRestartExpected = true;
},
]
);
test.chain.insertAfterEach('PC_LOCAL_CREATE_OFFER',
PC_LOCAL_REMOVE_BUNDLE_FROM_OFFER);
// TODO(bug 1093835): figure out how to verify if media flows through the new stream
test.setMediaConstraints([{audio: true}], [{audio: true}]);
test.run();
});
</script>
</pre>
</body>
</html>

View File

@ -0,0 +1,34 @@
<!DOCTYPE HTML>
<html>
<head>
<script type="application/javascript" src="pc.js"></script>
</head>
<body>
<pre id="test">
<script type="application/javascript">
createHTML({
bug: "1017888",
title: "Renegotiation: add second video stream"
});
var test;
runNetworkTest(function (options) {
test = new PeerConnectionTest(options);
addRenegotiation(test.chain,
[
function PC_LOCAL_ADD_SECOND_STREAM(test) {
test.setMediaConstraints([{video: true}, {video: true}],
[{video: true}]);
return test.pcLocal.getAllUserMedia([{video: true}]);
},
]
);
// TODO(bug 1093835): figure out how to verify if media flows through the new stream
test.setMediaConstraints([{video: true}], [{video: true}]);
test.run();
});
</script>
</pre>
</body>
</html>

View File

@ -0,0 +1,43 @@
<!DOCTYPE HTML>
<html>
<head>
<script type="application/javascript" src="pc.js"></script>
</head>
<body>
<pre id="test">
<script type="application/javascript">
createHTML({
bug: "1017888",
title: "Renegotiation: add second video stream, no bundle"
});
var test;
runNetworkTest(function (options) {
test = new PeerConnectionTest(options);
addRenegotiation(test.chain,
[
function PC_LOCAL_ADD_SECOND_STREAM(test) {
test.setMediaConstraints([{video: true}, {video: true}],
[{video: true}]);
// Since this is a NoBundle variant, adding a track will cause us to
// go back to checking.
test.pcLocal.iceCheckingRestartExpected = true;
return test.pcLocal.getAllUserMedia([{video: true}]);
},
function PC_REMOTE_EXPECT_ICE_CHECKING(test) {
test.pcRemote.iceCheckingRestartExpected = true;
},
]
);
test.chain.insertAfterEach('PC_LOCAL_CREATE_OFFER',
PC_LOCAL_REMOVE_BUNDLE_FROM_OFFER);
// TODO(bug 1093835): figure out how to verify if media flows through the new stream
test.setMediaConstraints([{video: true}], [{video: true}]);
test.run();
});
</script>
</pre>
</body>
</html>

View File

@ -0,0 +1,34 @@
<!DOCTYPE HTML>
<html>
<head>
<script type="application/javascript" src="pc.js"></script>
</head>
<body>
<pre id="test">
<script type="application/javascript">
createHTML({
bug: "1017888",
title: "Renegotiation: answerer adds second audio stream"
});
var test;
runNetworkTest(function (options) {
test = new PeerConnectionTest(options);
addRenegotiationAnswerer(test.chain,
[
function PC_LOCAL_ADD_SECOND_STREAM(test) {
test.setMediaConstraints([{audio: true}, {audio: true}],
[{audio: true}]);
return test.pcLocal.getAllUserMedia([{audio: true}]);
},
]
);
test.setMediaConstraints([{audio: true}], [{audio: true}]);
test.run();
});
</script>
</pre>
</body>
</html>

View File

@ -15,15 +15,7 @@
var test = new PeerConnectionTest(options);
test.chain.insertAfter(
'PC_LOCAL_CREATE_OFFER',
[
function PC_LOCAL_REMOVE_BUNDLE_FROM_OFFER(test) {
test.originalOffer.sdp = test.originalOffer.sdp.replace(
/a=group:BUNDLE .*\r\n/g,
""
);
info("Updated no bundle offer: " + JSON.stringify(test.originalOffer));
}
]);
[PC_LOCAL_REMOVE_BUNDLE_FROM_OFFER]);
test.setMediaConstraints([{audio: true}, {video: true}],
[{audio: true}, {video: true}]);
test.run();

View File

@ -31,7 +31,10 @@ runNetworkTest(function() {
.then(() => {
var stream = v1.mozCaptureStreamUntilEnded();
is(stream.getTracks().length, 2, "Captured stream has 2 tracks");
stream.getTracks().forEach(tr => test.pcLocal._pc.addTrack(tr, stream));
stream.getTracks().forEach(tr => {
test.pcLocal._pc.addTrack(tr, stream);
test.pcLocal.expectedLocalTrackTypesById[tr.id] = tr.kind;
});
test.pcLocal.constraints = [{ video: true, audio:true }]; // fool tests
});
}

View File

@ -0,0 +1,35 @@
<!DOCTYPE HTML>
<html>
<head>
<script type="application/javascript" src="pc.js"></script>
</head>
<body>
<pre id="test">
<script type="application/javascript">
createHTML({
bug: "1017888",
title: "Renegotiation: remove audio track"
});
var test;
runNetworkTest(function (options) {
test = new PeerConnectionTest(options);
addRenegotiation(test.chain,
[
function PC_LOCAL_REMOVE_AUDIO_TRACK(test) {
test.setOfferOptions({ offerToReceiveAudio: true });
test.setMediaConstraints([], [{audio: true}]);
return test.pcLocal.removeSender(0);
},
]
);
// TODO(bug 1093835): figure out how to verify that media stopped flowing from pcLocal
test.setMediaConstraints([{audio: true}], [{audio: true}]);
test.run();
});
</script>
</pre>
</body>
</html>

View File

@ -0,0 +1,39 @@
<!DOCTYPE HTML>
<html>
<head>
<script type="application/javascript" src="pc.js"></script>
</head>
<body>
<pre id="test">
<script type="application/javascript">
createHTML({
bug: "1017888",
title: "Renegotiation: remove then add audio track"
});
var test;
runNetworkTest(function (options) {
test = new PeerConnectionTest(options);
addRenegotiation(test.chain,
[
function PC_LOCAL_REMOVE_AUDIO_TRACK(test) {
return test.pcLocal.removeSender(0);
},
function PC_LOCAL_ADD_AUDIO_TRACK(test) {
// The new track's pipeline will start with a packet count of
// 0, but the remote side will keep its old pipeline and packet
// count.
test.pcLocal.disableRtpCountChecking = true;
return test.pcLocal.getAllUserMedia([{audio: true}]);
},
]
);
// TODO(bug 1093835): figure out how to verify if media flows through the new stream
test.setMediaConstraints([{audio: true}], [{audio: true}]);
test.run();
});
</script>
</pre>
</body>
</html>

View File

@ -0,0 +1,42 @@
<!DOCTYPE HTML>
<html>
<head>
<script type="application/javascript" src="pc.js"></script>
</head>
<body>
<pre id="test">
<script type="application/javascript">
createHTML({
bug: "1017888",
title: "Renegotiation: remove then add audio track"
});
var test;
runNetworkTest(function (options) {
test = new PeerConnectionTest(options);
addRenegotiation(test.chain,
[
function PC_LOCAL_REMOVE_AUDIO_TRACK(test) {
// The new track's pipeline will start with a packet count of
// 0, but the remote side will keep its old pipeline and packet
// count.
test.pcLocal.disableRtpCountChecking = true;
return test.pcLocal.removeSender(0);
},
function PC_LOCAL_ADD_AUDIO_TRACK(test) {
return test.pcLocal.getAllUserMedia([{audio: true}]);
},
]
);
test.chain.insertAfterEach('PC_LOCAL_CREATE_OFFER',
PC_LOCAL_REMOVE_BUNDLE_FROM_OFFER);
// TODO(bug 1093835): figure out how to verify if media flows through the new stream
test.setMediaConstraints([{audio: true}], [{audio: true}]);
test.run();
});
</script>
</pre>
</body>
</html>

View File

@ -0,0 +1,39 @@
<!DOCTYPE HTML>
<html>
<head>
<script type="application/javascript" src="pc.js"></script>
</head>
<body>
<pre id="test">
<script type="application/javascript">
createHTML({
bug: "1017888",
title: "Renegotiation: remove then add video track"
});
var test;
runNetworkTest(function (options) {
test = new PeerConnectionTest(options);
addRenegotiation(test.chain,
[
function PC_LOCAL_REMOVE_AUDIO_TRACK(test) {
// The new track's pipeline will start with a packet count of
// 0, but the remote side will keep its old pipeline and packet
// count.
test.pcLocal.disableRtpCountChecking = true;
return test.pcLocal.removeSender(0);
},
function PC_LOCAL_ADD_AUDIO_TRACK(test) {
return test.pcLocal.getAllUserMedia([{video: true}]);
},
]
);
// TODO(bug 1093835): figure out how to verify if media flows through the new stream
test.setMediaConstraints([{video: true}], [{video: true}]);
test.run();
});
</script>
</pre>
</body>
</html>

View File

@ -0,0 +1,42 @@
<!DOCTYPE HTML>
<html>
<head>
<script type="application/javascript" src="pc.js"></script>
</head>
<body>
<pre id="test">
<script type="application/javascript">
createHTML({
bug: "1017888",
title: "Renegotiation: remove then add video track, no bundle"
});
var test;
runNetworkTest(function (options) {
test = new PeerConnectionTest(options);
addRenegotiation(test.chain,
[
function PC_LOCAL_REMOVE_AUDIO_TRACK(test) {
// The new track's pipeline will start with a packet count of
// 0, but the remote side will keep its old pipeline and packet
// count.
test.pcLocal.disableRtpCountChecking = true;
return test.pcLocal.removeSender(0);
},
function PC_LOCAL_ADD_AUDIO_TRACK(test) {
return test.pcLocal.getAllUserMedia([{video: true}]);
},
]
);
test.chain.insertAfterEach('PC_LOCAL_CREATE_OFFER',
PC_LOCAL_REMOVE_BUNDLE_FROM_OFFER);
// TODO(bug 1093835): figure out how to verify if media flows through the new stream
test.setMediaConstraints([{video: true}], [{video: true}]);
test.run();
});
</script>
</pre>
</body>
</html>

View File

@ -0,0 +1,35 @@
<!DOCTYPE HTML>
<html>
<head>
<script type="application/javascript" src="pc.js"></script>
</head>
<body>
<pre id="test">
<script type="application/javascript">
createHTML({
bug: "1017888",
title: "Renegotiation: remove video track"
});
var test;
runNetworkTest(function (options) {
test = new PeerConnectionTest(options);
addRenegotiation(test.chain,
[
function PC_LOCAL_REMOVE_VIDEO_TRACK(test) {
test.setOfferOptions({ offerToReceiveVideo: true });
test.setMediaConstraints([], [{video: true}]);
return test.pcLocal.removeSender(0);
},
]
);
// TODO(bug 1093835): figure out how to verify that media stopped flowing from pcLocal
test.setMediaConstraints([{video: true}], [{video: true}]);
test.run();
});
</script>
</pre>
</body>
</html>

View File

@ -884,7 +884,6 @@ class IceTestPeer : public sigslot::has_slots<> {
streams_[i]->ParseAttributes(attributes);
}
// Allow us to parse candidates directly on the current thread.
void ParseCandidate(size_t i, const std::string& candidate)
{
test_utils->sts_target()->Dispatch(

View File

@ -223,17 +223,20 @@ class Fake_MediaStreamTrack
public:
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(Fake_MediaStreamTrack)
explicit Fake_MediaStreamTrack(bool aIsVideo) : mIsVideo (aIsVideo)
Fake_MediaStreamTrack(bool aIsVideo, Fake_DOMMediaStream* aStream) :
mIsVideo (aIsVideo),
mStream (aStream)
{
static size_t counter = 0;
std::ostringstream os;
os << counter++;
mID = os.str();
}
mozilla::TrackID GetTrackID() { return mIsVideo ? 1 : 0; }
std::string GetId() const { return mID; }
void AssignId(const std::string& id) { mID = id; }
Fake_DOMMediaStream *GetStream() { return nullptr; }
Fake_DOMMediaStream *GetStream() { return mStream; }
const Fake_MediaStreamTrack* AsVideoStreamTrack() const
{
return mIsVideo? this : nullptr;
@ -246,6 +249,7 @@ private:
~Fake_MediaStreamTrack() {}
const bool mIsVideo;
Fake_DOMMediaStream* mStream;
std::string mID;
};
@ -259,9 +263,9 @@ protected:
public:
explicit Fake_DOMMediaStream(Fake_MediaStream *stream = nullptr)
: mMediaStream(stream? stream : new Fake_MediaStream())
, mVideoTrack(new Fake_MediaStreamTrack(true))
, mAudioTrack(new Fake_MediaStreamTrack(false)) {}
: mMediaStream(stream ? stream : new Fake_MediaStream())
, mVideoTrack(new Fake_MediaStreamTrack(true, this))
, mAudioTrack(new Fake_MediaStreamTrack(false, this)) {}
NS_DECL_THREADSAFE_ISUPPORTS

File diff suppressed because it is too large Load Diff

View File

@ -229,11 +229,10 @@ enum offerAnswerFlags
enum mediaPipelineFlags
{
PIPELINE_LOCAL = (1<<0),
PIPELINE_RTCP_MUX = (1<<1),
PIPELINE_SEND = (1<<2),
PIPELINE_VIDEO = (1<<3),
PIPELINE_RTCP_NACK = (1<<4)
PIPELINE_RTCP_MUX = (1<<0),
PIPELINE_SEND = (1<<1),
PIPELINE_VIDEO = (1<<2),
PIPELINE_RTCP_NACK = (1<<3)
};
@ -648,7 +647,11 @@ class ParsedSDP {
std::string sdp;
for (auto it = sdp_lines_.begin(); it != sdp_lines_.end(); ++it) {
sdp += it->first + ' ' + it->second;
sdp += it->first;
if (it->second != "\r\n") {
sdp += " ";
}
sdp += it->second;
}
return sdp;
@ -1082,18 +1085,34 @@ class SignalingAgent {
for (uint32_t i = 0; i < tracks.Length(); i++) {
ASSERT_EQ(pc->AddTrack(tracks[i], domMediaStream), NS_OK);
}
domMediaStream_ = domMediaStream;
domMediaStreams_.push_back(domMediaStream);
}
// Removes a stream from the PeerConnection. If the stream
// parameter is absent, removes the stream that was most
// recently added to the PeerConnection.
void RemoveLastStreamAdded() {
void RemoveTrack(size_t streamIndex, bool videoTrack = false)
{
ASSERT_LT(streamIndex, domMediaStreams_.size());
nsTArray<nsRefPtr<MediaStreamTrack>> tracks;
domMediaStream_->GetTracks(tracks);
domMediaStreams_[streamIndex]->GetTracks(tracks);
for (size_t i = 0; i < tracks.Length(); ++i) {
if (!!tracks[i]->AsVideoStreamTrack() == videoTrack) {
ASSERT_EQ(pc->RemoveTrack(tracks[i]), NS_OK);
}
}
}
void RemoveStream(size_t index) {
nsTArray<nsRefPtr<MediaStreamTrack>> tracks;
domMediaStreams_[index]->GetTracks(tracks);
for (uint32_t i = 0; i < tracks.Length(); i++) {
ASSERT_EQ(pc->RemoveTrack(tracks[i]), NS_OK);
}
domMediaStreams_.erase(domMediaStreams_.begin() + index);
}
// Removes the stream that was most recently added to the PeerConnection.
void RemoveLastStreamAdded() {
ASSERT_FALSE(domMediaStreams_.empty());
RemoveStream(domMediaStreams_.size() - 1);
}
void CreateOffer(OfferOptions& options,
@ -1150,6 +1169,16 @@ class SignalingAgent {
DONT_CHECK_DATA,
PCImplSignalingState endState =
PCImplSignalingState::SignalingHaveRemoteOffer) {
// Create a media stream as if it came from GUM
Fake_AudioStreamSource *audio_stream =
new Fake_AudioStreamSource();
nsresult ret;
mozilla::SyncRunnable::DispatchToThread(
test_utils->sts_target(),
WrapRunnableRet(audio_stream, &Fake_MediaStream::Start, &ret));
ASSERT_TRUE(NS_SUCCEEDED(ret));
uint32_t aHintContents = 0;
if (offerAnswerFlags & ANSWER_AUDIO) {
@ -1158,7 +1187,7 @@ class SignalingAgent {
if (offerAnswerFlags & ANSWER_VIDEO) {
aHintContents |= DOMMediaStream::HINT_CONTENTS_VIDEO;
}
AddStream(aHintContents);
AddStream(aHintContents, audio_stream);
// Decide if streams are disabled for offer or answer
// then perform SDP checking based on which stream disabled
@ -1184,21 +1213,11 @@ class SignalingAgent {
}
}
// At present, we use the hints field in a stream to find and
// remove it. This only works if the specified hints flags are
// unique among all streams in the PeerConnection. This is not
// generally true, and will need significant revision once
// multiple streams are supported.
void CreateOfferRemoveStream(OfferOptions& options,
uint32_t hints, uint32_t sdpCheck) {
void CreateOfferRemoveTrack(OfferOptions& options,
bool videoTrack,
uint32_t sdpCheck) {
domMediaStream_->SetHintContents(hints);
// This currently "removes" a stream that has the same audio/video
// hints as were passed in.
// When complete RemoveStream will remove and entire stream and its tracks
// not just disable a track as this is currently doing
RemoveLastStreamAdded();
RemoveTrack(0, videoTrack);
// Now call CreateOffer as JS would
pObserver->state = TestObserver::stateNoResponse;
@ -1294,26 +1313,33 @@ class SignalingAgent {
ASSERT_EQ(signaling_state(), endState);
}
int GetPacketsReceived(int stream) {
int GetPacketsReceived(size_t stream) {
std::vector<DOMMediaStream *> streams = pObserver->GetStreams();
if ((int) streams.size() <= stream) {
if (streams.size() <= stream) {
EXPECT_TRUE(false);
return 0;
}
return streams[stream]->GetStream()->AsSourceStream()->GetSegmentsAdded();
}
int GetPacketsSent(int stream) {
int GetPacketsSent(size_t stream) {
if (stream >= domMediaStreams_.size()) {
EXPECT_TRUE(false);
return 0;
}
return static_cast<Fake_MediaStreamBase *>(
domMediaStream_->GetStream())->GetSegmentsAdded();
domMediaStreams_[stream]->GetStream())->GetSegmentsAdded();
}
//Stops generating new audio data for transmission.
//Should be called before Cleanup of the peer connection.
void CloseSendStreams() {
static_cast<Fake_MediaStream*>(
domMediaStream_->GetStream())->StopStream();
for (auto i = domMediaStreams_.begin(); i != domMediaStreams_.end(); ++i) {
static_cast<Fake_MediaStream*>((*i)->GetStream())->StopStream();
}
}
//Stops pulling audio data off the receivers.
@ -1367,7 +1393,6 @@ class SignalingAgent {
VideoSessionConduit::FrameRequestNone) {
std::cout << name << ": Checking media pipeline settings for "
<< ((flags & PIPELINE_LOCAL) ? "local " : "remote ")
<< ((flags & PIPELINE_SEND) ? "sending " : "receiving ")
<< ((flags & PIPELINE_VIDEO) ? "video" : "audio")
<< " pipeline (stream " << stream
@ -1377,7 +1402,7 @@ class SignalingAgent {
<< std::endl;
mozilla::RefPtr<mozilla::MediaPipeline> pipeline =
GetMediaPipeline((flags & PIPELINE_LOCAL), stream, video);
GetMediaPipeline((flags & PIPELINE_SEND), stream, video);
ASSERT_TRUE(pipeline);
ASSERT_EQ(pipeline->IsDoingRtcpMux(), !!(flags & PIPELINE_RTCP_MUX));
// We cannot yet test send/recv with video.
@ -1420,7 +1445,7 @@ public:
nsRefPtr<TestObserver> pObserver;
std::string offer_;
std::string answer_;
nsRefPtr<DOMMediaStream> domMediaStream_;
std::vector<nsRefPtr<DOMMediaStream>> domMediaStreams_;
IceConfiguration cfg_;
const std::string name;
bool mBundleEnabled;
@ -1845,14 +1870,14 @@ public:
WaitForCompleted();
}
void CreateOfferRemoveStream(OfferOptions& options,
uint32_t hints, uint32_t sdpCheck) {
void CreateOfferRemoveTrack(OfferOptions& options,
bool videoTrack, uint32_t sdpCheck) {
EnsureInit();
OfferOptions aoptions;
aoptions.setInt32Option("OfferToReceiveAudio", 1);
aoptions.setInt32Option("OfferToReceiveVideo", 1);
a1_->CreateOffer(aoptions, OFFER_AV, SHOULD_SENDRECV_AV );
a1_->CreateOfferRemoveStream(options, hints, sdpCheck);
a1_->CreateOfferRemoveTrack(options, videoTrack, sdpCheck);
}
void CreateOfferAudioOnly(OfferOptions& options,
@ -1875,6 +1900,37 @@ public:
a1_->AddIceCandidate(candidate, mid, level, false);
}
std::string SwapMsids(const std::string& sdp, bool swapVideo) const
{
SipccSdpParser parser;
UniquePtr<Sdp> parsed = parser.Parse(sdp);
SdpMediaSection* previousMsection = nullptr;
bool swapped = false;
for (size_t i = 0; i < parsed->GetMediaSectionCount(); ++i) {
SdpMediaSection* currentMsection = &parsed->GetMediaSection(i);
bool isVideo = currentMsection->GetMediaType() == SdpMediaSection::kVideo;
if (swapVideo == isVideo) {
if (previousMsection) {
UniquePtr<SdpMsidAttributeList> prevMsid(
new SdpMsidAttributeList(
previousMsection->GetAttributeList().GetMsid()));
UniquePtr<SdpMsidAttributeList> currMsid(
new SdpMsidAttributeList(
currentMsection->GetAttributeList().GetMsid()));
previousMsection->GetAttributeList().SetAttribute(currMsid.release());
currentMsection->GetAttributeList().SetAttribute(prevMsid.release());
swapped = true;
}
previousMsection = currentMsection;
}
}
EXPECT_TRUE(swapped);
return parsed->ToString();
}
void CheckRtcpFbSdp(const std::string &sdp,
const std::set<std::string>& expected) {
@ -1948,10 +2004,7 @@ public:
WaitForCompleted();
a1_->CloseSendStreams();
a1_->CloseReceiveStreams();
a2_->CloseSendStreams();
a2_->CloseReceiveStreams();
CloseStreams();
// Check caller video settings for remote pipeline
a1_->CheckMediaPipeline(0, true, (fRtcpMux ? PIPELINE_RTCP_MUX : 0) |
@ -1962,7 +2015,6 @@ public:
a2_->CheckMediaPipeline(0, true,
(fRtcpMux ? PIPELINE_RTCP_MUX : 0) |
PIPELINE_VIDEO |
PIPELINE_SEND |
PIPELINE_RTCP_NACK,
VideoSessionConduit::FrameRequestPli);
}
@ -1988,10 +2040,7 @@ public:
WaitForCompleted();
a1_->CloseSendStreams();
a1_->CloseReceiveStreams();
a2_->CloseSendStreams();
a2_->CloseReceiveStreams();
CloseStreams();
// Check callee video settings for remote pipeline
a2_->CheckMediaPipeline(0, true, (fRtcpMux ? PIPELINE_RTCP_MUX : 0) |
@ -2002,7 +2051,6 @@ public:
a1_->CheckMediaPipeline(0, true,
(fRtcpMux ? PIPELINE_RTCP_MUX : 0) |
PIPELINE_VIDEO |
PIPELINE_SEND |
PIPELINE_RTCP_NACK,
VideoSessionConduit::FrameRequestPli);
}
@ -2060,6 +2108,14 @@ public:
}
}
void CloseStreams()
{
a1_->CloseSendStreams();
a2_->CloseSendStreams();
a1_->CloseReceiveStreams();
a2_->CloseReceiveStreams();
}
protected:
bool init_;
ScopedDeletePtr<SignalingAgent> a1_; // Canonically "caller"
@ -2188,34 +2244,30 @@ TEST_P(SignalingTest, CreateOfferDontReceiveVideo)
SHOULD_SENDRECV_AUDIO | SHOULD_SEND_VIDEO);
}
// XXX Disabled pending resolution of Bug 840728
TEST_P(SignalingTest, DISABLED_CreateOfferRemoveAudioStream)
TEST_P(SignalingTest, CreateOfferRemoveAudioTrack)
{
OfferOptions options;
options.setInt32Option("OfferToReceiveAudio", 1);
options.setInt32Option("OfferToReceiveVideo", 1);
CreateOfferRemoveStream(options, DOMMediaStream::HINT_CONTENTS_AUDIO,
SHOULD_RECV_AUDIO | SHOULD_SENDRECV_VIDEO);
CreateOfferRemoveTrack(options, false,
SHOULD_RECV_AUDIO | SHOULD_SENDRECV_VIDEO);
}
// XXX Disabled pending resolution of Bug 840728
TEST_P(SignalingTest, DISABLED_CreateOfferDontReceiveAudioRemoveAudioStream)
TEST_P(SignalingTest, CreateOfferDontReceiveAudioRemoveAudioTrack)
{
OfferOptions options;
options.setInt32Option("OfferToReceiveAudio", 0);
options.setInt32Option("OfferToReceiveVideo", 1);
CreateOfferRemoveStream(options, DOMMediaStream::HINT_CONTENTS_AUDIO,
SHOULD_SENDRECV_VIDEO);
CreateOfferRemoveTrack(options, false, SHOULD_SENDRECV_VIDEO | SHOULD_OMIT_AUDIO);
}
// XXX Disabled pending resolution of Bug 840728
TEST_P(SignalingTest, DISABLED_CreateOfferDontReceiveVideoRemoveVideoStream)
TEST_P(SignalingTest, CreateOfferDontReceiveVideoRemoveVideoTrack)
{
OfferOptions options;
options.setInt32Option("OfferToReceiveAudio", 1);
options.setInt32Option("OfferToReceiveVideo", 0);
CreateOfferRemoveStream(options, DOMMediaStream::HINT_CONTENTS_VIDEO,
SHOULD_SENDRECV_AUDIO);
CreateOfferRemoveTrack(options, true,
SHOULD_SENDRECV_AUDIO);
}
TEST_P(SignalingTest, OfferAnswerNothingDisabled)
@ -2359,8 +2411,7 @@ TEST_P(SignalingTest, OfferAnswerVideoInactive)
ASSERT_TRUE_WAIT(a1_->GetPacketsSent(0) >= 40 &&
a2_->GetPacketsReceived(0) >= 40, kDefaultTimeout * 2);
a1_->CloseSendStreams();
a2_->CloseReceiveStreams();
CloseStreams();
// Check that we wrote a bunch of data
ASSERT_GE(a1_->GetPacketsSent(0), 40);
//ASSERT_GE(a2_->GetPacketsSent(0), 40);
@ -2564,20 +2615,350 @@ TEST_P(SignalingTest, FullCall)
ASSERT_TRUE_WAIT(a1_->GetPacketsSent(0) >= 40 &&
a2_->GetPacketsReceived(0) >= 40, kDefaultTimeout * 2);
a1_->CloseSendStreams();
a2_->CloseReceiveStreams();
ASSERT_TRUE_WAIT(a2_->GetPacketsSent(0) >= 40 &&
a1_->GetPacketsReceived(0) >= 40, kDefaultTimeout * 2);
CloseStreams();
// Check that we wrote a bunch of data
ASSERT_GE(a1_->GetPacketsSent(0), 40);
//ASSERT_GE(a2_->GetPacketsSent(0), 40);
//ASSERT_GE(a1_->GetPacketsReceived(0), 40);
ASSERT_GE(a2_->GetPacketsSent(0), 40);
ASSERT_GE(a1_->GetPacketsReceived(0), 40);
ASSERT_GE(a2_->GetPacketsReceived(0), 40);
// Check the low-level media pipeline
// for RTP and RTCP flows
// The first Local pipeline gets stored at 0
a1_->CheckMediaPipeline(0, false, fRtcpMux ?
PIPELINE_LOCAL | PIPELINE_RTCP_MUX | PIPELINE_SEND :
PIPELINE_LOCAL | PIPELINE_SEND);
PIPELINE_RTCP_MUX | PIPELINE_SEND :
PIPELINE_SEND);
// The first Remote pipeline gets stored at 0
a2_->CheckMediaPipeline(0, false, (fRtcpMux ? PIPELINE_RTCP_MUX : 0));
}
TEST_P(SignalingTest, RenegotiationOffererAddsTracks)
{
OfferOptions options;
OfferAnswer(options, OFFER_AV | ANSWER_AV,
SHOULD_SENDRECV_AV, SHOULD_SENDRECV_AV);
// Wait for some data to get received
ASSERT_TRUE_WAIT(a2_->GetPacketsReceived(0) >= 40, kDefaultTimeout * 2);
// Not really packets, but audio segments, happens later
ASSERT_TRUE_WAIT(a1_->GetPacketsSent(0) >= 40, kDefaultTimeout * 2);
// OFFER_AV causes a new stream + tracks to be added
OfferAnswer(options, OFFER_AV,
SHOULD_SENDRECV_AV, SHOULD_SENDRECV_AV);
// Wait for some more data to get received
ASSERT_TRUE_WAIT(a2_->GetPacketsReceived(1) >= 40, kDefaultTimeout * 2);
// Not really packets, but audio segments, happens later
ASSERT_TRUE_WAIT(a1_->GetPacketsSent(1) >= 40, kDefaultTimeout * 2);
CloseStreams();
// Check the low-level media pipeline
// for RTP and RTCP flows
for (size_t i = 0; i < 2; ++i) {
a2_->CheckMediaPipeline(i,
false,
(fRtcpMux ? PIPELINE_RTCP_MUX : 0));
a1_->CheckMediaPipeline(i,
false,
(fRtcpMux ? PIPELINE_RTCP_MUX : 0) |
PIPELINE_SEND);
}
a1_->CheckMediaPipeline(0,
false,
(fRtcpMux ? PIPELINE_RTCP_MUX : 0));
a2_->CheckMediaPipeline(0,
false,
(fRtcpMux ? PIPELINE_RTCP_MUX : 0) |
PIPELINE_SEND);
}
TEST_P(SignalingTest, RenegotiationOffererRemovesTrack)
{
OfferOptions options;
OfferAnswer(options, OFFER_AV | ANSWER_AV,
SHOULD_SENDRECV_AV, SHOULD_SENDRECV_AV);
// Wait for some data to get received
ASSERT_TRUE_WAIT(a1_->GetPacketsReceived(0) >= 40, kDefaultTimeout * 2);
// Not really packets, but audio segments, happens later
ASSERT_TRUE_WAIT(a2_->GetPacketsSent(0) >= 40, kDefaultTimeout * 2);
int a2PacketsSent = a2_->GetPacketsSent(0);
int a1PacketsReceived = a1_->GetPacketsReceived(0);
a1_->RemoveTrack(0, false);
OfferAnswer(options, OFFER_NONE,
SHOULD_RECV_AUDIO | SHOULD_SENDRECV_VIDEO,
SHOULD_SEND_AUDIO | SHOULD_SENDRECV_VIDEO);
ASSERT_TRUE_WAIT(a1_->GetPacketsReceived(0) >= a1PacketsReceived + 40,
kDefaultTimeout * 2);
ASSERT_TRUE_WAIT(a2_->GetPacketsSent(0) >= a2PacketsSent + 40,
kDefaultTimeout * 2);
CloseStreams();
}
TEST_P(SignalingTest, RenegotiationOffererReplacesTrack)
{
OfferOptions options;
OfferAnswer(options, OFFER_AV | ANSWER_AV,
SHOULD_SENDRECV_AV, SHOULD_SENDRECV_AV);
// Wait for some data to get received
ASSERT_TRUE_WAIT(a2_->GetPacketsReceived(0) >= 40, kDefaultTimeout * 2);
// Not really packets, but audio segments, happens later
ASSERT_TRUE_WAIT(a1_->GetPacketsSent(0) >= 40, kDefaultTimeout * 2);
a1_->RemoveTrack(0, false);
// OFFER_AUDIO causes a new audio track to be added on both sides
OfferAnswer(options, OFFER_AUDIO,
SHOULD_SENDRECV_AV, SHOULD_SENDRECV_AV);
// Wait for some more data to get received
ASSERT_TRUE_WAIT(a2_->GetPacketsReceived(1) >= 40, kDefaultTimeout * 2);
// Not really packets, but audio segments, happens later
ASSERT_TRUE_WAIT(a1_->GetPacketsSent(1) >= 40, kDefaultTimeout * 2);
CloseStreams();
// Check the low-level media pipeline
// for RTP and RTCP flows
a1_->CheckMediaPipeline(1, false, fRtcpMux ?
PIPELINE_RTCP_MUX | PIPELINE_SEND :
PIPELINE_SEND);
a2_->CheckMediaPipeline(1, false, (fRtcpMux ? PIPELINE_RTCP_MUX : 0));
}
TEST_P(SignalingTest, RenegotiationOffererSwapsMsids)
{
OfferOptions options;
EnsureInit();
// Create a media stream as if it came from GUM
Fake_AudioStreamSource *audio_stream =
new Fake_AudioStreamSource();
nsresult ret;
mozilla::SyncRunnable::DispatchToThread(
test_utils->sts_target(),
WrapRunnableRet(audio_stream, &Fake_MediaStream::Start, &ret));
ASSERT_TRUE(NS_SUCCEEDED(ret));
a1_->AddStream(DOMMediaStream::HINT_CONTENTS_AUDIO |
DOMMediaStream::HINT_CONTENTS_VIDEO, audio_stream);
OfferAnswer(options, OFFER_AV | ANSWER_AV,
SHOULD_SENDRECV_AV, SHOULD_SENDRECV_AV);
// Wait for some data to get received
ASSERT_TRUE_WAIT(a2_->GetPacketsReceived(0) >= 40, kDefaultTimeout * 2);
// Not really packets, but audio segments, happens later
ASSERT_TRUE_WAIT(a1_->GetPacketsSent(0) >= 40, kDefaultTimeout * 2);
a1_->CreateOffer(options, OFFER_NONE, SHOULD_SENDRECV_AV);
a1_->SetLocal(TestObserver::OFFER, a1_->offer());
std::string audioSwapped = SwapMsids(a1_->offer(), false);
std::string audioAndVideoSwapped = SwapMsids(audioSwapped, true);
std::cout << "Msids swapped: " << std::endl << audioAndVideoSwapped << std::endl;
a2_->SetRemote(TestObserver::OFFER, audioAndVideoSwapped);
Answer(options, OFFER_NONE, SHOULD_SENDRECV_AV, BOTH_TRICKLE);
WaitForCompleted();
// Wait for some more data to get received
ASSERT_TRUE_WAIT(a2_->GetPacketsReceived(1) >= 40, kDefaultTimeout * 2);
// Not really packets, but audio segments, happens later
ASSERT_TRUE_WAIT(a1_->GetPacketsSent(1) >= 40, kDefaultTimeout * 2);
CloseStreams();
for (size_t i = 0; i < 2; ++i) {
// Check the low-level media pipeline
// for RTP and RTCP flows
a1_->CheckMediaPipeline(i, false, fRtcpMux ?
PIPELINE_RTCP_MUX | PIPELINE_SEND :
PIPELINE_SEND);
a2_->CheckMediaPipeline(i, false, (fRtcpMux ? PIPELINE_RTCP_MUX : 0));
}
}
TEST_P(SignalingTest, RenegotiationAnswererAddsTracks)
{
OfferOptions options;
OfferAnswer(options, OFFER_AV | ANSWER_AV,
SHOULD_SENDRECV_AV, SHOULD_SENDRECV_AV);
// Wait for some data to get received
ASSERT_TRUE_WAIT(a1_->GetPacketsReceived(0) >= 40, kDefaultTimeout * 2);
// Not really packets, but audio segments, happens later
ASSERT_TRUE_WAIT(a2_->GetPacketsSent(0) >= 40, kDefaultTimeout * 2);
options.setInt32Option("OfferToReceiveAudio", 2);
options.setInt32Option("OfferToReceiveVideo", 2);
// ANSWER_AV causes a new stream + tracks to be added
OfferAnswer(options, ANSWER_AV,
SHOULD_SENDRECV_AV, SHOULD_SENDRECV_AV);
ASSERT_TRUE_WAIT(a1_->GetPacketsReceived(1) >= 40, kDefaultTimeout * 2);
ASSERT_TRUE_WAIT(a2_->GetPacketsSent(1) >= 40, kDefaultTimeout * 2);
CloseStreams();
// Check the low-level media pipeline
// for RTP and RTCP flows
for (size_t i = 0; i < 2; ++i) {
a1_->CheckMediaPipeline(i,
false,
(fRtcpMux ? PIPELINE_RTCP_MUX : 0));
a2_->CheckMediaPipeline(i,
false,
(fRtcpMux ? PIPELINE_RTCP_MUX : 0) |
PIPELINE_SEND);
}
a2_->CheckMediaPipeline(0,
false,
(fRtcpMux ? PIPELINE_RTCP_MUX : 0));
a1_->CheckMediaPipeline(0,
false,
(fRtcpMux ? PIPELINE_RTCP_MUX : 0) |
PIPELINE_SEND);
}
TEST_P(SignalingTest, RenegotiationAnswererRemovesTrack)
{
OfferOptions options;
OfferAnswer(options, OFFER_AV | ANSWER_AV,
SHOULD_SENDRECV_AV, SHOULD_SENDRECV_AV);
// Wait for some data to get received
ASSERT_TRUE_WAIT(a2_->GetPacketsReceived(0) >= 40, kDefaultTimeout * 2);
// Not really packets, but audio segments, happens later
ASSERT_TRUE_WAIT(a1_->GetPacketsSent(0) >= 40, kDefaultTimeout * 2);
int a1PacketsSent = a1_->GetPacketsSent(0);
int a2PacketsReceived = a2_->GetPacketsReceived(0);
a2_->RemoveTrack(0, false);
OfferAnswer(options, OFFER_NONE,
SHOULD_SENDRECV_AUDIO | SHOULD_SENDRECV_VIDEO,
SHOULD_RECV_AUDIO | SHOULD_SENDRECV_VIDEO);
ASSERT_TRUE_WAIT(a2_->GetPacketsReceived(0) >= a2PacketsReceived + 40,
kDefaultTimeout * 2);
ASSERT_TRUE_WAIT(a1_->GetPacketsSent(0) >= a1PacketsSent + 40,
kDefaultTimeout * 2);
CloseStreams();
}
TEST_P(SignalingTest, RenegotiationAnswererReplacesTrack)
{
OfferOptions options;
OfferAnswer(options, OFFER_AV | ANSWER_AV,
SHOULD_SENDRECV_AV, SHOULD_SENDRECV_AV);
// Wait for some data to get written
ASSERT_TRUE_WAIT(a1_->GetPacketsSent(0) >= 40 &&
a2_->GetPacketsReceived(0) >= 40, kDefaultTimeout * 2);
ASSERT_TRUE_WAIT(a2_->GetPacketsSent(0) >= 40 &&
a1_->GetPacketsReceived(0) >= 40, kDefaultTimeout * 2);
int a1PacketsSent = a1_->GetPacketsSent(0);
int a2PacketsReceived = a2_->GetPacketsReceived(0);
a2_->RemoveTrack(0, false);
// ANSWER_AUDIO causes a new audio track to be added
OfferAnswer(options, ANSWER_AUDIO,
SHOULD_SENDRECV_AV, SHOULD_SENDRECV_AV);
// Wait for some more data to get written
ASSERT_TRUE_WAIT(a1_->GetPacketsSent(0) >= a1PacketsSent + 40 &&
a2_->GetPacketsReceived(0) >= a2PacketsReceived + 40,
kDefaultTimeout * 2);
// The other direction is going to start over
ASSERT_TRUE_WAIT(a2_->GetPacketsSent(0) >= 40 &&
a1_->GetPacketsReceived(0) >= 40,
kDefaultTimeout * 2);
CloseStreams();
// Check the low-level media pipeline
// for RTP and RTCP flows
a1_->CheckMediaPipeline(0, false, fRtcpMux ?
PIPELINE_RTCP_MUX | PIPELINE_SEND :
PIPELINE_SEND);
a2_->CheckMediaPipeline(0, false, (fRtcpMux ? PIPELINE_RTCP_MUX : 0));
}
TEST_P(SignalingTest, BundleRenegotiation)
{
if (GetParam() == "bundle") {
// We don't support ICE restart, which is a prereq for renegotiating bundle
// off.
return;
}
OfferOptions options;
OfferAnswer(options, OFFER_AV | ANSWER_AV,
SHOULD_SENDRECV_AV, SHOULD_SENDRECV_AV);
// Wait for some data to get written
ASSERT_TRUE_WAIT(a1_->GetPacketsSent(0) >= 40 &&
a2_->GetPacketsReceived(0) >= 40, kDefaultTimeout * 2);
ASSERT_TRUE_WAIT(a2_->GetPacketsSent(0) >= 40 &&
a1_->GetPacketsReceived(0) >= 40, kDefaultTimeout * 2);
int a1PacketsSent = a1_->GetPacketsSent(0);
int a2PacketsSent = a2_->GetPacketsSent(0);
int a1PacketsReceived = a1_->GetPacketsReceived(0);
int a2PacketsReceived = a2_->GetPacketsReceived(0);
// If we did bundle before, turn it off, if not, turn it on
if (a1_->mBundleEnabled && a2_->mBundleEnabled) {
a1_->SetBundleEnabled(false);
} else {
a1_->SetBundleEnabled(true);
a2_->SetBundleEnabled(true);
}
OfferAnswer(options, OFFER_NONE,
SHOULD_SENDRECV_AV, SHOULD_SENDRECV_AV);
// Wait for some more data to get written
ASSERT_TRUE_WAIT(a1_->GetPacketsSent(0) >= a1PacketsSent + 40 &&
a2_->GetPacketsReceived(0) >= a2PacketsReceived + 40,
kDefaultTimeout * 2);
ASSERT_TRUE_WAIT(a2_->GetPacketsSent(0) >= a2PacketsSent + 40 &&
a1_->GetPacketsReceived(0) >= a1PacketsReceived + 40,
kDefaultTimeout * 2);
// Check the low-level media pipeline
// for RTP and RTCP flows
// The first Local pipeline gets stored at 0
a1_->CheckMediaPipeline(0, false, fRtcpMux ?
PIPELINE_RTCP_MUX | PIPELINE_SEND :
PIPELINE_SEND);
// The first Remote pipeline gets stored at 0
a2_->CheckMediaPipeline(0, false, (fRtcpMux ? PIPELINE_RTCP_MUX : 0));
@ -2593,8 +2974,7 @@ TEST_P(SignalingTest, FullCallAudioOnly)
ASSERT_TRUE_WAIT(a1_->GetPacketsSent(0) >= 40 &&
a2_->GetPacketsReceived(0) >= 40, kDefaultTimeout * 2);
a1_->CloseSendStreams();
a2_->CloseReceiveStreams();
CloseStreams();
// Check that we wrote a bunch of data
ASSERT_GE(a1_->GetPacketsSent(0), 40);
//ASSERT_GE(a2_->GetPacketsSent(0), 40);
@ -2617,8 +2997,7 @@ TEST_P(SignalingTest, DISABLED_FullCallAnswererRejectsVideo)
ASSERT_TRUE_WAIT(a1_->GetPacketsSent(0) >= 40 &&
a2_->GetPacketsReceived(0) >= 40, kDefaultTimeout * 2);
a1_->CloseSendStreams();
a2_->CloseReceiveStreams();
CloseStreams();
// Check that we wrote a bunch of data
ASSERT_GE(a1_->GetPacketsSent(0), 40);
//ASSERT_GE(a2_->GetPacketsSent(0), 40);
@ -2638,8 +3017,7 @@ TEST_P(SignalingTest, FullCallVideoOnly)
// ASSERT_TRUE_WAIT(a1_->GetPacketsSent(0) >= 40 &&
// a2_->GetPacketsReceived(0) >= 40, kDefaultTimeout * 2);
a1_->CloseSendStreams();
a2_->CloseReceiveStreams();
CloseStreams();
// FIXME -- Ideally we would check that packets were sent
// and received; however, the test driver setup does not
@ -2670,8 +3048,7 @@ TEST_P(SignalingTest, OfferAndAnswerWithExtraCodec)
WaitForCompleted();
a1_->CloseSendStreams();
a2_->CloseReceiveStreams();
CloseStreams();
}
TEST_P(SignalingTest, FullCallTrickle)
@ -2688,8 +3065,7 @@ TEST_P(SignalingTest, FullCallTrickle)
ASSERT_TRUE_WAIT(a1_->GetPacketsSent(0) >= 40 &&
a2_->GetPacketsReceived(0) >= 40, kDefaultTimeout * 2);
a1_->CloseSendStreams();
a2_->CloseReceiveStreams();
CloseStreams();
ASSERT_GE(a1_->GetPacketsSent(0), 40);
ASSERT_GE(a2_->GetPacketsReceived(0), 40);
}
@ -2708,8 +3084,7 @@ TEST_P(SignalingTest, DISABLED_FullCallTrickleChrome)
ASSERT_TRUE_WAIT(a1_->GetPacketsSent(0) >= 40 &&
a2_->GetPacketsReceived(0) >= 40, kDefaultTimeout * 2);
a1_->CloseSendStreams();
a2_->CloseReceiveStreams();
CloseStreams();
ASSERT_GE(a1_->GetPacketsSent(0), 40);
ASSERT_GE(a2_->GetPacketsReceived(0), 40);
}
@ -2734,8 +3109,7 @@ TEST_P(SignalingTest, FullCallTrickleBeforeSetLocal)
ASSERT_TRUE_WAIT(a1_->GetPacketsSent(0) >= 40 &&
a2_->GetPacketsReceived(0) >= 40, kDefaultTimeout * 2);
a1_->CloseSendStreams();
a2_->CloseReceiveStreams();
CloseStreams();
ASSERT_GE(a1_->GetPacketsSent(0), 40);
ASSERT_GE(a2_->GetPacketsReceived(0), 40);
}
@ -3404,8 +3778,7 @@ TEST_P(SignalingTest, AudioOnlyCalleeNoRtcpMux)
ASSERT_TRUE_WAIT(a1_->GetPacketsSent(0) >= 40 &&
a2_->GetPacketsReceived(0) >= 40, kDefaultTimeout * 2);
a1_->CloseSendStreams();
a2_->CloseReceiveStreams();
CloseStreams();
ASSERT_GE(a1_->GetPacketsSent(0), 40);
ASSERT_GE(a2_->GetPacketsReceived(0), 40);
@ -3413,7 +3786,7 @@ TEST_P(SignalingTest, AudioOnlyCalleeNoRtcpMux)
// Check the low-level media pipeline
// for RTP and RTCP flows
// The first Local pipeline gets stored at 0
a1_->CheckMediaPipeline(0, false, PIPELINE_LOCAL | PIPELINE_SEND);
a1_->CheckMediaPipeline(0, false, PIPELINE_SEND);
a2_->CheckMediaPipeline(0, false, 0);
}
@ -3445,8 +3818,7 @@ TEST_P(SignalingTest, AudioOnlyG722Only)
ASSERT_TRUE_WAIT(a1_->GetPacketsSent(0) >= 40 &&
a2_->GetPacketsReceived(0) >= 40, kDefaultTimeout * 2);
a1_->CloseSendStreams();
a2_->CloseReceiveStreams();
CloseStreams();
ASSERT_GE(a1_->GetPacketsSent(0), 40);
ASSERT_GE(a2_->GetPacketsReceived(0), 40);
@ -3472,8 +3844,7 @@ TEST_P(SignalingTest, AudioOnlyG722MostPreferred)
ASSERT_NE(a2_->getLocalDescription().find("RTP/SAVPF 9"), std::string::npos);
ASSERT_NE(a2_->getLocalDescription().find("a=rtpmap:9 G722/8000"), std::string::npos);
a1_->CloseSendStreams();
a2_->CloseReceiveStreams();
CloseStreams();
}
TEST_P(SignalingTest, AudioOnlyG722Rejected)
@ -3502,8 +3873,7 @@ TEST_P(SignalingTest, AudioOnlyG722Rejected)
ASSERT_EQ(a2_->getLocalDescription().find("a=rtpmap:109 opus/48000/2"), std::string::npos);
ASSERT_EQ(a2_->getLocalDescription().find("a=rtpmap:9 G722/8000"), std::string::npos);
a1_->CloseSendStreams();
a2_->CloseReceiveStreams();
CloseStreams();
}
TEST_P(SignalingTest, FullCallAudioNoMuxVideoMux)
@ -3542,8 +3912,7 @@ TEST_P(SignalingTest, FullCallAudioNoMuxVideoMux)
ASSERT_TRUE_WAIT(a1_->GetPacketsSent(0) >= 40 &&
a2_->GetPacketsReceived(0) >= 40, kDefaultTimeout * 2);
a1_->CloseSendStreams();
a2_->CloseReceiveStreams();
CloseStreams();
ASSERT_GE(a1_->GetPacketsSent(0), 40);
ASSERT_GE(a2_->GetPacketsReceived(0), 40);
@ -3551,11 +3920,11 @@ TEST_P(SignalingTest, FullCallAudioNoMuxVideoMux)
// Check the low-level media pipeline
// for RTP and RTCP flows
// The first Local pipeline gets stored at 0
a1_->CheckMediaPipeline(0, false, PIPELINE_LOCAL | PIPELINE_SEND);
a1_->CheckMediaPipeline(0, false, PIPELINE_SEND);
// Now check video mux.
a1_->CheckMediaPipeline(0, true,
PIPELINE_LOCAL | (fRtcpMux ? PIPELINE_RTCP_MUX : 0) | PIPELINE_SEND |
(fRtcpMux ? PIPELINE_RTCP_MUX : 0) | PIPELINE_SEND |
PIPELINE_VIDEO);
// The first Remote pipeline gets stored at 0
@ -3745,8 +4114,7 @@ TEST_P(SignalingTest, AudioCallForceDtlsRoles)
ASSERT_TRUE_WAIT(a1_->GetPacketsSent(0) >= 40 &&
a2_->GetPacketsReceived(0) >= 40, kDefaultTimeout * 2);
a1_->CloseSendStreams();
a2_->CloseReceiveStreams();
CloseStreams();
ASSERT_GE(a1_->GetPacketsSent(0), 40);
ASSERT_GE(a2_->GetPacketsReceived(0), 40);
@ -3794,8 +4162,7 @@ TEST_P(SignalingTest, AudioCallReverseDtlsRoles)
ASSERT_TRUE_WAIT(a1_->GetPacketsSent(0) >= 40 &&
a2_->GetPacketsReceived(0) >= 40, kDefaultTimeout * 2);
a1_->CloseSendStreams();
a2_->CloseReceiveStreams();
CloseStreams();
ASSERT_GE(a1_->GetPacketsSent(0), 40);
ASSERT_GE(a2_->GetPacketsReceived(0), 40);
@ -3842,8 +4209,7 @@ TEST_P(SignalingTest, AudioCallMismatchDtlsRoles)
// Not using ASSERT_TRUE_WAIT here because we expect failure
PR_Sleep(kDefaultTimeout * 2); // Wait for some data to get written
a1_->CloseSendStreams();
a2_->CloseReceiveStreams();
CloseStreams();
ASSERT_GE(a1_->GetPacketsSent(0), 40);
// In this case we should receive nothing.
@ -3892,8 +4258,7 @@ TEST_P(SignalingTest, AudioCallGarbageSetup)
ASSERT_TRUE_WAIT(a1_->GetPacketsSent(0) >= 40 &&
a2_->GetPacketsReceived(0) >= 40, kDefaultTimeout * 2);
a1_->CloseSendStreams();
a2_->CloseReceiveStreams();
CloseStreams();
ASSERT_GE(a1_->GetPacketsSent(0), 40);
ASSERT_GE(a2_->GetPacketsReceived(0), 40);
@ -3940,8 +4305,7 @@ TEST_P(SignalingTest, AudioCallOfferNoSetupOrConnection)
ASSERT_TRUE_WAIT(a1_->GetPacketsSent(0) >= 40 &&
a2_->GetPacketsReceived(0) >= 40, kDefaultTimeout * 2);
a1_->CloseSendStreams();
a2_->CloseReceiveStreams();
CloseStreams();
ASSERT_GE(a1_->GetPacketsSent(0), 40);
ASSERT_GE(a2_->GetPacketsReceived(0), 40);
@ -3988,8 +4352,7 @@ TEST_P(SignalingTest, AudioCallAnswerNoSetupOrConnection)
ASSERT_TRUE_WAIT(a1_->GetPacketsSent(0) >= 40 &&
a2_->GetPacketsReceived(0) >= 40, kDefaultTimeout * 2);
a1_->CloseSendStreams();
a2_->CloseReceiveStreams();
CloseStreams();
ASSERT_GE(a1_->GetPacketsSent(0), 40);
ASSERT_GE(a2_->GetPacketsReceived(0), 40);
@ -4006,8 +4369,7 @@ TEST_P(SignalingTest, FullCallRealTrickle)
ASSERT_TRUE_WAIT(a1_->GetPacketsSent(0) >= 40 &&
a2_->GetPacketsReceived(0) >= 40, kDefaultTimeout * 2);
a1_->CloseSendStreams();
a2_->CloseReceiveStreams();
CloseStreams();
ASSERT_GE(a1_->GetPacketsSent(0), 40);
ASSERT_GE(a2_->GetPacketsReceived(0), 40);
}
@ -4026,8 +4388,7 @@ TEST_P(SignalingTest, FullCallRealTrickleTestServer)
ASSERT_TRUE_WAIT(a1_->GetPacketsSent(0) >= 40 &&
a2_->GetPacketsReceived(0) >= 40, kDefaultTimeout * 2);
a1_->CloseSendStreams();
a2_->CloseReceiveStreams();
CloseStreams();
ASSERT_GE(a1_->GetPacketsSent(0), 40);
ASSERT_GE(a2_->GetPacketsReceived(0), 40);
}
@ -4533,8 +4894,7 @@ TEST_P(SignalingTest, AnswerWithoutVP8)
WaitForCompleted();
a1_->CloseSendStreams();
a2_->CloseReceiveStreams();
CloseStreams();
}
// Test using a non preferred dynamic video payload type on answer negotiation
@ -4602,8 +4962,7 @@ TEST_P(SignalingTest, UseNonPrefferedPayloadTypeOnAnswer)
ASSERT_TRUE_WAIT(a1_->GetPacketsSent(0) >= 40 &&
a2_->GetPacketsReceived(0) >= 40, kDefaultTimeout * 2);
a1_->CloseSendStreams();
a2_->CloseReceiveStreams();
CloseStreams();
}
TEST_P(SignalingTest, VideoNegotiationFails)
@ -4646,8 +5005,7 @@ TEST_P(SignalingTest, VideoNegotiationFails)
ASSERT_TRUE_WAIT(a1_->GetPacketsSent(0) >= 10 &&
a2_->GetPacketsReceived(0) >= 10, kDefaultTimeout * 2);
a1_->CloseSendStreams();
a2_->CloseReceiveStreams();
CloseStreams();
}
TEST_P(SignalingTest, AudioNegotiationFails)
@ -4681,8 +5039,7 @@ TEST_P(SignalingTest, AudioNegotiationFails)
WaitForCompleted();
a1_->CloseSendStreams();
a2_->CloseReceiveStreams();
CloseStreams();
}
TEST_P(SignalingTest, BundleStreamCorrelationBySsrc)
@ -4739,8 +5096,7 @@ TEST_P(SignalingTest, BundleStreamCorrelationBySsrc)
ASSERT_TRUE_WAIT(a1_->GetPacketsSent(0) >= 10 &&
a2_->GetPacketsReceived(0) >= 10, kDefaultTimeout * 2);
a1_->CloseSendStreams();
a2_->CloseReceiveStreams();
CloseStreams();
}
TEST_P(SignalingTest, BundleStreamCorrelationByUniquePt)
@ -4794,8 +5150,7 @@ TEST_P(SignalingTest, BundleStreamCorrelationByUniquePt)
ASSERT_TRUE_WAIT(a1_->GetPacketsSent(0) >= 10 &&
a2_->GetPacketsReceived(0) >= 10, kDefaultTimeout * 2);
a1_->CloseSendStreams();
a2_->CloseReceiveStreams();
CloseStreams();
}
INSTANTIATE_TEST_CASE_P(Variants, SignalingTest,