Backed out changeset 4cc3fae66ffb (bug 1250990) for frequent failure of modified test test_peerConnection_scaleResolution.html. r=frequentorange

This commit is contained in:
Sebastian Hengst 2016-02-28 09:59:16 +01:00
parent d4b0049e46
commit 06d19d0d14
2 changed files with 149 additions and 155 deletions

View File

@ -12,37 +12,33 @@
visible: true
});
var pc1 = new RTCPeerConnection();
var pc2 = new RTCPeerConnection();
var add = (pc, can, failed) => can && pc.addIceCandidate(can).catch(failed);
pc1.onicecandidate = e => add(pc2, e.candidate, generateErrorCallback());
pc2.onicecandidate = e => add(pc1, e.candidate, generateErrorCallback());
pc1.onnegotiationneeded = e =>
pc1.createOffer().then(d => pc1.setLocalDescription(d))
.then(() => pc2.setRemoteDescription(pc1.localDescription))
.then(() => pc2.createAnswer()).then(d => pc2.setLocalDescription(d))
.then(() => pc1.setRemoteDescription(pc2.localDescription))
.catch(generateErrorCallback());
var mustRejectWith = (msg, reason, f) =>
f().then(() => ok(false, msg),
e => is(e.name, reason, msg));
var v1, v2;
var removeVP8 = d => (d.sdp = d.sdp.replace("a=rtpmap:120 VP8/90000\r\n", ""), d);
runNetworkTest(function() {
v1 = createMediaElement('video', 'v1');
v2 = createMediaElement('video', 'v2');
function testScale(codec) {
var pc1 = new RTCPeerConnection();
var pc2 = new RTCPeerConnection();
is(v2.currentTime, 0, "v2.currentTime is zero at outset");
var add = (pc, can, failed) => can && pc.addIceCandidate(can).catch(failed);
pc1.onicecandidate = e => add(pc2, e.candidate, generateErrorCallback());
pc2.onicecandidate = e => add(pc1, e.candidate, generateErrorCallback());
info("testing scaling with " + codec);
pc1.onnegotiationneeded = e =>
pc1.createOffer()
.then(d => pc1.setLocalDescription(codec == "VP8" ? d : removeVP8(d)))
.then(() => pc2.setRemoteDescription(pc1.localDescription))
.then(() => pc2.createAnswer()).then(d => pc2.setLocalDescription(d))
.then(() => pc1.setRemoteDescription(pc2.localDescription))
.catch(generateErrorCallback());
return navigator.mediaDevices.getUserMedia({ video: true })
navigator.mediaDevices.getUserMedia({ video: true })
.then(stream => {
var v1 = createMediaElement('video', 'v1');
var v2 = createMediaElement('video', 'v2');
is(v2.currentTime, 0, "v2.currentTime is zero at outset");
v1.srcObject = stream;
var sender = pc1.addTrack(stream.getVideoTracks()[0], stream);
@ -50,29 +46,23 @@
() => sender.setParameters({ encodings:
[{ scaleResolutionDownBy: 0.5 } ] }))
.then(() => sender.setParameters({ encodings: [{ maxBitrate: 60000,
scaleResolutionDownBy: 2 }] }))
.then(() => new Promise(resolve => pc2.ontrack = e => resolve(e)))
.then(e => v2.srcObject = e.streams[0])
.then(() => new Promise(resolve => v2.onloadedmetadata = resolve))
.then(() => waitUntil(() => v2.currentTime > 0 && v2.srcObject.currentTime > 0))
.then(() => ok(v2.currentTime > 0, "v2.currentTime is moving (" + v2.currentTime + ")"))
.then(() => wait(1000)) // TODO: Bug 1248154
.then(() => {
ok(v1.videoWidth > 0, "source width is positive");
ok(v1.videoHeight > 0, "source height is positive");
is(v2.videoWidth, v1.videoWidth / 2, "sink is half the width of source");
is(v2.videoHeight, v1.videoHeight / 2, "sink is half the height of source");
})
.then(() => {
stream.getTracks().forEach(track => track.stop());
v1.srcObject = v2.srcObject = null;
})
scaleResolutionDownBy: 2 }] }))
})
.catch(generateErrorCallback());
}
runNetworkTest(() => testScale("VP8").then(() => testScale("H264"))
.then(networkTestFinished));
.then(() => new Promise(resolve => pc2.ontrack = e => resolve(e)))
.then(e => v2.srcObject = e.streams[0])
.then(() => new Promise(resolve => v2.onloadedmetadata = resolve))
.then(() => waitUntil(() => v2.currentTime > 0 && v2.srcObject.currentTime > 0))
.then(() => ok(v2.currentTime > 0, "v2.currentTime is moving (" + v2.currentTime + ")"))
.then(() => wait(1000)) // TODO: Bug 1248154
.then(() => {
ok(v1.videoWidth > 0, "source width is positive");
ok(v1.videoHeight > 0, "source height is positive");
is(v2.videoWidth, v1.videoWidth / 2, "sink is half the width of source");
is(v2.videoHeight, v1.videoHeight / 2, "sink is half the height of source");
})
.catch(generateErrorCallback())
.then(networkTestFinished);
});
</script>
</pre>
</body>

View File

@ -1277,76 +1277,80 @@ WebrtcVideoConduit::ReconfigureSendCodec(unsigned short width,
CSFLogDebug(logTag,
"%s: Requesting resolution change to %ux%u (from %ux%u)",
__FUNCTION__, width, height, vie_codec.width, vie_codec.height);
// Likely spurious unless there was some error, but rarely checked
if (vie_codec.width != width || vie_codec.height != height ||
vie_codec.maxFramerate != mSendingFramerate)
{
vie_codec.width = width;
vie_codec.height = height;
vie_codec.maxFramerate = mSendingFramerate;
SelectBitrates(vie_codec.width, vie_codec.height, 0,
mLastFramerateTenths,
vie_codec.minBitrate,
vie_codec.startBitrate,
vie_codec.maxBitrate);
vie_codec.width = width;
vie_codec.height = height;
vie_codec.maxFramerate = mSendingFramerate;
SelectBitrates(vie_codec.width, vie_codec.height, 0,
mLastFramerateTenths,
vie_codec.minBitrate,
vie_codec.startBitrate,
vie_codec.maxBitrate);
for (size_t i = vie_codec.numberOfSimulcastStreams; i > 0; --i) {
webrtc::SimulcastStream& stream(vie_codec.simulcastStream[i - 1]);
stream.width = width;
stream.height = height;
MOZ_ASSERT(stream.jsScaleDownBy >= 1.0);
uint32_t new_width = uint32_t(width / stream.jsScaleDownBy);
uint32_t new_height = uint32_t(height / stream.jsScaleDownBy);
// TODO: If two layers are similar, only alloc bits to one (Bug 1249859)
if (new_width != width || new_height != height) {
if (vie_codec.numberOfSimulcastStreams == 1) {
// Use less strict scaling in unicast. That way 320x240 / 3 = 106x79.
ConstrainPreservingAspectRatio(new_width, new_height,
&stream.width, &stream.height);
} else {
// webrtc.org supposedly won't tolerate simulcast unless every stream
// is exactly the same aspect ratio. 320x240 / 3 = 80x60.
ConstrainPreservingAspectRatioExact(new_width*new_height,
&stream.width, &stream.height);
}
}
// Give each layer default appropriate bandwidth limits based on the
// resolution/framerate of that layer
SelectBitrates(stream.width, stream.height, stream.jsMaxBitrate,
mLastFramerateTenths,
stream.minBitrate,
stream.targetBitrate,
stream.maxBitrate);
for (size_t i = vie_codec.numberOfSimulcastStreams; i > 0; --i) {
webrtc::SimulcastStream& stream(vie_codec.simulcastStream[i - 1]);
stream.width = width;
stream.height = height;
MOZ_ASSERT(stream.jsScaleDownBy >= 1.0);
uint32_t new_width = uint32_t(width / stream.jsScaleDownBy);
uint32_t new_height = uint32_t(height / stream.jsScaleDownBy);
// TODO: If two layers are similar, only alloc bits to one (Bug 1249859)
if (new_width != width || new_height != height) {
if (vie_codec.numberOfSimulcastStreams == 1) {
// Use less strict scaling in unicast. That way 320x240 / 3 = 106x79.
ConstrainPreservingAspectRatio(new_width, new_height,
&stream.width, &stream.height);
} else {
// webrtc.org supposedly won't tolerate simulcast unless every stream
// is exactly the same aspect ratio. 320x240 / 3 = 80x60.
ConstrainPreservingAspectRatioExact(new_width*new_height,
&stream.width, &stream.height);
vie_codec.minBitrate = std::min(stream.minBitrate, vie_codec.minBitrate);
vie_codec.startBitrate += stream.targetBitrate;
vie_codec.maxBitrate = std::max(stream.maxBitrate, vie_codec.maxBitrate);
// webrtc.org expects the last, highest fidelity, simulcast stream to
// always have the same resolution as vie_codec
if (i == vie_codec.numberOfSimulcastStreams) {
vie_codec.width = stream.width;
vie_codec.height = stream.height;
}
}
// Give each layer default appropriate bandwidth limits based on the
// resolution/framerate of that layer
SelectBitrates(stream.width, stream.height, stream.jsMaxBitrate,
mLastFramerateTenths,
stream.minBitrate,
stream.targetBitrate,
stream.maxBitrate);
vie_codec.minBitrate = std::min(stream.minBitrate, vie_codec.minBitrate);
vie_codec.startBitrate += stream.targetBitrate;
vie_codec.maxBitrate = std::max(stream.maxBitrate, vie_codec.maxBitrate);
// webrtc.org expects the last, highest fidelity, simulcast stream to
// always have the same resolution as vie_codec
if (i == vie_codec.numberOfSimulcastStreams) {
vie_codec.width = stream.width;
vie_codec.height = stream.height;
if (vie_codec.numberOfSimulcastStreams != 0) {
vie_codec.startBitrate /= vie_codec.numberOfSimulcastStreams;
}
if ((err = mPtrViECodec->SetSendCodec(mChannel, vie_codec)) != 0)
{
CSFLogError(logTag, "%s: SetSendCodec(%ux%u) failed, err %d",
__FUNCTION__, width, height, err);
return NS_ERROR_FAILURE;
}
if (mMinBitrateEstimate != 0) {
mPtrViENetwork->SetBitrateConfig(mChannel,
mMinBitrateEstimate,
std::max(vie_codec.startBitrate,
mMinBitrateEstimate),
std::max(vie_codec.maxBitrate,
mMinBitrateEstimate));
}
}
if (vie_codec.numberOfSimulcastStreams != 0) {
vie_codec.startBitrate /= vie_codec.numberOfSimulcastStreams;
}
if ((err = mPtrViECodec->SetSendCodec(mChannel, vie_codec)) != 0)
{
CSFLogError(logTag, "%s: SetSendCodec(%ux%u) failed, err %d",
__FUNCTION__, width, height, err);
return NS_ERROR_FAILURE;
}
if (mMinBitrateEstimate != 0) {
mPtrViENetwork->SetBitrateConfig(mChannel,
mMinBitrateEstimate,
std::max(vie_codec.startBitrate,
mMinBitrateEstimate),
std::max(vie_codec.maxBitrate,
mMinBitrateEstimate));
}
CSFLogDebug(logTag, "%s: Encoder resolution changed to %ux%u @ %ufps, bitrate %u:%u",
__FUNCTION__, width, height, mSendingFramerate,
vie_codec.minBitrate, vie_codec.maxBitrate);
CSFLogDebug(logTag, "%s: Encoder resolution changed to %ux%u @ %ufps, bitrate %u:%u",
__FUNCTION__, width, height, mSendingFramerate,
vie_codec.minBitrate, vie_codec.maxBitrate);
} // else no change; mSendingWidth likely was 0
if (frame) {
// XXX I really don't like doing this from MainThread...
mPtrExtCapture->IncomingFrame(*frame);
@ -1860,57 +1864,57 @@ WebrtcVideoConduit::CodecConfigToWebRTCCodec(const VideoCodecConfig* codecInfo,
cinst.codecSpecific.H264.spsLen = 0;
cinst.codecSpecific.H264.ppsData = nullptr;
cinst.codecSpecific.H264.ppsLen = 0;
}
// Init mSimulcastEncodings always since they hold info from setParameters.
// TODO(bug 1210175): H264 doesn't support simulcast yet.
for (size_t i = 0; i < codecInfo->mSimulcastEncodings.size(); ++i) {
const VideoCodecConfig::SimulcastEncoding& encoding =
codecInfo->mSimulcastEncodings[i];
// Make sure the constraints on the whole stream are reflected.
webrtc::SimulcastStream stream;
memset(&stream, 0, sizeof(stream));
stream.width = cinst.width;
stream.height = cinst.height;
stream.numberOfTemporalLayers = 1;
stream.maxBitrate = cinst.maxBitrate;
stream.targetBitrate = cinst.targetBitrate;
stream.minBitrate = cinst.minBitrate;
stream.qpMax = cinst.qpMax;
strncpy(stream.rid, encoding.rid.c_str(), sizeof(stream.rid)-1);
stream.rid[sizeof(stream.rid) - 1] = 0;
} else {
// TODO(bug 1210175): H264 doesn't support simulcast yet.
for (size_t i = 0; i < codecInfo->mSimulcastEncodings.size(); ++i) {
const VideoCodecConfig::SimulcastEncoding& encoding =
codecInfo->mSimulcastEncodings[i];
// Make sure the constraints on the whole stream are reflected.
webrtc::SimulcastStream stream;
memset(&stream, 0, sizeof(stream));
stream.width = cinst.width;
stream.height = cinst.height;
stream.numberOfTemporalLayers = 1;
stream.maxBitrate = cinst.maxBitrate;
stream.targetBitrate = cinst.targetBitrate;
stream.minBitrate = cinst.minBitrate;
stream.qpMax = cinst.qpMax;
strncpy(stream.rid, encoding.rid.c_str(), sizeof(stream.rid)-1);
stream.rid[sizeof(stream.rid) - 1] = 0;
// Apply encoding-specific constraints.
stream.width = MinIgnoreZero(
stream.width,
(unsigned short)encoding.constraints.maxWidth);
stream.height = MinIgnoreZero(
stream.height,
(unsigned short)encoding.constraints.maxHeight);
// Apply encoding-specific constraints.
stream.width = MinIgnoreZero(
stream.width,
(unsigned short)encoding.constraints.maxWidth);
stream.height = MinIgnoreZero(
stream.height,
(unsigned short)encoding.constraints.maxHeight);
// webrtc.org uses kbps, we use bps
stream.jsMaxBitrate = encoding.constraints.maxBr/1000;
stream.jsScaleDownBy = encoding.constraints.scaleDownBy;
// webrtc.org uses kbps, we use bps
stream.jsMaxBitrate = encoding.constraints.maxBr/1000;
stream.jsScaleDownBy = encoding.constraints.scaleDownBy;
MOZ_ASSERT(stream.jsScaleDownBy >= 1.0);
uint32_t width = stream.width? stream.width : 640;
uint32_t height = stream.height? stream.height : 480;
uint32_t new_width = uint32_t(width / stream.jsScaleDownBy);
uint32_t new_height = uint32_t(height / stream.jsScaleDownBy);
MOZ_ASSERT(stream.jsScaleDownBy >= 1.0);
uint32_t width = stream.width? stream.width : 640;
uint32_t height = stream.height? stream.height : 480;
uint32_t new_width = uint32_t(width / stream.jsScaleDownBy);
uint32_t new_height = uint32_t(height / stream.jsScaleDownBy);
if (new_width != width || new_height != height) {
// Estimate. Overridden on first frame.
SelectBitrates(new_width, new_height, stream.jsMaxBitrate,
mLastFramerateTenths,
stream.minBitrate,
stream.targetBitrate,
stream.maxBitrate);
if (new_width != width || new_height != height) {
// Estimate. Overridden on first frame.
SelectBitrates(new_width, new_height, stream.jsMaxBitrate,
mLastFramerateTenths,
stream.minBitrate,
stream.targetBitrate,
stream.maxBitrate);
}
// webrtc.org expects simulcast streams to be ordered by increasing
// fidelity, our jsep code does the opposite.
cinst.simulcastStream[codecInfo->mSimulcastEncodings.size()-i-1] = stream;
}
// webrtc.org expects simulcast streams to be ordered by increasing
// fidelity, our jsep code does the opposite.
cinst.simulcastStream[codecInfo->mSimulcastEncodings.size()-i-1] = stream;
}
cinst.numberOfSimulcastStreams = codecInfo->mSimulcastEncodings.size();
cinst.numberOfSimulcastStreams = codecInfo->mSimulcastEncodings.size();
}
}
//Copy the codec passed into Conduit's database