--- a/dom/canvas/test/captureStream_common.js
+++ b/dom/canvas/test/captureStream_common.js
@@ -102,17 +102,17 @@ CaptureStreamTestHelper.prototype = {
return px.some((ch, i) => Math.abs(ch - refColor.data[i]) > threshold);
},
/*
* Behaves like isPixelNot but ignores the alpha channel.
*/
isOpaquePixelNot: function(px, refColor, threshold) {
px[3] = refColor.data[3];
- return h.isPixelNot(px, refColor, threshold);
+ return this.isPixelNot(px, refColor, threshold);
},
/*
* Returns a promise that resolves when the provided function |test|
* returns true.
*/
waitForPixel: function (video, offsetX, offsetY, test, timeout, width, height) {
return new Promise(resolve => {
--- a/dom/media/tests/mochitest/head.js
+++ b/dom/media/tests/mochitest/head.js
@@ -903,65 +903,104 @@ AudioStreamHelper.prototype = {
checkAudioNotFlowing: function(stream) {
var analyser = new AudioStreamAnalyser(this._context, stream);
var freq = analyser.binIndexForFrequency(TEST_AUDIO_FREQ);
return this.checkAudio(stream, analyser, array => array[freq] < 50);
}
}
-function VideoStreamHelper() {
- this._helper = new CaptureStreamTestHelper2D(50,50);
- this._canvas = this._helper.createAndAppendElement('canvas', 'source_canvas');
- // Make sure this is initted
- this._helper.drawColor(this._canvas, this._helper.green);
- this._stream = this._canvas.captureStream(10);
-}
+class VideoFrameEmitter {
+ constructor(color1, color2) {
+ this._helper = new CaptureStreamTestHelper2D(50,50);
+ this._canvas = this._helper.createAndAppendElement('canvas', 'source_canvas');
+ this._color1 = color1 ? color1 : this._helper.green;
+ this._color2 = color2 ? color2 : this._helper.red;
+ // Make sure this is initted
+ this._helper.drawColor(this._canvas, this._color1);
+ this._stream = this._canvas.captureStream();
+ this._started = false;
+ }
-VideoStreamHelper.prototype = {
- stream: function() {
+ stream() {
return this._stream;
- },
+ }
+
+ start() {
+ if (this._started) {
+ return;
+ }
- startCapturingFrames: function() {
- var i = 0;
- var helper = this;
- return setInterval(function() {
+ let i = 0;
+ this._started = true;
+ this._intervalId = setInterval(() => {
try {
- helper._helper.drawColor(helper._canvas,
- i ? helper._helper.green : helper._helper.red);
+ this._helper.drawColor(this._canvas, i ? this._color1: this._color2);
i = 1 - i;
- helper._stream.requestFrame();
} catch (e) {
// ignore; stream might have shut down, and we don't bother clearing
// the setInterval.
}
}, 500);
- },
+ }
+
+ stop() {
+ if (this._started) {
+ clearInterval(this._intervalId);
+ this._started = false;
+ }
+ }
+}
- waitForFrames: function(canvas, timeout_value) {
- var intervalId = this.startCapturingFrames();
- timeout_value = timeout_value || 8000;
+class VideoStreamHelper {
+ constructor() {
+ this._helper = new CaptureStreamTestHelper2D(50,50);
+ }
+
+ checkHasFrame(video, offsetX, offsetY, threshold) {
+ const h = this._helper;
+ return h.waitForPixel(video, offsetX, offsetY, px => {
+ let result = h.isOpaquePixelNot(px, h.black, threshold);
+ info("Checking that we have a frame, got [" +
+ Array.slice(px) + "]. Ref=[" +
+ Array.slice(h.black.data) + "]. Threshold=" + threshold +
+ ". Pass=" + result);
+ return result;
+ });
+ }
- return addFinallyToPromise(timeout(
- Promise.all([
- this._helper.waitForPixelColor(canvas, this._helper.green, 128,
- canvas.id + " should become green"),
- this._helper.waitForPixelColor(canvas, this._helper.red, 128,
- canvas.id + " should become red")
- ]),
- timeout_value,
- "Timed out waiting for frames")).finally(() => clearInterval(intervalId));
- },
+ async checkVideoPlaying(video, offsetX, offsetY, threshold) {
+ const h = this._helper;
+ await this.checkHasFrame(video, offsetX, offsetY, threshold);
+ let startPixel = { data: h.getPixel(video, offsetX, offsetY)
+ , name: "startcolor"
+ };
+ return h.waitForPixel(video, offsetX, offsetY, px => {
+ let result = h.isPixelNot(px, startPixel, threshold)
+ info("Checking playing, [" +
+ Array.slice(px) + "] vs [" + Array.slice(startPixel.data) +
+ "]. Threshold=" + threshold + " Pass=" + result);
+ return result;
+ });
+ }
- verifyNoFrames: function(canvas) {
- return this.waitForFrames(canvas).then(
- () => ok(false, "Color should not change"),
- () => ok(true, "Color should not change")
- );
+ async checkVideoPaused(video, offsetX, offsetY, threshold, timeout) {
+ const h = this._helper;
+ await this.checkHasFrame(video, offsetX, offsetY, threshold);
+ let startPixel = { data: h.getPixel(video, offsetX, offsetY)
+ , name: "startcolor"
+ };
+ const changed = await h.waitForPixel(video, offsetX, offsetY, px => {
+ let result = h.isOpaquePixelNot(px, startPixel, threshold);
+ info("Checking paused, [" +
+ Array.slice(px) + "] vs [" + Array.slice(startPixel.data) +
+ "]. Threshold=" + threshold + " Pass=" + result);
+ return result;
+ }, timeout);
+ ok(!changed, "Frame shouldn't change within " + timeout / 1000 + " seconds.");
}
}
function IsMacOSX10_6orOlder() {
if (navigator.platform.indexOf("Mac") !== 0) {
return false;
}
--- a/dom/media/tests/mochitest/test_getUserMedia_mediaElementCapture_audio.html
+++ b/dom/media/tests/mochitest/test_getUserMedia_mediaElementCapture_audio.html
@@ -1,13 +1,12 @@
<!DOCTYPE HTML>
<html>
<head>
<script type="application/javascript" src="mediaStreamPlayback.js"></script>
- <script type="application/javascript" src="head.js"></script>
</head>
<body>
<pre id="test">
<script>
createHTML({
bug: "1259788",
title: "Test CaptureStream audio content on HTMLMediaElement playing a gUM MediaStream",
--- a/dom/media/tests/mochitest/test_getUserMedia_mediaElementCapture_tracks.html
+++ b/dom/media/tests/mochitest/test_getUserMedia_mediaElementCapture_tracks.html
@@ -1,13 +1,12 @@
<!DOCTYPE HTML>
<html>
<head>
<script type="application/javascript" src="mediaStreamPlayback.js"></script>
- <script type="application/javascript" src="head.js"></script>
</head>
<body>
<pre id="test">
<script>
createHTML({
bug: "1259788",
title: "Test CaptureStream track output on HTMLMediaElement playing a gUM MediaStream",
--- a/dom/media/tests/mochitest/test_getUserMedia_mediaElementCapture_video.html
+++ b/dom/media/tests/mochitest/test_getUserMedia_mediaElementCapture_video.html
@@ -1,14 +1,13 @@
<!DOCTYPE HTML>
<html>
<head>
<script type="application/javascript" src="/tests/dom/canvas/test/captureStream_common.js"></script>
<script type="application/javascript" src="mediaStreamPlayback.js"></script>
- <script type="application/javascript" src="head.js"></script>
</head>
<body>
<pre id="test">
<script>
createHTML({
bug: "1259788",
title: "Test CaptureStream video content on HTMLMediaElement playing a gUM MediaStream",
@@ -19,53 +18,21 @@ var gUMVideoElement;
var captureStreamElement;
// We check a pixel somewhere away from the top left corner since
// MediaEngineDefault puts semi-transparent time indicators there.
const offsetX = 20;
const offsetY = 20;
const threshold = 16;
const pausedTimeout = 1000;
-const h = new CaptureStreamTestHelper2D(50, 50);
-
-var checkHasFrame = video => h.waitForPixel(video, offsetX, offsetY, px => {
- let result = h.isOpaquePixelNot(px, h.black, threshold);
- info("Checking that we have a frame, got [" +
- Array.slice(px) + "]. Pass=" + result);
- return result;
-});
-
-var checkVideoPlaying = video => checkHasFrame(video)
- .then(() => {
- let startPixel = { data: h.getPixel(video, offsetX, offsetY)
- , name: "startcolor"
- };
- return h.waitForPixel(video, offsetX, offsetY, px => {
- let result = h.isPixelNot(px, startPixel, threshold)
- info("Checking playing, [" + Array.slice(px) + "] vs [" +
- Array.slice(startPixel.data) + "]. Pass=" + result);
- return result;
- });
- });
-
-var checkVideoPaused = video => checkHasFrame(video)
- .then(() => {
- let startPixel = { data: h.getPixel(video, offsetX, offsetY)
- , name: "startcolor"
- };
- return h.waitForPixel(video, offsetX, offsetY, px => {
- let result = h.isOpaquePixelNot(px, startPixel, threshold);
- info("Checking paused, [" + Array.slice(px) + "] vs [" +
- Array.slice(startPixel.data) + "]. Pass=" + result);
- return result;
- }, pausedTimeout);
- }).then(result => ok(!result, "Frame shouldn't change within " + pausedTimeout / 1000 + " seconds."));
+let h;
runTest(() => getUserMedia({video: true, fake: true})
.then(stream => {
+ h = new VideoStreamHelper();
gUMVideoElement =
createMediaElement("video", "gUMVideo");
gUMVideoElement.srcObject = stream;
gUMVideoElement.play();
info("Capturing");
captureStreamElement =
createMediaElement("video", "captureStream");
@@ -75,53 +42,55 @@ runTest(() => getUserMedia({video: true,
// Adding a dummy audio track to the stream will keep a consuming media
// element from ending.
// We could also solve it by repeatedly play()ing or autoplay, but then we
// wouldn't be sure the media element stopped rendering video because it
// went to the ended state or because there were no frames for the track.
let osc = createOscillatorStream(new AudioContext(), 1000);
captureStreamElement.srcObject.addTrack(osc.getTracks()[0]);
- return checkVideoPlaying(captureStreamElement);
+ return h.checkVideoPlaying(captureStreamElement, 10, 10, 16);
})
.then(() => {
info("Video flowing. Pausing.");
gUMVideoElement.pause();
- return checkVideoPaused(captureStreamElement);
+ return h.checkVideoPaused(captureStreamElement, 10, 10, 16, pausedTimeout);
})
.then(() => {
info("Video stopped flowing. Playing.");
gUMVideoElement.play();
- return checkVideoPlaying(captureStreamElement);
+ return h.checkVideoPlaying(captureStreamElement, 10, 10, 16);
})
.then(() => {
info("Video flowing. Removing source.");
var stream = gUMVideoElement.srcObject;
gUMVideoElement.srcObject = null;
- return checkVideoPaused(captureStreamElement).then(() => stream);
+ return h.checkVideoPaused(captureStreamElement, 10, 10, 16, pausedTimeout)
+ .then(() => stream);
})
.then(stream => {
info("Video stopped flowing. Setting source.");
gUMVideoElement.srcObject = stream;
- return checkVideoPlaying(captureStreamElement);
+ return h.checkVideoPlaying(captureStreamElement, 10, 10, 16);
})
.then(() => {
info("Video flowing. Changing source by track manipulation. Remove first.");
var track = gUMVideoElement.srcObject.getTracks()[0];
gUMVideoElement.srcObject.removeTrack(track);
- return checkVideoPaused(captureStreamElement).then(() => track);
+ return h.checkVideoPaused(captureStreamElement, 10, 10, 16, pausedTimeout)
+ .then(() => track);
})
.then(track => {
info("Video paused. Changing source by track manipulation. Add first.");
gUMVideoElement.srcObject.addTrack(track);
gUMVideoElement.play();
- return checkVideoPlaying(captureStreamElement);
+ return h.checkVideoPlaying(captureStreamElement, 10, 10, 16);
})
.then(() => {
gUMVideoElement.srcObject.getTracks().forEach(t => t.stop());
ok(true, "Test passed.");
})
.catch(e => ok(false, "Test failed: " + e + (e.stack ? "\n" + e.stack : ""))));
</script>
--- a/dom/media/tests/mochitest/test_getUserMedia_scarySources.html
+++ b/dom/media/tests/mochitest/test_getUserMedia_scarySources.html
@@ -1,12 +1,11 @@
<!DOCTYPE HTML>
<html>
<head>
- <script type="application/javascript" src="head.js"></script>
<script type="application/javascript" src="mediaStreamPlayback.js"></script>
</head>
<body>
<pre id="test">
<script type="application/javascript">
createHTML({title: "Detect screensharing sources that are firefox", bug: "1311048"});
--- a/dom/media/tests/mochitest/test_peerConnection_close.html
+++ b/dom/media/tests/mochitest/test_peerConnection_close.html
@@ -1,12 +1,11 @@
<!DOCTYPE HTML>
<html>
<head>
- <script type="application/javascript" src="head.js"></script>
<script type="application/javascript" src="pc.js"></script>
</head>
<body>
<pre id="test">
<script type="application/javascript">
createHTML({
bug: "991877",
title: "Basic RTCPeerConnection.close() tests"
--- a/dom/media/tests/mochitest/test_peerConnection_simulcastAnswer.html
+++ b/dom/media/tests/mochitest/test_peerConnection_simulcastAnswer.html
@@ -8,58 +8,58 @@
<pre id="test">
<script type="application/javascript">
createHTML({
bug: "1231507",
title: "Basic video-only peer connection with Simulcast answer",
visible: true
});
- var test;
- var pushPrefs = (...p) => SpecialPowers.pushPrefEnv({set: p});
+ const pushPrefs = (...p) => SpecialPowers.pushPrefEnv({set: p});
function addRIDExtension(pc, extensionId) {
- var receivers = pc._pc.getReceivers();
+ const receivers = pc._pc.getReceivers();
is(receivers.length, 1, "We have exactly one RTP receiver");
- var receiver = receivers[0];
+ const receiver = receivers[0];
SpecialPowers.wrap(pc._pc).mozAddRIDExtension(receiver, extensionId);
}
function selectRecvRID(pc, rid) {
- var receivers = pc._pc.getReceivers();
+ const receivers = pc._pc.getReceivers();
is(receivers.length, 1, "We have exactly one RTP receiver");
- var receiver = receivers[0];
+ const receiver = receivers[0];
SpecialPowers.wrap(pc._pc).mozAddRIDFilter(receiver, rid);
}
runNetworkTest(() =>
pushPrefs(['media.peerconnection.simulcast', true],
// 180Kbps was determined empirically, set well-higher than
// the 80Kbps+overhead needed for the two simulcast streams.
// 100Kbps was apparently too low.
['media.peerconnection.video.min_bitrate_estimate', 180*1000]).then(() => {
- var helper;
+ let emitter, helper;
test = new PeerConnectionTest({bundle: false});
test.setMediaConstraints([{video: true}], [{video: true}]);
test.chain.replace("PC_REMOTE_GUM", [
function PC_REMOTE_CANVAS_CAPTURESTREAM(test) {
+ emitter = new VideoFrameEmitter();
helper = new VideoStreamHelper();
- test.pcRemote.attachLocalStream(helper.stream());
+ test.pcRemote.attachLocalStream(emitter.stream());
}
]);
test.chain.insertAfter('PC_REMOTE_GET_OFFER', [
function PC_REMOTE_SET_RIDS(test) {
- var senders = test.pcRemote._pc.getSenders();
+ const senders = test.pcRemote._pc.getSenders();
is(senders.length, 1, "We have exactly one RTP sender");
- var sender = senders[0];
+ const sender = senders[0];
ok(sender.track, "Sender has a track");
return sender.setParameters({
encodings: [{ rid: "foo", maxBitrate: 40000 },
{ rid: "bar", maxBitrate: 40000, scaleResolutionDownBy: 2 }]
});
},
function PC_LOCAL_ADD_RIDS_TO_OFFER(test) {
@@ -83,65 +83,71 @@
sdputils.removeSimulcastProperties(test._remote_answer.sdp);
}
]);
// do this after set remote description so the MediaPipeline
// has been created.
test.chain.insertAfter('PC_LOCAL_SET_REMOTE_DESCRIPTION',[
function PC_LOCAL_SET_RTP_FIRST_RID(test) {
- var extmap_id = test._local_offer.sdp.match(
+ const extmap_id = test._local_offer.sdp.match(
"a=extmap:([0-9+])/recvonly urn:ietf:params:rtp-hdrext:sdes:rtp-stream-id");
ok(extmap_id, "Local offer has extmap id for simulcast: " + extmap_id[1]);
// Cause pcLocal to filter out everything but RID "bar", only
// allowing one of the simulcast streams through.
addRIDExtension(test.pcLocal, extmap_id[1]);
selectRecvRID(test.pcLocal, "bar");
}
]);
test.chain.append([
- function PC_LOCAL_WAIT_FOR_FRAMES() {
- var vremote = test.pcLocal.remoteMediaElements[0];
+ async function PC_LOCAL_WAIT_FOR_FRAMES() {
+ const vremote = test.pcLocal.remoteMediaElements[0];
ok(vremote, "Should have remote video element for pcLocal");
- return helper.waitForFrames(vremote);
+ emitter.start();
+ await helper.checkVideoPlaying(vremote, 10, 10, 16);
+ emitter.stop();
},
function PC_LOCAL_CHECK_SIZE_1() {
- var vlocal = test.pcRemote.localMediaElements[0];
- var vremote = test.pcLocal.remoteMediaElements[0];
+ const vlocal = test.pcRemote.localMediaElements[0];
+ const vremote = test.pcLocal.remoteMediaElements[0];
ok(vlocal, "Should have local video element for pcRemote");
ok(vremote, "Should have remote video element for pcLocal");
ok(vlocal.videoWidth > 0, "source width is positive");
ok(vlocal.videoHeight > 0, "source height is positive");
is(vremote.videoWidth, vlocal.videoWidth / 2, "sink is 1/2 width of source");
is(vremote.videoHeight, vlocal.videoHeight / 2, "sink is 1/2 height of source");
},
function PC_LOCAL_SET_RTP_SECOND_RID(test) {
// Now, cause pcLocal to filter out everything but RID "foo", only
// allowing the other simulcast stream through.
selectRecvRID(test.pcLocal, "foo");
},
function PC_LOCAL_WAIT_FOR_SECOND_MEDIA_FLOW(test) {
return test.pcLocal.waitForMediaFlow();
},
- function PC_LOCAL_WAIT_FOR_FRAMES_2() {
- var vremote = test.pcLocal.remoteMediaElements[0];
+ async function PC_LOCAL_WAIT_FOR_FRAMES_2() {
+ const vremote = test.pcLocal.remoteMediaElements[0];
ok(vremote, "Should have remote video element for pcLocal");
- return helper.waitForFrames(vremote);
+ emitter.start();
+ await helper.checkVideoPlaying(vremote, 10, 10, 16);
+ emitter.stop();
},
// For some reason, even though we're getting a 25x25 stream, sometimes
// the resolution isn't updated on the video element on the first frame.
- function PC_LOCAL_WAIT_FOR_FRAMES_3() {
- var vremote = test.pcLocal.remoteMediaElements[0];
+ async function PC_LOCAL_WAIT_FOR_FRAMES_3() {
+ const vremote = test.pcLocal.remoteMediaElements[0];
ok(vremote, "Should have remote video element for pcLocal");
- return helper.waitForFrames(vremote);
+ emitter.start();
+ await helper.checkVideoPlaying(vremote, 10, 10, 16);
+ emitter.stop();
},
function PC_LOCAL_CHECK_SIZE_2() {
- var vlocal = test.pcRemote.localMediaElements[0];
- var vremote = test.pcLocal.remoteMediaElements[0];
+ const vlocal = test.pcRemote.localMediaElements[0];
+ const vremote = test.pcLocal.remoteMediaElements[0];
ok(vlocal, "Should have local video element for pcRemote");
ok(vremote, "Should have remote video element for pcLocal");
ok(vlocal.videoWidth > 0, "source width is positive");
ok(vlocal.videoHeight > 0, "source height is positive");
is(vremote.videoWidth, vlocal.videoWidth, "sink is same width as source");
is(vremote.videoHeight, vlocal.videoHeight, "sink is same height as source");
},
]);
--- a/dom/media/tests/mochitest/test_peerConnection_simulcastOffer.html
+++ b/dom/media/tests/mochitest/test_peerConnection_simulcastOffer.html
@@ -8,58 +8,58 @@
<pre id="test">
<script type="application/javascript">
createHTML({
bug: "1231507",
title: "Basic video-only peer connection with Simulcast offer",
visible: true
});
- var test;
- var pushPrefs = (...p) => SpecialPowers.pushPrefEnv({set: p});
+ const pushPrefs = (...p) => SpecialPowers.pushPrefEnv({set: p});
function addRIDExtension(pc, extensionId) {
- var receivers = pc._pc.getReceivers();
+ const receivers = pc._pc.getReceivers();
is(receivers.length, 1, "We have exactly one RTP receiver");
- var receiver = receivers[0];
+ const receiver = receivers[0];
SpecialPowers.wrap(pc._pc).mozAddRIDExtension(receiver, extensionId);
}
function selectRecvRID(pc, rid) {
- var receivers = pc._pc.getReceivers();
+ const receivers = pc._pc.getReceivers();
is(receivers.length, 1, "We have exactly one RTP receiver");
- var receiver = receivers[0];
+ const receiver = receivers[0];
SpecialPowers.wrap(pc._pc).mozAddRIDFilter(receiver, rid);
}
runNetworkTest(() =>
pushPrefs(['media.peerconnection.simulcast', true],
// 180Kbps was determined empirically, set well-higher than
// the 80Kbps+overhead needed for the two simulcast streams.
// 100Kbps was apparently too low.
['media.peerconnection.video.min_bitrate_estimate', 180*1000]).then(() => {
- var helper;
+ let emitter, helper;
- test = new PeerConnectionTest({bundle: false});
+ const test = new PeerConnectionTest({bundle: false});
test.setMediaConstraints([{video: true}], []);
test.chain.replace("PC_LOCAL_GUM", [
function PC_LOCAL_CANVAS_CAPTURESTREAM(test) {
+ emitter = new VideoFrameEmitter();
helper = new VideoStreamHelper();
- test.pcLocal.attachLocalStream(helper.stream());
+ test.pcLocal.attachLocalStream(emitter.stream());
}
]);
test.chain.insertBefore('PC_LOCAL_CREATE_OFFER', [
function PC_LOCAL_SET_RIDS(test) {
- var senders = test.pcLocal._pc.getSenders();
+ const senders = test.pcLocal._pc.getSenders();
is(senders.length, 1, "We have exactly one RTP sender");
- var sender = senders[0];
+ const sender = senders[0];
ok(sender.track, "Sender has a track");
return sender.setParameters({
encodings: [{ rid: "foo", maxBitrate: 40000 },
{ rid: "bar", maxBitrate: 40000, scaleResolutionDownBy: 2 }]
});
}
]);
@@ -75,65 +75,71 @@
ok(test._remote_answer.sdp.match(/urn:ietf:params:rtp-hdrext:sdes:rtp-stream-id/), "Modified answer has RID");
}
]);
// do this after set local description so the MediaPipeline
// has been created.
test.chain.insertAfter('PC_REMOTE_SET_LOCAL_DESCRIPTION',[
function PC_REMOTE_SET_RTP_FIRST_RID(test) {
- var extmap_id = test.originalOffer.sdp.match(
+ const extmap_id = test.originalOffer.sdp.match(
"a=extmap:([0-9+])/sendonly urn:ietf:params:rtp-hdrext:sdes:rtp-stream-id");
ok(extmap_id, "Original offer has extmap id for simulcast: " + extmap_id[1]);
// Cause pcRemote to filter out everything but RID "foo", only
// allowing one of the simulcast streams through.
addRIDExtension(test.pcRemote, extmap_id[1]);
selectRecvRID(test.pcRemote, "foo");
}
]);
test.chain.append([
- function PC_REMOTE_WAIT_FOR_FRAMES() {
- var vremote = test.pcRemote.remoteMediaElements[0];
+ async function PC_REMOTE_WAIT_FOR_FRAMES() {
+ const vremote = test.pcRemote.remoteMediaElements[0];
ok(vremote, "Should have remote video element for pcRemote");
- return helper.waitForFrames(vremote);
+ emitter.start();
+ await helper.checkVideoPlaying(vremote, 10, 10, 16);
+ emitter.stop();
},
function PC_REMOTE_CHECK_SIZE_1() {
- var vlocal = test.pcLocal.localMediaElements[0];
- var vremote = test.pcRemote.remoteMediaElements[0];
+ const vlocal = test.pcLocal.localMediaElements[0];
+ const vremote = test.pcRemote.remoteMediaElements[0];
ok(vlocal, "Should have local video element for pcLocal");
ok(vremote, "Should have remote video element for pcRemote");
ok(vlocal.videoWidth > 0, "source width is positive");
ok(vlocal.videoHeight > 0, "source height is positive");
is(vremote.videoWidth, vlocal.videoWidth, "sink is same width as source");
is(vremote.videoHeight, vlocal.videoHeight, "sink is same height as source");
},
function PC_REMOTE_SET_RTP_SECOND_RID(test) {
// Now, cause pcRemote to filter out everything but RID "bar", only
// allowing the other simulcast stream through.
selectRecvRID(test.pcRemote, "bar");
},
function PC_REMOTE_WAIT_FOR_SECOND_MEDIA_FLOW(test) {
return test.pcRemote.waitForMediaFlow();
},
- function PC_REMOTE_WAIT_FOR_FRAMES_2() {
- var vremote = test.pcRemote.remoteMediaElements[0];
+ async function PC_REMOTE_WAIT_FOR_FRAMES_2() {
+ const vremote = test.pcRemote.remoteMediaElements[0];
ok(vremote, "Should have remote video element for pcRemote");
- return helper.waitForFrames(vremote);
+ emitter.start();
+ await helper.checkVideoPlaying(vremote, 10, 10, 16);
+ emitter.stop();
},
// For some reason, even though we're getting a 25x25 stream, sometimes
// the resolution isn't updated on the video element on the first frame.
- function PC_REMOTE_WAIT_FOR_FRAMES_3() {
- var vremote = test.pcRemote.remoteMediaElements[0];
+ async function PC_REMOTE_WAIT_FOR_FRAMES_3() {
+ const vremote = test.pcRemote.remoteMediaElements[0];
ok(vremote, "Should have remote video element for pcRemote");
- return helper.waitForFrames(vremote);
+ emitter.start();
+ await helper.checkVideoPlaying(vremote, 10, 10, 16);
+ emitter.stop();
},
function PC_REMOTE_CHECK_SIZE_2() {
- var vlocal = test.pcLocal.localMediaElements[0];
- var vremote = test.pcRemote.remoteMediaElements[0];
+ const vlocal = test.pcLocal.localMediaElements[0];
+ const vremote = test.pcRemote.remoteMediaElements[0];
ok(vlocal, "Should have local video element for pcLocal");
ok(vremote, "Should have remote video element for pcRemote");
ok(vlocal.videoWidth > 0, "source width is positive");
ok(vlocal.videoHeight > 0, "source height is positive");
is(vremote.videoWidth, vlocal.videoWidth / 2, "sink is 1/2 width of source");
is(vremote.videoHeight, vlocal.videoHeight / 2, "sink is 1/2 height of source");
},
]);