Bug 1208316 - Test media flow per track instead of per stream. r?drno
The existing media elements may have ended and new tracks been created in the
stream it used as source, since we now end when a stream goes inactive.
We test the tracks individually in media elements instead. They'll either be
live or ended forever.
MozReview-Commit-ID: DxBk2XjSVCH
--- a/dom/media/tests/mochitest/head.js
+++ b/dom/media/tests/mochitest/head.js
@@ -228,57 +228,68 @@ function realCreateHTML(meta) {
document.body.insertBefore(display, test);
var content = document.createElement('div');
content.setAttribute('id', 'content');
content.style.display = meta.visible ? 'block' : "none";
document.body.appendChild(content);
}
-function getMediaElement(label, direction, streamId) {
- var id = label + '_' + direction + '_' + streamId;
- return document.getElementById(id);
-}
-
/**
- * Create the HTML element if it doesn't exist yet and attach
- * it to the content node.
+ * Creates an element of the given type, assigns the given id, sets the controls
+ * and autoplay attributes and adds it to the content node.
*
- * @param {string} label
- * Prefix to use for the element
- * @param {direction} "local" or "remote"
- * @param {stream} A MediaStream id.
- * @param {audioOnly} Use <audio> element instead of <video>
- * @return {HTMLMediaElement} The created HTML media element
+ * @param {string} type
+ * Defining if we should create an "audio" or "video" element
+ * @param {string} id
+ * A string to use as the element id.
*/
-function createMediaElement(label, direction, streamId, audioOnly) {
- var id = label + '_' + direction + '_' + streamId;
- var element = document.getElementById(id);
-
- // Sanity check that we haven't created the element already
- if (element) {
- return element;
- }
-
- if (!audioOnly) {
- // Even if this is just audio now, we might add video later.
- element = document.createElement('video');
- } else {
- element = document.createElement('audio');
- }
+function createMediaElement(type, id) {
+ const element = document.createElement(type);
element.setAttribute('id', id);
element.setAttribute('height', 100);
element.setAttribute('width', 150);
element.setAttribute('controls', 'controls');
element.setAttribute('autoplay', 'autoplay');
document.getElementById('content').appendChild(element);
return element;
}
+/**
+ * Returns an existing element for the given track with the given idPrefix,
+ * as it was added by createMediaElementForTrack().
+ *
+ * @param {MediaStreamTrack} track
+ * Track used as the element's source.
+ * @param {string} idPrefix
+ * A string to use as the element id. The track id will also be appended.
+ */
+function getMediaElementForTrack(track, idPrefix) {
+ return document.getElementById(idPrefix + '_' + track.id);
+}
+
+/**
+ * Create a media element with a track as source and attach it to the content
+ * node.
+ *
+ * @param {MediaStreamTrack} track
+ * Track for use as source.
+ * @param {string} idPrefix
+ * A string to use as the element id. The track id will also be appended.
+ * @return {HTMLMediaElement} The created HTML media element
+ */
+function createMediaElementForTrack(track, idPrefix) {
+ const id = idPrefix + '_' + track.id;
+ const element = createMediaElement(track.kind, id);
+ element.srcObject = new MediaStream([track]);
+
+ return element;
+}
+
/**
* Wrapper function for mediaDevices.getUserMedia used by some tests. Whether
* to use fake devices or not is now determined in pref further below instead.
*
* @param {Dictionary} constraints
* The constraints for this mozGetUserMedia callback
*/
--- a/dom/media/tests/mochitest/pc.js
+++ b/dom/media/tests/mochitest/pc.js
@@ -857,33 +857,33 @@ PeerConnectionWrapper.prototype = {
get iceConnectionState() {
return this._pc.iceConnectionState;
},
setIdentityProvider: function(provider, protocol, identity) {
this._pc.setIdentityProvider(provider, protocol, identity);
},
- ensureMediaElement : function(track, stream, direction) {
- var element = getMediaElement(this.label, direction, stream.id);
+ ensureMediaElement : function(track, direction) {
+ const idPrefix = [this.label, direction].join('_');
+ var element = getMediaElementForTrack(track, idPrefix);
if (!element) {
- element = createMediaElement(this.label, direction, stream.id,
- this.audioElementsOnly);
+ element = createMediaElementForTrack(track, idPrefix);
if (direction == "local") {
this.localMediaElements.push(element);
} else if (direction == "remote") {
this.remoteMediaElements.push(element);
}
}
// We do this regardless, because sometimes we end up with a new stream with
// an old id (ie; the rollback tests cause the same stream to be added
// twice)
- element.srcObject = stream;
+ element.srcObject = new MediaStream([track]);
element.play();
},
/**
* Attaches a local track to this RTCPeerConnection using
* RTCPeerConnection.addTrack().
*
* Also creates a media element playing a MediaStream containing all
@@ -907,24 +907,24 @@ PeerConnectionWrapper.prototype = {
this.expectedLocalTrackInfoById[track.id] = {
type: track.kind,
streamId: stream.id,
};
// This will create one media element per track, which might not be how
// we set up things with the RTCPeerConnection. It's the only way
// we can ensure all sent tracks are flowing however.
- this.ensureMediaElement(track, new MediaStream([track]), "local");
+ this.ensureMediaElement(track, "local");
return this.observedNegotiationNeeded;
},
/**
* Callback when we get local media. Also an appropriate HTML media element
- * will be created, which may be obtained later with |getMediaElement|.
+ * will be created and added to the content node.
*
* @param {MediaStream} stream
* Media stream to handle
*/
attachLocalStream : function(stream) {
info("Got local media stream: (" + stream.id + ")");
this.expectNegotiationNeeded();
@@ -945,17 +945,17 @@ PeerConnectionWrapper.prototype = {
stream.getTracks().forEach(track => {
ok(track.id, "track has id");
ok(track.kind, "track has kind");
this.expectedLocalTrackInfoById[track.id] = {
type: track.kind,
streamId: stream.id
};
- this.ensureMediaElement(track, stream, "local");
+ this.ensureMediaElement(track, "local");
});
},
removeSender : function(index) {
var sender = this._pc.getSenders()[index];
delete this.expectedLocalTrackInfoById[sender.track.id];
this.expectNegotiationNeeded();
this._pc.removeTrack(sender);
@@ -1176,17 +1176,17 @@ PeerConnectionWrapper.prototype = {
this._pc.addEventListener('track', event => {
info(this + ": 'ontrack' event fired for " + JSON.stringify(event.track));
this.checkTrackIsExpected(event.track,
this.expectedRemoteTrackInfoById,
this.observedRemoteTrackInfoById);
ok(this.isTrackOnPC(event.track), "Found track " + event.track.id);
- this.ensureMediaElement(event.track, event.streams[0], 'remote');
+ this.ensureMediaElement(event.track, 'remote');
});
},
/**
* Either adds a given ICE candidate right away or stores it to be added
* later, depending on the state of the PeerConnection.
*
* @param {object} candidate
@@ -1363,55 +1363,52 @@ PeerConnectionWrapper.prototype = {
id => {
if (!this.observedRemoteTrackInfoById[id].negotiated) {
delete this.observedRemoteTrackInfoById[id];
}
});
},
/**
- * Check that media flow is present on the given media element by waiting for
- * it to reach ready state HAVE_ENOUGH_DATA and progress time further than
- * the start of the check.
+ * Check that media flow is present for the given media element by checking
+ * that it reaches ready state HAVE_ENOUGH_DATA and progresses time further
+ * than the start of the check.
*
* This ensures, that the stream being played is producing
- * data and that at least one video frame has been displayed.
+ * data and, in case it contains a video track, that at least one video frame
+ * has been displayed.
*
- * @param {object} element
- * A media element to wait for data flow on.
+ * @param {HTMLMediaElement} track
+ * The media element to check
* @returns {Promise}
- * A promise that resolves when media is flowing.
+ * A promise that resolves when media data is flowing.
*/
waitForMediaElementFlow : function(element) {
- return new Promise(resolve => {
- info("Checking data flow to element: " + element.id);
- if (element.ended && element.readyState >= element.HAVE_CURRENT_DATA) {
- resolve();
- return;
- }
- var haveEnoughData = false;
- var oncanplay = () => {
- info("Element " + element.id + " saw 'canplay', " +
- "meaning HAVE_ENOUGH_DATA was just reached.");
- haveEnoughData = true;
- element.removeEventListener("canplay", oncanplay);
- };
- var ontimeupdate = () => {
- info("Element " + element.id + " saw 'timeupdate'" +
- ", currentTime=" + element.currentTime +
- "s, readyState=" + element.readyState);
- if (haveEnoughData || element.readyState == element.HAVE_ENOUGH_DATA) {
- element.removeEventListener("timeupdate", ontimeupdate);
- ok(true, "Media flowing for element: " + element.id);
- resolve();
- }
- };
- element.addEventListener("canplay", oncanplay);
- element.addEventListener("timeupdate", ontimeupdate);
- });
+ info("Checking data flow for element: " + element.id);
+ is(element.ended, !element.srcObject.active,
+ "Element ended should be the inverse of the MediaStream's active state");
+ if (element.ended) {
+ is(element.readyState, element.HAVE_CURRENT_DATA,
+ "Element " + element.id + " is ended and should have had data");
+ return Promise.resolve();
+ }
+
+ const haveEnoughData = (element.readyState == element.HAVE_ENOUGH_DATA ?
+ Promise.resolve() :
+ haveEvent(element, "canplay", wait(60000,
+ new Error("Timeout for element " + element.id))))
+ .then(_ => info("Element " + element.id + " has enough data."));
+
+ const startTime = element.currentTime;
+ const timeProgressed = timeout(
+ listenUntil(element, "timeupdate", _ => element.currentTime > startTime),
+ 60000, "Element " + element.id + " should progress currentTime")
+ .then();
+
+ return Promise.all([haveEnoughData, timeProgressed]);
},
/**
* Wait for RTP packet flow for the given MediaStreamTrack.
*
* @param {object} track
* A MediaStreamTrack to wait for data flow on.
* @returns {Promise}
--- a/dom/media/tests/mochitest/test_getUserMedia_mediaElementCapture_audio.html
+++ b/dom/media/tests/mochitest/test_getUserMedia_mediaElementCapture_audio.html
@@ -14,17 +14,17 @@ createHTML({
visible: true
});
var audioContext;
var gUMAudioElement;
var analyser;
runTest(() => getUserMedia({audio: true})
.then(stream => {
- gUMAudioElement = createMediaElement("gUMAudio", "local", "gUMAudio", true);
+ gUMAudioElement = createMediaElement("audio", "gUMAudio");
gUMAudioElement.srcObject = stream;
audioContext = new AudioContext();
info("Capturing");
analyser = new AudioStreamAnalyser(audioContext,
gUMAudioElement.mozCaptureStream());
analyser.enableDebugCanvas();
--- a/dom/media/tests/mochitest/test_getUserMedia_mediaElementCapture_tracks.html
+++ b/dom/media/tests/mochitest/test_getUserMedia_mediaElementCapture_tracks.html
@@ -21,34 +21,34 @@ var videoCaptureStream;
var untilEndedElement;
var streamUntilEnded;
var tracks = [];
runTest(() => getUserMedia({audio: true, video: true})
.then(stream => {
// We need to test with multiple tracks. We add an extra of each kind.
stream.getTracks().forEach(t => stream.addTrack(t.clone()));
- audioElement = createMediaElement("gUMAudio", "local", "gUMAudio", true);
+ audioElement = createMediaElement("audio", "gUMAudio");
audioElement.srcObject = stream;
return haveEvent(audioElement, "loadedmetadata", wait(50000, new Error("Timeout")));
})
.then(() => {
info("Capturing audio element (loadedmetadata -> captureStream)");
audioCaptureStream = audioElement.mozCaptureStream();
is(audioCaptureStream.getAudioTracks().length, 2,
"audio element should capture two audio tracks");
is(audioCaptureStream.getVideoTracks().length, 0,
"audio element should not capture any video tracks");
return haveNoEvent(audioCaptureStream, "addtrack");
})
.then(() => {
- videoElement = createMediaElement("gUMVideo", "local", "gUMVideo", false);
+ videoElement = createMediaElement("video", "gUMVideo");
info("Capturing video element (captureStream -> loadedmetadata)");
videoCaptureStream = videoElement.mozCaptureStream();
videoElement.srcObject = audioElement.srcObject.clone();
is(videoCaptureStream.getTracks().length, 0,
"video element should have no tracks before metadata known");
@@ -138,17 +138,17 @@ runTest(() => getUserMedia({audio: true,
.filter(t => t.readyState == "ended").length, 1,
"Captured video stream should have one ended video tracks");
is(videoCaptureStream.getVideoTracks()
.filter(t => t.readyState == "live").length, 1,
"Captured video stream should have one live video track");
info("Testing CaptureStreamUntilEnded");
untilEndedElement =
- createMediaElement("gUMVideoUntilEnded", "local", "gUMVideoUntilEnded", false);
+ createMediaElement("video", "gUMVideoUntilEnded");
untilEndedElement.srcObject = audioElement.srcObject;
return haveEvent(untilEndedElement, "loadedmetadata",
wait(50000, new Error("Timeout")));
})
.then(() => {
streamUntilEnded = untilEndedElement.mozCaptureStreamUntilEnded();
--- a/dom/media/tests/mochitest/test_getUserMedia_mediaElementCapture_video.html
+++ b/dom/media/tests/mochitest/test_getUserMedia_mediaElementCapture_video.html
@@ -57,23 +57,23 @@ var checkVideoPaused = video => checkHas
Array.slice(startPixel.data) + "]. Pass=" + result);
return result;
}, pausedTimeout);
}).then(result => ok(!result, "Frame shouldn't change within " + pausedTimeout / 1000 + " seconds."));
runTest(() => getUserMedia({video: true, fake: true})
.then(stream => {
gUMVideoElement =
- createMediaElement("gUMVideo", "local", "gUMVideo", false);
+ createMediaElement("video", "gUMVideo");
gUMVideoElement.srcObject = stream;
gUMVideoElement.play();
info("Capturing");
captureStreamElement =
- createMediaElement("captureStream", "local", "captureStream", false);
+ createMediaElement("video", "captureStream");
captureStreamElement.srcObject = gUMVideoElement.mozCaptureStream();
captureStreamElement.play();
return checkVideoPlaying(captureStreamElement);
})
.then(() => {
info("Video flowing. Pausing.");
gUMVideoElement.pause();
--- a/dom/media/tests/mochitest/test_peerConnection_trackDisabling.html
+++ b/dom/media/tests/mochitest/test_peerConnection_trackDisabling.html
@@ -15,29 +15,31 @@ createHTML({
runNetworkTest(() => {
var test = new PeerConnectionTest();
// Always use fake tracks since we depend on video to be somewhat green and
// audio to have a large 1000Hz component (or 440Hz if using fake devices).
test.setMediaConstraints([{audio: true, video: true, fake: true}], []);
test.chain.append([
function CHECK_ASSUMPTIONS() {
- is(test.pcLocal.localMediaElements.length, 1,
+ is(test.pcLocal.localMediaElements.length, 2,
"pcLocal should have one media element");
- is(test.pcRemote.remoteMediaElements.length, 1,
+ is(test.pcRemote.remoteMediaElements.length, 2,
"pcRemote should have one media element");
is(test.pcLocal._pc.getLocalStreams().length, 1,
"pcLocal should have one stream");
is(test.pcRemote._pc.getRemoteStreams().length, 1,
"pcRemote should have one stream");
},
function CHECK_VIDEO() {
var h = new CaptureStreamTestHelper2D();
- var localVideo = test.pcLocal.localMediaElements[0];
- var remoteVideo = test.pcRemote.remoteMediaElements[0];
+ var localVideo = test.pcLocal.localMediaElements
+ .find(e => e instanceof HTMLVideoElement);
+ var remoteVideo = test.pcRemote.remoteMediaElements
+ .find(e => e instanceof HTMLVideoElement);
// We check a pixel somewhere away from the top left corner since
// MediaEngineDefault puts semi-transparent time indicators there.
const offsetX = 50;
const offsetY = 50;
const threshold = 128;
// We're regarding black as disabled here, and we're setting the alpha
// channel of the pixel to 255 to disregard alpha when testing.
--- a/dom/media/tests/mochitest/test_peerConnection_trackDisabling_clones.html
+++ b/dom/media/tests/mochitest/test_peerConnection_trackDisabling_clones.html
@@ -21,38 +21,40 @@ runNetworkTest(() => {
// Always use fake tracks since we depend on audio to have a large 1000Hz
// component.
test.setMediaConstraints([{audio: true, video: true, fake: true}], []);
test.chain.replace("PC_LOCAL_GUM", [
function PC_LOCAL_GUM_CLONE() {
return getUserMedia(test.pcLocal.constraints[0]).then(stream => {
originalStream = stream;
localVideoOriginal =
- createMediaElement("audiovideo", "local-original");
+ createMediaElement("video", "local-original");
localVideoOriginal.srcObject = stream;
test.pcLocal.attachLocalStream(originalStream.clone());
});
}
]);
test.chain.append([
function CHECK_ASSUMPTIONS() {
- is(test.pcLocal.localMediaElements.length, 1,
+ is(test.pcLocal.localMediaElements.length, 2,
"pcLocal should have one media element");
- is(test.pcRemote.remoteMediaElements.length, 1,
+ is(test.pcRemote.remoteMediaElements.length, 2,
"pcRemote should have one media element");
is(test.pcLocal._pc.getLocalStreams().length, 1,
"pcLocal should have one stream");
is(test.pcRemote._pc.getRemoteStreams().length, 1,
"pcRemote should have one stream");
},
function CHECK_VIDEO() {
info("Checking video");
var h = new CaptureStreamTestHelper2D();
- var localVideoClone = test.pcLocal.localMediaElements[0];
- var remoteVideoClone = test.pcRemote.remoteMediaElements[0];
+ var localVideoClone = test.pcLocal.localMediaElements
+ .find(e => e instanceof HTMLVideoElement);
+ var remoteVideoClone = test.pcRemote.remoteMediaElements
+ .find(e => e instanceof HTMLVideoElement);
// We check a pixel somewhere away from the top left corner since
// MediaEngineDefault puts semi-transparent time indicators there.
const offsetX = 50;
const offsetY = 50;
const threshold = 128;
const remoteDisabledColor = h.black;