Bug 1296531 - Let waitForPixel and friends take a cancelPromise. r?jib draft
authorAndreas Pehrson <pehrsons@gmail.com>
Mon, 29 May 2017 16:27:45 +0200
changeset 670325 8a23d15e1d274f6c580ae1c9c0669bb274f9fecc
parent 670324 2231e20902d32c4fe6837671bc6dbaf4f1e4dffb
child 670326 3c64790204ed2959cde65f683b13f34af7991db2
push id81598
push userbmo:apehrson@mozilla.com
push dateTue, 26 Sep 2017 09:13:19 +0000
reviewersjib
bugs1296531
milestone58.0a1
Bug 1296531 - Let waitForPixel and friends take a cancelPromise. r?jib MozReview-Commit-ID: 42hT181wkvq
dom/canvas/test/captureStream_common.js
dom/canvas/test/test_capture.html
dom/canvas/test/webgl-mochitest/test_capture.html
dom/ipc/tests/test_temporaryfile_stream.html
dom/media/test/test_mediarecorder_record_addtracked_stream.html
dom/media/test/test_mediarecorder_record_canvas_captureStream.html
dom/media/test/test_mediarecorder_record_downsize_resolution.html
dom/media/test/test_mediarecorder_record_upsize_resolution.html
dom/media/test/test_temporary_file_blob_video_plays.html
dom/media/tests/mochitest/head.js
dom/media/tests/mochitest/test_getUserMedia_bug1223696.html
dom/media/tests/mochitest/test_getUserMedia_mediaElementCapture_video.html
dom/media/tests/mochitest/test_peerConnection_addSecondVideoStream.html
dom/media/tests/mochitest/test_peerConnection_addSecondVideoStreamNoBundle.html
dom/media/tests/mochitest/test_peerConnection_captureStream_canvas_2d.html
dom/media/tests/mochitest/test_peerConnection_captureStream_canvas_2d_noSSRC.html
dom/media/tests/mochitest/test_peerConnection_captureStream_canvas_webgl.html
dom/media/tests/mochitest/test_peerConnection_multiple_captureStream_canvas_2d.html
dom/media/tests/mochitest/test_peerConnection_removeThenAddVideoTrack.html
dom/media/tests/mochitest/test_peerConnection_removeThenAddVideoTrackNoBundle.html
dom/media/tests/mochitest/test_peerConnection_renderAfterRenegotiation.html
dom/media/tests/mochitest/test_peerConnection_replaceVideoThenRenegotiate.html
dom/media/tests/mochitest/test_peerConnection_simulcastAnswer.html
dom/media/tests/mochitest/test_peerConnection_simulcastOffer.html
dom/media/tests/mochitest/test_peerConnection_trackDisabling.html
dom/media/tests/mochitest/test_peerConnection_trackDisabling_clones.html
dom/media/tests/mochitest/test_peerConnection_verifyVideoAfterRenegotiation.html
dom/media/tests/mochitest/test_peerConnection_videoRenegotiationInactiveAnswer.html
--- a/dom/canvas/test/captureStream_common.js
+++ b/dom/canvas/test/captureStream_common.js
@@ -57,126 +57,139 @@ CaptureStreamTestHelper.prototype = {
   },
 
   /*
    * Returns the pixel at (|offsetX|, |offsetY|) (from top left corner) of
    * |video| as an array of the pixel's color channels: [R,G,B,A].  Allows
    * optional scaling of the drawImage() call (so that a 1x1 black image
    * won't just draw 1 pixel in the corner)
    */
-  getPixel: function (video, offsetX, offsetY, width, height) {
-    offsetX = offsetX || 0; // Set to 0 if not passed in.
-    offsetY = offsetY || 0; // Set to 0 if not passed in.
-    width = width || 0; // Set to 0 if not passed in.
-    height = height || 0; // Set to 0 if not passed in.
-
+  getPixel: function (video, offsetX = 0, offsetY = 0, width = 0, height = 0) {
     // Avoids old values in case of a transparent image.
     CaptureStreamTestHelper2D.prototype.clear.call(this, this.cout);
 
     var ctxout = this.cout.getContext('2d');
     if (width != 0 || height != 0) {
       ctxout.drawImage(video, 0, 0, width, height);
     } else {
       ctxout.drawImage(video, 0, 0);
     }
     return ctxout.getImageData(offsetX, offsetY, 1, 1).data;
   },
 
   /*
    * Returns true if px lies within the per-channel |threshold| of the
    * referenced color for all channels. px is on the form of an array of color
    * channels, [R,G,B,A]. Each channel is in the range [0, 255].
+   *
+   * Threshold defaults to 0 which is an exact match.
    */
-  isPixel: function (px, refColor, threshold) {
-    threshold = threshold || 0; // Default to 0 (exact match) if not passed in.
+  isPixel: function (px, refColor, threshold = 0) {
     return px.every((ch, i) => Math.abs(ch - refColor.data[i]) <= threshold);
   },
 
   /*
    * Returns true if px lies further away than |threshold| of the
    * referenced color for any channel. px is on the form of an array of color
    * channels, [R,G,B,A]. Each channel is in the range [0, 255].
+   *
+   * Threshold defaults to 127 which should be far enough for most cases.
    */
-  isPixelNot: function (px, refColor, threshold) {
-    if (threshold === undefined) {
-      // Default to 127 (should be sufficiently far away) if not passed in.
-      threshold = 127;
-    }
+  isPixelNot: function (px, refColor, threshold = 127) {
     return px.some((ch, i) => Math.abs(ch - refColor.data[i]) > threshold);
   },
 
   /*
    * Behaves like isPixelNot but ignores the alpha channel.
    */
   isOpaquePixelNot: function(px, refColor, threshold) {
     px[3] = refColor.data[3];
     return this.isPixelNot(px, refColor, threshold);
   },
 
   /*
    * Returns a promise that resolves when the provided function |test|
-   * returns true.
+   * returns true, or rejects when the optional `cancel` promise resolves.
    */
-  waitForPixel: function (video, offsetX, offsetY, test, timeout, width, height) {
-    return new Promise(resolve => {
-      const startTime = video.currentTime;
-      var ontimeupdate = () => {
-        var pixelMatch = false;
-        try {
-            pixelMatch = test(this.getPixel(video, offsetX, offsetY, width, height));
-        } catch (e) {
-          info("Waiting for pixel but no video available: " + e + "\n" + e.stack);
-        }
-        if (!pixelMatch &&
-            (!timeout || video.currentTime < startTime + (timeout / 1000.0))) {
-          // No match yet and,
-          // No timeout (waiting indefinitely) or |timeout| has not passed yet.
+  waitForPixel: async function (video, test, {
+                                  offsetX = 0, offsetY = 0,
+                                  width = 0, height = 0,
+                                  cancel = new Promise(() => {}),
+                                } = {}) {
+    let aborted = false;
+    cancel.then(e => aborted = true);
+
+    while (true) {
+      await Promise.race([
+        new Promise(resolve => video.addEventListener("timeupdate", resolve, { once: true })),
+        cancel,
+      ]);
+      if (aborted) {
+        throw await cancel;
+      }
+      try {
+        if (test(this.getPixel(video, offsetX, offsetY, width, height))) {
           return;
         }
-        video.removeEventListener("timeupdate", ontimeupdate);
-        resolve(pixelMatch);
-      };
-      video.addEventListener("timeupdate", ontimeupdate);
-    });
+      } catch (e) {
+        info("Waiting for pixel but no video available: " + e + "\n" + e.stack);
+      }
+    }
   },
 
   /*
    * Returns a promise that resolves when the top left pixel of |video| matches
    * on all channels. Use |threshold| for fuzzy matching the color on each
-   * channel, in the range [0,255].
+   * channel, in the range [0,255]. 0 means exact match, 255 accepts anything.
    */
-  waitForPixelColor: function (video, refColor, threshold, infoString) {
+  pixelMustBecome: async function (video, refColor, {
+                                     threshold = 0, infoString = "n/a",
+                                     cancel = new Promise(() => {}),
+                                   } = {}) {
     info("Waiting for video " + video.id + " to match [" +
          refColor.data.join(',') + "] - " + refColor.name +
          " (" + infoString + ")");
     var paintedFrames = video.mozPaintedFrames-1;
-    return this.waitForPixel(video, 0, 0,
-                             px => { if (paintedFrames != video.mozPaintedFrames) {
-				       info("Frame: " + video.mozPaintedFrames +
-					    " IsPixel ref=" + refColor.data +
-					    " threshold=" + threshold +
-					    " value=" + px);
-				       paintedFrames = video.mozPaintedFrames;
-				     }
-				     return this.isPixel(px, refColor, threshold); })
-      .then(() => ok(true, video.id + " " + infoString));
+    await this.waitForPixel(video, px => {
+        if (paintedFrames != video.mozPaintedFrames) {
+         info("Frame: " + video.mozPaintedFrames +
+             " IsPixel ref=" + refColor.data +
+             " threshold=" + threshold +
+             " value=" + px);
+         paintedFrames = video.mozPaintedFrames;
+        }
+        return this.isPixel(px, refColor, threshold);
+      }, {
+        offsetX: 0, offsetY: 0,
+        width: 0, height: 0,
+        cancel,
+      });
+    ok(true, video.id + " " + infoString);
   },
 
   /*
-   * Returns a promise that resolves after |timeout| ms of playback or when the
+   * Returns a promise that resolves after |time| ms of playback or when the
    * top left pixel of |video| becomes |refColor|. The test is failed if the
-   * timeout is not reached.
+   * time is not reached, or if the cancel promise resolves.
    */
-  waitForPixelColorTimeout: function (video, refColor, threshold, timeout, infoString) {
-    info("Waiting for " + video.id + " to time out after " + timeout +
+  pixelMustNotBecome: async function (video, refColor, {
+                                        threshold = 0, time = 5000,
+                                        infoString = "n/a",
+                                      } = {}) {
+    info("Waiting for " + video.id + " to time out after " + time +
          "ms against [" + refColor.data.join(',') + "] - " + refColor.name);
-    return this.waitForPixel(video, 0, 0,
-                             px => this.isPixel(px, refColor, threshold),
-                             timeout)
-      .then(result => ok(!result, video.id + " " + infoString));
+    let timeout = new Promise(resolve => setTimeout(resolve, time));
+    let analysis = async () => {
+      await this.waitForPixel(video, px => this.isPixel(px, refColor, threshold), {
+          offsetX: 0, offsetY: 0, width: 0, height: 0,
+        });
+      throw new Error("Got color " + refColor.name + ". " + infoString);
+    };
+    await Promise.race([timeout, analysis()]);
+    ok(true, video.id + " " + infoString);
   },
 
   /* Create an element of type |type| with id |id| and append it to the body. */
   createAndAppendElement: function (type, id) {
     var e = document.createElement(type);
     e.id = id;
     e.width = this.elemWidth;
     e.height = this.elemHeight;
--- a/dom/canvas/test/test_capture.html
+++ b/dom/canvas/test/test_capture.html
@@ -26,79 +26,93 @@ function checkDrawColorInitialRed() {
   ok(h.isPixel(h.getPixel(vauto), h.blackTransparent, 0),
      "vauto should not be drawn to before stable state");
   ok(h.isPixel(h.getPixel(vrate), h.blackTransparent, 0),
      "vrate should not be drawn to before stable state");
   ok(h.isPixel(h.getPixel(vmanual), h.blackTransparent, 0),
      "vmanual should not be drawn to before stable state");
 
   return Promise.resolve()
-    .then(() => h.waitForPixelColor(vauto, h.red, 0,
-                                    "should become red automatically"))
-    .then(() => h.waitForPixelColor(vrate, h.red, 0,
-                                    "should become red automatically"))
-    .then(() => h.waitForPixelColor(vmanual, h.red, 0,
-                                    "should become red when we get" +
-                                    " to stable state (first frame)"));
+    .then(() => h.pixelMustBecome(vauto, h.red, {
+      infoString: "should become red automatically",
+    }))
+    .then(() => h.pixelMustBecome(vrate, h.red, {
+      infoString: "should become red automatically",
+    }))
+    .then(() => h.pixelMustBecome(vmanual, h.red, {
+      infoString: "should become red when we get to stable state (first frame)",
+    }));
 }
 
 function checkDrawColorGreen() {
   info("Checking that drawing green propagates properly to video elements.");
 
   var drawing = h.startDrawing(() => h.drawColor(c, h.green));
 
   return Promise.resolve()
-    .then(() => h.waitForPixelColor(vauto, h.green, 0,
-                                    "should become green automatically"))
-    .then(() => h.waitForPixelColor(vrate, h.green, 0,
-                                    "should become green automatically"))
-    .then(() => h.waitForPixelColor(vmanual, h.red, 0,
-                                    "should still be red"))
+    .then(() => h.pixelMustBecome(vauto, h.green, {
+      infoString: "should become green automatically",
+    }))
+    .then(() => h.pixelMustBecome(vrate, h.green, {
+      infoString: "should become green automatically",
+    }))
+    .then(() => h.pixelMustBecome(vmanual, h.red, {
+      infoString: "should still be red",
+    }))
     .then(() => h.requestFrame(vmanual))
-    .then(() => h.waitForPixelColor(vmanual, h.green, 0,
-                                    "should become green after requstFrame()"))
+    .then(() => h.pixelMustBecome(vmanual, h.green, {
+      infoString: "should become green after requstFrame()",
+    }))
     .catch(err => ok(false, "checkDrawColorGreen failed: ", err))
     .then(() => drawing.stop());
 }
 
 function checkRequestFrameOrderGuarantee() {
   info("Checking that requestFrame() immediately after a drawColor() " +
        "call results in the expected frame seen in the stream.");
 
   return Promise.resolve()
-    .then(() => h.waitForPixelColor(vmanual, h.green, 0,
-                                    "should still be green"))
+    .then(() => h.pixelMustBecome(vmanual, h.green, {
+      infoString: "should still be green",
+    }))
     .then(() => h.drawColor(c, h.red))   // 1. Draw canvas red
     .then(() => h.requestFrame(vmanual)) // 2. Immediately request a frame
-    .then(() => h.waitForPixelColor(vmanual, h.red, 0,
-                                    "should become red after call order test"))
+    .then(() => h.pixelMustBecome(vmanual, h.red, {
+      infoString: "should become red after call order test",
+    }));
 }
 
 function checkDrawImageNotCleanRed() {
   info("Checking that drawImage with not origin-clean image renders streams useless.");
   var ctx = c.getContext('2d');
   var notCleanRed = new Image();
   var drawing;
 
   return new Promise((resolve, reject) => {
     notCleanRed.onload = resolve;
     notCleanRed.onerror = () => reject(new Error("Failed to load tainted image."));
     notCleanRed.src = "http://example.com/tests/dom/canvas/test/image_red_crossorigin_credentials.png";
     document.body.appendChild(notCleanRed);
   })
     .then(() => drawing = h.startDrawing(() => ctx.drawImage(notCleanRed, 0, 0, c.width, c.height)))
     .then(() => h.testNotClean(c))
-    .then(() => h.waitForPixelColorTimeout(vauto, h.red, 0, 1000,
-                                           "should not become red"))
-    .then(() => h.isPixelNot(h.getPixel(vrate), h.red, 250,
-                             "should not have become red"))
-    .then(() => h.waitForPixelColor(vmanual, h.green, 0, "should still be green"))
+    .then(() => h.pixelMustNotBecome(vauto, h.red, {
+      timeout: 1000,
+      infoString: "should not become red",
+    }))
+    .then(() => ok(h.isPixelNot(h.getPixel(vrate), h.red, 250),
+                   "should not have become red"))
+    .then(() => h.pixelMustBecome(vmanual, h.green, {
+      infoString: "should still be green",
+    }))
     .then(() => h.requestFrame(vmanual))
-    .then(() => h.waitForPixelColorTimeout(vmanual, h.red, 0, 1000,
-                                           "should not become red"))
+    .then(() => h.pixelMustNotBecome(vmanual, h.red, {
+      timeout: 1000,
+      infoString: "should not become red",
+    }))
     .catch(err => ok(false, "checkDrawImageNotCleanRed failed: ", err))
     .then(() => drawing.stop());
 }
 
 function checkEndedOnStop() {
   let promises = [vauto, vmanual, vrate].map(elem => {
     elem.srcObject.getTracks()[0].stop();
     return new Promise(resolve =>
@@ -112,16 +126,17 @@ function checkEndedOnStop() {
 }
 
 function finish() {
   ok(true, 'Test complete.');
   SimpleTest.finish();
 }
 
 function beginTest() {
+  SimpleTest.requestFlakyTimeout("Ensuring nothing happens until timing out with good margin");
   h = new CaptureStreamTestHelper2D();
 
   c = h.createAndAppendElement('canvas', 'c');
   vauto = h.createAndAppendElement('video', 'vauto');
   vmanual = h.createAndAppendElement('video', 'vmanual');
   vrate = h.createAndAppendElement('video', 'vrate');
 
   Promise.resolve()
--- a/dom/canvas/test/webgl-mochitest/test_capture.html
+++ b/dom/canvas/test/webgl-mochitest/test_capture.html
@@ -49,75 +49,87 @@ function checkClearColorInitialRed() {
   info("Checking that clearing to red works for first frame.");
 
   h.clearColor(c, h.red);
 
   vauto.srcObject = c.captureStream();
   vmanual.srcObject = c.captureStream(0);
   vrate.srcObject = c.captureStream(10);
 
-  ok(h.isPixel(h.getPixel(vauto), h.blackTransparent, 0,
-     "vauto should not be drawn to before stable state"));
-  ok(h.isPixel(h.getPixel(vrate), h.blackTransparent, 0,
-     "vrate should not be drawn to before stable state"));
-  ok(h.isPixel(h.getPixel(vmanual), h.blackTransparent, 0,
-     "vmanual should not be drawn to before stable state"));
+  ok(h.isPixel(h.getPixel(vauto), h.blackTransparent),
+    "vauto should not be drawn to before stable state");
+  ok(h.isPixel(h.getPixel(vrate), h.blackTransparent),
+    "vrate should not be drawn to before stable state");
+  ok(h.isPixel(h.getPixel(vmanual), h.blackTransparent),
+    "vmanual should not be drawn to before stable state");
 
   return Promise.resolve()
-    .then(() => h.waitForPixelColor(vauto, h.red, 0,
-                                    "should become red automatically"))
-    .then(() => h.waitForPixelColor(vrate, h.red, 0,
-                                    "should become red automatically"))
-    .then(() => h.waitForPixelColor(vmanual, h.red, 0,
-                                    "should become red when we get to stable " +
-                                    "state (first frame)"))
+    .then(() => h.pixelMustBecome(vauto, h.red, {
+      infoString: "should become red automatically",
+    }))
+    .then(() => h.pixelMustBecome(vrate, h.red, {
+      infoString: "should become red automatically",
+    }))
+    .then(() => h.pixelMustBecome(vmanual, h.red, {
+      infoString: "should become red when we get to stable "
+        + "state (first frame)",
+    }))
 }
 
 function checkDrawColorGreen() {
   info("Checking that drawing green results in green video frames.");
   var drawing = h.startDrawing(h.drawColor.bind(h, c, h.green));
   checkGLError('after DrawColor');
   return Promise.resolve()
-    .then(() => h.waitForPixelColor(vauto, h.green, 0,
-                                    "should become green automatically"))
-    .then(() => h.waitForPixelColor(vrate, h.green, 0,
-                                    "should become green automatically"))
-    .then(() => h.waitForPixelColor(vmanual, h.red, 0,
-                                    "should still be red"))
+    .then(() => h.pixelMustBecome(vauto, h.green, {
+      infoString: "should become green automatically",
+    }))
+    .then(() => h.pixelMustBecome(vrate, h.green, {
+      infoString: "should become green automatically",
+    }))
+    .then(() => h.pixelMustBecome(vmanual, h.red, {
+      infoString: "should still be red",
+    }))
     .then(() => h.requestFrame(vmanual))
-    .then(() => h.waitForPixelColor(vmanual, h.green, 0,
-                                    "should become green after requstFrame()"))
+    .then(() => h.pixelMustBecome(vmanual, h.green, {
+      infoString: "should become green after requstFrame()",
+    }))
     .then(() => drawing.stop());
 }
 
 function checkClearColorRed() {
   info("Checking that clearing to red works.");
   var drawing = h.startDrawing(h.clearColor.bind(h, c, h.red));
   return Promise.resolve()
-    .then(() => h.waitForPixelColor(vauto, h.red, 0,
-                                    "should become red automatically"))
-    .then(() => h.waitForPixelColor(vrate, h.red, 0,
-                                    "should become red automatically"))
-    .then(() => h.waitForPixelColor(vmanual, h.green, 0,
-                                    "should still be green"))
+    .then(() => h.pixelMustBecome(vauto, h.red, {
+      infoString: "should become red automatically",
+    }))
+    .then(() => h.pixelMustBecome(vrate, h.red, {
+      infoString: "should become red automatically",
+    }))
+    .then(() => h.pixelMustBecome(vmanual, h.green, {
+      infoString: "should still be green",
+    }))
     .then(() => h.requestFrame(vmanual))
-    .then(() => h.waitForPixelColor(vmanual, h.red, 0,
-                                    "should become red after requestFrame()"))
+    .then(() => h.pixelMustBecome(vmanual, h.red, {
+      infoString: "should become red after requestFrame()",
+    }))
     .then(() => drawing.stop());
 }
 
 function checkRequestFrameOrderGuarantee() {
   info("Checking that requestFrame() immediately after a draw " +
        "call results in the expected frame seen in the stream.");
   return Promise.resolve()
-    .then(() => h.waitForPixelColor(vmanual, h.red, 0, "should still be red"))
+    .then(() => h.pixelMustBecome(vmanual, h.red, 0, "should still be red"))
     .then(() => h.drawColor(c, h.green)) // 1. Draw canvas green
     .then(() => h.requestFrame(vmanual)) // 2. Immediately request a frame
-    .then(() => h.waitForPixelColor(vmanual, h.green, 0,
-                                    "should become green after call order test"))
+    .then(() => h.pixelMustBecome(vmanual, h.green, {
+      infoString: "should become green after call order test",
+    }))
 }
 
 function checkEndedOnStop() {
   let promises = [vauto, vmanual, vrate].map(elem => {
     elem.srcObject.getTracks()[0].stop();
     return new Promise(resolve =>
       elem.addEventListener("ended", function endedListener(event) {
         ok(true, "Element " + elem.id + " ended.");
--- a/dom/ipc/tests/test_temporaryfile_stream.html
+++ b/dom/ipc/tests/test_temporaryfile_stream.html
@@ -52,18 +52,20 @@ function startTest() {
       video.id = "recorded-video";
       video.src = URL.createObjectURL(xhr.response);
       video.play();
       video.onerror = err => {
         ok(false, "Should be able to play the recording. Got error. code=" + video.error.code);
         SimpleTest.finish();
       };
       document.getElementById("content").appendChild(video);
-      helper.waitForPixelColor(video, helper.red, 128, "Should become red")
-        .then(SimpleTest.finish);
+      helper.pixelMustBecome(video, helper.red, {
+        threshold: 128,
+        infoString: "Should become red"
+      }).then(SimpleTest.finish);
     };
     xhr.onerror = () => {
       ok(false, "XHR error");
       SimpleTest.finish();
     }
     xhr.responseType = "blob";
     xhr.send(blob);
   };
--- a/dom/media/test/test_mediarecorder_record_addtracked_stream.html
+++ b/dom/media/test/test_mediarecorder_record_addtracked_stream.html
@@ -121,19 +121,22 @@ runTestWhenReady(async () => {
     const upperAmp = array[analyser.binIndexForFrequency(upperFreq)];
     info("Analysing audio. "
          + lowerFreq + ": " + lowerAmp + ", "
          + freq + ": " + freqAmp + ", "
          + upperFreq + ": " + upperAmp);
     return lowerAmp < 50 && freqAmp > 200 && upperAmp < 50;
   }, endedNoError.then(() => new Error("Audio check failed")));
 
-  const videoReady = helper.waitForPixelColor(
-      video, helper.red, 128, "Should become red",
-      endedNoError.then(() => new Error("Video check failed")));
+  const videoReady = helper.pixelMustBecome(
+      video, helper.red, {
+        threshold: 128,
+        infoString: "Should become red",
+        cancelPromise: endedNoError.then(() => new Error("Video check failed")),
+      });
 
   video.play();
 
   try {
     await endedNoError;
   } finally {
     analyser.disconnect();
     let url = video.src;
--- a/dom/media/test/test_mediarecorder_record_canvas_captureStream.html
+++ b/dom/media/test/test_mediarecorder_record_canvas_captureStream.html
@@ -51,18 +51,20 @@ function startTest() {
     video.id = "recorded-video";
     video.src = URL.createObjectURL(blob);
     video.play();
     video.onerror = err => {
       ok(false, "Should be able to play the recording. Got error. code=" + video.error.code);
       SimpleTest.finish();
     };
     document.getElementById("content").appendChild(video);
-    helper.waitForPixelColor(video, helper.red, 128, "Should become red")
-      .then(SimpleTest.finish);
+    helper.pixelMustBecome(video, helper.red, {
+      threshold: 128,
+      infoString: "Should become red"
+    }).then(SimpleTest.finish);
   };
 
   mediaRecorder.start();
   is(mediaRecorder.state, "recording", "Media recorder should be recording");
 }
 
 SimpleTest.waitForExplicitFinish();
 startTest();
--- a/dom/media/test/test_mediarecorder_record_downsize_resolution.html
+++ b/dom/media/test/test_mediarecorder_record_downsize_resolution.html
@@ -85,22 +85,24 @@ function startTest() {
 
     video.onended = () => {
       is(numResizeRaised, 2, "Expected 2 resize event");
     };
     document.getElementById("content").appendChild(video);
     video.play();
 
     // Check last color
-    helper.waitForPixelColor(video, helper.red, 128, "Should become red")
-      .then(() => {
-        video.onresize = {};
-        video.onended = {};
-        SimpleTest.finish();
-      });
+    helper.pixelMustBecome(video, helper.red, {
+      threshold: 128,
+      infoString: "Should become red",
+    }).then(() => {
+      video.onresize = {};
+      video.onended = {};
+      SimpleTest.finish();
+    });
   };
 
   // Start here by stream recorder.
   mediaRecorder.start();
   is(mediaRecorder.state, "recording", "Media recorder started");
   requestAnimationFrame(draw);
 
   // Change resolution every 100ms
--- a/dom/media/test/test_mediarecorder_record_upsize_resolution.html
+++ b/dom/media/test/test_mediarecorder_record_upsize_resolution.html
@@ -85,22 +85,24 @@ function startTest() {
 
     video.onended = () => {
       is(numResizeRaised, 2, "Expected 2 resize event");
     };
     document.getElementById("content").appendChild(video);
     video.play();
 
     // Check last color
-    helper.waitForPixelColor(video, helper.red, 128, "Should become red")
-      .then(() => {
-        video.onresize = {};
-        video.onended = {};
-        SimpleTest.finish();
-      });
+    helper.pixelMustBecome(video, helper.red, {
+      threshold: 128,
+      infoString: "Should become red",
+    }).then(() => {
+      video.onresize = {};
+      video.onended = {};
+      SimpleTest.finish();
+    });
   };
 
   // Start here by stream recorder.
   mediaRecorder.start();
   is(mediaRecorder.state, "recording", "Media recorder started");
   requestAnimationFrame(draw);
 
   // Change resolution every 100 ms
--- a/dom/media/test/test_temporary_file_blob_video_plays.html
+++ b/dom/media/test/test_temporary_file_blob_video_plays.html
@@ -51,18 +51,20 @@ function startTest() {
     video.id = "recorded-video";
     video.src = URL.createObjectURL(blob);
     video.play();
     video.onerror = err => {
       ok(false, "Should be able to play the recording. Got error. code=" + video.error.code);
       SimpleTest.finish();
     };
     document.getElementById("content").appendChild(video);
-    helper.waitForPixelColor(video, helper.red, 128, "Should become red")
-      .then(SimpleTest.finish);
+    helper.pixelMustBecome(video, helper.red, {
+      threshold: 128,
+      infoString: "Should become red",
+    }).then(SimpleTest.finish);
   };
 
   mediaRecorder.start();
   is(mediaRecorder.state, "recording", "Media recorder should be recording");
 }
 
 SimpleTest.waitForExplicitFinish();
 SpecialPowers.pushPrefEnv({set:[["media.recorder.max_memory", 1]]}, startTest);
--- a/dom/media/tests/mochitest/head.js
+++ b/dom/media/tests/mochitest/head.js
@@ -133,17 +133,17 @@ AudioStreamAnalyser.prototype = {
     cancel.then(() => aborted = true);
 
     // We need to give the Analyser some time to start gathering data.
     await wait(200);
 
     do {
       await new Promise(resolve => requestAnimationFrame(resolve));
       if (aborted) {
-        throw error;
+        throw await cancel;
       }
     }
     while (!analysisFunction(this.getByteFrequencyData()));
   },
 
   /**
    * Return the FFT bin index for a given frequency.
    *
@@ -629,56 +629,56 @@ function createOneShotEventWrapper(wrapp
     wrapper[onx](e);
     wrapper[onx] = unexpected;
   };
 }
 
 /**
  * Returns a promise that resolves when `target` has raised an event with the
  * given name the given number of times. Cancel the returned promise by passing
- * in a `cancelPromise` and resolve it.
+ * in a `cancel` promise and resolving it.
  *
  * @param {object} target
  *        The target on which the event should occur.
  * @param {string} name
  *        The name of the event that should occur.
  * @param {integer} count
  *        Optional number of times the event should be raised before resolving.
- * @param {promise} cancelPromise
+ * @param {promise} cancel
  *        Optional promise that on resolving rejects the returned promise,
  *        so we can avoid logging results after a test has finished.
  * @returns {promise} A promise that resolves to the last of the seen events.
  */
-function haveEvents(target, name, count, cancelPromise) {
+function haveEvents(target, name, count, cancel) {
   var listener;
   var counter = count || 1;
   return Promise.race([
-    (cancelPromise || new Promise(() => {})).then(e => Promise.reject(e)),
+    (cancel || new Promise(() => {})).then(e => Promise.reject(e)),
     new Promise(resolve =>
         target.addEventListener(name, listener = e => (--counter < 1 && resolve(e))))
   ])
   .then(e => (target.removeEventListener(name, listener), e));
 };
 
 /**
  * Returns a promise that resolves when `target` has raised an event with the
- * given name. Cancel the returned promise by passing in a `cancelPromise` and
- * resolve it.
+ * given name. Cancel the returned promise by passing in a `cancel` promise and
+ * resolving it.
  *
  * @param {object} target
  *        The target on which the event should occur.
  * @param {string} name
  *        The name of the event that should occur.
- * @param {promise} cancelPromise
+ * @param {promise} cancel
  *        Optional promise that on resolving rejects the returned promise,
  *        so we can avoid logging results after a test has finished.
  * @returns {promise} A promise that resolves to the seen event.
  */
-function haveEvent(target, name, cancelPromise) {
-  return haveEvents(target, name, 1, cancelPromise);
+function haveEvent(target, name, cancel) {
+  return haveEvents(target, name, 1, cancel);
 };
 
 /**
  * Returns a promise that resolves if the target has not seen the given event
  * after one crank (or until the given timeoutPromise resolves) of the event
  * loop.
  *
  * @param {object} target
@@ -701,23 +701,23 @@ function haveNoEvent(target, name, timeo
  * of events but no such event in a following crank of the event loop.
  *
  * @param {object} target
  *        The target on which the events should occur.
  * @param {string} name
  *        The name of the event that should occur.
  * @param {integer} count
  *        Optional number of times the event should be raised before resolving.
- * @param {promise} cancelPromise
+ * @param {promise} cancel
  *        Optional promise that on resolving rejects the returned promise,
  *        so we can avoid logging results after a test has finished.
  * @returns {promise} A promise that resolves to the last of the seen events.
  */
-function haveEventsButNoMore(target, name, count, cancelPromise) {
-  return haveEvents(target, name, count, cancelPromise)
+function haveEventsButNoMore(target, name, count, cancel) {
+  return haveEvents(target, name, count, cancel)
     .then(e => haveNoEvent(target, name).then(() => e));
 };
 
 /**
  * This class executes a series of functions in a continuous sequence.
  * Promise-bearing functions are executed after the previous promise completes.
  *
  * @constructor
@@ -970,57 +970,67 @@ class VideoFrameEmitter {
   }
 }
 
 class VideoStreamHelper {
   constructor() {
     this._helper = new CaptureStreamTestHelper2D(50,50);
   }
 
-  checkHasFrame(video, offsetX, offsetY, threshold) {
+  async checkHasFrame(video, { offsetX, offsetY, threshold } = {}) {
     const h = this._helper;
-    return h.waitForPixel(video, offsetX, offsetY, px => {
+    await h.waitForPixel(video, px => {
       let result = h.isOpaquePixelNot(px, h.black, threshold);
       info("Checking that we have a frame, got [" +
            Array.slice(px) + "]. Ref=[" +
            Array.slice(h.black.data) + "]. Threshold=" + threshold +
            ". Pass=" + result);
       return result;
-    });
+    }, { offsetX, offsetY });
   }
 
-  async checkVideoPlaying(video, offsetX, offsetY, threshold) {
+  async checkVideoPlaying(video, { offsetX = 10, offsetY = 10,
+                                   threshold = 16,
+                                 } = {}) {
     const h = this._helper;
-    await this.checkHasFrame(video, offsetX, offsetY, threshold);
-    let startPixel = { data: h.getPixel(video, offsetX, offsetY)
-                     , name: "startcolor"
-                     };
-    return h.waitForPixel(video, offsetX, offsetY, px => {
-      let result = h.isPixelNot(px, startPixel, threshold)
+    await this.checkHasFrame(video, { offsetX, offsetY, threshold });
+    let startPixel = {
+      data: h.getPixel(video, offsetX, offsetY),
+      name: "startcolor",
+    };
+    await h.waitForPixel(video, px => {
+      let result = h.isPixelNot(px, startPixel, threshold);
       info("Checking playing, [" +
            Array.slice(px) + "] vs [" + Array.slice(startPixel.data) +
            "]. Threshold=" + threshold + " Pass=" + result);
       return result;
-    });
+    }, { offsetX, offsetY });
   }
 
-  async checkVideoPaused(video, offsetX, offsetY, threshold, timeout) {
+  async checkVideoPaused(video, { offsetX = 10, offsetY = 10,
+                                  threshold = 16, time = 5000,
+                                }={}) {
     const h = this._helper;
-    await this.checkHasFrame(video, offsetX, offsetY, threshold);
-    let startPixel = { data: h.getPixel(video, offsetX, offsetY)
-                     , name: "startcolor"
-                     };
-    const changed = await h.waitForPixel(video, offsetX, offsetY, px => {
-      let result = h.isOpaquePixelNot(px, startPixel, threshold);
-      info("Checking paused, [" +
-           Array.slice(px) + "] vs [" + Array.slice(startPixel.data) +
-           "]. Threshold=" + threshold + " Pass=" + result);
-      return result;
-    }, timeout);
-    ok(!changed, "Frame shouldn't change within " + timeout / 1000 + " seconds.");
+    await this.checkHasFrame(video, { offsetX, offsetY, threshold });
+    let startPixel = {
+      data: h.getPixel(video, offsetX, offsetY),
+      name: "startcolor",
+    };
+    try {
+      await h.waitForPixel(video, px => {
+          let result = h.isOpaquePixelNot(px, startPixel, threshold);
+          info("Checking paused, [" +
+               Array.slice(px) + "] vs [" + Array.slice(startPixel.data) +
+               "]. Threshold=" + threshold + " Pass=" + result);
+          return result;
+        }, { offsetX, offsetY, cancel: wait(time, "timeout") });
+      ok(false, "Frame changed within " + time/1000 + " seconds");
+    } catch (e) {
+      is(e, "timeout", "Frame shouldn't change for " + time/1000 + " seconds");
+    }
   }
 }
 
 
 function IsMacOSX10_6orOlder() {
   if (navigator.platform.indexOf("Mac") !== 0) {
     return false;
   }
--- a/dom/media/tests/mochitest/test_getUserMedia_bug1223696.html
+++ b/dom/media/tests/mochitest/test_getUserMedia_bug1223696.html
@@ -35,18 +35,20 @@
 
         stream.addTrack(canvasStream.getVideoTracks()[0]);
 
         checkMediaStreamContains(stream, [stream.getAudioTracks()[0],
                                           canvasStream.getVideoTracks()[0]]);
       };
 
       return listenUntil(video, "loadeddata", () => true)
-        .then(() => h.waitForPixelColor(video, h.grey, 5,
-            "The canvas track should be rendered by the media element"))
+        .then(() => h.pixelMustBecome(video, h.grey, {
+          threshold: 5,
+          infoString: "The canvas track should be rendered by the media element",
+        }))
         .then(() => {
           [removedTrack, ...stream.getAudioTracks()].forEach(t => t.stop());
         });
     }));
 </script>
 </pre>
 </body>
 </html>
--- a/dom/media/tests/mochitest/test_getUserMedia_mediaElementCapture_video.html
+++ b/dom/media/tests/mochitest/test_getUserMedia_mediaElementCapture_video.html
@@ -12,21 +12,16 @@ createHTML({
   bug: "1259788",
   title: "Test CaptureStream video content on HTMLMediaElement playing a gUM MediaStream",
   visible: true
 });
 
 var gUMVideoElement;
 var captureStreamElement;
 
-// We check a pixel somewhere away from the top left corner since
-// MediaEngineDefault puts semi-transparent time indicators there.
-const offsetX = 20;
-const offsetY = 20;
-const threshold = 16;
 const pausedTimeout = 1000;
 let h;
 
 runTest(() => getUserMedia({video: true, fake: true})
   .then(stream => {
     h = new VideoStreamHelper();
     gUMVideoElement =
       createMediaElement("video", "gUMVideo");
@@ -42,55 +37,55 @@ runTest(() => getUserMedia({video: true,
     // Adding a dummy audio track to the stream will keep a consuming media
     // element from ending.
     // We could also solve it by repeatedly play()ing or autoplay, but then we
     // wouldn't be sure the media element stopped rendering video because it
     // went to the ended state or because there were no frames for the track.
     let osc = createOscillatorStream(new AudioContext(), 1000);
     captureStreamElement.srcObject.addTrack(osc.getTracks()[0]);
 
-    return h.checkVideoPlaying(captureStreamElement, 10, 10, 16);
+    return h.checkVideoPlaying(captureStreamElement);
   })
   .then(() => {
     info("Video flowing. Pausing.");
     gUMVideoElement.pause();
 
-    return h.checkVideoPaused(captureStreamElement, 10, 10, 16, pausedTimeout);
+    return h.checkVideoPaused(captureStreamElement, { time: pausedTimeout });
   })
   .then(() => {
     info("Video stopped flowing. Playing.");
     gUMVideoElement.play();
 
-    return h.checkVideoPlaying(captureStreamElement, 10, 10, 16);
+    return h.checkVideoPlaying(captureStreamElement);
   })
   .then(() => {
     info("Video flowing. Removing source.");
     var stream = gUMVideoElement.srcObject;
     gUMVideoElement.srcObject = null;
 
-    return h.checkVideoPaused(captureStreamElement, 10, 10, 16, pausedTimeout)
+    return h.checkVideoPaused(captureStreamElement, { time: pausedTimeout })
         .then(() => stream);
   })
   .then(stream => {
     info("Video stopped flowing. Setting source.");
     gUMVideoElement.srcObject = stream;
-    return h.checkVideoPlaying(captureStreamElement, 10, 10, 16);
+    return h.checkVideoPlaying(captureStreamElement);
   })
   .then(() => {
     info("Video flowing. Changing source by track manipulation. Remove first.");
     var track = gUMVideoElement.srcObject.getTracks()[0];
     gUMVideoElement.srcObject.removeTrack(track);
-    return h.checkVideoPaused(captureStreamElement, 10, 10, 16, pausedTimeout)
+    return h.checkVideoPaused(captureStreamElement, { time: pausedTimeout })
         .then(() => track);
   })
   .then(track => {
     info("Video paused. Changing source by track manipulation. Add first.");
     gUMVideoElement.srcObject.addTrack(track);
     gUMVideoElement.play();
-    return h.checkVideoPlaying(captureStreamElement, 10, 10, 16);
+    return h.checkVideoPlaying(captureStreamElement);
   })
   .then(() => {
     gUMVideoElement.srcObject.getTracks().forEach(t => t.stop());
     ok(true, "Test passed.");
   })
   .catch(e => ok(false, "Test failed: " + e + (e.stack ? "\n" + e.stack : ""))));
 
 </script>
--- a/dom/media/tests/mochitest/test_peerConnection_addSecondVideoStream.html
+++ b/dom/media/tests/mochitest/test_peerConnection_addSecondVideoStream.html
@@ -25,17 +25,17 @@
         },
       ],
       [
         function PC_REMOTE_CHECK_VIDEO_FLOW(test) {
           const h = new VideoStreamHelper();
           is(test.pcRemote.remoteMediaElements.length, 2,
              "Should have two remote media elements after renegotiation");
           return Promise.all(test.pcRemote.remoteMediaElements.map(video =>
-            h.checkVideoPlaying(video, 10, 10, 16)));
+            h.checkVideoPlaying(video)));
         },
       ]
     );
 
     test.setMediaConstraints([{video: true, fake: true}], [{video: true}]);
     test.run();
   });
 </script>
--- a/dom/media/tests/mochitest/test_peerConnection_addSecondVideoStreamNoBundle.html
+++ b/dom/media/tests/mochitest/test_peerConnection_addSecondVideoStreamNoBundle.html
@@ -32,17 +32,17 @@
         },
       ],
       [
         function PC_REMOTE_CHECK_VIDEO_FLOW(test) {
           const h = new VideoStreamHelper();
           is(test.pcRemote.remoteMediaElements.length, 2,
              "Should have two remote media elements after renegotiation");
           return Promise.all(test.pcRemote.remoteMediaElements.map(video =>
-            h.checkVideoPlaying(video, 10, 10, 16)));
+            h.checkVideoPlaying(video)));
         },
       ]
     );
 
     test.setMediaConstraints([{video: true, fake: true}], [{video: true}]);
     test.run();
   });
 </script>
--- a/dom/media/tests/mochitest/test_peerConnection_captureStream_canvas_2d.html
+++ b/dom/media/tests/mochitest/test_peerConnection_captureStream_canvas_2d.html
@@ -43,29 +43,33 @@ runNetworkTest(() => {
         }
       }, 500);
     }
   ]);
   test.chain.append([
     function PC_REMOTE_WAIT_FOR_REMOTE_GREEN() {
       mediaElement = test.pcRemote.remoteMediaElements[0];
       ok(!!mediaElement, "Should have remote video element for pcRemote");
-      return h.waitForPixelColor(mediaElement, h.green, 128,
-                                 "pcRemote's remote should become green");
+      return h.pixelMustBecome(mediaElement, h.green, {
+        threshold: 128,
+        infoString: "pcRemote's remote should become green",
+      });
     },
     function PC_LOCAL_DRAW_LOCAL_RED() {
       // After requesting a frame it will be captured at the time of next render.
       // Next render will happen at next stable state, at the earliest,
       // i.e., this order of `requestFrame(); draw();` should work.
       stream.requestFrame();
       h.drawColor(canvas, h.red);
     },
     function PC_REMOTE_WAIT_FOR_REMOTE_RED() {
-      return h.waitForPixelColor(mediaElement, h.red, 128,
-                                 "pcRemote's remote should become red");
+      return h.pixelMustBecome(mediaElement, h.red, {
+        threshold: 128,
+        infoString: "pcRemote's remote should become red",
+      });
     }
   ]);
   test.run();
 });
 </script>
 </pre>
 </body>
 </html>
--- a/dom/media/tests/mochitest/test_peerConnection_captureStream_canvas_2d_noSSRC.html
+++ b/dom/media/tests/mochitest/test_peerConnection_captureStream_canvas_2d_noSSRC.html
@@ -45,29 +45,33 @@ runNetworkTest((options) => {
         }
       }, 500);
     }
   ]);
   test.chain.append([
     function PC_REMOTE_WAIT_FOR_REMOTE_GREEN() {
       mediaElement = test.pcRemote.remoteMediaElements[0];
       ok(!!mediaElement, "Should have remote video element for pcRemote");
-      return h.waitForPixelColor(mediaElement, h.green, 128,
-                                 "pcRemote's remote should become green");
+      return h.pixelMustBecome(mediaElement, h.green, {
+        threshold: 128,
+        infoString: "pcRemote's remote should become green",
+      });
     },
     function PC_LOCAL_DRAW_LOCAL_RED() {
       // After requesting a frame it will be captured at the time of next render.
       // Next render will happen at next stable state, at the earliest,
       // i.e., this order of `requestFrame(); draw();` should work.
       stream.requestFrame();
       h.drawColor(canvas, h.red);
     },
     function PC_REMOTE_WAIT_FOR_REMOTE_RED() {
-      return h.waitForPixelColor(mediaElement, h.red, 128,
-                                 "pcRemote's remote should become red");
+      return h.pixelMustBecome(mediaElement, h.red, {
+        threshold: 128,
+        infoString: "pcRemote's remote should become red",
+      });
     }
   ]);
   test.run();
 });
 </script>
 </pre>
 </body>
 </html>
--- a/dom/media/tests/mochitest/test_peerConnection_captureStream_canvas_webgl.html
+++ b/dom/media/tests/mochitest/test_peerConnection_captureStream_canvas_webgl.html
@@ -94,31 +94,35 @@ runNetworkTest(() => {
     }
   ]);
   test.chain.append([
     function FIND_REMOTE_VIDEO() {
       vremote = test.pcRemote.remoteMediaElements[0];
       ok(!!vremote, "Should have remote video element for pcRemote");
     },
     function WAIT_FOR_REMOTE_GREEN() {
-      return h.waitForPixelColor(vremote, h.green, 128,
-                                 "pcRemote's remote should become green");
+      return h.pixelMustBecome(vremote, h.green, {
+        threshold: 128,
+        infoString: "pcRemote's remote should become green",
+      });
     },
     function REQUEST_FRAME(test) {
       // After requesting a frame it will be captured at the time of next render.
       // Next render will happen at next stable state, at the earliest,
       // i.e., this order of `requestFrame(); draw();` should work.
       test.pcLocal.canvasStream.requestFrame();
     },
     function DRAW_LOCAL_RED() {
       h.drawColor(canvas, h.red);
     },
     function WAIT_FOR_REMOTE_RED() {
-      return h.waitForPixelColor(vremote, h.red, 128,
-                                 "pcRemote's remote should become red");
+      return h.pixelMustBecome(vremote, h.red, {
+        threshold: 128,
+        infoString: "pcRemote's remote should become red",
+      });
     }
   ]);
   test.run();
 });
 </script>
 </pre>
 </body>
 </html>
--- a/dom/media/tests/mochitest/test_peerConnection_multiple_captureStream_canvas_2d.html
+++ b/dom/media/tests/mochitest/test_peerConnection_multiple_captureStream_canvas_2d.html
@@ -23,16 +23,18 @@ runNetworkTest(() => {
   var vremote1;
   var stream1;
   var canvas1 = h.createAndAppendElement('canvas', 'source_canvas1');
 
   var vremote2;
   var stream2;
   var canvas2 = h.createAndAppendElement('canvas', 'source_canvas2');
 
+  const threshold = 128;
+
   test.setMediaConstraints([{video: true}, {video: true}], []);
   test.chain.replace("PC_LOCAL_GUM", [
     function PC_LOCAL_CANVAS_CAPTURESTREAM(test) {
       h.drawColor(canvas1, h.green);
       h.drawColor(canvas2, h.blue);
       stream1 = canvas1.captureStream(0); // fps = 0 to capture single frame
       test.pcLocal.attachLocalStream(stream1);
       stream2 = canvas2.captureStream(0); // fps = 0 to capture single frame
@@ -60,35 +62,47 @@ runNetworkTest(() => {
       vremote1 = test.pcRemote.remoteMediaElements[0];
       vremote2 = test.pcRemote.remoteMediaElements[1];
 
       // since we don't know which remote video is created first, we don't know
       // which should be blue or red, but this will make sure that one is
       // green and one is blue
       return Promise.race([
                Promise.all([
-                 h.waitForPixelColor(vremote1, h.red, 128,
-                                     "pcRemote's remote1 should become red"),
-                 h.waitForPixelColor(vremote2, h.blue, 128,
-                                     "pcRemote's remote2 should become blue")
+                 h.pixelMustBecome(vremote1, h.red, {
+                   threshold,
+                   infoString: "pcRemote's remote1 should become red",
+                 }),
+                 h.pixelMustBecome(vremote2, h.blue, {
+                   threshold,
+                   infoString: "pcRemote's remote2 should become blue",
+                 }),
                ]),
                Promise.all([
-                 h.waitForPixelColor(vremote2, h.red, 128,
-                                     "pcRemote's remote2 should become red"),
-                 h.waitForPixelColor(vremote1, h.blue, 128,
-                                     "pcRemote's remote1 should become blue")
+                 h.pixelMustBecome(vremote2, h.red, {
+                   threshold,
+                   infoString: "pcRemote's remote2 should become red",
+                 }),
+                 h.pixelMustBecome(vremote1, h.blue, {
+                   threshold,
+                   infoString: "pcRemote's remote1 should become blue",
+                 }),
                ])
              ]);
     },
     function WAIT_FOR_REMOTE_BOTH_GREEN() {
       return Promise.all([
-               h.waitForPixelColor(vremote1, h.green, 128,
-                                   "pcRemote's remote1 should become green"),
-               h.waitForPixelColor(vremote2, h.green, 128,
-                                 "pcRemote's remote2 should become green")
+               h.pixelMustBecome(vremote1, h.green, {
+                   threshold,
+                   infoString: "pcRemote's remote1 should become green",
+               }),
+               h.pixelMustBecome(vremote2, h.green, {
+                   threshold,
+                   infoString: "pcRemote's remote2 should become green",
+               }),
              ])
     },
   ]);
   test.run();
 });
 </script>
 </pre>
 </body>
--- a/dom/media/tests/mochitest/test_peerConnection_removeThenAddVideoTrack.html
+++ b/dom/media/tests/mochitest/test_peerConnection_removeThenAddVideoTrack.html
@@ -43,17 +43,17 @@
           const track = test.pcRemote._pc.getReceivers()[0].track;
           isnot(originalTrack.id, track.id, "Receiver should have changed");
 
           const vOriginal = test.pcRemote.remoteMediaElements.find(
               elem => elem.id.includes(originalTrack.id));
           const vAdded = test.pcRemote.remoteMediaElements.find(
               elem => elem.id.includes(track.id));
           ok(vOriginal.ended, "Original video element should have ended");
-          return helper.checkVideoPlaying(vAdded, 10, 10, 16);
+          return helper.checkVideoPlaying(vAdded);
         },
       ]
     );
 
     test.setMediaConstraints([{video: true}], [{video: true}]);
     test.run();
   });
 </script>
--- a/dom/media/tests/mochitest/test_peerConnection_removeThenAddVideoTrackNoBundle.html
+++ b/dom/media/tests/mochitest/test_peerConnection_removeThenAddVideoTrackNoBundle.html
@@ -43,17 +43,17 @@
           const track = test.pcRemote._pc.getReceivers()[0].track;
           isnot(originalTrack.id, track.id, "Receiver should have changed");
 
           const vOriginal = test.pcRemote.remoteMediaElements.find(
               elem => elem.id.includes(originalTrack.id));
           const vAdded = test.pcRemote.remoteMediaElements.find(
               elem => elem.id.includes(track.id));
           ok(vOriginal.ended, "Original video element should have ended");
-          return helper.checkVideoPlaying(vAdded, 10, 10, 16);
+          return helper.checkVideoPlaying(vAdded);
         },
       ]
     );
 
     test.chain.insertAfterEach('PC_LOCAL_CREATE_OFFER',
                                PC_LOCAL_REMOVE_BUNDLE_FROM_OFFER);
 
     test.setMediaConstraints([{video: true}], [{video: true}]);
--- a/dom/media/tests/mochitest/test_peerConnection_renderAfterRenegotiation.html
+++ b/dom/media/tests/mochitest/test_peerConnection_renderAfterRenegotiation.html
@@ -59,25 +59,28 @@
     .then(() => pc2.setRemoteDescription(pc1.localDescription))
     .then(() => pc2.createAnswer({}))
     .then(answer => pc2.setLocalDescription(answer))
     .then(() => pc1.setRemoteDescription(pc2.localDescription))
     .then(() => delivered)
 
     // now verify that actually something gets rendered into the remote video
     // element.
-    .then(() => h.waitForPixelColor(v2, h.blue, 128,
-                                    "pcRemote's video should become blue"))
+    .then(() => h.pixelMustBecome(v2, h.blue, {
+      threshold: 128,
+      infoString: "pcRemote's video should become blue",
+    }))
     // This will verify that new changes to the canvas propagate through
     // the peerconnection
     .then(() => {
       emitter.colors(h.red, h.green)
-      })
-    .then(() => h.waitForPixelColor(v2, h.red, 128,
-                                    "pcRemote's video should become red"))
-
+    })
+    .then(() => h.pixelMustBecome(v2, h.red, {
+      threshold: 128,
+      infoString: "pcRemote's video should become red",
+    }))
     .catch(reason => ok(false, "unexpected failure: " + reason))
     .then(networkTestFinished);
   });
 </script>
 </pre>
 </body>
 </html>
--- a/dom/media/tests/mochitest/test_peerConnection_replaceVideoThenRenegotiate.html
+++ b/dom/media/tests/mochitest/test_peerConnection_replaceVideoThenRenegotiate.html
@@ -56,34 +56,34 @@
           ok(vremote.ended, "Original track should have ended after renegotiation");
         },
         function PC_REMOTE_CHECK_REPLACED_TRACK_FLOW(test) {
           const vremote = test.pcRemote.remoteMediaElements.find(
               elem => elem.id.includes(test.pcLocal._pc.getSenders()[0].track.id));
           if (!vremote) {
             return Promise.reject(new Error("Couldn't find video element"));
           }
-          return addFinallyToPromise(helper.checkVideoPlaying(vremote, 10, 10, 16))
+          return addFinallyToPromise(helper.checkVideoPlaying(vremote))
             .finally(() => emitter2.stop())
             .then(() => {
               const px = helper._helper.getPixel(vremote, 10, 10);
               const isBlue = helper._helper.isPixel(
                   px, CaptureStreamTestHelper.prototype.blue, 5);
               const isGrey = helper._helper.isPixel(
                   px, CaptureStreamTestHelper.prototype.grey, 5);
               ok(isBlue || isGrey, "replaced track should be blue or grey");
             });
         },
         function PC_REMOTE_CHECK_ADDED_TRACK_FLOW(test) {
           const vremote = test.pcRemote.remoteMediaElements.find(
               elem => elem.id.includes(test.pcLocal._pc.getSenders()[1].track.id));
           if (!vremote) {
             return Promise.reject(new Error("Couldn't find video element"));
           }
-          return helper.checkVideoPlaying(vremote, 10, 10, 16);
+          return helper.checkVideoPlaying(vremote);
         },
       ]
     );
 
     test.run();
    });
   });
 
--- a/dom/media/tests/mochitest/test_peerConnection_simulcastAnswer.html
+++ b/dom/media/tests/mochitest/test_peerConnection_simulcastAnswer.html
@@ -99,17 +99,17 @@
         }
       ]);
 
       test.chain.append([
         async function PC_LOCAL_WAIT_FOR_FRAMES() {
           const vremote = test.pcLocal.remoteMediaElements[0];
           ok(vremote, "Should have remote video element for pcLocal");
           emitter.start();
-          await helper.checkVideoPlaying(vremote, 10, 10, 16);
+          await helper.checkVideoPlaying(vremote);
           emitter.stop();
         },
         function PC_LOCAL_CHECK_SIZE_1() {
           const vlocal = test.pcRemote.localMediaElements[0];
           const vremote = test.pcLocal.remoteMediaElements[0];
           ok(vlocal, "Should have local video element for pcRemote");
           ok(vremote, "Should have remote video element for pcLocal");
           ok(vlocal.videoWidth > 0, "source width is positive");
@@ -124,26 +124,26 @@
         },
         function PC_LOCAL_WAIT_FOR_SECOND_MEDIA_FLOW(test) {
           return test.pcLocal.waitForMediaFlow();
         },
         async function PC_LOCAL_WAIT_FOR_FRAMES_2() {
           const vremote = test.pcLocal.remoteMediaElements[0];
           ok(vremote, "Should have remote video element for pcLocal");
           emitter.start();
-          await helper.checkVideoPlaying(vremote, 10, 10, 16);
+          await helper.checkVideoPlaying(vremote);
           emitter.stop();
         },
         // For some reason, even though we're getting a 25x25 stream, sometimes
         // the resolution isn't updated on the video element on the first frame.
         async function PC_LOCAL_WAIT_FOR_FRAMES_3() {
           const vremote = test.pcLocal.remoteMediaElements[0];
           ok(vremote, "Should have remote video element for pcLocal");
           emitter.start();
-          await helper.checkVideoPlaying(vremote, 10, 10, 16);
+          await helper.checkVideoPlaying(vremote);
           emitter.stop();
         },
         function PC_LOCAL_CHECK_SIZE_2() {
           const vlocal = test.pcRemote.localMediaElements[0];
           const vremote = test.pcLocal.remoteMediaElements[0];
           ok(vlocal, "Should have local video element for pcRemote");
           ok(vremote, "Should have remote video element for pcLocal");
           ok(vlocal.videoWidth > 0, "source width is positive");
--- a/dom/media/tests/mochitest/test_peerConnection_simulcastOffer.html
+++ b/dom/media/tests/mochitest/test_peerConnection_simulcastOffer.html
@@ -91,17 +91,17 @@
         }
       ]);
 
       test.chain.append([
         async function PC_REMOTE_WAIT_FOR_FRAMES() {
           const vremote = test.pcRemote.remoteMediaElements[0];
           ok(vremote, "Should have remote video element for pcRemote");
           emitter.start();
-          await helper.checkVideoPlaying(vremote, 10, 10, 16);
+          await helper.checkVideoPlaying(vremote);
           emitter.stop();
         },
         function PC_REMOTE_CHECK_SIZE_1() {
           const vlocal = test.pcLocal.localMediaElements[0];
           const vremote = test.pcRemote.remoteMediaElements[0];
           ok(vlocal, "Should have local video element for pcLocal");
           ok(vremote, "Should have remote video element for pcRemote");
           ok(vlocal.videoWidth > 0, "source width is positive");
@@ -116,26 +116,26 @@
         },
         function PC_REMOTE_WAIT_FOR_SECOND_MEDIA_FLOW(test) {
           return test.pcRemote.waitForMediaFlow();
         },
         async function PC_REMOTE_WAIT_FOR_FRAMES_2() {
           const vremote = test.pcRemote.remoteMediaElements[0];
           ok(vremote, "Should have remote video element for pcRemote");
           emitter.start();
-          await helper.checkVideoPlaying(vremote, 10, 10, 16);
+          await helper.checkVideoPlaying(vremote);
           emitter.stop();
         },
         // For some reason, even though we're getting a 25x25 stream, sometimes
         // the resolution isn't updated on the video element on the first frame.
         async function PC_REMOTE_WAIT_FOR_FRAMES_3() {
           const vremote = test.pcRemote.remoteMediaElements[0];
           ok(vremote, "Should have remote video element for pcRemote");
           emitter.start();
-          await helper.checkVideoPlaying(vremote, 10, 10, 16);
+          await helper.checkVideoPlaying(vremote);
           emitter.stop();
         },
         function PC_REMOTE_CHECK_SIZE_2() {
           const vlocal = test.pcLocal.localMediaElements[0];
           const vremote = test.pcRemote.remoteMediaElements[0];
           ok(vlocal, "Should have local video element for pcLocal");
           ok(vremote, "Should have remote video element for pcRemote");
           ok(vlocal.videoWidth > 0, "source width is positive");
--- a/dom/media/tests/mochitest/test_peerConnection_trackDisabling.html
+++ b/dom/media/tests/mochitest/test_peerConnection_trackDisabling.html
@@ -38,22 +38,24 @@ runNetworkTest(() => {
       // We check a pixel somewhere away from the top left corner since
       // MediaEngineDefault puts semi-transparent time indicators there.
       const offsetX = 50;
       const offsetY = 50;
       const threshold = 128;
 
       // We're regarding black as disabled here, and we're setting the alpha
       // channel of the pixel to 255 to disregard alpha when testing.
-      var checkVideoEnabled = video =>
-        h.waitForPixel(video, offsetX, offsetY,
-                       px => (px[3] = 255, h.isPixelNot(px, h.black, threshold)));
-      var checkVideoDisabled = video =>
-        h.waitForPixel(video, offsetX, offsetY,
-                       px => (px[3] = 255, h.isPixel(px, h.black, threshold, offsetX*2, offsetY*2)));
+      var checkVideoEnabled = video => h.waitForPixel(video,
+        px => (px[3] = 255, h.isPixelNot(px, h.black, threshold)),
+        { offsetX, offsetY }
+      );
+      var checkVideoDisabled = video => h.waitForPixel(video,
+        px => (px[3] = 255, h.isPixel(px, h.black, threshold)),
+        { offsetX, offsetY }
+      );
       return Promise.resolve()
         .then(() => info("Checking local video enabled"))
         .then(() => checkVideoEnabled(localVideo))
         .then(() => info("Checking remote video enabled"))
         .then(() => checkVideoEnabled(remoteVideo))
 
         .then(() => info("Disabling original"))
         .then(() => test.pcLocal._pc.getLocalStreams()[0].getVideoTracks()[0].enabled = false)
--- a/dom/media/tests/mochitest/test_peerConnection_trackDisabling_clones.html
+++ b/dom/media/tests/mochitest/test_peerConnection_trackDisabling_clones.html
@@ -55,22 +55,24 @@ runNetworkTest(() => {
       // MediaEngineDefault puts semi-transparent time indicators there.
       const offsetX = 50;
       const offsetY = 50;
       const threshold = 128;
       const remoteDisabledColor = h.black;
 
       // We're regarding black as disabled here, and we're setting the alpha
       // channel of the pixel to 255 to disregard alpha when testing.
-      var checkVideoEnabled = video =>
-        h.waitForPixel(video, offsetX, offsetY,
-                       px => (px[3] = 255, h.isPixelNot(px, h.black, threshold)));
-      var checkVideoDisabled = video =>
-        h.waitForPixel(video, offsetX, offsetY,
-                       px => (px[3] = 255, h.isPixel(px, h.black, threshold)));
+      var checkVideoEnabled = video => h.waitForPixel(video,
+        px => (px[3] = 255, h.isPixelNot(px, h.black, threshold)),
+        { offsetX, offsetY }
+      );
+      var checkVideoDisabled = video => h.waitForPixel(video,
+        px => (px[3] = 255, h.isPixel(px, h.black, threshold)),
+        { offsetX, offsetY }
+      );
 
       return Promise.resolve()
         .then(() => info("Checking local original enabled"))
         .then(() => checkVideoEnabled(localVideoOriginal))
         .then(() => info("Checking local clone enabled"))
         .then(() => checkVideoEnabled(localVideoClone))
         .then(() => info("Checking remote clone enabled"))
         .then(() => checkVideoEnabled(remoteVideoClone))
--- a/dom/media/tests/mochitest/test_peerConnection_verifyVideoAfterRenegotiation.html
+++ b/dom/media/tests/mochitest/test_peerConnection_verifyVideoAfterRenegotiation.html
@@ -53,22 +53,26 @@ runNetworkTest(() => {
   ]);
 
   test.chain.append([
     function FIND_REMOTE_VIDEO() {
       vremote1 = test.pcRemote.remoteMediaElements[0];
       ok(!!vremote1, "Should have remote video element for pcRemote");
     },
     function WAIT_FOR_REMOTE_GREEN() {
-      return h1.waitForPixelColor(vremote1, h1.green, 128,
-                                 "pcRemote's remote should become green");
+      return h1.pixelMustBecome(vremote1, h1.green, {
+        threshold: 128,
+        infoString: "pcRemote's remote should become green",
+      });
     },
     function WAIT_FOR_REMOTE_RED() {
-      return h1.waitForPixelColor(vremote1, h1.red, 128,
-                                 "pcRemote's remote should become red");
+      return h1.pixelMustBecome(vremote1, h1.red, {
+        threshold: 128,
+        infoString: "pcRemote's remote should become red",
+      });
     }
   ]);
 
   addRenegotiation(test.chain,
     [
       function PC_LOCAL_ADD_SECOND_STREAM(test) {
         canvas2 = h2.createAndAppendElement('canvas', 'source_canvas2');
         h2.drawColor(canvas2, h2.blue);
@@ -82,26 +86,30 @@ runNetworkTest(() => {
   );
 
   test.chain.append([
     function FIND_REMOTE2_VIDEO() {
       vremote2 = test.pcRemote.remoteMediaElements[1];
       ok(!!vremote2, "Should have remote2 video element for pcRemote");
     },
     function WAIT_FOR_REMOTE2_BLUE() {
-      return h2.waitForPixelColor(vremote2, h2.blue, 128,
-                                 "pcRemote's remote2 should become blue");
+      return h2.pixelMustBecome(vremote2, h2.blue, {
+        threshold: 128,
+        infoString: "pcRemote's remote2 should become blue",
+      });
     },
     function DRAW_NEW_LOCAL_GREEN(test) {
       stream1.requestFrame();
       h1.drawColor(canvas1, h1.green);
     },
     function WAIT_FOR_REMOTE1_GREEN() {
-      return h1.waitForPixelColor(vremote1, h1.green, 128,
-                                 "pcRemote's remote1 should become green");
+      return h1.pixelMustBecome(vremote1, h1.green, {
+        threshold: 128,
+        infoString: "pcRemote's remote1 should become green",
+      });
     }
   ]);
 
   test.run();
 });
 
 </script>
 </pre>
--- a/dom/media/tests/mochitest/test_peerConnection_videoRenegotiationInactiveAnswer.html
+++ b/dom/media/tests/mochitest/test_peerConnection_videoRenegotiationInactiveAnswer.html
@@ -26,17 +26,17 @@
         emitter.start();
       }
     ]);
 
     test.chain.append([
       function PC_REMOTE_WAIT_FOR_FRAMES() {
         var vremote = test.pcRemote.remoteMediaElements[0];
         ok(vremote, "Should have remote video element for pcRemote");
-        return addFinallyToPromise(helper.checkVideoPlaying(vremote, 10, 10, 16))
+        return addFinallyToPromise(helper.checkVideoPlaying(vremote))
             .finally(() => emitter.stop());
       }
     ]);
 
     addRenegotiation(test.chain, []);
 
     test.chain.insertAfter("PC_LOCAL_GET_ANSWER", [
         function PC_LOCAL_REWRITE_REMOTE_SDP_INACTIVE(test) {
@@ -45,32 +45,32 @@
         }
     ], false, 1);
 
     test.chain.append([
       function PC_REMOTE_ENSURE_NO_FRAMES() {
         var vremote = test.pcRemote.remoteMediaElements[0];
         ok(vremote, "Should have remote video element for pcRemote");
         emitter.start();
-        return addFinallyToPromise(helper.checkVideoPaused(vremote, 10, 10, 16, 5000))
+        return addFinallyToPromise(helper.checkVideoPaused(vremote))
             .finally(() => emitter.stop());
       },
     ]);
 
     test.chain.remove("PC_REMOTE_CHECK_STATS", 1);
     test.chain.remove("PC_LOCAL_CHECK_STATS", 1);
 
     addRenegotiation(test.chain, []);
 
     test.chain.append([
       function PC_REMOTE_WAIT_FOR_FRAMES_2() {
         var vremote = test.pcRemote.remoteMediaElements[0];
         ok(vremote, "Should have remote video element for pcRemote");
         emitter.start();
-        return addFinallyToPromise(helper.checkVideoPlaying(vremote, 10, 10, 16))
+        return addFinallyToPromise(helper.checkVideoPlaying(vremote))
             .finally(() => emitter.stop());
       }
     ]);
 
     test.setMediaConstraints([{video: true}], []);
     test.run();
   });
 </script>