Bug 1413098 - part2 : add test. r=padenot draft
authorAlastor Wu <alwu@mozilla.com>
Wed, 13 Dec 2017 15:50:20 -0600
changeset 803529 60b0bc916b96d0929c048a11a98623bb90b63107
parent 803528 bc1d2ad0594ad1f54c05ade06495918aaee14911
child 803530 394c73e9385d51dd891a230b648eec1992850044
push id112134
push userpaul@paul.cx
push dateMon, 04 Jun 2018 13:38:42 +0000
reviewerspadenot
bugs1413098
milestone62.0a1
Bug 1413098 - part2 : add test. r=padenot MozReview-Commit-ID: 145k2FDfMm4
dom/media/webaudio/test/mochitest.ini
dom/media/webaudio/test/test_notAllowedToStartAudioContextGC.html
toolkit/content/tests/browser/browser_autoplay_policy_user_gestures.js
--- a/dom/media/webaudio/test/mochitest.ini
+++ b/dom/media/webaudio/test/mochitest.ini
@@ -181,16 +181,17 @@ tags=capturestream
 [test_mediaStreamAudioSourceNodeNoGC.html]
 [test_mediaStreamAudioSourceNodePassThrough.html]
 [test_mediaStreamAudioSourceNodeResampling.html]
 tags=capturestream
 [test_mixingRules.html]
 skip-if = toolkit == 'android' # bug 1091965
 [test_nodeToParamConnection.html]
 [test_nodeCreationDocumentGone.html]
+[test_notAllowedToStartAudioContextGC.html]
 [test_OfflineAudioContext.html]
 [test_offlineDestinationChannelCountLess.html]
 [test_offlineDestinationChannelCountMore.html]
 [test_oscillatorNode.html]
 [test_oscillatorNode2.html]
 [test_oscillatorNodeNegativeFrequency.html]
 [test_oscillatorNodePassThrough.html]
 [test_oscillatorNodeStart.html]
new file mode 100644
--- /dev/null
+++ b/dom/media/webaudio/test/test_notAllowedToStartAudioContextGC.html
@@ -0,0 +1,57 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+  <title>Test GC for not-allow-to-start audio context</title>
+  <script type="text/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
+  <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.requestFlakyTimeout(`Checking that something does not happen`);
+
+SimpleTest.waitForExplicitFinish();
+
+var destId;
+
+function observer(subject, topic, data) {
+  let id = parseInt(data);
+  ok(id != destId, "dropping another node, not the context's destination");
+}
+
+SpecialPowers.addAsyncObserver(observer, "webaudio-node-demise", false);
+SimpleTest.registerCleanupFunction(function() {
+  SpecialPowers.removeAsyncObserver(observer, "webaudio-node-demise");
+});
+
+SpecialPowers.pushPrefEnv({"set": [["media.autoplay.enabled", false],
+                                   ["media.autoplay.enabled.user-gestures-needed", true]]},
+                          startTest);
+
+function startTest() {
+  info("- create audio context -");
+  let ac = new AudioContext();
+
+  info("- get node Id -");
+  destId = SpecialPowers.getPrivilegedProps(ac.destination, "id");
+
+  info("- trigger GCs -");
+  SpecialPowers.forceGC();
+  SpecialPowers.forceCC();
+  SpecialPowers.forceGC();
+
+  info("- after three GCs -");
+
+  // We're doing this async so that we can receive observerservice messages.
+  setTimeout(function() {
+    ok(true, `AudioContext that has been prevented
+              from starting has correctly survived GC`)
+    SimpleTest.finish();
+  }, 1);
+}
+
+</script>
+</pre>
+</body>
+</html>
--- a/toolkit/content/tests/browser/browser_autoplay_policy_user_gestures.js
+++ b/toolkit/content/tests/browser/browser_autoplay_policy_user_gestures.js
@@ -1,8 +1,10 @@
+/* eslint-disable mozilla/no-arbitrary-setTimeout */
+
 const VIDEO_PAGE = "https://example.com/browser/toolkit/content/tests/browser/file_video.html";
 
 var UserGestures = {
   MOUSE_CLICK: "mouse-click",
   MOUSE_MOVE: "mouse-move",
   KEYBOARD_PRESS: "keyboard-press"
 };
 
@@ -91,26 +93,120 @@ async function test_play_with_user_gestu
       await video.play();
       ok(gesture.isActivationGesture, "user gesture can activate the page");
       ok(!video.paused, "video starts playing.");
     } catch (e) {
       ok(!gesture.isActivationGesture, "user gesture can not activate the page");
       ok(video.paused, "video can not start playing.");
     }
   }
+
   await ContentTask.spawn(tab.linkedBrowser, gesture, play_video);
 
   info("- remove tab -");
   BrowserTestUtils.removeTab(tab);
 }
 
+async function test_webaudio_with_user_gesture(gesture) {
+  function createAudioContext() {
+    content.ac = new content.AudioContext();
+    let ac = content.ac;
+    ac.resumePromises = [];
+    ac.stateChangePromise = new Promise(resolve => {
+      ac.addEventListener("statechange", function() {
+        resolve();
+      }, {once: true});
+    });
+  }
+
+  function checking_audio_context_running_state() {
+    let ac = content.ac;
+    return new Promise(resolve => {
+      setTimeout(() => {
+        ok(ac.state == "suspended", "audio context is still suspended");
+        resolve();
+      }, 4000);
+    });
+  }
+
+  function resume_without_supported_user_gestures() {
+    let ac = content.ac;
+    let promise = ac.resume();
+    ac.resumePromises.push(promise);
+    return new Promise((resolve, reject) => {
+      setTimeout(() => {
+        if (ac.state == "suspended") {
+          ok(true, "audio context is still suspended");
+          resolve();
+        } else {
+          reject("audio context should not be allowed to start");
+        }
+      }, 4000);
+    });
+  }
+
+  function resume_with_supported_user_gestures() {
+    let ac = content.ac;
+    ac.resumePromises.push(ac.resume());
+    return Promise.all(ac.resumePromises).then(() => {
+      ok(ac.state == "running", "audio context starts running");
+    });
+  }
+
+  info("- open new tab -");
+  let tab = await BrowserTestUtils.openNewForegroundTab(window.gBrowser,
+                                                        "about:blank");
+  info("- create audio context -");
+  // We want the same audio context could be used between different content
+  // tasks, so it *must* need to be loaded by frame script.
+  let frameScript = createAudioContext;
+  let mm = tab.linkedBrowser.messageManager;
+  mm.loadFrameScript("data:,(" + frameScript.toString() + ")();", false);
+
+  info("- check whether audio context starts running -");
+  try {
+    await ContentTask.spawn(tab.linkedBrowser, null,
+                            checking_audio_context_running_state);
+  } catch (error) {
+    ok(false, error.toString());
+  }
+
+  info("- calling resume() -");
+  try {
+    await ContentTask.spawn(tab.linkedBrowser, null,
+                            resume_without_supported_user_gestures);
+  } catch (error) {
+    ok(false, error.toString());
+  }
+
+  info("- simulate user gesture -");
+  await simulateUserGesture(gesture, tab.linkedBrowser);
+
+  info("- calling resume() again");
+  try {
+    let resumeFunc = gesture.isActivationGesture ?
+      resume_with_supported_user_gestures :
+      resume_without_supported_user_gestures;
+    await ContentTask.spawn(tab.linkedBrowser, null, resumeFunc);
+  } catch (error) {
+    ok(false, error.toString());
+  }
+
+  info("- remove tab -");
+  await BrowserTestUtils.removeTab(tab);
+}
+
 add_task(async function start_test() {
   info("- setup test preference -");
   await setup_test_preference();
 
   info("- test play when page doesn't be activated -");
   await test_play_without_user_gesture();
 
   info("- test play after page got user gesture -");
   for (let idx = 0; idx < UserGestureTests.length; idx++) {
+    info("- test play after page got user gesture -");
     await test_play_with_user_gesture(UserGestureTests[idx]);
+
+    info("- test web audio with user gesture -");
+    await test_webaudio_with_user_gesture(UserGestureTests[idx]);
   }
 });