Bug 1406027 - Add mochitest for audio gUM and no output. r?jib
MozReview-Commit-ID: KjlQvIsyr64
--- a/dom/media/tests/mochitest/mochitest.ini
+++ b/dom/media/tests/mochitest/mochitest.ini
@@ -39,16 +39,17 @@ skip-if = android_version == '18' # andr
skip-if = android_version == '18' # android(Bug 1189784, timeouts on 4.3 emulator)
[test_dataChannel_noOffer.html]
[test_enumerateDevices.html]
[test_enumerateDevices_iframe.html]
skip-if = true # needed by test_enumerateDevices.html on builders
[test_ondevicechange.html]
skip-if = os == 'android'
[test_getUserMedia_active_autoplay.html]
+[test_getUserMedia_audio_inputOnly.html]
[test_getUserMedia_audioCapture.html]
skip-if = toolkit == 'android' # android(Bug 1189784, timeouts on 4.3 emulator), android(Bug 1264333)
[test_getUserMedia_addTrackRemoveTrack.html]
skip-if = android_version == '18' || os == 'linux' # android(Bug 1189784, timeouts on 4.3 emulator), linux bug 1377450
[test_getUserMedia_addtrack_removetrack_events.html]
skip-if os == 'linux' && debug # Bug 1389983
[test_getUserMedia_basicAudio.html]
[test_getUserMedia_basicVideo.html]
new file mode 100644
--- /dev/null
+++ b/dom/media/tests/mochitest/test_getUserMedia_audio_inputOnly.html
@@ -0,0 +1,59 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <script type="application/javascript" src="mediaStreamPlayback.js"></script>
+</head>
+<body>
+<pre id="test">
+<script type="application/javascript">
+ createHTML({
+ title: "getUserMedia Audio Test with no audio output present",
+ bug: "1406027",
+ visible: true,
+ });
+
+ runTest(async () => {
+ let stream = await getUserMedia({audio: true});
+ let recorder = new MediaRecorder(stream);
+ recorder.start();
+
+ // We record 5 seconds of audio for the analysis pass later.
+ await wait(5000);
+
+ let dataavailable = new Promise(r => recorder.addEventListener("dataavailable", r, {once: true}));
+ recorder.stop();
+ let {data: blob} = await dataavailable;
+ stream.getTracks().forEach(t => t.stop());
+
+ let audio = document.createElement("audio");
+ audio.preload = "metadata";
+ audio.src = URL.createObjectURL(blob);
+ await new Promise(r => audio.addEventListener("loadedmetadata", r, {once: true}));
+ let ended = new Promise(r => audio.addEventListener("ended", r, {once: true}));
+
+ let ac = new AudioContext();
+ let analyser = new AudioStreamAnalyser(ac, audio.mozCaptureStream());
+ is(analyser.sourceNodes.length, 1, `${analyser.sourceNodes.length} nodes`);
+ analyser.enableDebugCanvas();
+ audio.play();
+ try {
+ await analyser.waitForAnalysisSuccess(
+ array =>
+ array[analyser.binIndexForFrequency(TEST_AUDIO_FREQ / 2)] < 50 &&
+ array[analyser.binIndexForFrequency(TEST_AUDIO_FREQ)] > 200 &&
+ array[analyser.binIndexForFrequency(TEST_AUDIO_FREQ * 1.5)] < 50,
+ ended.then(() => new Error("Recording ended")));
+ ok(true, "Analysis OK");
+ } catch (e) {
+ ok(false, `Error: ${e.name}, ${e.message}`);
+ }
+
+ analyser.disableDebugCanvas();
+ URL.revokeObjectURL(audio.src);
+ audio.src = null;
+ });
+
+</script>
+</pre>
+</body>
+</html>