Bug 1464922 - Don't allow media without audio tracks to autoplay. r?bryce
I don't think we should allow media without audio tracks to autoplay because:
* It means play() before loaded metadata behaves differently than play()
called after loaded metadata.
* With the current impl we dispatch the "play" event and then the "pause"
event when we decide we should block, which may confuse some sites' controls.
* Delaying running the play() algorithm until we've loaded metadata would add
significant complexity, and may break sites.
* Chrome doesn't have this provision, meaning the complexity required to
support it will not result in much benefit to us.
MozReview-Commit-ID: FSVlDJAOisw
--- a/dom/html/HTMLMediaElement.cpp
+++ b/dom/html/HTMLMediaElement.cpp
@@ -4027,22 +4027,17 @@ HTMLMediaElement::PlayInternal(ErrorResu
// 4.8.12.8
// When the play() method on a media element is invoked, the user agent must
// run the following steps.
// 4.8.12.8 - Step 1:
// If the media element is not allowed to play, return a promise rejected
// with a "NotAllowedError" DOMException and abort these steps.
- // Note: IsAllowedToPlay() needs to know whether there is an audio track
- // in the resource, and for that we need to be at readyState HAVE_METADATA
- // or above. So only reject here if we're at readyState HAVE_METADATA. If
- // we're below that, we'll we delay fulfilling the play promise until we've
- // reached readyState >= HAVE_METADATA below.
- if (mReadyState >= HAVE_METADATA && !IsAllowedToPlay()) {
+ if (!IsAllowedToPlay()) {
// NOTE: for promise-based-play, will return a rejected promise here.
LOG(LogLevel::Debug,
("%p Play() promise rejected because not allowed to play.", this));
promise->MaybeReject(NS_ERROR_DOM_MEDIA_NOT_ALLOWED_ERR);
return promise.forget();
}
// 4.8.12.8 - Step 2:
--- a/dom/media/AutoplayPolicy.cpp
+++ b/dom/media/AutoplayPolicy.cpp
@@ -42,23 +42,16 @@ AutoplayPolicy::IsMediaElementAllowedToP
EventStateManager::IsHandlingUserInput();
}
// Muted content
if (aElement->Volume() == 0.0 || aElement->Muted()) {
return true;
}
- // Media has already loaded metadata and doesn't contain audio track
- if (aElement->IsVideo() &&
- aElement->ReadyState() >= HTMLMediaElementBinding::HAVE_METADATA &&
- !aElement->HasAudio()) {
- return true;
- }
-
// Whitelisted.
if (nsContentUtils::IsExactSitePermAllow(
aElement->NodePrincipal(), "autoplay-media")) {
return true;
}
// Activated by user gesture.
if (aElement->OwnerDoc()->HasBeenUserActivated()) {
--- a/dom/media/test/file_autoplay_policy_play_before_loadedmetadata.html
+++ b/dom/media/test/file_autoplay_policy_play_before_loadedmetadata.html
@@ -22,43 +22,36 @@
window.is = window.opener.is;
window.info = window.opener.info;
async function testPlayBeforeLoadedMetata(testCase, parent_window) {
info("testPlayBeforeLoadedMetata: " + testCase.resource);
let element = document.createElement("video");
element.preload = "auto";
+ element.muted = testCase.muted;
element.src = testCase.resource;
document.body.appendChild(element);
is(element.paused, true, testCase.resource + " - should start out paused.");
let playEventFired = false;
once(element, "play").then(() => { playEventFired = true; });
let playingEventFired = false;
once(element, "playing").then(() => { playingEventFired = true;});
let pauseEventFired = false;
once(element, "pause").then(() => { pauseEventFired = true; });
let played = await element.play().then(() => true, () => false);
- // Wait for one round through the event loop. This gives any tasks
- // inside Gecko enqueued to dispatch events a chance to run.
- // Specifically the "playing" event, if it's erronously fired.
- await new Promise((resolve, reject) => {
- setTimeout(resolve, 0);
- });
-
- let playingEventMsg = testCase.resource + " should " + (!testCase.shouldPlay ? "not " : "") + " fire playing";
- is(playingEventFired, testCase.shouldPlay, playingEventMsg);
let playMsg = testCase.resource + " should " + (!testCase.shouldPlay ? "not " : "") + "play";
is(played, testCase.shouldPlay, playMsg);
- is(playEventFired, true, testCase.resource + " - we should always get a play event");
- is(pauseEventFired, !testCase.shouldPlay, testCase.resource + " - if we shouldn't play, we should get a pause event");
+ is(playEventFired, testCase.shouldPlay, testCase.resource + " - should get play event if we played");
+ is(playingEventFired, testCase.shouldPlay, testCase.resource + "- should get playing event if we played");
+ is(pauseEventFired, false, testCase.resource + " - should not get pause event if we played");
removeNodeAndSource(element);
}
nextWindowMessage().then(
async (event) => {
await testPlayBeforeLoadedMetata(event.data, event.source);
event.source.postMessage("done", "*");
});
--- a/dom/media/test/test_autoplay_policy.html
+++ b/dom/media/test/test_autoplay_policy.html
@@ -29,72 +29,63 @@ window.ok = function(val, msg, token) {
SimpleTest.ok(val, msg + ", token=" + token);
}
/**
* test files and paremeters
*/
var autoplayTests = [
/* video */
- { name:"gizmo.mp4", type:"video/mp4", hasAudio:true },
- { name:"gizmo-noaudio.mp4", type:"video/mp4", hasAudio:false },
- { name:"gizmo.webm", type:'video/webm', hasAudio:true },
- { name:"gizmo-noaudio.webm", type:'video/webm', hasAudio:false },
+ { name: "gizmo.mp4", type: "video/mp4" },
+ { name: "gizmo-noaudio.mp4", type: "video/mp4" },
+ { name: "gizmo.webm", type: "video/webm" },
+ { name: "gizmo-noaudio.webm", type: "video/webm" },
/* audio */
- { name:"small-shot.ogg", type:"audio/ogg", hasAudio:true },
- { name:"small-shot.m4a", type:"audio/mp4", hasAudio:true },
- { name:"small-shot.mp3", type:"audio/mpeg", hasAudio:true },
- { name:"small-shot.flac", type:"audio/flac", hasAudio:true }
+ { name: "small-shot.ogg", type: "audio/ogg" },
+ { name: "small-shot.m4a", type: "audio/mp4" },
+ { name: "small-shot.mp3", type: "audio/mpeg" },
+ { name: "small-shot.flac", type: "audio/flac" }
];
var autoplayParams = [
- { volume: 1.0, muted: false, preload: "none" },
- { volume: 1.0, muted: false, preload: "metadata" },
- { volume: 0.0, muted: false, preload: "none" },
- { volume: 0.0, muted: false, preload: "metadata" },
- { volume: 1.0, muted: true, preload: "none"},
- { volume: 1.0, muted: true, preload: "metadata" },
- { volume: 0.0, muted: true, preload: "none"},
- { volume: 0.0, muted: true, preload: "metadata" }
+ { volume: 1.0, muted: false },
+ { volume: 0.0, muted: false },
+ { volume: 1.0, muted: true },
+ { volume: 0.0, muted: true },
];
function createTestArray()
{
var tests = [];
- for (let testIdx = 0; testIdx < autoplayTests.length; testIdx++) {
- for (let paramIdx = 0; paramIdx < autoplayParams.length; paramIdx++) {
- let test = autoplayTests[testIdx];
- let param = autoplayParams[paramIdx];
- let obj = new Object;
- obj.name = test.name;
- obj.type = test.type;
- obj.hasAudio = test.hasAudio;
- obj.volume = param.volume;
- obj.muted = param.muted;
- obj.preload = param.preload;
- tests.push(obj);
+ for (let test of autoplayTests) {
+ for (let param of autoplayParams) {
+ tests.push({
+ name: test.name,
+ type: test.type,
+ volume: param.volume,
+ muted: param.muted,
+ });
}
}
return tests;
}
/**
* Main test function for different autoplay cases without user interaction.
*
* When the pref "media.autoplay.enabled" is false and the pref
- * "media.autoplay.enabled.user-gestures-needed" is true, we would only allow
+ * "media.autoplay.enabled.user-gestures-needed" is true, we only allow
* audible media to autoplay after the website has been activated by specific
- * user gestures. However, non-audible media won't be restricted.
+ * user gestures. However, inaudible media won't be restricted.
*
- * Audible means the volume is not zero, or muted is not true for the video with
- * audio track. For media without loading metadata, we can't know whether it has
- * audio track or not, so we would also regard it as audible media.
+ * Audible means the volume is non zero and muted is false.
*
- * Non-audible means the volume is zero, the muted is true, or the video without
- * audio track.
+ * Inaudible means the volume is zero, or the muted is true.
+ *
+ * We do not take into account whether a media has an audio track.
*/
async function runTest(test, token) {
manager.started(token);
await testPlay(test, token);
await testAutoplayKeyword(test, token);
manager.finished(token);
@@ -102,34 +93,25 @@ async function runTest(test, token) {
manager.runTests(createTestArray(), runTest);
/**
* Different test scenarios
*/
async function testPlay(test, token) {
info("### start testPlay", token);
- info(`volume=${test.volume}, muted=${test.muted}, ` +
- `preload=${test.preload}, hasAudio=${test.hasAudio}`, token);
+ info(`volume=${test.volume}, muted=${test.muted}`, token);
let element = document.createElement(getMajorMimeType(test.type));
- element.preload = test.preload;
element.volume = test.volume;
element.muted = test.muted;
element.src = test.name;
document.body.appendChild(element);
- let preLoadNone = test.preload == "none";
- if (!preLoadNone) {
- info("### wait for loading metadata", token);
- await once(element, "loadedmetadata");
- }
-
- let isAudible = (preLoadNone || test.hasAudio) &&
- test.volume != 0.0 &&
+ let isAudible = test.volume != 0.0 &&
!test.muted;
let state = isAudible? "audible" : "non-audible";
info(`### calling play() for ${state} media`, token);
let promise = element.play();
if (isAudible) {
await promise.catch(function(error) {
ok(element.paused, `${state} media fail to start via play()`, token);
is(error.name, "NotAllowedError", "rejected play promise", token);
@@ -141,40 +123,31 @@ async function testPlay(test, token) {
ok(!element.paused, `${state} media start via play()`, token);
}
removeNodeAndSource(element);
}
async function testAutoplayKeyword(test, token) {
info("### start testAutoplayKeyword", token);
- info(`volume=${test.volume}, muted=${test.muted}, ` +
- `preload=${test.preload}, hasAudio=${test.hasAudio}`, token);
+ info(`volume=${test.volume}, muted=${test.muted}`, token);
let element = document.createElement(getMajorMimeType(test.type));
element.autoplay = true;
- element.preload = test.preload;
element.volume = test.volume;
element.muted = test.muted;
element.src = test.name;
document.body.appendChild(element);
- let preLoadNone = test.preload == "none";
- let isAudible = (preLoadNone || test.hasAudio) &&
- test.volume != 0.0 &&
+ let isAudible = test.volume != 0.0 &&
!test.muted;
let state = isAudible? "audible" : "non-audible";
info(`### wait to autoplay for ${state} media`, token);
if (isAudible) {
- if (preLoadNone) {
- await once(element, "suspend");
- is(element.readyState, 0 /* HAVE_NOTHING */, "media won't start loading", token);
- } else {
- await once(element, "canplay");
- }
+ await once(element, "canplay");
ok(element.paused, `can not start with 'autoplay' keyword for ${state} media`, token);
} else {
await once(element, "play");
ok(!element.paused, `start with 'autoplay' keyword for ${state} media`, token);
}
removeNodeAndSource(element);
}
--- a/dom/media/test/test_autoplay_policy_permission.html
+++ b/dom/media/test/test_autoplay_policy_permission.html
@@ -17,54 +17,53 @@
gTestPrefs.push(["media.autoplay.enabled", false],
["media.autoplay.enabled.user-gestures-needed", true]);
SpecialPowers.pushPrefEnv({ 'set': gTestPrefs }, () => {
runTest();
});
- async function canPlayIn(testCase) {
+ async function testPlayInOrigin(testCase) {
// Run test in a new window, to ensure its user gesture
// activation state isn't tainted by preceeding tests.
let child = window.open("file_autoplay_policy_permission.html", "", "width=500,height=500");
await once(child, "load");
child.postMessage(testCase, "*");
- let result = await nextWindowMessage();
+ await nextWindowMessage();
child.close();
- return result.played == true;
}
async function runTest() {
// First verify that we can't play in a document unwhitelisted.
is(window.origin, "http://mochi.test:8888", "Origin should be as we assume, otherwise the rest of the test is bogus!");
- await canPlayIn({
+ await testPlayInOrigin({
origin: "http://mochi.test:8888",
shouldPlay: false,
message: "Should not be able to play unwhitelisted."
});
// Add our origin to the whitelist.
await pushAutoplayAllowedPermission();
// Now we should be able to play...
- await canPlayIn({
+ await testPlayInOrigin({
origin: "http://mochi.test:8888",
shouldPlay: true,
message: "Should be able to play since whitelisted."
});
// But sub-origins should not.
- await canPlayIn({
+ await testPlayInOrigin({
origin: "http://test1.mochi.test:8888",
shouldPlay: false,
message: "Sub origin should not count as whitelisted."
});
- await canPlayIn({
+ await testPlayInOrigin({
origin: "http://sub1.test1.mochi.test:8888",
shouldPlay: false,
message: "Sub-sub-origins should not count as whitelisted."
});
SimpleTest.finish();
}
--- a/dom/media/test/test_autoplay_policy_play_before_loadedmetadata.html
+++ b/dom/media/test/test_autoplay_policy_play_before_loadedmetadata.html
@@ -24,21 +24,33 @@
SpecialPowers.pushPrefEnv({ 'set': gTestPrefs }, () => {
runTest();
});
let testCases = [
{
resource: "320x240.ogv", // Only video track.
+ shouldPlay: false,
+ muted: false,
+ },
+ {
+ resource: "320x240.ogv", // Only video track.
shouldPlay: true,
+ muted: true,
},
{
resource: "short.mp4", // Audio and video track.
shouldPlay: false,
+ muted: false,
+ },
+ {
+ resource: "short.mp4", // Audio and video track.
+ shouldPlay: true,
+ muted: true,
},
];
let child_url = "file_autoplay_policy_play_before_loadedmetadata.html";
async function runTest() {
for (testCase of testCases) {
// Run each test in a new window, to ensure its user gesture