Bug 1392216 - Part 3: VRPuppet dispatch submit frame result to VRListener thread; r?kip
MozReview-Commit-ID: K5ivNAkB89I
--- a/dom/vr/test/reftest/change_size.html
+++ b/dom/vr/test/reftest/change_size.html
@@ -84,17 +84,16 @@ If this fails, something is seriously wr
}
}
function onAnimationFrame() {
if (!vrDisplay.isPresenting) {
return;
}
- vrDisplay.requestAnimationFrame(onAnimationFrame);
gl.clearColor(0.0, 1.0, 0.0, 1.0);
gl.clear(gl.COLOR_BUFFER_BIT);
// Presenting render a stereo view.
gl.viewport(0, 0, webglCanvas.width * 0.5, webglCanvas.height);
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
gl.viewport(webglCanvas.width * 0.5, 0, webglCanvas.width * 0.5, webglCanvas.height);
@@ -116,16 +115,17 @@ If this fails, something is seriously wr
}
};
img.src = submitResult.base64Image;
document.body.appendChild(img);
} else {
img.src = submitResult.base64Image;
}
}
+ vrDisplay.requestAnimationFrame(onAnimationFrame);
}
function runTest() {
webglCanvas = document.getElementById('canvas');
gl = WebGLUtil.getWebGL('canvas');
if (!gl) {
setStatus('WebGL context creation failed.');
return;
--- a/dom/vr/test/reftest/draw_rect.html
+++ b/dom/vr/test/reftest/draw_rect.html
@@ -57,17 +57,16 @@ If this fails, something is seriously wr
});
}
function onAnimationFrame() {
if (!vrDisplay.isPresenting) {
return;
}
- vrDisplay.requestAnimationFrame(onAnimationFrame);
gl.clearColor(0.0, 1.0, 0.0, 1.0);
gl.clear(gl.COLOR_BUFFER_BIT);
// Presenting render a stereo view.
gl.viewport(0, 0, webglCanvas.width * 0.5, webglCanvas.height);
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
gl.viewport(webglCanvas.width * 0.5, 0, webglCanvas.width * 0.5, webglCanvas.height);
@@ -84,16 +83,17 @@ If this fails, something is seriously wr
setTimeout(testComplete, 0);
};
img.src = submitResult.base64Image;
document.body.appendChild(img);
} else {
img.src = submitResult.base64Image;
}
}
+ vrDisplay.requestAnimationFrame(onAnimationFrame);
}
function runTest() {
webglCanvas = document.getElementById('canvas');
gl = WebGLUtil.getWebGL('canvas');
if (!gl) {
setStatus('WebGL context creation failed.');
return;
--- a/dom/vr/test/reftest/reftest.list
+++ b/dom/vr/test/reftest/reftest.list
@@ -1,11 +1,11 @@
# WebVR Reftests
# Please confirm there is no other VR display connected. Otherwise, VRPuppetDisplay can't be attached.
-default-preferences pref(dom.vr.puppet.enabled,true) pref(dom.vr.test.enabled,true) pref(dom.vr.require-gesture,false) pref(dom.vr.puppet.submitframe,1)
+default-preferences pref(dom.vr.puppet.enabled,true) pref(dom.vr.test.enabled,true) pref(dom.vr.require-gesture,false) pref(dom.vr.puppet.submitframe,1) pref(dom.vr.display.rafMaxDuration,200)
# VR SubmitFrame is only implemented for D3D11.1 and MacOSX now.
# Our Windows 7 test machines don't support D3D11.1, so we run these tests on Windows 8+ only.
# We need to continue to investigate why these reftests can be run well in local,
# but will be suspended until terminating on reftest D3D11 debug build.
skip-if((!winWidget&&release_or_beta)||Android||gtkWidget||(winWidget&&isDebugBuild)||!layersGPUAccelerated||/^Windows\x20NT\x206\.1/.test(http.oscpu)) == draw_rect.html wrapper.html?draw_rect.png
# On MacOSX platform, getting different color interpolation result.
# For lower resolution Mac hardware, we need to adjust it to fuzzy-if(cocoaWidget,1,1200).
--- a/gfx/thebes/gfxPrefs.h
+++ b/gfx/thebes/gfxPrefs.h
@@ -364,16 +364,17 @@ private:
DECL_GFX_PREF(Live, "dom.vr.oculus.present.timeout", VROculusPresentTimeout, int32_t, 10000);
DECL_GFX_PREF(Live, "dom.vr.oculus.quit.timeout", VROculusQuitTimeout, int32_t, 30000);
DECL_GFX_PREF(Once, "dom.vr.openvr.enabled", VROpenVREnabled, bool, false);
DECL_GFX_PREF(Once, "dom.vr.osvr.enabled", VROSVREnabled, bool, false);
DECL_GFX_PREF(Live, "dom.vr.poseprediction.enabled", VRPosePredictionEnabled, bool, true);
DECL_GFX_PREF(Live, "dom.vr.require-gesture", VRRequireGesture, bool, true);
DECL_GFX_PREF(Live, "dom.vr.puppet.enabled", VRPuppetEnabled, bool, false);
DECL_GFX_PREF(Live, "dom.vr.puppet.submitframe", VRPuppetSubmitFrame, uint32_t, 0);
+ DECL_GFX_PREF(Live, "dom.vr.display.rafMaxDuration", VRDisplayRafMaxDuration, uint32_t, 50);
DECL_GFX_PREF(Live, "dom.w3c_pointer_events.enabled", PointerEventsEnabled, bool, false);
DECL_GFX_PREF(Live, "dom.w3c_touch_events.enabled", TouchEventsEnabled, int32_t, 0);
DECL_GFX_PREF(Live, "general.smoothScroll", SmoothScrollEnabled, bool, true);
DECL_GFX_PREF(Live, "general.smoothScroll.currentVelocityWeighting",
SmoothScrollCurrentVelocityWeighting, float, 0.25);
DECL_GFX_PREF(Live, "general.smoothScroll.durationToIntervalRatio",
SmoothScrollDurationToIntervalRatio, int32_t, 200);
--- a/gfx/vr/VRDisplayHost.cpp
+++ b/gfx/vr/VRDisplayHost.cpp
@@ -1,14 +1,15 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "VRDisplayHost.h"
+#include "gfxPrefs.h"
#include "gfxVR.h"
#include "ipc/VRLayerParent.h"
#include "mozilla/layers/TextureHost.h"
#include "mozilla/dom/GamepadBinding.h" // For GamepadMappingType
#include "VRThread.h"
#if defined(XP_WIN)
@@ -192,62 +193,54 @@ VRDisplayHost::StartFrame()
}
void
VRDisplayHost::NotifyVSync()
{
/**
* We will trigger a new frame immediately after a successful frame texture
* submission. If content fails to call VRDisplay.submitFrame after
- * kVRDisplayRAFMaxDuration milliseconds has elapsed since the last
+ * dom.vr.display.rafMaxDuration milliseconds has elapsed since the last
* VRDisplay.requestAnimationFrame, we act as a "watchdog" and kick-off
* a new VRDisplay.requestAnimationFrame to avoid a render loop stall and
* to give content a chance to recover.
*
* If the lower level VR platform API's are rejecting submitted frames,
* such as when the Oculus "Health and Safety Warning" is displayed,
* we will not kick off the next frame immediately after VRDisplay.submitFrame
* as it would result in an unthrottled render loop that would free run at
* potentially extreme frame rates. To ensure that content has a chance to
* resume its presentation when the frames are accepted once again, we rely
* on this "watchdog" to act as a VR refresh driver cycling at a rate defined
- * by kVRDisplayRAFMaxDuration.
- *
- * kVRDisplayRAFMaxDuration is the number of milliseconds since last frame
- * start before triggering a new frame. When content is failing to submit
- * frames on time or the lower level VR platform API's are rejecting frames,
- * kVRDisplayRAFMaxDuration determines the rate at which RAF callbacks
- * will be called.
+ * by dom.vr.display.rafMaxDuration.
*
* This number must be larger than the slowest expected frame time during
* normal VR presentation, but small enough not to break content that
* makes assumptions of reasonably minimal VSync rate.
*
* The slowest expected refresh rate for a VR display currently is an
* Oculus CV1 when ASW (Asynchronous Space Warp) is enabled, at 45hz.
- * A kVRDisplayRAFMaxDuration value of 50 milliseconds results in a 20hz
+ * A dom.vr.display.rafMaxDuration value of 50 milliseconds results in a 20hz
* rate, which avoids inadvertent triggering of the watchdog during
* Oculus ASW even if every second frame is dropped.
*/
- const double kVRDisplayRAFMaxDuration = 50;
-
bool bShouldStartFrame = false;
if (mDisplayInfo.mPresentingGroups == 0) {
// If this display isn't presenting, refresh the sensors and trigger
// VRDisplay.requestAnimationFrame at the normal 2d display refresh rate.
bShouldStartFrame = true;
} else {
// If content fails to call VRDisplay.submitFrame, we must eventually
// time-out and trigger a new frame.
if (mLastFrameStart.IsNull()) {
bShouldStartFrame = true;
} else {
TimeDuration duration = TimeStamp::Now() - mLastFrameStart;
- if (duration.ToMilliseconds() > kVRDisplayRAFMaxDuration) {
+ if (duration.ToMilliseconds() > gfxPrefs::VRDisplayRafMaxDuration()) {
bShouldStartFrame = true;
}
}
}
if (bShouldStartFrame) {
VRManager *vm = VRManager::Get();
MOZ_ASSERT(vm);
@@ -269,17 +262,16 @@ VRDisplayHost::SubmitFrame(VRLayerParent
return;
}
// Ensure that we only accept the first SubmitFrame call per RAF cycle.
if (!mFrameStarted || aFrameId != mDisplayInfo.mFrameId) {
return;
}
mFrameStarted = false;
-
switch (aTexture.type()) {
#if defined(XP_WIN)
case SurfaceDescriptor::TSurfaceDescriptorD3D10: {
if (!CreateD3DObjects()) {
return;
}
const SurfaceDescriptorD3D10& surf = aTexture.get_SurfaceDescriptorD3D10();
--- a/gfx/vr/gfxVRPuppet.cpp
+++ b/gfx/vr/gfxVRPuppet.cpp
@@ -12,16 +12,17 @@
#endif
#include "mozilla/Base64.h"
#include "mozilla/gfx/DataSurfaceHelpers.h"
#include "gfxPrefs.h"
#include "gfxUtils.h"
#include "gfxVRPuppet.h"
#include "VRManager.h"
+#include "VRThread.h"
#include "mozilla/dom/GamepadEventTypes.h"
#include "mozilla/dom/GamepadBinding.h"
// See CompositorD3D11Shaders.h
namespace mozilla {
namespace layers {
struct ShaderBytes { const void* mData; size_t mLength; };
@@ -372,17 +373,21 @@ VRDisplayPuppet::SubmitFrame(ID3D11Textu
nsCString rawString(Substring(srcData, mapInfo.RowPitch * desc.Height));
if (Base64Encode(rawString, result.mBase64Image) != NS_OK) {
MOZ_ASSERT(false, "Failed to encode base64 images.");
}
mContext->Unmap(mappedTexture, 0);
// Dispatch the base64 encoded string to the DOM side. Then, it will be decoded
// and convert to a PNG image there.
- vm->DispatchSubmitFrameResult(mDisplayInfo.mDisplayID, result);
+ MessageLoop* loop = VRListenerThreadHolder::Loop();
+ loop->PostTask(NewRunnableMethod<const uint32_t, VRSubmitFrameResultInfo>(
+ "VRManager::DispatchSubmitFrameResult",
+ vm, &VRManager::DispatchSubmitFrameResult, mDisplayInfo.mDisplayID, result
+ ));
break;
}
case 2:
{
// The VR compositor sumbmit frame to the screen window,
// the current coordinate is at (0, 0, width, height).
Matrix viewMatrix = Matrix::Translation(-1.0, 1.0);
viewMatrix.PreScale(2.0f / float(aSize.width), 2.0f / float(aSize.height));
@@ -513,17 +518,21 @@ VRDisplayPuppet::SubmitFrame(MacIOSurfac
}
dataSurf->Unmap();
if (Base64Encode(rawString, result.mBase64Image) != NS_OK) {
MOZ_ASSERT(false, "Failed to encode base64 images.");
}
// Dispatch the base64 encoded string to the DOM side. Then, it will be decoded
// and convert to a PNG image there.
- vm->DispatchSubmitFrameResult(mDisplayInfo.mDisplayID, result);
+ MessageLoop* loop = VRListenerThreadHolder::Loop();
+ loop->PostTask(NewRunnableMethod<const uint32_t, VRSubmitFrameResultInfo>(
+ "VRManager::DispatchSubmitFrameResult",
+ vm, &VRManager::DispatchSubmitFrameResult, mDisplayInfo.mDisplayID, result
+ ));
}
break;
}
case 2:
{
MOZ_ASSERT(false, "No support for showing VR frames on MacOSX yet.");
break;
}
--- a/gfx/vr/ipc/VRLayerParent.cpp
+++ b/gfx/vr/ipc/VRLayerParent.cpp
@@ -66,17 +66,17 @@ VRLayerParent::RecvSubmitFrame(const lay
if (mVRDisplayID) {
MessageLoop* loop = layers::CompositorThreadHolder::Loop();
VRManager* vm = VRManager::Get();
RefPtr<VRDisplayHost> display = vm->GetDisplay(mVRDisplayID);
if (display) {
// Because VR compositor still shares the same graphics device with Compositor thread.
// We have to post sumbit frame tasks to Compositor thread.
// TODO: Move SubmitFrame to Bug 1392217.
- loop->PostTask(NewRunnableMethod<VRDisplayHost*, const layers::SurfaceDescriptor&, uint64_t,
+ loop->PostTask(NewRunnableMethod<VRDisplayHost*, const layers::SurfaceDescriptor, uint64_t,
const gfx::Rect&, const gfx::Rect&>(
"gfx::VRLayerParent::SubmitFrame",
this,
&VRLayerParent::SubmitFrame, display, aTexture, aFrameId, aLeftEyeRect, aRightEyeRect));
}
}
return IPC_OK();
--- a/modules/libpref/init/all.js
+++ b/modules/libpref/init/all.js
@@ -5336,16 +5336,20 @@ pref("gfx.vr.osvr.utilLibPath", "");
pref("gfx.vr.osvr.commonLibPath", "");
pref("gfx.vr.osvr.clientLibPath", "");
pref("gfx.vr.osvr.clientKitLibPath", "");
// Puppet device, used for simulating VR hardware within tests and dev tools
pref("dom.vr.puppet.enabled", false);
// Allow displaying the result of vr submitframe (0: disable, 1: store the
// result as a base64 image, 2: show it on the screen).
pref("dom.vr.puppet.submitframe", 0);
+// The number of milliseconds since last frame start before triggering a new frame.
+// When content is failing to submit frames on time or the lower level VR platform API's
+// are rejecting frames, it determines the rate at which RAF callbacks will be called.
+pref("dom.vr.display.rafMaxDuration", 50);
// VR test system.
pref("dom.vr.test.enabled", false);
// If the user puts a finger down on an element and we think the user
// might be executing a pan gesture, how long do we wait before
// tentatively deciding the gesture is actually a tap and activating
// the target element?
pref("ui.touch_activation.delay_ms", 100);