Bug 1368990 - Add reftest for verifying onVRPresentChange; r?kip draft
authorDaosheng Mu <daoshengmu@gmail.com>
Wed, 07 Jun 2017 17:05:04 +0800
changeset 595589 ccc25f706acc443d29b0ba5b18199680036125f4
parent 594149 da66c4a05fda49d457d9411a7092fed87cf9e53a
child 633736 81def981169a651f8d4c683b599aedacf4e9bdee
push id64373
push userbmo:dmu@mozilla.com
push dateFri, 16 Jun 2017 12:52:43 +0000
reviewerskip
bugs1368990
milestone56.0a1
Bug 1368990 - Add reftest for verifying onVRPresentChange; r?kip MozReview-Commit-ID: I8cce1eYQV8
dom/vr/VRDisplay.cpp
dom/vr/test/reftest/change_size.html
dom/vr/test/reftest/change_size.png
dom/vr/test/reftest/reftest.list
gfx/vr/gfxVRPuppet.cpp
--- a/dom/vr/VRDisplay.cpp
+++ b/dom/vr/VRDisplay.cpp
@@ -458,17 +458,17 @@ VRDisplay::GetSubmitFrameResult(VRSubmit
   }
 
   nsAutoCString decodedImg;
   if (Base64Decode(resultInfo.mBase64Image, decodedImg) != NS_OK) {
     MOZ_ASSERT(false, "Failed to do decode base64 images.");
     return false;
   }
 
-  const char* srcData = (decodedImg.get());
+  const char* srcData = decodedImg.get();
   const gfx::IntSize size(resultInfo.mWidth, resultInfo.mHeight);
   RefPtr<DataSourceSurface> dataSurface = gfx::CreateDataSourceSurfaceFromData(
                                             size, resultInfo.mFormat, (uint8_t*)srcData,
                                             StrideForFormatAndWidth(resultInfo.mFormat, resultInfo.mWidth));
   if (!dataSurface || !dataSurface->IsValid()) {
     MOZ_ASSERT(false, "dataSurface is null.");
     return false;
   }
new file mode 100644
--- /dev/null
+++ b/dom/vr/test/reftest/change_size.html
@@ -0,0 +1,168 @@
+<!DOCTYPE html>
+<meta charset='UTF-8'>
+<!-- Viewport size change in WebGL and submit it to the VR device as a base64 image.
+If this fails, something is seriously wrong. -->
+<html class="reftest-wait">
+<head>
+  <script type='text/javascript' src='webgl-util.js'></script>
+  <script type='text/javascript' src="VRSimulationDriver.js"></script>
+  <script id="vs" type="x-shader/x-vertex">
+    attribute vec2 aVertCoord;
+
+    void main(void) {
+      gl_Position = vec4(aVertCoord, 0.0, 1.0);
+    }
+  </script>
+  <script id="fs" type="x-shader/x-fragment">
+    precision mediump float;
+
+    void main(void) {
+      gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);
+    }
+  </script>
+  <script type='text/javascript'>
+    'use strict';
+
+    var submitResult = null;
+    var vrDisplay = null;
+    var webglCanvas = null;
+    var gl = null;
+    var prog = null;
+    var img = null;
+    // The resolution is 540 : 300 (the ratio of 2160 * 1200, like Vive and Oculus)
+    const eyeWidth = 270;
+    const eyeHeight = 300;
+
+    function setStatus(text) {
+      var elem = document.getElementById('status');
+      elem.innerHTML = text;
+    }
+
+    function initVRMock() {
+      VRServiceTest = navigator.requestVRServiceTest();
+      if (!VRServiceTest) {
+        setStatus('VRServiceTest get failed.');
+        return;
+      }
+
+      VRSimulationDriver.AttachWebVRDisplay().then(() => {
+        VRSimulationDriver.SetEyeResolution(eyeWidth, eyeHeight);
+        VRSimulationDriver.UpdateVRDisplay();
+      }).then(() => {
+        // Looking for VR displays
+        if (navigator.getVRDisplays) {
+          submitResult = new VRSubmitFrameResult();
+          navigator.getVRDisplays().then(function (displays) {
+            if (displays.length > 0) {
+              window.addEventListener('vrdisplaypresentchange', onVRPresentChange, false);
+
+              vrDisplay = displays[0];
+              vrDisplay.requestPresent([{ source: webglCanvas }]);
+              vrDisplay.requestAnimationFrame(onAnimationFrame);
+            }
+          });
+        }
+      });
+    }
+
+    function onVRPresentChange() {
+      if (vrDisplay && vrDisplay.isPresenting) {
+        const leftEye = vrDisplay.getEyeParameters("left");
+        const rightEye = vrDisplay.getEyeParameters("right");
+
+        if (leftEye.renderWidth != rightEye.renderWidth ||
+            leftEye.renderWidth != eyeWidth) {
+          setStatus('renderWidth is not equal to eyeWidth.');
+        }
+
+        if (leftEye.renderHeight != rightEye.renderHeight ||
+            leftEye.renderHeight != eyeHeight) {
+          setStatus('renderHeight is not equal to eyeHeight.');
+        }
+        webglCanvas.width = leftEye.renderWidth * 2;
+        webglCanvas.height = leftEye.renderHeight;
+      }
+    }
+
+    function onAnimationFrame() {
+      if (!vrDisplay.isPresenting) {
+        return;
+      }
+
+      vrDisplay.requestAnimationFrame(onAnimationFrame);
+      gl.clearColor(0.0, 1.0, 0.0, 1.0);
+      gl.clear(gl.COLOR_BUFFER_BIT);
+
+      // Presenting render a stereo view.
+      gl.viewport(0, 0, webglCanvas.width * 0.5, webglCanvas.height);
+      gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
+
+      gl.viewport(webglCanvas.width * 0.5, 0, webglCanvas.width * 0.5, webglCanvas.height);
+      gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
+
+      // Indicate VRDisplay we're done rendering.
+      vrDisplay.submitFrame();
+      if (vrDisplay.getSubmitFrameResult(submitResult)) {
+        if (!img) {
+          img = document.createElement("img");
+          img.onload = function(){
+            // img width will not be eyeWidth * 2 (540), it would
+            // be 544. It is because D3D11 CopyResource changes
+            // the destination image size.
+            if ((img.height == eyeHeight)) {
+              webglCanvas.style.display = 'none';
+              vrDisplay.exitPresent();
+              setTimeout(testComplete, 0);
+            }
+          };
+          img.src = submitResult.base64Image;
+          document.body.appendChild(img);
+        } else {
+          img.src = submitResult.base64Image;
+        }
+      }
+    }
+
+    function runTest() {
+      webglCanvas = document.getElementById('canvas');
+      gl = WebGLUtil.getWebGL('canvas');
+      if (!gl) {
+        setStatus('WebGL context creation failed.');
+        return;
+      }
+      gl.disable(gl.DEPTH_TEST);
+      prog = WebGLUtil.createProgramByIds(gl, 'vs', 'fs');
+      if (!prog) {
+        setStatus('Program linking failed.');
+        return;
+      }
+      prog.aVertCoord = gl.getAttribLocation(prog, "aVertCoord");
+
+      var vertCoordArr = new Float32Array([
+        -0.5, -0.5,
+        0.5, -0.5,
+        -0.5, 0.5,
+        0.5, 0.5,
+      ]);
+      var vertCoordBuff = gl.createBuffer();
+      gl.bindBuffer(gl.ARRAY_BUFFER, vertCoordBuff);
+      gl.bufferData(gl.ARRAY_BUFFER, vertCoordArr, gl.STATIC_DRAW);
+      gl.useProgram(prog);
+      gl.enableVertexAttribArray(prog.aVertCoord);
+      gl.vertexAttribPointer(prog.aVertCoord, 2, gl.FLOAT, false, 0, 0);
+
+      initVRMock();
+    }
+
+    function testComplete() {
+      document.documentElement.removeAttribute("class");
+    }
+  </script>
+</head>
+
+<body onload='runTest();'>
+  <canvas id='canvas' width='128' height='128'></canvas>
+  <div id='status'></div>
+</body>
+
+</html>
\ No newline at end of file
new file mode 100644
index 0000000000000000000000000000000000000000..fe03114b20d6204226a0afbb7ebee20a83d334ad
GIT binary patch
literal 1439
zc%17D@N?(olHy`uVBq!ia0y~yV3J{AVASDY1B!IyWcdIo#^NA%Cx&(BWL^R}Ea{HE
zjtmSN`?>!lvI6;>1s;*b3=DinK$vl=HlH*D1FMOri(^Pd+}j(DT!#!qTrbL-=atJv
z7X$>hmzViflrXEtY)YHY&Y<w6qMnh#sEL{31ScPZg1QAm!<+{U3`L^?#0d1wer~vV
z`pxg_>Y3{b^Kb0mX>K=Y+%_sOvIMq+v-6YILFTNXBVhKKsd0-~+<NEOtH45)!PC{x
JWt~$(697R^be;eJ
--- a/dom/vr/test/reftest/reftest.list
+++ b/dom/vr/test/reftest/reftest.list
@@ -1,7 +1,8 @@
 # WebVR Reftests
 default-preferences pref(dom.vr.puppet.enabled,true) pref(dom.vr.test.enabled,true) pref(dom.vr.require-gesture,false) pref(dom.vr.puppet.submitframe,1)
 
 # VR SubmitFrame is only implemented for D3D11 now.
 # We need to continue to investigate why these reftests can be run well in local,
 # but will be suspended until terminating on reftest debug build.
 skip-if(!winWidget||!layersGPUAccelerated||isDebugBuild) == draw_rect.html wrapper.html?draw_rect.png
+skip-if(!winWidget||!layersGPUAccelerated||isDebugBuild) == change_size.html wrapper.html?change_size.png
--- a/gfx/vr/gfxVRPuppet.cpp
+++ b/gfx/vr/gfxVRPuppet.cpp
@@ -308,17 +308,18 @@ VRDisplayPuppet::SubmitFrame(TextureSour
       break;
     case 1:
     {
       // The frames are submitted to VR compositor are decoded
       // into a base64Image and dispatched to the DOM side.
       D3D11_TEXTURE2D_DESC desc;
       ID3D11Texture2D* texture = aSource->GetD3D11Texture();
       texture->GetDesc(&desc);
-      DXGI_FORMAT format = desc.Format;
+      MOZ_ASSERT(desc.Format == DXGI_FORMAT_B8G8R8A8_UNORM,
+                 "Only support B8G8R8A8_UNORM format.");
       // Map the staging resource
       ID3D11Texture2D* mappedTexture = nullptr;
       D3D11_MAPPED_SUBRESOURCE mapInfo;
       HRESULT hr = mContext->Map(texture,
                                  0,  // Subsource
                                  D3D11_MAP_READ,
                                  0,  // MapFlags
                                  &mapInfo);
@@ -366,26 +367,29 @@ VRDisplayPuppet::SubmitFrame(TextureSour
       // Ideally, we should convert the srcData to a PNG image and decode it
       // to a Base64 string here, but the GPU process does not have the privilege to
       // access the image library. So, we have to convert the RAW image data
       // to a base64 string and forward it to let the content process to
       // do the image conversion.
       char* srcData = static_cast<char*>(mapInfo.pData);
       VRSubmitFrameResultInfo result;
       result.mFormat = SurfaceFormat::B8G8R8A8;
-      result.mWidth = desc.Width;
+      // If the original texture size is not pow of 2, CopyResource() will add padding,
+      // so the size is adjusted. We have to get the correct size by (mapInfo.RowPitch /
+      // the format size).
+      result.mWidth = mapInfo.RowPitch / 4;
       result.mHeight = desc.Height;
       result.mFrameNum = mDisplayInfo.mFrameId;
       nsCString rawString(Substring((char*)srcData, mapInfo.RowPitch * desc.Height));
 
       if (Base64Encode(rawString, result.mBase64Image) != NS_OK) {
         MOZ_ASSERT(false, "Failed to encode base64 images.");
       }
       mContext->Unmap(mappedTexture, 0);
-      // Dispatch the base64 encoded string to the DOM side, and it will be decoded
+      // Dispatch the base64 encoded string to the DOM side. Then, it will be decoded
       // and convert to a PNG image there.
       vm->DispatchSubmitFrameResult(mDisplayInfo.mDisplayID, result);
       break;
     }
     case 2:
     {
       // The VR compositor sumbmit frame to the screen window,
       // the current coordinate is at (0, 0, width, height).