Bug 1410493 - Update Oculus SDK from 1.5 to 1.9, update IPD during VR presentation draft
authorKearwood "Kip" Gilbert <kgilbert@mozilla.com>
Mon, 23 Oct 2017 14:23:03 -0700
changeset 688824 edc42b1658f59ba1c080d03ce0055d7a0b7ba494
parent 688758 a89e5587c7a761fa59b82270b861c7f547968145
child 738181 3273cc3745121aff6bdcb05c328f3d37b3b8e7af
push id86867
push userkgilbert@mozilla.com
push dateMon, 30 Oct 2017 19:46:57 +0000
bugs1410493
milestone58.0a1
Bug 1410493 - Update Oculus SDK from 1.5 to 1.9, update IPD during VR presentation - Oculus SDK was updated from 1.5 to 1.9 - As the Oculus API now returns quaternion orientations for each eye-to-head transform, I needed to send more information to the content process. - Rather than adding the quaternion, we now calculate the view matrices on the VR thread in the GPU process rather than calculating it in the content thread from parameters. - OpenVR's full view matrix is now used, for compatibility with more devices. - IPD adjustments are now updated every frame for both Oculus and OpenVR. MozReview-Commit-ID: LOtfs4QIqc8
dom/vr/VRDisplay.cpp
gfx/2d/Matrix.h
gfx/vr/gfxVR.cpp
gfx/vr/gfxVR.h
gfx/vr/gfxVROSVR.cpp
gfx/vr/gfxVROSVR.h
gfx/vr/gfxVROculus.cpp
gfx/vr/gfxVROculus.h
gfx/vr/gfxVROpenVR.cpp
gfx/vr/gfxVROpenVR.h
gfx/vr/gfxVRPuppet.cpp
gfx/vr/ipc/VRMessageUtils.h
gfx/vr/ovr_capi_dynamic.h
--- a/dom/vr/VRDisplay.cpp
+++ b/dom/vr/VRDisplay.cpp
@@ -898,49 +898,28 @@ VRFrameInfo::Update(const gfx::VRDisplay
      * We use a pseudo random offset rather than 0.0f just to discourage users
      * from making the assumption that the timestamp returned in the WebVR API
      * has a base of 0, which is not necessarily true in all UA's.
      */
     mTimeStampOffset = float(rand()) / RAND_MAX * 10000.0f + 1000.0f - aState.timestamp;
   }
   mVRState.timestamp = aState.timestamp + mTimeStampOffset;
 
-  gfx::Quaternion qt;
-  if (mVRState.flags & gfx::VRDisplayCapabilityFlags::Cap_Orientation) {
-    qt.x = mVRState.orientation[0];
-    qt.y = mVRState.orientation[1];
-    qt.z = mVRState.orientation[2];
-    qt.w = mVRState.orientation[3];
-  }
-  gfx::Point3D pos;
-  if (mVRState.flags & gfx::VRDisplayCapabilityFlags::Cap_Position) {
-    pos.x = -mVRState.position[0];
-    pos.y = -mVRState.position[1];
-    pos.z = -mVRState.position[2];
-  }
-  gfx::Matrix4x4 matHead;
-  matHead.SetRotationFromQuaternion(qt);
-  matHead.PreTranslate(pos);
-
-  mLeftView = matHead;
-  mLeftView.PostTranslate(-aInfo.mEyeTranslation[gfx::VRDisplayInfo::Eye_Left]);
-
-  mRightView = matHead;
-  mRightView.PostTranslate(-aInfo.mEyeTranslation[gfx::VRDisplayInfo::Eye_Right]);
-
   // Avoid division by zero within ConstructProjectionMatrix
   const float kEpsilon = 0.00001f;
   if (fabs(aDepthFar - aDepthNear) < kEpsilon) {
     aDepthFar = aDepthNear + kEpsilon;
   }
 
   const gfx::VRFieldOfView leftFOV = aInfo.mEyeFOV[gfx::VRDisplayInfo::Eye_Left];
   mLeftProjection = leftFOV.ConstructProjectionMatrix(aDepthNear, aDepthFar, true);
   const gfx::VRFieldOfView rightFOV = aInfo.mEyeFOV[gfx::VRDisplayInfo::Eye_Right];
   mRightProjection = rightFOV.ConstructProjectionMatrix(aDepthNear, aDepthFar, true);
+  memcpy(mLeftView.components, aState.leftViewMatrix, sizeof(aState.leftViewMatrix));
+  memcpy(mRightView.components, aState.rightViewMatrix, sizeof(aState.rightViewMatrix));
 }
 
 VRFrameInfo::VRFrameInfo()
  : mTimeStampOffset(0.0f)
 {
 }
 
 bool
--- a/gfx/2d/Matrix.h
+++ b/gfx/2d/Matrix.h
@@ -1431,16 +1431,69 @@ public:
   {
       MOZ_ASSERT(aIndex >= 0 && aIndex <= 3, "Invalid matrix array index");
       *((&_11)+aIndex) = aVector.x;
       *((&_21)+aIndex) = aVector.y;
       *((&_31)+aIndex) = aVector.z;
       *((&_41)+aIndex) = aVector.w;
   }
 
+  bool Decompose(Point3D& translation, Quaternion& rotation, Point3D& scale) const
+  {
+    // Ensure matrix can be normalized
+    if (gfx::FuzzyEqual(_44, 0.0f)) {
+      return false;
+    }
+    Matrix4x4Typed mat = *this;
+    mat.Normalize();
+    if (HasPerspectiveComponent()) {
+      // We do not support projection matrices
+      return false;
+    }
+
+    // Extract translation
+    translation.x = mat._41;
+    translation.y = mat._42;
+    translation.z = mat._43;
+
+    // Remove translation
+    mat._41 = 0.0f;
+    mat._42 = 0.0f;
+    mat._43 = 0.0f;
+
+    // Extract scale
+    scale.x = sqrtf(_11 * _11 + _21 * _21 + _31 * _31);
+    scale.y = sqrtf(_12 * _12 + _22 * _22 + _32 * _32);
+    scale.z = sqrtf(_13 * _13 + _23 * _23 + _33 * _33);
+
+    // Remove scale
+    if (gfx::FuzzyEqual(scale.x, 0.0f) ||
+        gfx::FuzzyEqual(scale.y, 0.0f) ||
+        gfx::FuzzyEqual(scale.z, 0.0f)) {
+      // We do not support matrices with a zero scale component
+      return false;
+    }
+    Float invXS = 1.0f / scale.x;
+    Float invYS = 1.0f / scale.y;
+    Float invZS = 1.0f / scale.z;
+    mat._11 *= invXS;
+    mat._21 *= invXS;
+    mat._31 *= invXS;
+    mat._12 *= invYS;
+    mat._22 *= invYS;
+    mat._32 *= invYS;
+    mat._13 *= invZS;
+    mat._23 *= invZS;
+    mat._33 *= invZS;
+
+    // Extract rotation
+    rotation.SetFromRotationMatrix(mat);
+    return true;
+  }
+
   // Sets this matrix to a rotation matrix given by aQuat.
   // This quaternion *MUST* be normalized!
   // Implemented in Quaternion.cpp
   void SetRotationFromQuaternion(const Quaternion& q)
   {
     const Float x2 = q.x + q.x, y2 = q.y + q.y, z2 = q.z + q.z;
     const Float xx = q.x * x2, xy = q.x * y2, xz = q.x * z2;
     const Float yy = q.y * y2, yz = q.y * z2, zz = q.z * z2;
--- a/gfx/vr/gfxVR.cpp
+++ b/gfx/vr/gfxVR.cpp
@@ -124,8 +124,26 @@ VRSystemManager::NewHandChangeEvent(uint
                                     const dom::GamepadHand aHand)
 {
   dom::GamepadHandInformation a(aHand);
 
   VRManager* vm = VRManager::Get();
   vm->NotifyGamepadChange<dom::GamepadHandInformation>(aIndex, a);
 }
 
+void
+VRHMDSensorState::CalcViewMatrices(const gfx::Matrix4x4* aHeadToEyeTransforms)
+{
+
+  gfx::Matrix4x4 matHead;
+  if (flags & VRDisplayCapabilityFlags::Cap_Orientation) {
+    matHead.SetRotationFromQuaternion(gfx::Quaternion(orientation[0], orientation[1],
+                                                      orientation[2], orientation[3]));
+  }
+  matHead.PreTranslate(-position[0], -position[1], -position[2]);
+
+  gfx::Matrix4x4 matView = matHead * aHeadToEyeTransforms[VRDisplayInfo::Eye_Left];
+  matView.Normalize();
+  memcpy(leftViewMatrix, matView.components, sizeof(matView.components));
+  matView = matHead * aHeadToEyeTransforms[VRDisplayInfo::Eye_Right];
+  matView.Normalize();
+  memcpy(rightViewMatrix, matView.components, sizeof(matView.components));
+}
--- a/gfx/vr/gfxVR.h
+++ b/gfx/vr/gfxVR.h
@@ -139,16 +139,18 @@ struct VRHMDSensorState {
   }
   int64_t inputFrameID;
   double timestamp;
   VRDisplayCapabilityFlags flags;
 
   // These members will only change with inputFrameID:
   float orientation[4];
   float position[3];
+  float leftViewMatrix[16];
+  float rightViewMatrix[16];
   float angularVelocity[3];
   float angularAcceleration[3];
   float linearVelocity[3];
   float linearAcceleration[3];
 
   void Clear() {
     memset(this, 0, sizeof(VRHMDSensorState));
   }
@@ -156,16 +158,17 @@ struct VRHMDSensorState {
   bool operator==(const VRHMDSensorState& other) const {
     return inputFrameID == other.inputFrameID &&
            timestamp == other.timestamp;
   }
 
   bool operator!=(const VRHMDSensorState& other) const {
     return !(*this == other);
   }
+  void CalcViewMatrices(const gfx::Matrix4x4* aHeadToEyeTransforms);
 };
 
 // The maximum number of frames of latency that we would expect before we
 // should give up applying pose prediction.
 // If latency is greater than one second, then the experience is not likely
 // to be corrected by pose prediction.  Setting this value too
 // high may result in unnecessary memory allocation.
 // As the current fastest refresh rate is 90hz, 100 is selected as a
--- a/gfx/vr/gfxVROSVR.cpp
+++ b/gfx/vr/gfxVROSVR.cpp
@@ -255,16 +255,25 @@ VRDisplayOSVR::VRDisplayOSVR(OSVR_Client
       osvr_ClientGetViewerEyePose(*m_display, 0, eye, &eyePose);
     while (ret != OSVR_RETURN_SUCCESS) {
       osvr_ClientUpdate(*m_ctx);
       ret = osvr_ClientGetViewerEyePose(*m_display, 0, eye, &eyePose);
     }
     mDisplayInfo.mEyeTranslation[eye].x = eyePose.translation.data[0];
     mDisplayInfo.mEyeTranslation[eye].y = eyePose.translation.data[1];
     mDisplayInfo.mEyeTranslation[eye].z = eyePose.translation.data[2];
+
+    Matrix4x4 pose;
+    pose.SetRotationFromQuaternion(gfx::Quaternion(osvrQuatGetX(&eyePose.rotation),
+                                                   osvrQuatGetY(&eyePose.rotation),
+                                                   osvrQuatGetZ(&eyePose.rotation),
+                                                   osvrQuatGetW(&eyePose.rotation)));
+    pose.PreTranslate(eyePose.translation.data[0], eyePose.translation.data[1], eyePose.translation.data[2]);
+    pose.Invert();
+    mHeadToEye[eye] = pose;
   }
 }
 
 void
 VRDisplayOSVR::Destroy()
 {
   // destroy non-owning pointers
   m_ctx = nullptr;
@@ -299,27 +308,32 @@ VRDisplayOSVR::GetSensorState()
   result.inputFrameID = mDisplayInfo.mFrameId;
 
   if (ret == OSVR_RETURN_SUCCESS) {
     result.flags |= VRDisplayCapabilityFlags::Cap_Orientation;
     result.orientation[0] = orientation.data[1];
     result.orientation[1] = orientation.data[2];
     result.orientation[2] = orientation.data[3];
     result.orientation[3] = orientation.data[0];
+  } else {
+    // default to an identity quaternion
+    result.orientation[3] = 1.0f;
   }
 
   OSVR_PositionState position;
   ret = osvr_GetPositionState(*m_iface, &timestamp, &position);
   if (ret == OSVR_RETURN_SUCCESS) {
     result.flags |= VRDisplayCapabilityFlags::Cap_Position;
     result.position[0] = position.data[0];
     result.position[1] = position.data[1];
     result.position[2] = position.data[2];
   }
 
+  result.CalcViewMatrices(mHeadToEye);
+
   return result;
 }
 
 #if defined(XP_WIN)
 
 bool
 VRDisplayOSVR::SubmitFrame(ID3D11Texture2D* aSource,
   const IntSize& aSize,
--- a/gfx/vr/gfxVROSVR.h
+++ b/gfx/vr/gfxVROSVR.h
@@ -58,16 +58,18 @@ protected:
     Destroy();
     MOZ_COUNT_DTOR_INHERITED(VRDisplayOSVR, VRDisplayHost);
   }
   void Destroy();
 
   OSVR_ClientContext* m_ctx;
   OSVR_ClientInterface* m_iface;
   OSVR_DisplayConfig* m_display;
+
+  gfx::Matrix4x4 mHeadToEye[2];
 };
 
 } // namespace impl
 
 class VRSystemManagerOSVR : public VRSystemManager
 {
 public:
   static already_AddRefed<VRSystemManagerOSVR> Create();
--- a/gfx/vr/gfxVROculus.cpp
+++ b/gfx/vr/gfxVROculus.cpp
@@ -69,16 +69,18 @@ static pfn_ovr_GetVersionString ovr_GetV
 static pfn_ovr_TraceMessage ovr_TraceMessage = nullptr;
 static pfn_ovr_IdentifyClient ovr_IdentifyClient = nullptr;
 static pfn_ovr_GetHmdDesc ovr_GetHmdDesc = nullptr;
 static pfn_ovr_GetTrackerCount ovr_GetTrackerCount = nullptr;
 static pfn_ovr_GetTrackerDesc ovr_GetTrackerDesc = nullptr;
 static pfn_ovr_Create ovr_Create = nullptr;
 static pfn_ovr_Destroy ovr_Destroy = nullptr;
 static pfn_ovr_GetSessionStatus ovr_GetSessionStatus = nullptr;
+static pfn_ovr_IsExtensionSupported ovr_IsExtensionSupported = nullptr;
+static pfn_ovr_EnableExtension ovr_EnableExtension = nullptr;
 static pfn_ovr_SetTrackingOriginType ovr_SetTrackingOriginType = nullptr;
 static pfn_ovr_GetTrackingOriginType ovr_GetTrackingOriginType = nullptr;
 static pfn_ovr_RecenterTrackingOrigin ovr_RecenterTrackingOrigin = nullptr;
 static pfn_ovr_SpecifyTrackingOrigin ovr_SpecifyTrackingOrigin = nullptr;
 static pfn_ovr_ClearShouldRecenterFlag ovr_ClearShouldRecenterFlag = nullptr;
 static pfn_ovr_GetTrackingState ovr_GetTrackingState = nullptr;
 static pfn_ovr_GetDevicePoses ovr_GetDevicePoses = nullptr;
 static pfn_ovr_GetTrackerPose ovr_GetTrackerPose = nullptr;
@@ -98,17 +100,20 @@ static pfn_ovr_GetBoundaryVisible ovr_Ge
 static pfn_ovr_RequestBoundaryVisible ovr_RequestBoundaryVisible = nullptr;
 static pfn_ovr_GetTextureSwapChainLength ovr_GetTextureSwapChainLength = nullptr;
 static pfn_ovr_GetTextureSwapChainCurrentIndex ovr_GetTextureSwapChainCurrentIndex = nullptr;
 static pfn_ovr_GetTextureSwapChainDesc ovr_GetTextureSwapChainDesc = nullptr;
 static pfn_ovr_CommitTextureSwapChain ovr_CommitTextureSwapChain = nullptr;
 static pfn_ovr_DestroyTextureSwapChain ovr_DestroyTextureSwapChain = nullptr;
 static pfn_ovr_DestroyMirrorTexture ovr_DestroyMirrorTexture = nullptr;
 static pfn_ovr_GetFovTextureSize ovr_GetFovTextureSize = nullptr;
-static pfn_ovr_GetRenderDesc ovr_GetRenderDesc = nullptr;
+static pfn_ovr_GetRenderDesc2 ovr_GetRenderDesc2 = nullptr;
+static pfn_ovr_WaitToBeginFrame ovr_WaitToBeginFrame = nullptr;
+static pfn_ovr_BeginFrame ovr_BeginFrame = nullptr;
+static pfn_ovr_EndFrame ovr_EndFrame = nullptr;
 static pfn_ovr_SubmitFrame ovr_SubmitFrame = nullptr;
 static pfn_ovr_GetPerfStats ovr_GetPerfStats = nullptr;
 static pfn_ovr_ResetPerfStats ovr_ResetPerfStats = nullptr;
 static pfn_ovr_GetPredictedDisplayTime ovr_GetPredictedDisplayTime = nullptr;
 static pfn_ovr_GetTimeInSeconds ovr_GetTimeInSeconds = nullptr;
 static pfn_ovr_GetBool ovr_GetBool = nullptr;
 static pfn_ovr_SetBool ovr_SetBool = nullptr;
 static pfn_ovr_GetInt ovr_GetInt = nullptr;
@@ -137,17 +142,17 @@ static pfn_ovr_GetMirrorTextureBufferGL 
 #ifdef HAVE_64BIT_BUILD
 #define BUILD_BITS 64
 #else
 #define BUILD_BITS 32
 #endif
 
 #define OVR_PRODUCT_VERSION 1
 #define OVR_MAJOR_VERSION   1
-#define OVR_MINOR_VERSION   15
+#define OVR_MINOR_VERSION   19
 
 enum class OculusLeftControllerButtonType : uint16_t {
   LThumb,
   IndexTrigger,
   HandTrigger,
   Button_X,
   Button_Y,
   LThumbRest,
@@ -629,16 +634,18 @@ VROculusSession::LoadOvrLib()
   REQUIRE_FUNCTION(ovr_TraceMessage);
   REQUIRE_FUNCTION(ovr_IdentifyClient);
   REQUIRE_FUNCTION(ovr_GetHmdDesc);
   REQUIRE_FUNCTION(ovr_GetTrackerCount);
   REQUIRE_FUNCTION(ovr_GetTrackerDesc);
   REQUIRE_FUNCTION(ovr_Create);
   REQUIRE_FUNCTION(ovr_Destroy);
   REQUIRE_FUNCTION(ovr_GetSessionStatus);
+  REQUIRE_FUNCTION(ovr_IsExtensionSupported);
+  REQUIRE_FUNCTION(ovr_EnableExtension);
   REQUIRE_FUNCTION(ovr_SetTrackingOriginType);
   REQUIRE_FUNCTION(ovr_GetTrackingOriginType);
   REQUIRE_FUNCTION(ovr_RecenterTrackingOrigin);
   REQUIRE_FUNCTION(ovr_SpecifyTrackingOrigin);
   REQUIRE_FUNCTION(ovr_ClearShouldRecenterFlag);
   REQUIRE_FUNCTION(ovr_GetTrackingState);
   REQUIRE_FUNCTION(ovr_GetDevicePoses);
   REQUIRE_FUNCTION(ovr_GetTrackerPose);
@@ -658,17 +665,20 @@ VROculusSession::LoadOvrLib()
   REQUIRE_FUNCTION(ovr_RequestBoundaryVisible);
   REQUIRE_FUNCTION(ovr_GetTextureSwapChainLength);
   REQUIRE_FUNCTION(ovr_GetTextureSwapChainCurrentIndex);
   REQUIRE_FUNCTION(ovr_GetTextureSwapChainDesc);
   REQUIRE_FUNCTION(ovr_CommitTextureSwapChain);
   REQUIRE_FUNCTION(ovr_DestroyTextureSwapChain);
   REQUIRE_FUNCTION(ovr_DestroyMirrorTexture);
   REQUIRE_FUNCTION(ovr_GetFovTextureSize);
-  REQUIRE_FUNCTION(ovr_GetRenderDesc);
+  REQUIRE_FUNCTION(ovr_GetRenderDesc2);
+  REQUIRE_FUNCTION(ovr_WaitToBeginFrame);
+  REQUIRE_FUNCTION(ovr_BeginFrame);
+  REQUIRE_FUNCTION(ovr_EndFrame);
   REQUIRE_FUNCTION(ovr_SubmitFrame);
   REQUIRE_FUNCTION(ovr_GetPerfStats);
   REQUIRE_FUNCTION(ovr_ResetPerfStats);
   REQUIRE_FUNCTION(ovr_GetPredictedDisplayTime);
   REQUIRE_FUNCTION(ovr_GetTimeInSeconds);
   REQUIRE_FUNCTION(ovr_GetBool);
   REQUIRE_FUNCTION(ovr_SetBool);
   REQUIRE_FUNCTION(ovr_GetInt);
@@ -773,47 +783,65 @@ VRDisplayOculus::VRDisplayOculus(VROculu
   mFOVPort[VRDisplayInfo::Eye_Right] = mDesc.DefaultEyeFov[ovrEye_Right];
 
   mDisplayInfo.mEyeFOV[VRDisplayInfo::Eye_Left] = FromFovPort(mFOVPort[VRDisplayInfo::Eye_Left]);
   mDisplayInfo.mEyeFOV[VRDisplayInfo::Eye_Right] = FromFovPort(mFOVPort[VRDisplayInfo::Eye_Right]);
 
   float pixelsPerDisplayPixel = 1.0;
   ovrSizei texSize[2];
 
-  // get eye parameters and create the mesh
+  // get eye texture sizes
   for (uint32_t eye = 0; eye < VRDisplayInfo::NumEyes; eye++) {
-
-    ovrEyeRenderDesc renderDesc = ovr_GetRenderDesc(mSession->Get(), (ovrEyeType)eye, mFOVPort[eye]);
-
-    // As of Oculus 0.6.0, the HmdToEyeOffset values are correct and don't need to be negated.
-    mDisplayInfo.mEyeTranslation[eye] = Point3D(renderDesc.HmdToEyeOffset.x, renderDesc.HmdToEyeOffset.y, renderDesc.HmdToEyeOffset.z);
-
     texSize[eye] = ovr_GetFovTextureSize(mSession->Get(), (ovrEyeType)eye, mFOVPort[eye], pixelsPerDisplayPixel);
   }
 
   // take the max of both for eye resolution
   mDisplayInfo.mEyeResolution.width = std::max(texSize[VRDisplayInfo::Eye_Left].w, texSize[VRDisplayInfo::Eye_Right].w);
   mDisplayInfo.mEyeResolution.height = std::max(texSize[VRDisplayInfo::Eye_Left].h, texSize[VRDisplayInfo::Eye_Right].h);
 
+  UpdateEyeParameters();
   UpdateStageParameters();
 }
 
 VRDisplayOculus::~VRDisplayOculus() {
   Destroy();
   MOZ_COUNT_DTOR_INHERITED(VRDisplayOculus, VRDisplayHost);
 }
 
 void
 VRDisplayOculus::Destroy()
 {
   StopPresentation();
   mSession = nullptr;
 }
 
 void
+VRDisplayOculus::UpdateEyeParameters(gfx::Matrix4x4* aHeadToEyeTransforms /* = nullptr */)
+{
+  // Note this must be called every frame, as the IPD adjustment can be changed
+  // by the user during a VR session.
+  for (uint32_t eye = 0; eye < VRDisplayInfo::NumEyes; eye++) {
+    // As of Oculus 1.17 SDK, we must use the ovr_GetRenderDesc2 function to return the updated
+    // version of ovrEyeRenderDesc.  This is normally done by the Oculus static lib shim, but we
+    // need to do this explicitly as we are loading the Oculus runtime dll directly.
+    ovrEyeRenderDesc renderDesc = ovr_GetRenderDesc2(mSession->Get(), (ovrEyeType)eye, mFOVPort[eye]);
+    mDisplayInfo.mEyeTranslation[eye].x = renderDesc.HmdToEyePose.Position.x;
+    mDisplayInfo.mEyeTranslation[eye].y = renderDesc.HmdToEyePose.Position.y;
+    mDisplayInfo.mEyeTranslation[eye].z = renderDesc.HmdToEyePose.Position.z;
+    if (aHeadToEyeTransforms) {
+      Matrix4x4 pose;
+      pose.SetRotationFromQuaternion(gfx::Quaternion(renderDesc.HmdToEyePose.Orientation.x, renderDesc.HmdToEyePose.Orientation.y, renderDesc.HmdToEyePose.Orientation.z, renderDesc.HmdToEyePose.Orientation.w));
+      pose.PreTranslate(renderDesc.HmdToEyePose.Position.x, renderDesc.HmdToEyePose.Position.y, renderDesc.HmdToEyePose.Position.z);
+      pose.Invert();
+      aHeadToEyeTransforms[eye] = pose;
+    }
+  }
+}
+
+void
 VRDisplayOculus::UpdateStageParameters()
 {
   if (!mSession->IsTrackingReady()) {
     return;
   }
   ovrVector3f playArea;
   ovrResult res = ovr_GetBoundaryDimensions(mSession->Get(), ovrBoundary_PlayArea, &playArea);
   if (res == ovrSuccess) {
@@ -859,27 +887,30 @@ VRDisplayOculus::ZeroSensor()
   UpdateStageParameters();
 }
 
 VRHMDSensorState
 VRDisplayOculus::GetSensorState()
 {
   VRHMDSensorState result;
   if (mSession->IsTrackingReady()) {
+    gfx::Matrix4x4 headToEyeTransforms[2];
+    UpdateEyeParameters(headToEyeTransforms);
     double predictedFrameTime = 0.0f;
     if (gfxPrefs::VRPosePredictionEnabled()) {
       // XXX We might need to call ovr_GetPredictedDisplayTime even if we don't use the result.
       // If we don't call it, the Oculus driver will spew out many warnings...
       predictedFrameTime = ovr_GetPredictedDisplayTime(mSession->Get(), 0);
     }
     result = GetSensorState(predictedFrameTime);
+    result.position[1] -= mEyeHeight;
+    result.CalcViewMatrices(headToEyeTransforms);
   }
   result.inputFrameID = mDisplayInfo.mFrameId;
-  result.position[1] -= mEyeHeight;
-  mDisplayInfo.mLastSensorState[result.inputFrameID % kVRMaxLatencyFrames] = result;
+
   return result;
 }
 
 VRHMDSensorState
 VRDisplayOculus::GetSensorState(double absTime)
 {
   VRHMDSensorState result;
 
@@ -900,16 +931,19 @@ VRDisplayOculus::GetSensorState(double a
     result.angularVelocity[1] = pose.AngularVelocity.y;
     result.angularVelocity[2] = pose.AngularVelocity.z;
 
     result.flags |= VRDisplayCapabilityFlags::Cap_AngularAcceleration;
 
     result.angularAcceleration[0] = pose.AngularAcceleration.x;
     result.angularAcceleration[1] = pose.AngularAcceleration.y;
     result.angularAcceleration[2] = pose.AngularAcceleration.z;
+  } else {
+    // default to an identity quaternion
+    result.orientation[3] = 1.0f;
   }
 
   if (state.StatusFlags & ovrStatus_PositionTracked) {
     result.flags |= VRDisplayCapabilityFlags::Cap_Position;
 
     result.position[0] = pose.ThePose.Position.x;
     result.position[1] = pose.ThePose.Position.y;
     result.position[2] = pose.ThePose.Position.z;
@@ -1166,37 +1200,35 @@ VRDisplayOculus::SubmitFrame(ID3D11Textu
   layer.Viewport[0].Pos.y = aSize.height * aLeftEyeRect.y;
   layer.Viewport[0].Size.w = aSize.width * aLeftEyeRect.Width();
   layer.Viewport[0].Size.h = aSize.height * aLeftEyeRect.Height();
   layer.Viewport[1].Pos.x = aSize.width * aRightEyeRect.x;
   layer.Viewport[1].Pos.y = aSize.height * aRightEyeRect.y;
   layer.Viewport[1].Size.w = aSize.width * aRightEyeRect.Width();
   layer.Viewport[1].Size.h = aSize.height * aRightEyeRect.Height();
 
-  const Point3D& l = mDisplayInfo.mEyeTranslation[0];
-  const Point3D& r = mDisplayInfo.mEyeTranslation[1];
-  const ovrVector3f hmdToEyeViewOffset[2] = { { l.x, l.y, l.z },
-                                              { r.x, r.y, r.z } };
-
   const VRHMDSensorState& sensorState = mDisplayInfo.GetSensorState();
+  gfx::Matrix4x4 matView[2];
+  memcpy(matView[0].components, sensorState.leftViewMatrix, sizeof(sensorState.leftViewMatrix));
+  memcpy(matView[1].components, sensorState.rightViewMatrix, sizeof(sensorState.rightViewMatrix));
 
   for (uint32_t i = 0; i < 2; ++i) {
-    Quaternion o(sensorState.orientation[0],
-      sensorState.orientation[1],
-      sensorState.orientation[2],
-      sensorState.orientation[3]);
-    Point3D vo(hmdToEyeViewOffset[i].x, hmdToEyeViewOffset[i].y, hmdToEyeViewOffset[i].z);
-    Point3D p = o.RotatePoint(vo);
-    layer.RenderPose[i].Orientation.x = o.x;
-    layer.RenderPose[i].Orientation.y = o.y;
-    layer.RenderPose[i].Orientation.z = o.z;
-    layer.RenderPose[i].Orientation.w = o.w;
-    layer.RenderPose[i].Position.x = p.x + sensorState.position[0];
-    layer.RenderPose[i].Position.y = p.y + sensorState.position[1];
-    layer.RenderPose[i].Position.z = p.z + sensorState.position[2];
+    Point3D eyeTranslation;
+    Quaternion eyeRotation;
+    Point3D eyeScale;
+    if (!matView[i].Decompose(eyeTranslation, eyeRotation, eyeScale)) {
+      NS_WARNING("Failed to decompose eye pose matrix for Oculus");
+    }
+    layer.RenderPose[i].Orientation.x = eyeRotation.x;
+    layer.RenderPose[i].Orientation.y = eyeRotation.y;
+    layer.RenderPose[i].Orientation.z = eyeRotation.z;
+    layer.RenderPose[i].Orientation.w = eyeRotation.w;
+    layer.RenderPose[i].Position.x = eyeTranslation.x;
+    layer.RenderPose[i].Position.y = eyeTranslation.y;
+    layer.RenderPose[i].Position.z = eyeTranslation.z;
   }
 
   ovrLayerHeader *layers = &layer.Header;
   orv = ovr_SubmitFrame(mSession->Get(), mDisplayInfo.mFrameId, nullptr, &layers, 1);
   // ovr_SubmitFrame will fail during the Oculus health and safety warning.
   // and will start succeeding once the warning has been dismissed by the user.
 
   if (!OVR_UNQUALIFIED_SUCCESS(orv)) {
--- a/gfx/vr/gfxVROculus.h
+++ b/gfx/vr/gfxVROculus.h
@@ -117,16 +117,17 @@ protected:
   RefPtr<ID3D11Buffer> mVSConstantBuffer;
   RefPtr<ID3D11Buffer> mPSConstantBuffer;
   RefPtr<ID3D11Buffer> mVertexBuffer;
   RefPtr<ID3D11InputLayout> mInputLayout;
 
   float mEyeHeight;
 
   bool UpdateConstantBuffers();
+  void UpdateEyeParameters(gfx::Matrix4x4* aHeadToEyeTransforms = nullptr);
 
   struct Vertex
   {
     float position[2];
   };
 };
 
 class VRControllerOculus : public VRControllerHost
--- a/gfx/vr/gfxVROpenVR.cpp
+++ b/gfx/vr/gfxVROpenVR.cpp
@@ -81,24 +81,18 @@ VRDisplayOpenVR::VRDisplayOpenVR(::vr::I
   mDisplayInfo.mEyeResolution.height = h;
 
   // SteamVR gives the application a single FOV to use; it's not configurable as with Oculus
   for (uint32_t eye = 0; eye < 2; ++eye) {
     // get l/r/t/b clip plane coordinates
     float l, r, t, b;
     mVRSystem->GetProjectionRaw(static_cast<::vr::Hmd_Eye>(eye), &l, &r, &t, &b);
     mDisplayInfo.mEyeFOV[eye].SetFromTanRadians(-t, r, b, -l);
-
-    ::vr::HmdMatrix34_t eyeToHead = mVRSystem->GetEyeToHeadTransform(static_cast<::vr::Hmd_Eye>(eye));
-
-    mDisplayInfo.mEyeTranslation[eye].x = eyeToHead.m[0][3];
-    mDisplayInfo.mEyeTranslation[eye].y = eyeToHead.m[1][3];
-    mDisplayInfo.mEyeTranslation[eye].z = eyeToHead.m[2][3];
   }
-
+  UpdateEyeParameters();
   UpdateStageParameters();
 }
 
 VRDisplayOpenVR::~VRDisplayOpenVR()
 {
   Destroy();
   MOZ_COUNT_DTOR_INHERITED(VRDisplayOpenVR, VRDisplayHost);
 }
@@ -106,16 +100,41 @@ VRDisplayOpenVR::~VRDisplayOpenVR()
 void
 VRDisplayOpenVR::Destroy()
 {
   StopPresentation();
   ::vr::VR_Shutdown();
 }
 
 void
+VRDisplayOpenVR::UpdateEyeParameters(gfx::Matrix4x4* aHeadToEyeTransforms /* = nullptr */)
+{
+  // Note this must be called every frame, as the IPD adjustment can be changed
+  // by the user during a VR session.
+  for (uint32_t eye = 0; eye < VRDisplayInfo::NumEyes; eye++) {
+    ::vr::HmdMatrix34_t eyeToHead = mVRSystem->GetEyeToHeadTransform(static_cast<::vr::Hmd_Eye>(eye));
+
+    mDisplayInfo.mEyeTranslation[eye].x = eyeToHead.m[0][3];
+    mDisplayInfo.mEyeTranslation[eye].y = eyeToHead.m[1][3];
+    mDisplayInfo.mEyeTranslation[eye].z = eyeToHead.m[2][3];
+
+    if (aHeadToEyeTransforms) {
+      Matrix4x4 pose;
+      // NOTE! eyeToHead.m is a 3x4 matrix, not 4x4.  But
+      // because of its arrangement, we can copy the 12 elements in and
+      // then transpose them to the right place.
+      memcpy(&pose._11, &eyeToHead.m, sizeof(eyeToHead.m));
+      pose.Transpose();
+      pose.Invert();
+      aHeadToEyeTransforms[eye] = pose;
+    }
+  }
+}
+
+void
 VRDisplayOpenVR::UpdateStageParameters()
 {
   float sizeX = 0.0f;
   float sizeZ = 0.0f;
   if (mVRChaperone->GetPlayAreaSize(&sizeX, &sizeZ)) {
     ::vr::HmdMatrix34_t t = mVRSystem->GetSeatedZeroPoseToStandingAbsoluteTrackingPose();
     mDisplayInfo.mStageSize.width = sizeX;
     mDisplayInfo.mStageSize.height = sizeZ;
@@ -225,16 +244,18 @@ VRHMDSensorState
 VRDisplayOpenVR::GetSensorState()
 {
   PollEvents();
 
   const uint32_t posesSize = ::vr::k_unTrackedDeviceIndex_Hmd + 1;
   ::vr::TrackedDevicePose_t poses[posesSize];
   // Note: We *must* call WaitGetPoses in order for any rendering to happen at all.
   mVRCompositor->WaitGetPoses(nullptr, 0, poses, posesSize);
+  gfx::Matrix4x4 headToEyeTransforms[2];
+  UpdateEyeParameters(headToEyeTransforms);
 
   VRHMDSensorState result;
 
   ::vr::Compositor_FrameTiming timing;
   timing.m_nSize = sizeof(::vr::Compositor_FrameTiming);
   if (mVRCompositor->GetFrameTiming(&timing)) {
     result.timestamp = timing.m_flSystemTimeInSeconds;
   } else {
@@ -248,17 +269,17 @@ VRDisplayOpenVR::GetSensorState()
   {
     const ::vr::TrackedDevicePose_t& pose = poses[::vr::k_unTrackedDeviceIndex_Hmd];
 
     gfx::Matrix4x4 m;
     // NOTE! mDeviceToAbsoluteTracking is a 3x4 matrix, not 4x4.  But
     // because of its arrangement, we can copy the 12 elements in and
     // then transpose them to the right place.  We do this so we can
     // pull out a Quaternion.
-    memcpy(&m._11, &pose.mDeviceToAbsoluteTracking, sizeof(float) * 12);
+    memcpy(&m._11, &pose.mDeviceToAbsoluteTracking, sizeof(pose.mDeviceToAbsoluteTracking));
     m.Transpose();
 
     gfx::Quaternion rot;
     rot.SetFromRotationMatrix(m);
     rot.Invert();
 
     result.flags |= VRDisplayCapabilityFlags::Cap_Orientation;
     result.orientation[0] = rot.x;
@@ -271,18 +292,22 @@ VRDisplayOpenVR::GetSensorState()
 
     result.flags |= VRDisplayCapabilityFlags::Cap_Position;
     result.position[0] = m._41;
     result.position[1] = m._42;
     result.position[2] = m._43;
     result.linearVelocity[0] = pose.vVelocity.v[0];
     result.linearVelocity[1] = pose.vVelocity.v[1];
     result.linearVelocity[2] = pose.vVelocity.v[2];
+  } else {
+    // default to an identity quaternion
+    result.orientation[3] = 1.0f;
   }
 
+  result.CalcViewMatrices(headToEyeTransforms);
   result.inputFrameID = mDisplayInfo.mFrameId;
   return result;
 }
 
 void
 VRDisplayOpenVR::StartPresentation()
 {
   if (mIsPresenting) {
@@ -810,17 +835,17 @@ VRSystemManagerOpenVR::HandleInput()
       if (pose.bPoseIsValid &&
           pose.eTrackingResult == ::vr::TrackingResult_Running_OK) {
         gfx::Matrix4x4 m;
 
         // NOTE! mDeviceToAbsoluteTracking is a 3x4 matrix, not 4x4.  But
         // because of its arrangement, we can copy the 12 elements in and
         // then transpose them to the right place.  We do this so we can
         // pull out a Quaternion.
-        memcpy(&m.components, &pose.mDeviceToAbsoluteTracking, sizeof(float) * 12);
+        memcpy(&m.components, &pose.mDeviceToAbsoluteTracking, sizeof(pose.mDeviceToAbsoluteTracking));
         m.Transpose();
 
         gfx::Quaternion rot;
         rot.SetFromRotationMatrix(m);
         rot.Invert();
 
         poseState.orientation[0] = rot.x;
         poseState.orientation[1] = rot.y;
--- a/gfx/vr/gfxVROpenVR.h
+++ b/gfx/vr/gfxVROpenVR.h
@@ -63,16 +63,17 @@ protected:
   ::vr::IVRChaperone *mVRChaperone;
   ::vr::IVRCompositor *mVRCompositor;
 
   VRTelemetry mTelemetry;
   bool mIsPresenting;
   bool mIsHmdPresent;
 
   void UpdateStageParameters();
+  void UpdateEyeParameters(gfx::Matrix4x4* aHeadToEyeTransforms = nullptr);
   void PollEvents();
   bool SubmitFrame(void* aTextureHandle,
                    ::vr::ETextureType aTextureType,
                    const IntSize& aSize,
                    const gfx::Rect& aLeftEyeRect,
                    const gfx::Rect& aRightEyeRect);
 };
 
--- a/gfx/vr/gfxVRPuppet.cpp
+++ b/gfx/vr/gfxVRPuppet.cpp
@@ -144,16 +144,23 @@ void
 VRDisplayPuppet::ZeroSensor()
 {
 }
 
 VRHMDSensorState
 VRDisplayPuppet::GetSensorState()
 {
   mSensorState.inputFrameID = mDisplayInfo.mFrameId;
+
+  Matrix4x4 matHeadToEye[2];
+  for (uint32_t eye = 0; eye < 2; ++eye) {
+    matHeadToEye[eye].PreTranslate(mDisplayInfo.mEyeTranslation[eye]);
+  }
+  mSensorState.CalcViewMatrices(matHeadToEye);
+
   return mSensorState;
 }
 
 void
 VRDisplayPuppet::SetSensorState(const VRHMDSensorState& aSensorState)
 {
   memcpy(&mSensorState, &aSensorState, sizeof(mSensorState));
 }
--- a/gfx/vr/ipc/VRMessageUtils.h
+++ b/gfx/vr/ipc/VRMessageUtils.h
@@ -110,16 +110,22 @@ struct ParamTraits<mozilla::gfx::VRHMDSe
     WriteParam(aMsg, aParam.angularAcceleration[1]);
     WriteParam(aMsg, aParam.angularAcceleration[2]);
     WriteParam(aMsg, aParam.linearVelocity[0]);
     WriteParam(aMsg, aParam.linearVelocity[1]);
     WriteParam(aMsg, aParam.linearVelocity[2]);
     WriteParam(aMsg, aParam.linearAcceleration[0]);
     WriteParam(aMsg, aParam.linearAcceleration[1]);
     WriteParam(aMsg, aParam.linearAcceleration[2]);
+    for (int i=0; i < 16; i++) {
+      WriteParam(aMsg, aParam.leftViewMatrix[i]);
+    }
+    for (int i=0; i < 16; i++) {
+      WriteParam(aMsg, aParam.rightViewMatrix[i]);
+    }
   }
 
   static bool Read(const Message* aMsg, PickleIterator* aIter, paramType* aResult)
   {
     if (!ReadParam(aMsg, aIter, &(aResult->timestamp)) ||
         !ReadParam(aMsg, aIter, &(aResult->inputFrameID)) ||
         !ReadParam(aMsg, aIter, &(aResult->flags)) ||
         !ReadParam(aMsg, aIter, &(aResult->orientation[0])) ||
@@ -138,16 +144,26 @@ struct ParamTraits<mozilla::gfx::VRHMDSe
         !ReadParam(aMsg, aIter, &(aResult->linearVelocity[0])) ||
         !ReadParam(aMsg, aIter, &(aResult->linearVelocity[1])) ||
         !ReadParam(aMsg, aIter, &(aResult->linearVelocity[2])) ||
         !ReadParam(aMsg, aIter, &(aResult->linearAcceleration[0])) ||
         !ReadParam(aMsg, aIter, &(aResult->linearAcceleration[1])) ||
         !ReadParam(aMsg, aIter, &(aResult->linearAcceleration[2]))) {
       return false;
     }
+    for (int i=0; i < 16; i++) {
+      if (!ReadParam(aMsg, aIter, &(aResult->leftViewMatrix[i]))) {
+        return false;
+      }
+    }
+    for (int i=0; i < 16; i++) {
+      if (!ReadParam(aMsg, aIter, &(aResult->rightViewMatrix[i]))) {
+        return false;
+      }
+    }
     return true;
   }
 };
 
 template <>
 struct ParamTraits<mozilla::gfx::VRFieldOfView>
 {
   typedef mozilla::gfx::VRFieldOfView paramType;
--- a/gfx/vr/ovr_capi_dynamic.h
+++ b/gfx/vr/ovr_capi_dynamic.h
@@ -121,16 +121,22 @@ typedef enum
 {
   ovrTrackingCap_Orientation      = 0x0010,
   ovrTrackingCap_MagYawCorrection = 0x0020,
   ovrTrackingCap_Position         = 0x0040,
   ovrTrackingCap_EnumSize         = 0x7fffffff
 } ovrTrackingCaps;
 
 typedef enum {
+  ovrExtension_TextureLayout_Octilinear = 0,
+  ovrExtension_Count,
+  ovrExtension_EnumSize = 0x7fffffff
+} ovrExtensions;
+
+typedef enum {
   ovrEye_Left  = 0,
   ovrEye_Right = 1,
   ovrEye_Count = 2,
   ovrEye_EnumSize = 0x7fffffff
 } ovrEyeType;
 
 typedef enum {
   ovrTrackingOrigin_EyeLevel = 0,
@@ -205,27 +211,27 @@ typedef struct OVR_ALIGNAS(8) {
   ovrPosef CalibratedOrigin;
 } ovrTrackingState;
 
 typedef struct OVR_ALIGNAS(4) {
   ovrEyeType  Eye;
   ovrFovPort  Fov;
   ovrRecti    DistortedViewport;
   ovrVector2f PixelsPerTanAngleAtCenter;
-  ovrVector3f HmdToEyeOffset;
+  ovrPosef    HmdToEyePose;
 } ovrEyeRenderDesc;
 
 typedef struct OVR_ALIGNAS(4) {
   float Projection22;
   float Projection23;
   float Projection32;
 } ovrTimewarpProjectionDesc;
 
 typedef struct OVR_ALIGNAS(4) {
-  ovrVector3f HmdToEyeOffset[ovrEye_Count];
+  ovrPosef HmdToEyePose[ovrEye_Count];
   float HmdSpaceToWorldScaleInMeters;
 } ovrViewScaleDesc;
 
 typedef enum {
   ovrTexture_2D,
   ovrTexture_2D_External,
   ovrTexture_Cube,
   ovrTexture_Count,
@@ -271,16 +277,17 @@ typedef enum {
   OVR_FORMAT_ENUMSIZE = 0x7fffffff
 } ovrTextureFormat;
 
 typedef enum {
   ovrTextureMisc_None,
   ovrTextureMisc_DX_Typeless = 0x0001,
   ovrTextureMisc_AllowGenerateMips = 0x0002,
   ovrTextureMisc_ProtectedContent = 0x0004,
+  ovrTextureMisc_AutoGenerateMips = 0x0008,
   ovrTextureMisc_EnumSize = 0x7fffffff
 } ovrTextureFlags;
 
 typedef struct {
   ovrTextureType Type;
   ovrTextureFormat Format;
   int ArraySize;
   int Width;
@@ -372,16 +379,18 @@ typedef enum {
   ovrControllerType_Active = 0xffffffff,
   ovrControllerType_EnumSize = 0x7fffffff
 } ovrControllerType;
 
 typedef enum {
   ovrHapticsBufferSubmit_Enqueue
 } ovrHapticsBufferSubmitMode;
 
+#define OVR_HAPTICS_BUFFER_SAMPLES_MAX 256
+
 typedef struct {
   const void* Samples;
   int SamplesCount;
   ovrHapticsBufferSubmitMode SubmitMode;
 } ovrHapticsBuffer;
 
 typedef struct {
   int RemainingQueueSpace;
@@ -477,16 +486,17 @@ typedef struct {
   ovrCameraExtrinsics Extrinsics;
 } ovrExternalCamera;
 
 typedef enum {
   ovrInit_Debug          = 0x00000001,
   ovrInit_RequestVersion = 0x00000004,
   ovrInit_Invisible      = 0x00000010,
   ovrInit_MixedRendering = 0x00000020,
+  ovrInit_FocusAware     = 0x00000040,
   ovrinit_WritableBits   = 0x00ffffff,
   ovrInit_EnumSize       = 0x7fffffff
 } ovrInitFlags;
 
 typedef enum {
   ovrLogLevel_Debug = 0,
   ovrLogLevel_Info  = 1,
   ovrLogLevel_Error = 2,
@@ -524,19 +534,25 @@ typedef void (OVR_PFN* pfn_ovr_Destroy)(
 
 typedef struct {
   ovrBool IsVisible;
   ovrBool HmdPresent;
   ovrBool HmdMounted;
   ovrBool DisplayLost;
   ovrBool ShouldQuit;
   ovrBool ShouldRecenter;
+  ovrBool HasInputFocus;
+  ovrBool OverlayPresent;
 } ovrSessionStatus;
 
 typedef ovrResult (OVR_PFN* pfn_ovr_GetSessionStatus)(ovrSession session, ovrSessionStatus* sessionStatus);
+typedef ovrResult (OVR_PFN* pfn_ovr_IsExtensionSupported)(ovrSession session,
+                                                          ovrExtensions extension,
+                                                          ovrBool* outExtensionSupported);
+typedef ovrResult (OVR_PFN* pfn_ovr_EnableExtension)(ovrSession session, ovrExtensions extension);
 typedef ovrResult (OVR_PFN* pfn_ovr_SetTrackingOriginType)(ovrSession session, ovrTrackingOrigin origin);
 typedef ovrTrackingOrigin (OVR_PFN* pfn_ovr_GetTrackingOriginType)(ovrSession session);
 typedef ovrResult (OVR_PFN* pfn_ovr_RecenterTrackingOrigin)(ovrSession session);
 typedef ovrResult (OVR_PFN* pfn_ovr_SpecifyTrackingOrigin)(ovrSession session, ovrPosef originPose);
 typedef void (OVR_PFN* pfn_ovr_ClearShouldRecenterFlag)(ovrSession session);
 typedef ovrTrackingState (OVR_PFN* pfn_ovr_GetTrackingState)(ovrSession session, double absTime, ovrBool latencyMarker);
 typedef ovrResult  (OVR_PFN* pfn_ovr_GetDevicePoses)(ovrSession session,
                                                      ovrTrackedDeviceType* deviceTypes,
@@ -578,16 +594,18 @@ enum {
   ovrMaxLayerCount = 16
 };
 
 typedef enum {
   ovrLayerType_Disabled       = 0,
   ovrLayerType_EyeFov         = 1,
   ovrLayerType_Quad           = 3,
   ovrLayerType_EyeMatrix      = 5,
+  ovrLayerType_EyeFovMultires = 7,
+  ovrLayerType_Cube = 10,
   ovrLayerType_EnumSize       = 0x7fffffff
 } ovrLayerType;
 
 typedef enum {
   ovrLayerFlag_HighQuality               = 0x01,
   ovrLayerFlag_TextureOriginAtBottomLeft = 0x02,
   ovrLayerFlag_HeadLocked                = 0x04
 } ovrLayerFlags;
@@ -601,16 +619,49 @@ typedef struct OVR_ALIGNAS(OVR_PTR_SIZE)
   ovrLayerHeader Header;
   ovrTextureSwapChain ColorTexture[ovrEye_Count];
   ovrRecti Viewport[ovrEye_Count];
   ovrFovPort Fov[ovrEye_Count];
   ovrPosef RenderPose[ovrEye_Count];
   double SensorSampleTime;
 } ovrLayerEyeFov;
 
+typedef enum {
+  ovrTextureLayout_Rectilinear = 0,
+  ovrTextureLayout_Octilinear = 1,
+  ovrTextureLayout_EnumSize = 0x7fffffff
+} ovrTextureLayout;
+
+typedef struct OVR_ALIGNAS(OVR_PTR_SIZE) {
+  float WarpLeft;
+  float WarpRight;
+  float WarpUp;
+  float WarpDown;
+  float SizeLeft;
+  float SizeRight;
+  float SizeUp;
+  float SizeDown;
+
+} ovrTextureLayoutOctilinear;
+
+typedef union OVR_ALIGNAS(OVR_PTR_SIZE) {
+  ovrTextureLayoutOctilinear Octilinear[ovrEye_Count];
+} ovrTextureLayoutDesc_Union;
+
+typedef struct OVR_ALIGNAS(OVR_PTR_SIZE) {
+  ovrLayerHeader Header;
+  ovrTextureSwapChain ColorTexture[ovrEye_Count];
+  ovrRecti Viewport[ovrEye_Count];
+  ovrFovPort Fov[ovrEye_Count];
+  ovrPosef RenderPose[ovrEye_Count];
+  double SensorSampleTime;
+  ovrTextureLayout TextureLayout;
+  ovrTextureLayoutDesc_Union TextureLayoutDesc;
+} ovrLayerEyeFovMultires;
+
 typedef struct OVR_ALIGNAS(OVR_PTR_SIZE) {
   ovrLayerHeader Header;
   ovrTextureSwapChain ColorTexture[ovrEye_Count];
   ovrRecti Viewport[ovrEye_Count];
   ovrPosef RenderPose[ovrEye_Count];
   ovrMatrix4f Matrix[ovrEye_Count];
   double SensorSampleTime;
 } ovrLayerEyeMatrix;
@@ -618,31 +669,46 @@ typedef struct OVR_ALIGNAS(OVR_PTR_SIZE)
 typedef struct OVR_ALIGNAS(OVR_PTR_SIZE) {
   ovrLayerHeader Header;
   ovrTextureSwapChain ColorTexture;
   ovrRecti Viewport;
   ovrPosef QuadPoseCenter;
   ovrVector2f QuadSize;
 } ovrLayerQuad;
 
+typedef struct OVR_ALIGNAS(OVR_PTR_SIZE) {
+  ovrLayerHeader Header;
+  ovrQuatf Orientation;
+  ovrTextureSwapChain CubeMapTexture;
+} ovrLayerCube;
+
 typedef union {
   ovrLayerHeader Header;
   ovrLayerEyeFov EyeFov;
   ovrLayerQuad Quad;
+  ovrLayerEyeFovMultires Multires;
+  ovrLayerCube Cube;
 } ovrLayer_Union;
 
 
 typedef ovrResult (OVR_PFN* pfn_ovr_GetTextureSwapChainLength)(ovrSession session, ovrTextureSwapChain chain, int* out_Length);
 typedef ovrResult (OVR_PFN* pfn_ovr_GetTextureSwapChainCurrentIndex)(ovrSession session, ovrTextureSwapChain chain, int* out_Index);
 typedef ovrResult (OVR_PFN* pfn_ovr_GetTextureSwapChainDesc)(ovrSession session, ovrTextureSwapChain chain, ovrTextureSwapChainDesc* out_Desc);
 typedef ovrResult (OVR_PFN* pfn_ovr_CommitTextureSwapChain)(ovrSession session, ovrTextureSwapChain chain);
 typedef void (OVR_PFN* pfn_ovr_DestroyTextureSwapChain)(ovrSession session, ovrTextureSwapChain chain);
 typedef void (OVR_PFN* pfn_ovr_DestroyMirrorTexture)(ovrSession session, ovrMirrorTexture mirrorTexture);
 typedef ovrSizei(OVR_PFN* pfn_ovr_GetFovTextureSize)(ovrSession session, ovrEyeType eye, ovrFovPort fov, float pixelsPerDisplayPixel);
-typedef ovrEyeRenderDesc(OVR_PFN* pfn_ovr_GetRenderDesc)(ovrSession session, ovrEyeType eyeType, ovrFovPort fov);
+typedef ovrEyeRenderDesc(OVR_PFN* pfn_ovr_GetRenderDesc2)(ovrSession session, ovrEyeType eyeType, ovrFovPort fov);
+typedef ovrResult (OVR_PFN* pfn_ovr_WaitToBeginFrame)(ovrSession session, long long frameIndex);
+typedef ovrResult (OVR_PFN* pfn_ovr_BeginFrame)(ovrSession session, long long frameIndex);
+typedef ovrResult (OVR_PFN* pfn_ovr_EndFrame)(ovrSession session,
+                                              long long frameIndex,
+                                              const ovrViewScaleDesc* viewScaleDesc,
+                                              ovrLayerHeader const* const* layerPtrList,
+                                              unsigned int layerCount);
 typedef ovrResult(OVR_PFN* pfn_ovr_SubmitFrame)(ovrSession session, long long frameIndex,
 	const ovrViewScaleDesc* viewScaleDesc,
 	ovrLayerHeader const * const * layerPtrList, unsigned int layerCount);
 
 typedef struct OVR_ALIGNAS(4) {
   int HmdVsyncIndex;
   int AppFrameIndex;
   int AppDroppedFrameCount;
@@ -747,16 +813,17 @@ typedef enum {
   ovrError_DeviceUnavailable = -1010,
   ovrError_InvalidHeadsetOrientation = -1011,
   ovrError_ClientSkippedDestroy = -1012,
   ovrError_ClientSkippedShutdown = -1013,
   ovrError_ServiceDeadlockDetected = -1014,
   ovrError_InvalidOperation = -1015,
   ovrError_InsufficientArraySize = -1016,
   ovrError_NoExternalCameraInfo = -1017,
+  ovrError_LostTracking = -1018,
   ovrError_AudioDeviceNotFound = -2001,
   ovrError_AudioComError = -2002,
   ovrError_Initialize = -3000,
   ovrError_LibLoad = -3001,
   ovrError_LibVersion = -3002,
   ovrError_ServiceConnection = -3003,
   ovrError_ServiceVersion = -3004,
   ovrError_IncompatibleOS = -3005,