--- a/dom/base/Navigator.h
+++ b/dom/base/Navigator.h
@@ -96,16 +96,17 @@ class CellBroadcast;
class IccManager;
class Telephony;
class Voicemail;
class TVManager;
class InputPortManager;
class DeviceStorageAreaListener;
class Presentation;
class LegacyMozTCPSocket;
+class VRDisplay;
namespace time {
class TimeManager;
} // namespace time
namespace system {
#ifdef MOZ_AUDIO_CHANNEL_MANAGER
class AudioChannelManager;
--- a/dom/base/nsDocument.cpp
+++ b/dom/base/nsDocument.cpp
@@ -254,18 +254,16 @@
#include "nsISpeculativeConnect.h"
#include "mozilla/MediaManager.h"
#ifdef MOZ_WEBRTC
#include "IPeerConnection.h"
#endif // MOZ_WEBRTC
-#include "VRDisplayProxy.h"
-
using namespace mozilla;
using namespace mozilla::dom;
typedef nsTArray<Link*> LinkArray;
static LazyLogModule gDocumentLeakPRLog("DocumentLeak");
static LazyLogModule gCspPRLog("CSP");
--- a/dom/base/nsGlobalWindow.cpp
+++ b/dom/base/nsGlobalWindow.cpp
@@ -1738,16 +1738,17 @@ nsGlobalWindow::FreeInnerObjects()
}
mAudioContexts.Clear();
#ifdef MOZ_GAMEPAD
DisableGamepadUpdates();
mHasGamepad = false;
mGamepads.Clear();
#endif
+ mVRDisplays.Clear();
}
//*****************************************************************************
// nsGlobalWindow::nsISupports
//*****************************************************************************
// QueryInterface implementation for nsGlobalWindow
NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(nsGlobalWindow)
@@ -13448,17 +13449,17 @@ nsGlobalWindow::SyncGamepadState()
}
#endif // MOZ_GAMEPAD
bool
nsGlobalWindow::UpdateVRDisplays(nsTArray<RefPtr<mozilla::dom::VRDisplay>>& aDevices)
{
FORWARD_TO_INNER(UpdateVRDisplays, (aDevices), false);
- VRDisplay::UpdateVRDisplays(mVRDisplays, ToSupports(this));
+ VRDisplay::UpdateVRDisplays(mVRDisplays, AsInner());
aDevices = mVRDisplays;
return true;
}
// nsGlobalChromeWindow implementation
NS_IMPL_CYCLE_COLLECTION_CLASS(nsGlobalChromeWindow)
--- a/dom/bindings/Bindings.conf
+++ b/dom/bindings/Bindings.conf
@@ -1228,20 +1228,16 @@ DOMInterfaces = {
'TreeWalker': {
'wrapperCache': False,
},
'UndoManager': {
'implicitJSContext' : [ 'undo', 'redo', 'transact' ],
},
-'VRDisplay': {
- 'concrete': False
-},
-
'VTTCue': {
'nativeType': 'mozilla::dom::TextTrackCue'
},
'VTTRegion': {
'nativeType': 'mozilla::dom::TextTrackRegion',
},
--- a/dom/canvas/WebGLContext.cpp
+++ b/dom/canvas/WebGLContext.cpp
@@ -43,16 +43,18 @@
#include "nsIObserverService.h"
#include "nsIVariant.h"
#include "nsIWidget.h"
#include "nsIXPConnect.h"
#include "nsServiceManagerUtils.h"
#include "nsSVGEffects.h"
#include "prenv.h"
#include "ScopedGLHelpers.h"
+#include "VRManagerChild.h"
+#include "mozilla/layers/TextureClientSharedSurface.h"
#ifdef MOZ_WIDGET_GONK
#include "mozilla/layers/ShadowLayers.h"
#endif
// Local
#include "CanvasUtils.h"
#include "WebGL1Context.h"
@@ -118,16 +120,17 @@ WebGLContext::WebGLContext()
, mMaxFetchedVertices(0)
, mMaxFetchedInstances(0)
, mBypassShaderValidation(false)
, mContextLossHandler(this)
, mNeedsFakeNoAlpha(false)
, mNeedsFakeNoDepth(false)
, mNeedsFakeNoStencil(false)
, mNeedsEmulatedLoneDepthStencil(false)
+ , mVRPresentationActive(false)
{
mGeneration = 0;
mInvalidated = false;
mCapturedFrameInvalidated = false;
mShouldPresent = true;
mResetLayer = true;
mOptionsFrozen = false;
mMinCapability = false;
@@ -2344,16 +2347,82 @@ WebGLContext::GetUnpackSize(bool isFunc3
CheckedUint32 totalBytes = strideBytesPerImage * (usedImages - 1);
totalBytes += strideBytesPerRow * (usedRowsPerImage - 1);
totalBytes += usedBytesPerRow;
return totalBytes;
}
+already_AddRefed<layers::SharedSurfaceTextureClient>
+WebGLContext::GetVRFrame()
+{
+ VRManagerChild *vrmc = VRManagerChild::Get();
+ if (!vrmc) {
+ return nullptr;
+ }
+
+ PresentScreenBuffer();
+ mDrawCallsSinceLastFlush = 0;
+
+ MarkContextClean();
+ UpdateLastUseIndex();
+
+ gl::GLScreenBuffer* screen = gl->Screen();
+ if (!screen) {
+ return nullptr;
+ }
+
+ RefPtr<SharedSurfaceTextureClient> sharedSurface = screen->Front();
+ if (!sharedSurface) {
+ return nullptr;
+ }
+
+ if (sharedSurface && sharedSurface->GetAllocator() != vrmc) {
+ RefPtr<SharedSurfaceTextureClient> dest =
+ screen->Factory()->NewTexClient(sharedSurface->GetSize());
+ if (!dest) {
+ return nullptr;
+ }
+ gl::SharedSurface* destSurf = dest->Surf();
+ destSurf->ProducerAcquire();
+ SharedSurface::ProdCopy(sharedSurface->Surf(), dest->Surf(), screen->Factory());
+ destSurf->ProducerRelease();
+
+ return dest.forget();
+ }
+
+ return sharedSurface.forget();
+}
+
+bool
+WebGLContext::StartVRPresentation()
+{
+ VRManagerChild *vrmc = VRManagerChild::Get();
+ if (!vrmc) {
+ return false;
+ }
+ gl::GLScreenBuffer* screen = gl->Screen();
+ if (!screen) {
+ return false;
+ }
+ gl::SurfaceCaps caps = screen->mCaps;
+
+ UniquePtr<gl::SurfaceFactory> factory =
+ gl::GLScreenBuffer::CreateFactory(gl,
+ caps,
+ vrmc,
+ vrmc->GetBackendType(),
+ TextureFlags::ORIGIN_BOTTOM_LEFT);
+
+ screen->Morph(Move(factory));
+ mVRPresentationActive = true;
+ return true;
+}
+
////////////////////////////////////////////////////////////////////////////////
// XPCOM goop
NS_IMPL_CYCLE_COLLECTING_ADDREF(WebGLContext)
NS_IMPL_CYCLE_COLLECTING_RELEASE(WebGLContext)
NS_IMPL_CYCLE_COLLECTION_WRAPPERCACHE(WebGLContext,
mCanvasElement,
--- a/dom/canvas/WebGLContext.h
+++ b/dom/canvas/WebGLContext.h
@@ -111,16 +111,17 @@ class Element;
class ImageData;
class OwningHTMLCanvasElementOrOffscreenCanvas;
struct WebGLContextAttributes;
template<typename> struct Nullable;
} // namespace dom
namespace gfx {
class SourceSurface;
+class VRLayerChild;
} // namespace gfx
namespace webgl {
struct LinkedProgramInfo;
class ShaderValidator;
class TexUnpackBlob;
struct UniformInfo;
} // namespace webgl
@@ -534,16 +535,19 @@ public:
bool IsProgram(WebGLProgram* prog);
bool IsRenderbuffer(WebGLRenderbuffer* rb);
bool IsShader(WebGLShader* shader);
bool IsVertexArray(WebGLVertexArray* vao);
void LineWidth(GLfloat width);
void LinkProgram(WebGLProgram* prog);
void PixelStorei(GLenum pname, GLint param);
void PolygonOffset(GLfloat factor, GLfloat units);
+
+ already_AddRefed<layers::SharedSurfaceTextureClient> GetVRFrame();
+ bool StartVRPresentation();
protected:
bool ReadPixels_SharedPrecheck(ErrorResult* const out_error);
void ReadPixelsImpl(GLint x, GLint y, GLsizei width, GLsizei height, GLenum format,
GLenum type, void* data, uint32_t dataLen);
bool DoReadPixelsAndConvert(const webgl::FormatInfo* srcFormat, GLint x, GLint y,
GLsizei width, GLsizei height, GLenum format,
GLenum destType, void* dest, uint32_t dataLen,
uint32_t rowStride);
@@ -1469,16 +1473,17 @@ protected:
bool ShouldGenerateWarnings() const;
uint64_t mLastUseIndex;
bool mNeedsFakeNoAlpha;
bool mNeedsFakeNoDepth;
bool mNeedsFakeNoStencil;
bool mNeedsEmulatedLoneDepthStencil;
+ bool mVRPresentationActive;
bool HasTimestampBits() const;
struct ScopedMaskWorkaround {
WebGLContext& mWebGL;
const bool mFakeNoAlpha;
const bool mFakeNoDepth;
const bool mFakeNoStencil;
--- a/dom/html/HTMLCanvasElement.cpp
+++ b/dom/html/HTMLCanvasElement.cpp
@@ -37,16 +37,17 @@
#include "nsIXPConnect.h"
#include "nsJSUtils.h"
#include "nsLayoutUtils.h"
#include "nsMathUtils.h"
#include "nsNetUtil.h"
#include "nsRefreshDriver.h"
#include "nsStreamUtils.h"
#include "ActiveLayerTracker.h"
+#include "VRManagerChild.h"
#include "WebGL1Context.h"
#include "WebGL2Context.h"
using namespace mozilla::layers;
using namespace mozilla::gfx;
NS_IMPL_NS_NEW_HTML_ELEMENT(Canvas)
@@ -347,16 +348,17 @@ HTMLCanvasElementObserver::HandleEvent(n
NS_IMPL_ISUPPORTS(HTMLCanvasElementObserver, nsIObserver)
// ---------------------------------------------------------------------------
HTMLCanvasElement::HTMLCanvasElement(already_AddRefed<mozilla::dom::NodeInfo>& aNodeInfo)
: nsGenericHTMLElement(aNodeInfo),
mResetLayer(true) ,
+ mVRPresentationActive(false),
mWriteOnly(false)
{}
HTMLCanvasElement::~HTMLCanvasElement()
{
if (mContextObserver) {
mContextObserver->Destroy();
mContextObserver = nullptr;
@@ -1060,17 +1062,17 @@ HTMLCanvasElement::GetCanvasLayer(nsDisp
{
// The address of sOffscreenCanvasLayerUserDataDummy is used as the user
// data key for retained LayerManagers managed by FrameLayerBuilder.
// We don't much care about what value in it, so just assign a dummy
// value for it.
static uint8_t sOffscreenCanvasLayerUserDataDummy = 0;
if (mCurrentContext) {
- return mCurrentContext->GetCanvasLayer(aBuilder, aOldLayer, aManager);
+ return mCurrentContext->GetCanvasLayer(aBuilder, aOldLayer, aManager, mVRPresentationActive);
}
if (mOffscreenCanvas) {
if (!mResetLayer &&
aOldLayer && aOldLayer->HasUserData(&sOffscreenCanvasLayerUserDataDummy)) {
RefPtr<Layer> ret = aOldLayer;
return ret.forget();
}
@@ -1378,10 +1380,47 @@ HTMLCanvasElement::InvalidateFromAsyncCa
HTMLCanvasElement *element = aRenderer->mHTMLCanvasElement;
if (!element) {
return;
}
element->InvalidateCanvasContent(nullptr);
}
+void
+HTMLCanvasElement::StartVRPresentation()
+{
+ WebGLContext* webgl = static_cast<WebGLContext*>(GetContextAtIndex(0));
+ if (!webgl) {
+ return;
+ }
+
+ if (!webgl->StartVRPresentation()) {
+ return;
+ }
+
+ mVRPresentationActive = true;
+}
+
+void
+HTMLCanvasElement::StopVRPresentation()
+{
+ mVRPresentationActive = false;
+}
+
+already_AddRefed<layers::SharedSurfaceTextureClient>
+HTMLCanvasElement::GetVRFrame()
+{
+ if (GetCurrentContextType() != CanvasContextType::WebGL1 &&
+ GetCurrentContextType() != CanvasContextType::WebGL2) {
+ return nullptr;
+ }
+
+ WebGLContext* webgl = static_cast<WebGLContext*>(GetContextAtIndex(0));
+ if (!webgl) {
+ return nullptr;
+ }
+
+ return webgl->GetVRFrame();
+}
+
} // namespace dom
} // namespace mozilla
--- a/dom/html/HTMLCanvasElement.h
+++ b/dom/html/HTMLCanvasElement.h
@@ -28,19 +28,21 @@ namespace mozilla {
class WebGLContext;
namespace layers {
class AsyncCanvasRenderer;
class CanvasLayer;
class Image;
class Layer;
class LayerManager;
+class SharedSurfaceTextureClient;
} // namespace layers
namespace gfx {
class SourceSurface;
+class VRLayerChild;
} // namespace gfx
namespace dom {
class CanvasCaptureMediaStream;
class File;
class FileCallback;
class HTMLCanvasPrintState;
class OffscreenCanvas;
@@ -337,16 +339,20 @@ public:
void OnVisibilityChange();
void OnMemoryPressure();
static void SetAttrFromAsyncCanvasRenderer(AsyncCanvasRenderer *aRenderer);
static void InvalidateFromAsyncCanvasRenderer(AsyncCanvasRenderer *aRenderer);
+ void StartVRPresentation();
+ void StopVRPresentation();
+ already_AddRefed<layers::SharedSurfaceTextureClient> GetVRFrame();
+
protected:
virtual ~HTMLCanvasElement();
virtual JSObject* WrapNode(JSContext* aCx, JS::Handle<JSObject*> aGivenProto) override;
virtual nsIntSize GetWidthHeight() override;
virtual already_AddRefed<nsICanvasRenderingContextInternal>
@@ -370,16 +376,17 @@ protected:
RefPtr<HTMLCanvasElement> mOriginalCanvas;
RefPtr<PrintCallback> mPrintCallback;
RefPtr<HTMLCanvasPrintState> mPrintState;
nsTArray<WeakPtr<FrameCaptureListener>> mRequestedFrameListeners;
RefPtr<RequestedFrameRefreshObserver> mRequestedFrameRefreshObserver;
RefPtr<AsyncCanvasRenderer> mAsyncCanvasRenderer;
RefPtr<OffscreenCanvas> mOffscreenCanvas;
RefPtr<HTMLCanvasElementObserver> mContextObserver;
+ bool mVRPresentationActive;
public:
// Record whether this canvas should be write-only or not.
// We set this when script paints an image from a different origin.
// We also transitively set it when script paints a canvas which
// is itself write-only.
bool mWriteOnly;
--- a/dom/ipc/TabChild.cpp
+++ b/dom/ipc/TabChild.cpp
@@ -105,16 +105,17 @@
#include "nsNetUtil.h"
#include "nsIPermissionManager.h"
#include "nsIURILoader.h"
#include "nsIScriptError.h"
#include "mozilla/EventForwards.h"
#include "nsDeviceContext.h"
#include "nsSandboxFlags.h"
#include "FrameLayerBuilder.h"
+#include "VRManagerChild.h"
#ifdef NS_PRINTING
#include "nsIPrintSession.h"
#include "nsIPrintSettings.h"
#include "nsIPrintSettingsService.h"
#include "nsIWebBrowserPrint.h"
#endif
@@ -2736,16 +2737,17 @@ TabChild::InitRenderingState(const Textu
ShadowLayerForwarder* lf =
mPuppetWidget->GetLayerManager(
shadowManager, mTextureFactoryIdentifier.mParentBackend)
->AsShadowForwarder();
MOZ_ASSERT(lf && lf->HasShadowManager(),
"PuppetWidget should have shadow manager");
lf->IdentifyTextureHost(mTextureFactoryIdentifier);
ImageBridgeChild::IdentifyCompositorTextureHost(mTextureFactoryIdentifier);
+ gfx::VRManagerChild::IdentifyTextureHost(mTextureFactoryIdentifier);
mRemoteFrame = remoteFrame;
if (aLayersId != 0) {
if (!sTabChildren) {
sTabChildren = new TabChildMap;
}
MOZ_ASSERT(!sTabChildren->Get(aLayersId));
sTabChildren->Put(aLayersId, this);
--- a/dom/vr/VRDisplay.cpp
+++ b/dom/vr/VRDisplay.cpp
@@ -7,361 +7,623 @@
#include "nsWrapperCache.h"
#include "mozilla/dom/Element.h"
#include "mozilla/dom/VRDisplayBinding.h"
#include "mozilla/dom/ElementBinding.h"
#include "mozilla/dom/VRDisplay.h"
#include "Navigator.h"
#include "gfxVR.h"
-#include "VRDisplayProxy.h"
+#include "VRDisplayClient.h"
#include "VRManagerChild.h"
+#include "VRDisplayPresentation.h"
+#include "nsIObserverService.h"
#include "nsIFrame.h"
+#include "nsISupportsPrimitives.h"
using namespace mozilla::gfx;
namespace mozilla {
namespace dom {
+VRFieldOfView::VRFieldOfView(nsISupports* aParent,
+ double aUpDegrees, double aRightDegrees,
+ double aDownDegrees, double aLeftDegrees)
+ : mParent(aParent)
+ , mUpDegrees(aUpDegrees)
+ , mRightDegrees(aRightDegrees)
+ , mDownDegrees(aDownDegrees)
+ , mLeftDegrees(aLeftDegrees)
+{
+}
+
+VRFieldOfView::VRFieldOfView(nsISupports* aParent, const gfx::VRFieldOfView& aSrc)
+ : mParent(aParent)
+ , mUpDegrees(aSrc.upDegrees)
+ , mRightDegrees(aSrc.rightDegrees)
+ , mDownDegrees(aSrc.downDegrees)
+ , mLeftDegrees(aSrc.leftDegrees)
+{
+}
+
+bool
+VRDisplayCapabilities::HasPosition() const
+{
+ return bool(mFlags & gfx::VRDisplayCapabilityFlags::Cap_Position);
+}
+
+bool
+VRDisplayCapabilities::HasOrientation() const
+{
+ return bool(mFlags & gfx::VRDisplayCapabilityFlags::Cap_Orientation);
+}
+
+bool
+VRDisplayCapabilities::HasExternalDisplay() const
+{
+ return bool(mFlags & gfx::VRDisplayCapabilityFlags::Cap_External);
+}
+
+bool
+VRDisplayCapabilities::CanPresent() const
+{
+ return bool(mFlags & gfx::VRDisplayCapabilityFlags::Cap_Present);
+}
+
+uint32_t
+VRDisplayCapabilities::MaxLayers() const
+{
+ return CanPresent() ? 1 : 0;
+}
+
/*static*/ bool
VRDisplay::RefreshVRDisplays(dom::Navigator* aNavigator)
{
gfx::VRManagerChild* vm = gfx::VRManagerChild::Get();
return vm && vm->RefreshVRDisplaysWithCallback(aNavigator);
}
/*static*/ void
-VRDisplay::UpdateVRDisplays(nsTArray<RefPtr<VRDisplay>>& aDevices, nsISupports* aParent)
+VRDisplay::UpdateVRDisplays(nsTArray<RefPtr<VRDisplay>>& aDisplays, nsPIDOMWindowInner* aWindow)
{
- nsTArray<RefPtr<VRDisplay>> devices;
+ nsTArray<RefPtr<VRDisplay>> displays;
gfx::VRManagerChild* vm = gfx::VRManagerChild::Get();
- nsTArray<RefPtr<gfx::VRDisplayProxy>> proxyDevices;
- if (vm && vm->GetVRDisplays(proxyDevices)) {
- for (size_t i = 0; i < proxyDevices.Length(); i++) {
- RefPtr<gfx::VRDisplayProxy> proxyDevice = proxyDevices[i];
- bool isNewDevice = true;
- for (size_t j = 0; j < aDevices.Length(); j++) {
- if (aDevices[j]->GetHMD()->GetDeviceInfo() == proxyDevice->GetDeviceInfo()) {
- devices.AppendElement(aDevices[j]);
- isNewDevice = false;
+ nsTArray<RefPtr<gfx::VRDisplayClient>> updatedDisplays;
+ if (vm && vm->GetVRDisplays(updatedDisplays)) {
+ for (size_t i = 0; i < updatedDisplays.Length(); i++) {
+ RefPtr<gfx::VRDisplayClient> display = updatedDisplays[i];
+ bool isNewDisplay = true;
+ for (size_t j = 0; j < aDisplays.Length(); j++) {
+ if (aDisplays[j]->GetClient()->GetDisplayInfo() == display->GetDisplayInfo()) {
+ displays.AppendElement(aDisplays[j]);
+ isNewDisplay = false;
}
}
- if (isNewDevice) {
- gfx::VRDisplayCapabilityFlags flags = proxyDevice->GetDeviceInfo().GetCapabilities();
- devices.AppendElement(new HMDInfoVRDisplay(aParent, proxyDevice));
- if (flags & (gfx::VRDisplayCapabilityFlags::Cap_Position |
- gfx::VRDisplayCapabilityFlags::Cap_Orientation))
- {
- devices.AppendElement(new HMDPositionVRDisplay(aParent, proxyDevice));
- }
+ if (isNewDisplay) {
+ displays.AppendElement(new VRDisplay(aWindow, display));
}
}
}
- aDevices = devices;
-}
-
-NS_IMPL_CYCLE_COLLECTION_WRAPPERCACHE(VRFieldOfViewReadOnly, mParent)
-NS_IMPL_CYCLE_COLLECTION_ROOT_NATIVE(VRFieldOfViewReadOnly, AddRef)
-NS_IMPL_CYCLE_COLLECTION_UNROOT_NATIVE(VRFieldOfViewReadOnly, Release)
-
-JSObject*
-VRFieldOfViewReadOnly::WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto)
-{
- return VRFieldOfViewReadOnlyBinding::Wrap(aCx, this, aGivenProto);
+ aDisplays = displays;
}
-already_AddRefed<VRFieldOfView>
-VRFieldOfView::Constructor(const GlobalObject& aGlobal, const VRFieldOfViewInit& aParams,
- ErrorResult& aRV)
-{
- RefPtr<VRFieldOfView> fov =
- new VRFieldOfView(aGlobal.GetAsSupports(),
- aParams.mUpDegrees, aParams.mRightDegrees,
- aParams.mDownDegrees, aParams.mLeftDegrees);
- return fov.forget();
-}
+NS_IMPL_CYCLE_COLLECTION_WRAPPERCACHE(VRFieldOfView, mParent)
+NS_IMPL_CYCLE_COLLECTION_ROOT_NATIVE(VRFieldOfView, AddRef)
+NS_IMPL_CYCLE_COLLECTION_UNROOT_NATIVE(VRFieldOfView, Release)
-already_AddRefed<VRFieldOfView>
-VRFieldOfView::Constructor(const GlobalObject& aGlobal,
- double aUpDegrees, double aRightDegrees,
- double aDownDegrees, double aLeftDegrees,
- ErrorResult& aRV)
-{
- RefPtr<VRFieldOfView> fov =
- new VRFieldOfView(aGlobal.GetAsSupports(),
- aUpDegrees, aRightDegrees, aDownDegrees,
- aLeftDegrees);
- return fov.forget();
-}
JSObject*
VRFieldOfView::WrapObject(JSContext* aCx,
JS::Handle<JSObject*> aGivenProto)
{
return VRFieldOfViewBinding::Wrap(aCx, this, aGivenProto);
}
-NS_IMPL_CYCLE_COLLECTION_WRAPPERCACHE(VREyeParameters, mParent, mMinFOV, mMaxFOV, mRecFOV, mCurFOV, mEyeTranslation, mRenderRect)
+NS_IMPL_CYCLE_COLLECTION_CLASS(VREyeParameters)
+
+NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN(VREyeParameters)
+ NS_IMPL_CYCLE_COLLECTION_UNLINK(mParent, mFOV)
+ NS_IMPL_CYCLE_COLLECTION_UNLINK_PRESERVED_WRAPPER
+ tmp->mOffset = nullptr;
+NS_IMPL_CYCLE_COLLECTION_UNLINK_END
+
+NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN(VREyeParameters)
+ NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mParent, mFOV)
+ NS_IMPL_CYCLE_COLLECTION_TRAVERSE_SCRIPT_OBJECTS
+NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
+
+NS_IMPL_CYCLE_COLLECTION_TRACE_BEGIN(VREyeParameters)
+ NS_IMPL_CYCLE_COLLECTION_TRACE_PRESERVED_WRAPPER
+ NS_IMPL_CYCLE_COLLECTION_TRACE_JS_MEMBER_CALLBACK(mOffset)
+NS_IMPL_CYCLE_COLLECTION_TRACE_END
+
NS_IMPL_CYCLE_COLLECTION_ROOT_NATIVE(VREyeParameters, AddRef)
NS_IMPL_CYCLE_COLLECTION_UNROOT_NATIVE(VREyeParameters, Release)
VREyeParameters::VREyeParameters(nsISupports* aParent,
- const gfx::VRFieldOfView& aMinFOV,
- const gfx::VRFieldOfView& aMaxFOV,
- const gfx::VRFieldOfView& aRecFOV,
const gfx::Point3D& aEyeTranslation,
- const gfx::VRFieldOfView& aCurFOV,
- const gfx::IntRect& aRenderRect)
+ const gfx::VRFieldOfView& aFOV,
+ const gfx::IntSize& aRenderSize)
: mParent(aParent)
+ , mEyeTranslation(aEyeTranslation)
+ , mRenderSize(aRenderSize)
{
- mMinFOV = new VRFieldOfView(aParent, aMinFOV);
- mMaxFOV = new VRFieldOfView(aParent, aMaxFOV);
- mRecFOV = new VRFieldOfView(aParent, aRecFOV);
- mCurFOV = new VRFieldOfView(aParent, aCurFOV);
-
- mEyeTranslation = new DOMPoint(aParent, aEyeTranslation.x, aEyeTranslation.y, aEyeTranslation.z, 0.0);
- mRenderRect = new DOMRect(aParent, aRenderRect.x, aRenderRect.y, aRenderRect.width, aRenderRect.height);
-}
-
-VRFieldOfView*
-VREyeParameters::MinimumFieldOfView()
-{
- return mMinFOV;
+ mFOV = new VRFieldOfView(aParent, aFOV);
}
VRFieldOfView*
-VREyeParameters::MaximumFieldOfView()
+VREyeParameters::FieldOfView()
{
- return mMaxFOV;
-}
-
-VRFieldOfView*
-VREyeParameters::RecommendedFieldOfView()
-{
- return mRecFOV;
+ return mFOV;
}
-VRFieldOfView*
-VREyeParameters::CurrentFieldOfView()
+void
+VREyeParameters::GetOffset(JSContext* aCx, JS::MutableHandle<JSObject*> aRetval, ErrorResult& aRv)
{
- return mCurFOV;
-}
-
-DOMPoint*
-VREyeParameters::EyeTranslation()
-{
- return mEyeTranslation;
-}
-
-DOMRect*
-VREyeParameters::RenderRect()
-{
- return mRenderRect;
+ if (!mOffset) {
+ // Lazily create the Float32Array
+ mOffset = dom::Float32Array::Create(aCx, this, 3, mEyeTranslation.components);
+ if (!mOffset) {
+ aRv.NoteJSContextException(aCx);
+ return;
+ }
+ }
+ JS::ExposeObjectToActiveJS(mOffset);
+ aRetval.set(mOffset);
}
JSObject*
VREyeParameters::WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto)
{
return VREyeParametersBinding::Wrap(aCx, this, aGivenProto);
}
-NS_IMPL_CYCLE_COLLECTION_WRAPPERCACHE(VRPositionState, mParent)
-NS_IMPL_CYCLE_COLLECTION_ROOT_NATIVE(VRPositionState, AddRef)
-NS_IMPL_CYCLE_COLLECTION_UNROOT_NATIVE(VRPositionState, Release)
+JSObject*
+VRStageParameters::WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto)
+{
+ return VRStageParametersBinding::Wrap(aCx, this, aGivenProto);
+}
+
+NS_IMPL_CYCLE_COLLECTION_CLASS(VRStageParameters)
+
+NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN(VRStageParameters)
+ NS_IMPL_CYCLE_COLLECTION_UNLINK(mParent)
+ NS_IMPL_CYCLE_COLLECTION_UNLINK_PRESERVED_WRAPPER
+ tmp->mSittingToStandingTransformArray = nullptr;
+NS_IMPL_CYCLE_COLLECTION_UNLINK_END
+
+NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN(VRStageParameters)
+ NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mParent)
+ NS_IMPL_CYCLE_COLLECTION_TRAVERSE_SCRIPT_OBJECTS
+NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
+
+NS_IMPL_CYCLE_COLLECTION_TRACE_BEGIN(VRStageParameters)
+ NS_IMPL_CYCLE_COLLECTION_TRACE_PRESERVED_WRAPPER
+ NS_IMPL_CYCLE_COLLECTION_TRACE_JS_MEMBER_CALLBACK(mSittingToStandingTransformArray)
+NS_IMPL_CYCLE_COLLECTION_TRACE_END
+
+NS_IMPL_CYCLE_COLLECTION_ROOT_NATIVE(VRStageParameters, AddRef)
+NS_IMPL_CYCLE_COLLECTION_UNROOT_NATIVE(VRStageParameters, Release)
-VRPositionState::VRPositionState(nsISupports* aParent, const gfx::VRHMDSensorState& aState)
+void
+VRStageParameters::GetSittingToStandingTransform(JSContext* aCx,
+ JS::MutableHandle<JSObject*> aRetval,
+ ErrorResult& aRv)
+{
+ if (!mSittingToStandingTransformArray) {
+ // Lazily create the Float32Array
+ mSittingToStandingTransformArray = dom::Float32Array::Create(aCx, this, 16,
+ mSittingToStandingTransform.components);
+ if (!mSittingToStandingTransformArray) {
+ aRv.NoteJSContextException(aCx);
+ return;
+ }
+ }
+ JS::ExposeObjectToActiveJS(mSittingToStandingTransformArray);
+ aRetval.set(mSittingToStandingTransformArray);
+}
+
+NS_IMPL_CYCLE_COLLECTION_WRAPPERCACHE(VRDisplayCapabilities, mParent)
+NS_IMPL_CYCLE_COLLECTION_ROOT_NATIVE(VRDisplayCapabilities, AddRef)
+NS_IMPL_CYCLE_COLLECTION_UNROOT_NATIVE(VRDisplayCapabilities, Release)
+
+JSObject*
+VRDisplayCapabilities::WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto)
+{
+ return VRDisplayCapabilitiesBinding::Wrap(aCx, this, aGivenProto);
+}
+
+NS_IMPL_CYCLE_COLLECTION_CLASS(VRPose)
+
+NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN(VRPose)
+ NS_IMPL_CYCLE_COLLECTION_UNLINK(mParent)
+ NS_IMPL_CYCLE_COLLECTION_UNLINK_PRESERVED_WRAPPER
+ tmp->mPosition = nullptr;
+ tmp->mLinearVelocity = nullptr;
+ tmp->mLinearAcceleration = nullptr;
+ tmp->mOrientation = nullptr;
+ tmp->mAngularVelocity = nullptr;
+ tmp->mAngularAcceleration = nullptr;
+NS_IMPL_CYCLE_COLLECTION_UNLINK_END
+
+NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN(VRPose)
+ NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mParent)
+ NS_IMPL_CYCLE_COLLECTION_TRAVERSE_SCRIPT_OBJECTS
+NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
+
+NS_IMPL_CYCLE_COLLECTION_TRACE_BEGIN(VRPose)
+ NS_IMPL_CYCLE_COLLECTION_TRACE_PRESERVED_WRAPPER
+ NS_IMPL_CYCLE_COLLECTION_TRACE_JS_MEMBER_CALLBACK(mPosition)
+ NS_IMPL_CYCLE_COLLECTION_TRACE_JS_MEMBER_CALLBACK(mLinearVelocity)
+ NS_IMPL_CYCLE_COLLECTION_TRACE_JS_MEMBER_CALLBACK(mLinearAcceleration)
+ NS_IMPL_CYCLE_COLLECTION_TRACE_JS_MEMBER_CALLBACK(mOrientation)
+ NS_IMPL_CYCLE_COLLECTION_TRACE_JS_MEMBER_CALLBACK(mAngularVelocity)
+ NS_IMPL_CYCLE_COLLECTION_TRACE_JS_MEMBER_CALLBACK(mAngularAcceleration)
+NS_IMPL_CYCLE_COLLECTION_TRACE_END
+
+NS_IMPL_CYCLE_COLLECTION_ROOT_NATIVE(VRPose, AddRef)
+NS_IMPL_CYCLE_COLLECTION_UNROOT_NATIVE(VRPose, Release)
+
+VRPose::VRPose(nsISupports* aParent, const gfx::VRHMDSensorState& aState)
: mParent(aParent)
, mVRState(aState)
+ , mPosition(nullptr)
+ , mLinearVelocity(nullptr)
+ , mLinearAcceleration(nullptr)
+ , mOrientation(nullptr)
+ , mAngularVelocity(nullptr)
+ , mAngularAcceleration(nullptr)
{
- mTimeStamp = aState.timestamp;
-
- if (aState.flags & gfx::VRDisplayCapabilityFlags::Cap_Position) {
- mPosition = new DOMPoint(mParent, aState.position[0], aState.position[1], aState.position[2], 0.0);
- }
-
- if (aState.flags & gfx::VRDisplayCapabilityFlags::Cap_Orientation) {
- mOrientation = new DOMPoint(mParent, aState.orientation[0], aState.orientation[1], aState.orientation[2], aState.orientation[3]);
- }
+ mTimeStamp = aState.timestamp * 1000.0f; // Converting from seconds to ms
+ mFrameId = aState.inputFrameID;
}
-DOMPoint*
-VRPositionState::GetLinearVelocity()
+void
+VRPose::GetPosition(JSContext* aCx,
+ JS::MutableHandle<JSObject*> aRetval,
+ ErrorResult& aRv)
{
- if (!mLinearVelocity) {
- mLinearVelocity = new DOMPoint(mParent, mVRState.linearVelocity[0], mVRState.linearVelocity[1], mVRState.linearVelocity[2], 0.0);
+ if (!mPosition && mVRState.flags & gfx::VRDisplayCapabilityFlags::Cap_Position) {
+ // Lazily create the Float32Array
+ mPosition = dom::Float32Array::Create(aCx, this, 3, mVRState.position);
+ if (!mPosition) {
+ aRv.NoteJSContextException(aCx);
+ return;
+ }
+ }
+ if (mPosition) {
+ JS::ExposeObjectToActiveJS(mPosition);
}
- return mLinearVelocity;
+ aRetval.set(mPosition);
+}
+
+void
+VRPose::GetLinearVelocity(JSContext* aCx,
+ JS::MutableHandle<JSObject*> aRetval,
+ ErrorResult& aRv)
+{
+ if (!mLinearVelocity && mVRState.flags & gfx::VRDisplayCapabilityFlags::Cap_Position) {
+ // Lazily create the Float32Array
+ mLinearVelocity = dom::Float32Array::Create(aCx, this, 3, mVRState.linearVelocity);
+ if (!mLinearVelocity) {
+ aRv.NoteJSContextException(aCx);
+ return;
+ }
+ }
+ if (mLinearVelocity) {
+ JS::ExposeObjectToActiveJS(mLinearVelocity);
+ }
+ aRetval.set(mLinearVelocity);
}
-DOMPoint*
-VRPositionState::GetLinearAcceleration()
+void
+VRPose::GetLinearAcceleration(JSContext* aCx,
+ JS::MutableHandle<JSObject*> aRetval,
+ ErrorResult& aRv)
{
- if (!mLinearAcceleration) {
- mLinearAcceleration = new DOMPoint(mParent, mVRState.linearAcceleration[0], mVRState.linearAcceleration[1], mVRState.linearAcceleration[2], 0.0);
+ if (!mLinearAcceleration && mVRState.flags & gfx::VRDisplayCapabilityFlags::Cap_Position) {
+ // Lazily create the Float32Array
+ mLinearAcceleration = dom::Float32Array::Create(aCx, this, 3, mVRState.linearAcceleration);
+ if (!mLinearAcceleration) {
+ aRv.NoteJSContextException(aCx);
+ return;
+ }
}
- return mLinearAcceleration;
+ if (mLinearAcceleration) {
+ JS::ExposeObjectToActiveJS(mLinearAcceleration);
+ }
+ aRetval.set(mLinearAcceleration);
}
-DOMPoint*
-VRPositionState::GetAngularVelocity()
+void
+VRPose::GetOrientation(JSContext* aCx,
+ JS::MutableHandle<JSObject*> aRetval,
+ ErrorResult& aRv)
{
- if (!mAngularVelocity) {
- mAngularVelocity = new DOMPoint(mParent, mVRState.angularVelocity[0], mVRState.angularVelocity[1], mVRState.angularVelocity[2], 0.0);
+ if (!mOrientation && mVRState.flags & gfx::VRDisplayCapabilityFlags::Cap_Orientation) {
+ // Lazily create the Float32Array
+ mOrientation = dom::Float32Array::Create(aCx, this, 4, mVRState.orientation);
+ if (!mOrientation) {
+ aRv.NoteJSContextException(aCx);
+ return;
+ }
}
- return mAngularVelocity;
+ if (mOrientation) {
+ JS::ExposeObjectToActiveJS(mOrientation);
+ }
+ aRetval.set(mOrientation);
}
-DOMPoint*
-VRPositionState::GetAngularAcceleration()
+void
+VRPose::GetAngularVelocity(JSContext* aCx,
+ JS::MutableHandle<JSObject*> aRetval,
+ ErrorResult& aRv)
{
- if (!mAngularAcceleration) {
- mAngularAcceleration = new DOMPoint(mParent, mVRState.angularAcceleration[0], mVRState.angularAcceleration[1], mVRState.angularAcceleration[2], 0.0);
+ if (!mAngularVelocity && mVRState.flags & gfx::VRDisplayCapabilityFlags::Cap_Orientation) {
+ // Lazily create the Float32Array
+ mAngularVelocity = dom::Float32Array::Create(aCx, this, 3, mVRState.angularVelocity);
+ if (!mAngularVelocity) {
+ aRv.NoteJSContextException(aCx);
+ return;
+ }
+ }
+ if (mAngularVelocity) {
+ JS::ExposeObjectToActiveJS(mAngularVelocity);
}
- return mAngularAcceleration;
+ aRetval.set(mAngularVelocity);
+}
+
+void
+VRPose::GetAngularAcceleration(JSContext* aCx,
+ JS::MutableHandle<JSObject*> aRetval,
+ ErrorResult& aRv)
+{
+ if (!mAngularAcceleration && mVRState.flags & gfx::VRDisplayCapabilityFlags::Cap_Orientation) {
+ // Lazily create the Float32Array
+ mAngularAcceleration = dom::Float32Array::Create(aCx, this, 3, mVRState.angularAcceleration);
+ if (!mAngularAcceleration) {
+ aRv.NoteJSContextException(aCx);
+ return;
+ }
+ }
+ if (mAngularAcceleration) {
+ JS::ExposeObjectToActiveJS(mAngularAcceleration);
+ }
+ aRetval.set(mAngularAcceleration);
}
JSObject*
-VRPositionState::WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto)
+VRPose::WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto)
{
- return VRPositionStateBinding::Wrap(aCx, this, aGivenProto);
-}
-
-NS_IMPL_CYCLE_COLLECTING_ADDREF(VRDisplay)
-NS_IMPL_CYCLE_COLLECTING_RELEASE(VRDisplay)
-
-NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(VRDisplay)
- NS_WRAPPERCACHE_INTERFACE_MAP_ENTRY
- NS_INTERFACE_MAP_ENTRY(nsISupports)
-NS_INTERFACE_MAP_END
-
-NS_IMPL_CYCLE_COLLECTION_WRAPPERCACHE(VRDisplay, mParent)
-
-/* virtual */ JSObject*
-HMDVRDisplay::WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto)
-{
- return HMDVRDisplayBinding::Wrap(aCx, this, aGivenProto);
+ return VRPoseBinding::Wrap(aCx, this, aGivenProto);
}
/* virtual */ JSObject*
-PositionSensorVRDisplay::WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto)
+VRDisplay::WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto)
{
- return PositionSensorVRDisplayBinding::Wrap(aCx, this, aGivenProto);
+ return VRDisplayBinding::Wrap(aCx, this, aGivenProto);
}
-HMDInfoVRDisplay::HMDInfoVRDisplay(nsISupports* aParent, gfx::VRDisplayProxy* aHMD)
- : HMDVRDisplay(aParent, aHMD)
+VRDisplay::VRDisplay(nsPIDOMWindowInner* aWindow, gfx::VRDisplayClient* aClient)
+ : DOMEventTargetHelper(aWindow)
+ , mClient(aClient)
+ , mDepthNear(0.01f) // Default value from WebVR Spec
+ , mDepthFar(10000.0f) // Default value from WebVR Spec
{
- MOZ_COUNT_CTOR_INHERITED(HMDInfoVRDisplay, HMDVRDisplay);
- uint64_t hmdid = aHMD->GetDeviceInfo().GetDeviceID() << 8;
- uint64_t devid = hmdid | 0x00; // we generate a devid with low byte 0 for the HMD, 1 for the position sensor
-
- mHWID.Truncate();
- mHWID.AppendPrintf("0x%llx", hmdid);
-
- mDeviceId.Truncate();
- mDeviceId.AppendPrintf("0x%llx", devid);
-
- mDeviceName.Truncate();
- mDeviceName.Append(NS_ConvertASCIItoUTF16(aHMD->GetDeviceInfo().GetDeviceName()));
- mDeviceName.AppendLiteral(" (HMD)");
-
- mValid = true;
-}
-
-HMDInfoVRDisplay::~HMDInfoVRDisplay()
-{
- MOZ_COUNT_DTOR_INHERITED(HMDInfoVRDisplay, HMDVRDisplay);
+ MOZ_COUNT_CTOR(VRDisplay);
+ mDisplayId = aClient->GetDisplayInfo().GetDisplayID();
+ mDisplayName = NS_ConvertASCIItoUTF16(aClient->GetDisplayInfo().GetDisplayName());
+ mCapabilities = new VRDisplayCapabilities(aWindow, aClient->GetDisplayInfo().GetCapabilities());
}
-/* If a field of view that is set to all 0's is passed in,
- * the recommended field of view for that eye is used.
- */
-void
-HMDInfoVRDisplay::SetFieldOfView(const VRFieldOfViewInit& aLeftFOV,
- const VRFieldOfViewInit& aRightFOV,
- double zNear, double zFar)
+VRDisplay::~VRDisplay()
{
- gfx::VRFieldOfView left = gfx::VRFieldOfView(aLeftFOV.mUpDegrees, aLeftFOV.mRightDegrees,
- aLeftFOV.mDownDegrees, aLeftFOV.mLeftDegrees);
- gfx::VRFieldOfView right = gfx::VRFieldOfView(aRightFOV.mUpDegrees, aRightFOV.mRightDegrees,
- aRightFOV.mDownDegrees, aRightFOV.mLeftDegrees);
-
- if (left.IsZero()) {
- left = mHMD->GetDeviceInfo().GetRecommendedEyeFOV(VRDisplayInfo::Eye_Left);
- }
-
- if (right.IsZero()) {
- right = mHMD->GetDeviceInfo().GetRecommendedEyeFOV(VRDisplayInfo::Eye_Right);
- }
-
- mHMD->SetFOV(left, right, zNear, zFar);
+ ExitPresentInternal();
+ MOZ_COUNT_DTOR(VRDisplay);
}
-already_AddRefed<VREyeParameters> HMDInfoVRDisplay::GetEyeParameters(VREye aEye)
+void
+VRDisplay::LastRelease()
{
- gfx::IntSize sz(mHMD->GetDeviceInfo().SuggestedEyeResolution());
+ // We don't want to wait for the CC to free up the presentation
+ // for use in other documents, so we do this in LastRelease().
+ ExitPresentInternal();
+}
+
+already_AddRefed<VREyeParameters>
+VRDisplay::GetEyeParameters(VREye aEye)
+{
gfx::VRDisplayInfo::Eye eye = aEye == VREye::Left ? gfx::VRDisplayInfo::Eye_Left : gfx::VRDisplayInfo::Eye_Right;
RefPtr<VREyeParameters> params =
- new VREyeParameters(mParent,
- gfx::VRFieldOfView(15, 15, 15, 15), // XXX min?
- mHMD->GetDeviceInfo().GetMaximumEyeFOV(eye),
- mHMD->GetDeviceInfo().GetRecommendedEyeFOV(eye),
- mHMD->GetDeviceInfo().GetEyeTranslation(eye),
- mHMD->GetDeviceInfo().GetEyeFOV(eye),
- gfx::IntRect((aEye == VREye::Left) ? 0 : sz.width, 0, sz.width, sz.height));
+ new VREyeParameters(GetParentObject(),
+ mClient->GetDisplayInfo().GetEyeTranslation(eye),
+ mClient->GetDisplayInfo().GetEyeFOV(eye),
+ mClient->GetDisplayInfo().SuggestedEyeResolution());
return params.forget();
}
-HMDPositionVRDisplay::HMDPositionVRDisplay(nsISupports* aParent, gfx::VRDisplayProxy* aHMD)
- : PositionSensorVRDisplay(aParent, aHMD)
+VRDisplayCapabilities*
+VRDisplay::Capabilities()
{
- MOZ_COUNT_CTOR_INHERITED(HMDPositionVRDisplay, PositionSensorVRDisplay);
-
- uint64_t hmdid = aHMD->GetDeviceInfo().GetDeviceID() << 8;
- uint64_t devid = hmdid | 0x01; // we generate a devid with low byte 0 for the HMD, 1 for the position sensor
-
- mHWID.Truncate();
- mHWID.AppendPrintf("0x%llx", hmdid);
-
- mDeviceId.Truncate();
- mDeviceId.AppendPrintf("0x%llx", devid);
-
- mDeviceName.Truncate();
- mDeviceName.Append(NS_ConvertASCIItoUTF16(aHMD->GetDeviceInfo().GetDeviceName()));
- mDeviceName.AppendLiteral(" (Sensor)");
-
- mValid = true;
+ return mCapabilities;
}
-HMDPositionVRDisplay::~HMDPositionVRDisplay()
+VRStageParameters*
+VRDisplay::GetStageParameters()
{
- MOZ_COUNT_DTOR_INHERITED(HMDPositionVRDisplay, PositionSensorVRDisplay);
+ // XXX When we implement room scale experiences for OpenVR, we should return
+ // something here.
+ return nullptr;
}
-already_AddRefed<VRPositionState>
-HMDPositionVRDisplay::GetState()
+already_AddRefed<VRPose>
+VRDisplay::GetPose()
{
- gfx::VRHMDSensorState state = mHMD->GetSensorState();
- RefPtr<VRPositionState> obj = new VRPositionState(mParent, state);
+ gfx::VRHMDSensorState state = mClient->GetSensorState();
+ RefPtr<VRPose> obj = new VRPose(GetParentObject(), state);
return obj.forget();
}
-already_AddRefed<VRPositionState>
-HMDPositionVRDisplay::GetImmediateState()
+already_AddRefed<VRPose>
+VRDisplay::GetImmediatePose()
{
- gfx::VRHMDSensorState state = mHMD->GetImmediateSensorState();
- RefPtr<VRPositionState> obj = new VRPositionState(mParent, state);
+ gfx::VRHMDSensorState state = mClient->GetImmediateSensorState();
+ RefPtr<VRPose> obj = new VRPose(GetParentObject(), state);
return obj.forget();
}
void
-HMDPositionVRDisplay::ResetSensor()
+VRDisplay::ResetPose()
+{
+ mClient->ZeroSensor();
+}
+
+already_AddRefed<Promise>
+VRDisplay::RequestPresent(const nsTArray<VRLayer>& aLayers, ErrorResult& aRv)
+{
+ nsCOMPtr<nsIGlobalObject> global = do_QueryInterface(GetParentObject());
+ if (!global) {
+ aRv.Throw(NS_ERROR_FAILURE);
+ return nullptr;
+ }
+
+ RefPtr<Promise> promise = Promise::Create(global, aRv);
+ NS_ENSURE_TRUE(!aRv.Failed(), nullptr);
+
+ nsCOMPtr<nsIObserverService> obs = services::GetObserverService();
+ NS_ENSURE_TRUE(obs, nullptr);
+
+ if (IsPresenting()) {
+ // Only one presentation allowed per VRDisplay
+ // on a first-come-first-serve basis.
+ promise->MaybeRejectWithUndefined();
+ } else {
+ mPresentation = mClient->BeginPresentation(aLayers);
+
+ nsresult rv = obs->AddObserver(this, "inner-window-destroyed", false);
+ if (NS_WARN_IF(NS_FAILED(rv))) {
+ mPresentation = nullptr;
+ promise->MaybeRejectWithUndefined();
+ } else {
+ promise->MaybeResolve(JS::UndefinedHandleValue);
+ }
+ }
+ return promise.forget();
+}
+
+NS_IMETHODIMP
+VRDisplay::Observe(nsISupports* aSubject, const char* aTopic,
+ const char16_t* aData)
+{
+ MOZ_ASSERT(NS_IsMainThread());
+
+ if (strcmp(aTopic, "inner-window-destroyed") == 0) {
+ nsCOMPtr<nsISupportsPRUint64> wrapper = do_QueryInterface(aSubject);
+ NS_ENSURE_TRUE(wrapper, NS_ERROR_FAILURE);
+
+ uint64_t innerID;
+ nsresult rv = wrapper->GetData(&innerID);
+ NS_ENSURE_SUCCESS(rv, rv);
+
+ if (!GetOwner() || GetOwner()->WindowID() == innerID) {
+ ExitPresentInternal();
+ }
+
+ return NS_OK;
+ }
+
+ // This should not happen.
+ return NS_ERROR_FAILURE;
+}
+
+already_AddRefed<Promise>
+VRDisplay::ExitPresent(ErrorResult& aRv)
{
- mHMD->ZeroSensor();
+ nsCOMPtr<nsIGlobalObject> global = do_QueryInterface(GetParentObject());
+ if (!global) {
+ aRv.Throw(NS_ERROR_FAILURE);
+ return nullptr;
+ }
+ ExitPresentInternal();
+
+ RefPtr<Promise> promise = Promise::Create(global, aRv);
+ NS_ENSURE_TRUE(!aRv.Failed(), nullptr);
+
+ promise->MaybeResolve(JS::UndefinedHandleValue);
+ return promise.forget();
+}
+
+void
+VRDisplay::ExitPresentInternal()
+{
+ mPresentation = nullptr;
+}
+
+void
+VRDisplay::GetLayers(nsTArray<VRLayer>& result)
+{
+ if (mPresentation) {
+ mPresentation->GetDOMLayers(result);
+ } else {
+ result = nsTArray<VRLayer>();
+ }
+}
+
+void
+VRDisplay::SubmitFrame(const Optional<NonNull<VRPose>>& aPose)
+{
+ if (mPresentation) {
+ if (aPose.WasPassed()) {
+ mPresentation->SubmitFrame(aPose.Value().FrameID());
+ } else {
+ mPresentation->SubmitFrame(0);
+ }
+ }
}
+int32_t
+VRDisplay::RequestAnimationFrame(FrameRequestCallback& aCallback,
+ErrorResult& aError)
+{
+ gfx::VRManagerChild* vm = gfx::VRManagerChild::Get();
+
+ int32_t handle;
+ aError = vm->ScheduleFrameRequestCallback(aCallback, &handle);
+ return handle;
+}
+
+void
+VRDisplay::CancelAnimationFrame(int32_t aHandle, ErrorResult& aError)
+{
+ gfx::VRManagerChild* vm = gfx::VRManagerChild::Get();
+ vm->CancelFrameRequestCallback(aHandle);
+}
+
+
+bool
+VRDisplay::IsPresenting() const
+{
+ return mClient->GetIsPresenting();
+}
+
+bool
+VRDisplay::IsConnected() const
+{
+ return mClient->GetIsConnected();
+}
+
+NS_IMPL_CYCLE_COLLECTION_INHERITED(VRDisplay, DOMEventTargetHelper, mCapabilities)
+
+NS_IMPL_ADDREF_INHERITED(VRDisplay, DOMEventTargetHelper)
+NS_IMPL_RELEASE_INHERITED(VRDisplay, DOMEventTargetHelper)
+
+NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION_INHERITED(VRDisplay)
+NS_INTERFACE_MAP_ENTRY(nsIObserver)
+NS_INTERFACE_MAP_ENTRY_AMBIGUOUS(nsISupports, DOMEventTargetHelper)
+NS_INTERFACE_MAP_END_INHERITING(DOMEventTargetHelper)
+
} // namespace dom
} // namespace mozilla
--- a/dom/vr/VRDisplay.h
+++ b/dom/vr/VRDisplay.h
@@ -7,311 +7,296 @@
#ifndef mozilla_dom_VRDisplay_h_
#define mozilla_dom_VRDisplay_h_
#include <stdint.h>
#include "mozilla/ErrorResult.h"
#include "mozilla/dom/TypedArray.h"
#include "mozilla/dom/VRDisplayBinding.h"
+#include "mozilla/DOMEventTargetHelper.h"
#include "mozilla/dom/DOMPoint.h"
#include "mozilla/dom/DOMRect.h"
#include "nsCOMPtr.h"
#include "nsString.h"
#include "nsTArray.h"
#include "nsWrapperCache.h"
#include "gfxVR.h"
-#include "VRDisplayProxy.h"
namespace mozilla {
+namespace gfx {
+class VRDisplayClient;
+class VRDisplayPresentation;
+struct VRFieldOfView;
+enum class VRDisplayCapabilityFlags : uint16_t;
+struct VRHMDSensorState;
+}
namespace dom {
class Navigator;
-class VRFieldOfViewReadOnly : public nsWrapperCache
+class VRFieldOfView final : public nsWrapperCache
{
public:
- VRFieldOfViewReadOnly(nsISupports* aParent,
- double aUpDegrees, double aRightDegrees,
- double aDownDegrees, double aLeftDegrees)
- : mParent(aParent)
- , mUpDegrees(aUpDegrees)
- , mRightDegrees(aRightDegrees)
- , mDownDegrees(aDownDegrees)
- , mLeftDegrees(aLeftDegrees)
- {
- }
+ VRFieldOfView(nsISupports* aParent,
+ double aUpDegrees, double aRightDegrees,
+ double aDownDegrees, double aLeftDegrees);
+ VRFieldOfView(nsISupports* aParent, const gfx::VRFieldOfView& aSrc);
- NS_INLINE_DECL_CYCLE_COLLECTING_NATIVE_REFCOUNTING(VRFieldOfViewReadOnly)
- NS_DECL_CYCLE_COLLECTION_SCRIPT_HOLDER_NATIVE_CLASS(VRFieldOfViewReadOnly)
+ NS_INLINE_DECL_CYCLE_COLLECTING_NATIVE_REFCOUNTING(VRFieldOfView)
+ NS_DECL_CYCLE_COLLECTION_SCRIPT_HOLDER_NATIVE_CLASS(VRFieldOfView)
double UpDegrees() const { return mUpDegrees; }
double RightDegrees() const { return mRightDegrees; }
double DownDegrees() const { return mDownDegrees; }
double LeftDegrees() const { return mLeftDegrees; }
nsISupports* GetParentObject() const { return mParent; }
virtual JSObject* WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto) override;
protected:
- virtual ~VRFieldOfViewReadOnly() {}
+ virtual ~VRFieldOfView() {}
nsCOMPtr<nsISupports> mParent;
double mUpDegrees;
double mRightDegrees;
double mDownDegrees;
double mLeftDegrees;
};
-class VRFieldOfView final : public VRFieldOfViewReadOnly
+class VRDisplayCapabilities final : public nsWrapperCache
{
public:
- VRFieldOfView(nsISupports* aParent, const gfx::VRFieldOfView& aSrc)
- : VRFieldOfViewReadOnly(aParent,
- aSrc.upDegrees, aSrc.rightDegrees,
- aSrc.downDegrees, aSrc.leftDegrees)
- {}
+ VRDisplayCapabilities(nsISupports* aParent, const gfx::VRDisplayCapabilityFlags& aFlags)
+ : mParent(aParent)
+ , mFlags(aFlags)
+ {
+ }
- explicit VRFieldOfView(nsISupports* aParent,
- double aUpDegrees = 0.0, double aRightDegrees = 0.0,
- double aDownDegrees = 0.0, double aLeftDegrees = 0.0)
- : VRFieldOfViewReadOnly(aParent,
- aUpDegrees, aRightDegrees, aDownDegrees, aLeftDegrees)
- {}
+ NS_INLINE_DECL_CYCLE_COLLECTING_NATIVE_REFCOUNTING(VRDisplayCapabilities)
+ NS_DECL_CYCLE_COLLECTION_SCRIPT_HOLDER_NATIVE_CLASS(VRDisplayCapabilities)
- static already_AddRefed<VRFieldOfView>
- Constructor(const GlobalObject& aGlobal, const VRFieldOfViewInit& aParams,
- ErrorResult& aRv);
-
- static already_AddRefed<VRFieldOfView>
- Constructor(const GlobalObject& aGlobal,
- double aUpDegrees, double aRightDegrees,
- double aDownDegrees, double aLeftDegrees,
- ErrorResult& aRv);
+ nsISupports* GetParentObject() const
+ {
+ return mParent;
+ }
virtual JSObject* WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto) override;
- void SetUpDegrees(double aVal) { mUpDegrees = aVal; }
- void SetRightDegrees(double aVal) { mRightDegrees = aVal; }
- void SetDownDegrees(double aVal) { mDownDegrees = aVal; }
- void SetLeftDegrees(double aVal) { mLeftDegrees = aVal; }
+ bool HasPosition() const;
+ bool HasOrientation() const;
+ bool HasExternalDisplay() const;
+ bool CanPresent() const;
+ uint32_t MaxLayers() const;
+
+protected:
+ ~VRDisplayCapabilities() {}
+ nsCOMPtr<nsISupports> mParent;
+ gfx::VRDisplayCapabilityFlags mFlags;
};
-class VRPositionState final : public nsWrapperCache
+class VRPose final : public nsWrapperCache
{
- ~VRPositionState() {}
+
public:
- VRPositionState(nsISupports* aParent, const gfx::VRHMDSensorState& aState);
+ VRPose(nsISupports* aParent, const gfx::VRHMDSensorState& aState);
- NS_INLINE_DECL_CYCLE_COLLECTING_NATIVE_REFCOUNTING(VRPositionState)
- NS_DECL_CYCLE_COLLECTION_SCRIPT_HOLDER_NATIVE_CLASS(VRPositionState)
+ NS_INLINE_DECL_CYCLE_COLLECTING_NATIVE_REFCOUNTING(VRPose)
+ NS_DECL_CYCLE_COLLECTION_SCRIPT_HOLDER_NATIVE_CLASS(VRPose)
- double TimeStamp() const { return mTimeStamp; }
+ double Timestamp() const { return mTimeStamp; }
+ uint32_t FrameID() const { return mFrameId; }
- bool HasPosition() const { return mPosition != nullptr; }
- DOMPoint* GetPosition() const { return mPosition; }
-
- bool HasOrientation() const { return mOrientation != nullptr; }
- DOMPoint* GetOrientation() const { return mOrientation; }
-
- // these are created lazily
- DOMPoint* GetLinearVelocity();
- DOMPoint* GetLinearAcceleration();
- DOMPoint* GetAngularVelocity();
- DOMPoint* GetAngularAcceleration();
+ void GetPosition(JSContext* aCx,
+ JS::MutableHandle<JSObject*> aRetval,
+ ErrorResult& aRv);
+ void GetLinearVelocity(JSContext* aCx,
+ JS::MutableHandle<JSObject*> aRetval,
+ ErrorResult& aRv);
+ void GetLinearAcceleration(JSContext* aCx,
+ JS::MutableHandle<JSObject*> aRetval,
+ ErrorResult& aRv);
+ void GetOrientation(JSContext* aCx,
+ JS::MutableHandle<JSObject*> aRetval,
+ ErrorResult& aRv);
+ void GetAngularVelocity(JSContext* aCx,
+ JS::MutableHandle<JSObject*> aRetval,
+ ErrorResult& aRv);
+ void GetAngularAcceleration(JSContext* aCx,
+ JS::MutableHandle<JSObject*> aRetval,
+ ErrorResult& aRv);
nsISupports* GetParentObject() const { return mParent; }
virtual JSObject* WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto) override;
protected:
+ ~VRPose() {}
nsCOMPtr<nsISupports> mParent;
double mTimeStamp;
+ uint32_t mFrameId;
gfx::VRHMDSensorState mVRState;
- RefPtr<DOMPoint> mPosition;
- RefPtr<DOMPoint> mLinearVelocity;
- RefPtr<DOMPoint> mLinearAcceleration;
+ JS::Heap<JSObject*> mPosition;
+ JS::Heap<JSObject*> mLinearVelocity;
+ JS::Heap<JSObject*> mLinearAcceleration;
+ JS::Heap<JSObject*> mOrientation;
+ JS::Heap<JSObject*> mAngularVelocity;
+ JS::Heap<JSObject*> mAngularAcceleration;
+
+};
- RefPtr<DOMPoint> mOrientation;
- RefPtr<DOMPoint> mAngularVelocity;
- RefPtr<DOMPoint> mAngularAcceleration;
+class VRStageParameters final : public nsWrapperCache
+{
+public:
+ VRStageParameters(nsISupports* aParent,
+ const gfx::Matrix4x4& aSittingToStandingTransform,
+ const gfx::Size& aSize)
+ : mParent(aParent)
+ , mSittingToStandingTransform(aSittingToStandingTransform)
+ , mSittingToStandingTransformArray(nullptr)
+ , mSize(aSize)
+ {
+ }
+
+ NS_INLINE_DECL_CYCLE_COLLECTING_NATIVE_REFCOUNTING(VRStageParameters)
+ NS_DECL_CYCLE_COLLECTION_SCRIPT_HOLDER_NATIVE_CLASS(VRStageParameters)
+
+ void GetSittingToStandingTransform(JSContext* aCx,
+ JS::MutableHandle<JSObject*> aRetval,
+ ErrorResult& aRv);
+ float SizeX() const { return mSize.width; }
+ float SizeZ() const { return mSize.height; }
+
+ nsISupports* GetParentObject() const { return mParent; }
+ virtual JSObject* WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto) override;
+
+protected:
+ ~VRStageParameters() {}
+
+ nsCOMPtr<nsISupports> mParent;
+
+ gfx::Matrix4x4 mSittingToStandingTransform;
+ JS::Heap<JSObject*> mSittingToStandingTransformArray;
+ gfx::Size mSize;
};
class VREyeParameters final : public nsWrapperCache
{
public:
VREyeParameters(nsISupports* aParent,
- const gfx::VRFieldOfView& aMinFOV,
- const gfx::VRFieldOfView& aMaxFOV,
- const gfx::VRFieldOfView& aRecFOV,
const gfx::Point3D& aEyeTranslation,
- const gfx::VRFieldOfView& aCurFOV,
- const gfx::IntRect& aRenderRect);
+ const gfx::VRFieldOfView& aFOV,
+ const gfx::IntSize& aRenderSize);
NS_INLINE_DECL_CYCLE_COLLECTING_NATIVE_REFCOUNTING(VREyeParameters)
NS_DECL_CYCLE_COLLECTION_SCRIPT_HOLDER_NATIVE_CLASS(VREyeParameters)
- VRFieldOfView* MinimumFieldOfView();
- VRFieldOfView* MaximumFieldOfView();
- VRFieldOfView* RecommendedFieldOfView();
- DOMPoint* EyeTranslation();
+ void GetOffset(JSContext* aCx, JS::MutableHandle<JSObject*> aRetVal,
+ ErrorResult& aRv);
- VRFieldOfView* CurrentFieldOfView();
- DOMRect* RenderRect();
+ VRFieldOfView* FieldOfView();
+
+ uint32_t RenderWidth() const { return mRenderSize.width; }
+ uint32_t RenderHeight() const { return mRenderSize.height; }
nsISupports* GetParentObject() const { return mParent; }
virtual JSObject* WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto) override;
protected:
~VREyeParameters() {}
nsCOMPtr<nsISupports> mParent;
- RefPtr<VRFieldOfView> mMinFOV;
- RefPtr<VRFieldOfView> mMaxFOV;
- RefPtr<VRFieldOfView> mRecFOV;
- RefPtr<DOMPoint> mEyeTranslation;
- RefPtr<VRFieldOfView> mCurFOV;
- RefPtr<DOMRect> mRenderRect;
+
+ gfx::Point3D mEyeTranslation;
+ gfx::IntSize mRenderSize;
+ JS::Heap<JSObject*> mOffset;
+ RefPtr<VRFieldOfView> mFOV;
};
-class VRDisplay : public nsISupports,
- public nsWrapperCache
+class VRDisplay final : public DOMEventTargetHelper
+ , public nsIObserver
{
public:
-
- NS_DECL_CYCLE_COLLECTING_ISUPPORTS
- NS_DECL_CYCLE_COLLECTION_SCRIPT_HOLDER_CLASS(VRDisplay)
-
- void GetHardwareUnitId(nsAString& aHWID) const { aHWID = mHWID; }
- void GetDeviceId(nsAString& aDeviceId) const { aDeviceId = mDeviceId; }
- void GetDeviceName(nsAString& aDeviceName) const { aDeviceName = mDeviceName; }
-
- bool IsValid() { return mValid; }
-
- nsISupports* GetParentObject() const
- {
- return mParent;
- }
-
- enum VRDisplayType {
- HMD,
- PositionSensor
- };
-
- VRDisplayType GetType() const { return mType; }
-
- static bool RefreshVRDisplays(dom::Navigator* aNavigator);
- static void UpdateVRDisplays(nsTArray<RefPtr<VRDisplay> >& aDevices,
- nsISupports* aParent);
-
- gfx::VRDisplayProxy *GetHMD() {
- return mHMD;
- }
-
-protected:
- VRDisplay(nsISupports* aParent,
- gfx::VRDisplayProxy* aHMD,
- VRDisplayType aType)
- : mParent(aParent)
- , mHMD(aHMD)
- , mType(aType)
- , mValid(false)
- {
- MOZ_COUNT_CTOR(VRDisplay);
- mHWID.AssignLiteral("uknown");
- mDeviceId.AssignLiteral("unknown");
- mDeviceName.AssignLiteral("unknown");
- }
-
- virtual ~VRDisplay()
- {
- MOZ_COUNT_DTOR(VRDisplay);
- }
-
- nsCOMPtr<nsISupports> mParent;
- RefPtr<gfx::VRDisplayProxy> mHMD;
- nsString mHWID;
- nsString mDeviceId;
- nsString mDeviceName;
-
- VRDisplayType mType;
-
- bool mValid;
-};
-
-class HMDVRDisplay : public VRDisplay
-{
-public:
- virtual already_AddRefed<VREyeParameters> GetEyeParameters(VREye aEye) = 0;
-
- virtual void SetFieldOfView(const VRFieldOfViewInit& aLeftFOV,
- const VRFieldOfViewInit& aRightFOV,
- double zNear, double zFar) = 0;
+ NS_DECL_ISUPPORTS_INHERITED
+ NS_DECL_NSIOBSERVER
+ NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(VRDisplay, DOMEventTargetHelper)
virtual JSObject* WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto) override;
-protected:
- HMDVRDisplay(nsISupports* aParent, gfx::VRDisplayProxy* aHMD)
- : VRDisplay(aParent, aHMD, VRDisplay::HMD)
- {
- MOZ_COUNT_CTOR_INHERITED(HMDVRDisplay, VRDisplay);
+ bool IsPresenting() const;
+ bool IsConnected() const;
+
+ VRDisplayCapabilities* Capabilities();
+ VRStageParameters* GetStageParameters();
+
+ uint32_t DisplayId() const { return mDisplayId; }
+ void GetDisplayName(nsAString& aDisplayName) const { aDisplayName = mDisplayName; }
+
+ static bool RefreshVRDisplays(dom::Navigator* aNavigator);
+ static void UpdateVRDisplays(nsTArray<RefPtr<VRDisplay> >& aDisplays,
+ nsPIDOMWindowInner* aWindow);
+
+ gfx::VRDisplayClient *GetClient() {
+ return mClient;
+ }
+
+ virtual already_AddRefed<VREyeParameters> GetEyeParameters(VREye aEye);
+
+ already_AddRefed<VRPose> GetPose();
+ already_AddRefed<VRPose> GetImmediatePose();
+ void ResetPose();
+
+ double DepthNear() {
+ return mDepthNear;
+ }
+
+ double DepthFar() {
+ return mDepthFar;
}
- virtual ~HMDVRDisplay()
- {
- MOZ_COUNT_DTOR_INHERITED(HMDVRDisplay, VRDisplay);
+ void SetDepthNear(double aDepthNear) {
+ // XXX When we start sending depth buffers to VRLayer's we will want
+ // to communicate this with the VRDisplayHost
+ mDepthNear = aDepthNear;
}
-};
-
-class HMDInfoVRDisplay : public HMDVRDisplay
-{
-public:
- HMDInfoVRDisplay(nsISupports* aParent, gfx::VRDisplayProxy* aHMD);
- virtual ~HMDInfoVRDisplay();
- /* If a field of view that is set to all 0's is passed in,
- * the recommended field of view for that eye is used.
- */
- virtual void SetFieldOfView(const VRFieldOfViewInit& aLeftFOV,
- const VRFieldOfViewInit& aRightFOV,
- double zNear, double zFar) override;
- virtual already_AddRefed<VREyeParameters> GetEyeParameters(VREye aEye) override;
-};
+ void SetDepthFar(double aDepthFar) {
+ // XXX When we start sending depth buffers to VRLayer's we will want
+ // to communicate this with the VRDisplayHost
+ mDepthFar = aDepthFar;
+ }
-class PositionSensorVRDisplay : public VRDisplay
-{
-public:
- virtual already_AddRefed<VRPositionState> GetState() = 0;
- virtual already_AddRefed<VRPositionState> GetImmediateState() = 0;
- virtual void ResetSensor() = 0;
- virtual JSObject* WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto) override;
+ already_AddRefed<Promise> RequestPresent(const nsTArray<VRLayer>& aLayers, ErrorResult& aRv);
+ already_AddRefed<Promise> ExitPresent(ErrorResult& aRv);
+ void GetLayers(nsTArray<VRLayer>& result);
+ void SubmitFrame(const Optional<NonNull<VRPose>>& aPose);
+
+ int32_t RequestAnimationFrame(mozilla::dom::FrameRequestCallback& aCallback,
+ mozilla::ErrorResult& aError);
+ void CancelAnimationFrame(int32_t aHandle, mozilla::ErrorResult& aError);
protected:
- explicit PositionSensorVRDisplay(nsISupports* aParent, gfx::VRDisplayProxy* aHMD)
- : VRDisplay(aParent, aHMD, VRDisplay::PositionSensor)
- {
- MOZ_COUNT_CTOR_INHERITED(PositionSensorVRDisplay, VRDisplay);
- }
+ VRDisplay(nsPIDOMWindowInner* aWindow, gfx::VRDisplayClient* aClient);
+ virtual ~VRDisplay();
+ virtual void LastRelease() override;
+
+ void ExitPresentInternal();
+
+ RefPtr<gfx::VRDisplayClient> mClient;
- virtual ~PositionSensorVRDisplay()
- {
- MOZ_COUNT_DTOR_INHERITED(PositionSensorVRDisplay, VRDisplay);
- }
-};
+ uint32_t mDisplayId;
+ nsString mDisplayName;
+
+ RefPtr<VRDisplayCapabilities> mCapabilities;
-class HMDPositionVRDisplay : public PositionSensorVRDisplay
-{
-public:
- HMDPositionVRDisplay(nsISupports* aParent, gfx::VRDisplayProxy* aHMD);
- ~HMDPositionVRDisplay();
+ double mDepthNear;
+ double mDepthFar;
- virtual already_AddRefed<VRPositionState> GetState() override;
- virtual already_AddRefed<VRPositionState> GetImmediateState() override;
- virtual void ResetSensor() override;
+ RefPtr<gfx::VRDisplayPresentation> mPresentation;
};
} // namespace dom
} // namespace mozilla
#endif
--- a/dom/webidl/VRDisplay.webidl
+++ b/dom/webidl/VRDisplay.webidl
@@ -5,126 +5,269 @@
enum VREye {
"left",
"right"
};
[Pref="dom.vr.enabled",
HeaderFile="mozilla/dom/VRDisplay.h"]
-interface VRFieldOfViewReadOnly {
+interface VRFieldOfView {
readonly attribute double upDegrees;
readonly attribute double rightDegrees;
readonly attribute double downDegrees;
readonly attribute double leftDegrees;
};
-[Pref="dom.vr.enabled",
- HeaderFile="mozilla/dom/VRDisplay.h",
- Constructor(optional VRFieldOfViewInit fov),
- Constructor(double upDegrees, double rightDegrees, double downDegrees, double leftDegrees)]
-interface VRFieldOfView : VRFieldOfViewReadOnly {
- inherit attribute double upDegrees;
- inherit attribute double rightDegrees;
- inherit attribute double downDegrees;
- inherit attribute double leftDegrees;
+typedef (HTMLCanvasElement or OffscreenCanvas) VRSource;
+
+dictionary VRLayer {
+ /**
+ * XXX - When WebVR in WebWorkers is implemented, HTMLCanvasElement below
+ * should be replaced with VRSource.
+ */
+ HTMLCanvasElement? source = null;
+
+ /**
+ * The left and right viewports contain 4 values defining the viewport
+ * rectangles within the canvas to present to the eye in UV space.
+ * [0] left offset of the viewport (0.0 - 1.0)
+ * [1] top offset of the viewport (0.0 - 1.0)
+ * [2] width of the viewport (0.0 - 1.0)
+ * [3] height of the viewport (0.0 - 1.0)
+ *
+ * When no values are passed, they will be processed as though the left
+ * and right sides of the viewport were passed:
+ *
+ * leftBounds: [0.0, 0.0, 0.5, 1.0]
+ * rightBounds: [0.5, 0.0, 0.5, 1.0]
+ */
+ sequence<float> leftBounds = [];
+ sequence<float> rightBounds = [];
};
-dictionary VRFieldOfViewInit {
- double upDegrees = 0.0;
- double rightDegrees = 0.0;
- double downDegrees = 0.0;
- double leftDegrees = 0.0;
+/**
+ * Values describing the capabilities of a VRDisplay.
+ * These are expected to be static per-device/per-user.
+ */
+[Pref="dom.vr.enabled",
+ HeaderFile="mozilla/dom/VRDisplay.h"]
+interface VRDisplayCapabilities {
+ /**
+ * hasPosition is true if the VRDisplay is capable of tracking its position.
+ */
+ readonly attribute boolean hasPosition;
+
+ /**
+ * hasOrientation is true if the VRDisplay is capable of tracking its orientation.
+ */
+ readonly attribute boolean hasOrientation;
+
+ /**
+ * Whether the VRDisplay is separate from the device’s
+ * primary display. If presenting VR content will obscure
+ * other content on the device, this should be false. When
+ * false, the application should not attempt to mirror VR content
+ * or update non-VR UI because that content will not be visible.
+ */
+ readonly attribute boolean hasExternalDisplay;
+
+ /**
+ * Whether the VRDisplay is capable of presenting content to an HMD or similar device.
+ * Can be used to indicate “magic window” devices that are capable of 6DoF tracking but for
+ * which requestPresent is not meaningful. If false then calls to requestPresent should
+ * always fail, and getEyeParameters should return null.
+ */
+ readonly attribute boolean canPresent;
+
+ /**
+ * Indicates the maximum length of the array that requestPresent() will accept. MUST be 1 if
+ canPresent is true, 0 otherwise.
+ */
+ readonly attribute unsigned long maxLayers;
+};
+
+/**
+ * Values describing the the stage / play area for devices
+ * that support room-scale experiences.
+ */
+[Pref="dom.vr.enabled",
+ HeaderFile="mozilla/dom/VRDisplay.h"]
+interface VRStageParameters {
+ /**
+ * A 16-element array containing the components of a column-major 4x4
+ * affine transform matrix. This matrix transforms the sitting-space position
+ * returned by get{Immediate}Pose() to a standing-space position.
+ */
+ [Throws] readonly attribute Float32Array sittingToStandingTransform;
+
+ /**
+ * Dimensions of the play-area bounds. The bounds are defined
+ * as an axis-aligned rectangle on the floor.
+ * The center of the rectangle is at (0,0,0) in standing-space
+ * coordinates.
+ * These bounds are defined for safety purposes.
+ * Content should not require the user to move beyond these
+ * bounds; however, it is possible for the user to ignore
+ * the bounds resulting in position values outside of
+ * this rectangle.
+ */
+ readonly attribute float sizeX;
+ readonly attribute float sizeZ;
};
[Pref="dom.vr.enabled",
HeaderFile="mozilla/dom/VRDisplay.h"]
-interface VRPositionState {
- readonly attribute double timeStamp;
+interface VRPose {
+ readonly attribute DOMHighResTimeStamp timestamp;
- readonly attribute boolean hasPosition;
- readonly attribute DOMPoint? position;
- readonly attribute DOMPoint? linearVelocity;
- readonly attribute DOMPoint? linearAcceleration;
+ /**
+ * position, linearVelocity, and linearAcceleration are 3-component vectors.
+ * position is relative to a sitting space. Transforming this point with
+ * VRStageParameters.sittingToStandingTransform converts this to standing space.
+ */
+ [Constant, Throws] readonly attribute Float32Array? position;
+ [Constant, Throws] readonly attribute Float32Array? linearVelocity;
+ [Constant, Throws] readonly attribute Float32Array? linearAcceleration;
- readonly attribute boolean hasOrientation;
- // XXX should be DOMQuaternion as soon as we add that
- readonly attribute DOMPoint? orientation;
- readonly attribute DOMPoint? angularVelocity;
- readonly attribute DOMPoint? angularAcceleration;
+ /* orientation is a 4-entry array representing the components of a quaternion. */
+ [Constant, Throws] readonly attribute Float32Array? orientation;
+ /* angularVelocity and angularAcceleration are the components of 3-dimensional vectors. */
+ [Constant, Throws] readonly attribute Float32Array? angularVelocity;
+ [Constant, Throws] readonly attribute Float32Array? angularAcceleration;
};
[Pref="dom.vr.enabled",
HeaderFile="mozilla/dom/VRDisplay.h"]
interface VREyeParameters {
- /* These values are expected to be static per-device/per-user */
- [Constant, Cached] readonly attribute VRFieldOfView minimumFieldOfView;
- [Constant, Cached] readonly attribute VRFieldOfView maximumFieldOfView;
- [Constant, Cached] readonly attribute VRFieldOfView recommendedFieldOfView;
- [Constant, Cached] readonly attribute DOMPoint eyeTranslation;
+ /**
+ * offset is a 3-component vector representing an offset to
+ * translate the eye. This value may vary from frame
+ * to frame if the user adjusts their headset ipd.
+ */
+ [Constant, Throws] readonly attribute Float32Array offset;
- /* These values will vary after a FOV has been set */
- [Constant, Cached] readonly attribute VRFieldOfView currentFieldOfView;
- [Constant, Cached] readonly attribute DOMRect renderRect;
-};
-
-[Pref="dom.vr.enabled"]
-interface VRDisplay {
- /**
- * An identifier for the distinct hardware unit that this
- * VR Device is a part of. All VRDisplay/Sensors that come
- * from the same hardware will have the same hardwareId
- */
- [Constant] readonly attribute DOMString hardwareUnitId;
+ /* These values may vary as the user adjusts their headset ipd. */
+ [Constant] readonly attribute VRFieldOfView fieldOfView;
/**
- * An identifier for this distinct sensor/device on a physical
- * hardware device. This shouldn't change across browser
- * restrats, allowing configuration data to be saved based on it.
+ * renderWidth and renderHeight specify the recommended render target
+ * size of each eye viewport, in pixels. If multiple eyes are rendered
+ * in a single render target, then the render target should be made large
+ * enough to fit both viewports.
*/
- [Constant] readonly attribute DOMString deviceId;
-
- /**
- * a device name, a user-readable name identifying it
- */
- [Constant] readonly attribute DOMString deviceName;
+ [Constant] readonly attribute unsigned long renderWidth;
+ [Constant] readonly attribute unsigned long renderHeight;
};
[Pref="dom.vr.enabled",
HeaderFile="mozilla/dom/VRDisplay.h"]
-interface HMDVRDisplay : VRDisplay {
- // Return the current VREyeParameters for the given eye
+interface VRDisplay : EventTarget {
+ readonly attribute boolean isConnected;
+ readonly attribute boolean isPresenting;
+
+ /**
+ * Dictionary of capabilities describing the VRDisplay.
+ */
+ [Constant] readonly attribute VRDisplayCapabilities capabilities;
+
+ /**
+ * If this VRDisplay supports room-scale experiences, the optional
+ * stage attribute contains details on the room-scale parameters.
+ */
+ readonly attribute VRStageParameters? stageParameters;
+
+ /* Return the current VREyeParameters for the given eye. */
VREyeParameters getEyeParameters(VREye whichEye);
- // Set a field of view. If either of the fields of view is null,
- // or if their values are all zeros, then the recommended field of view
- // for that eye will be used.
- void setFieldOfView(optional VRFieldOfViewInit leftFOV,
- optional VRFieldOfViewInit rightFOV,
- optional double zNear = 0.01,
- optional double zFar = 10000.0);
-};
+ /**
+ * An identifier for this distinct VRDisplay. Used as an
+ * association point in the Gamepad API.
+ */
+ [Constant] readonly attribute unsigned long displayId;
-[Pref="dom.vr.enabled" ,
- HeaderFile="mozilla/dom/VRDisplay.h"]
-interface PositionSensorVRDisplay : VRDisplay {
- /*
- * Return a VRPositionState dictionary containing the state of this position sensor
- * for the current frame if within a requestAnimationFrame callback, or for the
- * previous frame if not.
+ /**
+ * A display name, a user-readable name identifying it.
+ */
+ [Constant] readonly attribute DOMString displayName;
+
+ /**
+ * Return a VRPose containing the future predicted pose of the VRDisplay
+ * when the current frame will be presented. Subsequent calls to getPose()
+ * MUST return a VRPose with the same values until the next call to
+ * submitFrame().
*
- * The VRPositionState will contain the position, orientation, and velocity
- * and acceleration of each of these properties. Use "hasPosition" and "hasOrientation"
- * to check if the associated members are valid; if these are false, those members
- * will be null.
+ * The VRPose will contain the position, orientation, velocity,
+ * and acceleration of each of these properties.
+ */
+ [NewObject] VRPose getPose();
+
+ /**
+ * Return the current instantaneous pose of the VRDisplay, with no
+ * prediction applied. Every call to getImmediatePose() may
+ * return a different value, even within a single frame.
*/
- [NewObject] VRPositionState getState();
+ [NewObject] VRPose getImmediatePose();
+
+ /**
+ * Reset the pose for this display, treating its current position and
+ * orientation as the "origin/zero" values. VRPose.position,
+ * VRPose.orientation, and VRStageParameters.sittingToStandingTransform may be
+ * updated when calling resetPose(). This should be called in only
+ * sitting-space experiences.
+ */
+ void resetPose();
+
+ /**
+ * z-depth defining the near plane of the eye view frustum
+ * enables mapping of values in the render target depth
+ * attachment to scene coordinates. Initially set to 0.01.
+ */
+ attribute double depthNear;
+
+ /**
+ * z-depth defining the far plane of the eye view frustum
+ * enables mapping of values in the render target depth
+ * attachment to scene coordinates. Initially set to 10000.0.
+ */
+ attribute double depthFar;
- /*
- * Return the current instantaneous sensor state.
+ /**
+ * The callback passed to `requestAnimationFrame` will be called
+ * any time a new frame should be rendered. When the VRDisplay is
+ * presenting the callback will be called at the native refresh
+ * rate of the HMD. When not presenting this function acts
+ * identically to how window.requestAnimationFrame acts. Content should
+ * make no assumptions of frame rate or vsync behavior as the HMD runs
+ * asynchronously from other displays and at differing refresh rates.
*/
- [NewObject] VRPositionState getImmediateState();
+ [Throws] long requestAnimationFrame(FrameRequestCallback callback);
+
+ /**
+ * Passing the value returned by `requestAnimationFrame` to
+ * `cancelAnimationFrame` will unregister the callback.
+ */
+ [Throws] void cancelAnimationFrame(long handle);
- /* Reset this sensor, treating its current position and orientation
- * as the "origin/zero" values.
+ /**
+ * Begin presenting to the VRDisplay. Must be called in response to a user gesture.
+ * Repeat calls while already presenting will update the VRLayers being displayed.
+ */
+ [Throws] Promise<void> requestPresent(sequence<VRLayer> layers);
+
+ /**
+ * Stops presenting to the VRDisplay.
*/
- void resetSensor();
+ [Throws] Promise<void> exitPresent();
+
+ /**
+ * Get the layers currently being presented.
+ */
+ sequence<VRLayer> getLayers();
+
+ /**
+ * The VRLayer provided to the VRDisplay will be captured and presented
+ * in the HMD. Calling this function has the same effect on the source
+ * canvas as any other operation that uses its source image, and canvases
+ * created without preserveDrawingBuffer set to true will be cleared.
+ */
+ void submitFrame(optional VRPose pose);
};
--- a/gfx/layers/d3d11/TextureD3D11.cpp
+++ b/gfx/layers/d3d11/TextureD3D11.cpp
@@ -728,20 +728,20 @@ Compositor*
DXGITextureHostD3D11::GetCompositor()
{
return mCompositor;
}
bool
DXGITextureHostD3D11::Lock()
{
- if (!mCompositor) {
- NS_WARNING("no suitable compositor");
- return false;
- }
+ /**
+ * Note: This function may be called when mCompositor is null
+ * such as during WebVR frame submission.
+ **/
if (!GetDevice()) {
NS_WARNING("trying to lock a TextureHost without a D3D device");
return false;
}
if (!mTextureSource) {
if (!mTexture && !OpenSharedHandle()) {
--- a/gfx/layers/d3d11/TextureD3D11.h
+++ b/gfx/layers/d3d11/TextureD3D11.h
@@ -376,17 +376,16 @@ public:
void BindRenderTarget(ID3D11DeviceContext* aContext);
virtual gfx::IntSize GetSize() const override;
void SetSize(const gfx::IntSize& aSize) { mSize = aSize; }
private:
friend class CompositorD3D11;
-
RefPtr<ID3D11RenderTargetView> mRTView;
};
class SyncObjectD3D11 : public SyncObject
{
public:
SyncObjectD3D11(SyncHandle aSyncHandle);
virtual void FinalizeFrame();
--- a/gfx/layers/ipc/PTexture.ipdl
+++ b/gfx/layers/ipc/PTexture.ipdl
@@ -4,29 +4,30 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
include LayersSurfaces;
include protocol PLayerTransaction;
include protocol PCompositorBridge;
include protocol PImageBridge;
+include protocol PVRManager;
include "mozilla/GfxMessageUtils.h";
using struct mozilla::layers::FrameMetrics from "FrameMetrics.h";
using mozilla::layers::TextureFlags from "mozilla/layers/CompositorTypes.h";
namespace mozilla {
namespace layers {
/**
* PTexture is the IPDL glue between a TextureClient and a TextureHost.
*/
sync protocol PTexture {
- manager PImageBridge or PCompositorBridge;
+ manager PImageBridge or PCompositorBridge or PVRManager;
child:
async __delete__();
parent:
/**
* Asynchronously tell the compositor side to remove the texture.
*/
new file mode 100644
--- /dev/null
+++ b/gfx/vr/VRDisplayClient.cpp
@@ -0,0 +1,143 @@
+/* -*- Mode: C++; tab-width: 20; indent-tabs-mode: nil; c-basic-offset: 2 -*-
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include <math.h>
+
+#include "prlink.h"
+#include "prmem.h"
+#include "prenv.h"
+#include "gfxPrefs.h"
+#include "nsString.h"
+#include "mozilla/Preferences.h"
+#include "mozilla/unused.h"
+#include "nsServiceManagerUtils.h"
+#include "nsIScreenManager.h"
+
+#ifdef XP_WIN
+#include "../layers/d3d11/CompositorD3D11.h"
+#endif
+
+#include "VRDisplayClient.h"
+#include "VRDisplayPresentation.h"
+#include "VRManagerChild.h"
+#include "VRLayerChild.h"
+
+using namespace mozilla;
+using namespace mozilla::gfx;
+
+VRDisplayClient::VRDisplayClient(const VRDisplayInfo& aDisplayInfo)
+ : mDisplayInfo(aDisplayInfo)
+ , bLastEventWasPresenting(false)
+ , mPresentationCount(0)
+{
+ MOZ_COUNT_CTOR(VRDisplayClient);
+}
+
+VRDisplayClient::~VRDisplayClient() {
+ MOZ_COUNT_DTOR(VRDisplayClient);
+}
+
+void
+VRDisplayClient::UpdateDisplayInfo(const VRDisplayInfo& aDisplayInfo)
+{
+ mDisplayInfo = aDisplayInfo;
+}
+
+already_AddRefed<VRDisplayPresentation>
+VRDisplayClient::BeginPresentation(const nsTArray<mozilla::dom::VRLayer>& aLayers)
+{
+ ++mPresentationCount;
+ RefPtr<VRDisplayPresentation> presentation = new VRDisplayPresentation(this, aLayers);
+ return presentation.forget();
+}
+
+void
+VRDisplayClient::PresentationDestroyed()
+{
+ --mPresentationCount;
+}
+
+void
+VRDisplayClient::ZeroSensor()
+{
+ VRManagerChild *vm = VRManagerChild::Get();
+ vm->SendResetSensor(mDisplayInfo.mDisplayID);
+}
+
+VRHMDSensorState
+VRDisplayClient::GetSensorState()
+{
+ VRHMDSensorState sensorState;
+ VRManagerChild *vm = VRManagerChild::Get();
+ Unused << vm->SendGetSensorState(mDisplayInfo.mDisplayID, &sensorState);
+ return sensorState;
+}
+
+VRHMDSensorState
+VRDisplayClient::GetImmediateSensorState()
+{
+ VRHMDSensorState sensorState;
+
+ VRManagerChild *vm = VRManagerChild::Get();
+ Unused << vm->SendGetImmediateSensorState(mDisplayInfo.mDisplayID, &sensorState);
+ return sensorState;
+}
+
+const double kVRDisplayRAFMaxDuration = 32; // milliseconds
+
+void
+VRDisplayClient::NotifyVsync()
+{
+ VRManagerChild *vm = VRManagerChild::Get();
+
+ bool isPresenting = GetIsPresenting();
+
+ bool bShouldCallback = !isPresenting;
+ if (mLastVSyncTime.IsNull()) {
+ bShouldCallback = true;
+ } else {
+ TimeDuration duration = TimeStamp::Now() - mLastVSyncTime;
+ if (duration.ToMilliseconds() > kVRDisplayRAFMaxDuration) {
+ bShouldCallback = true;
+ }
+ }
+
+ if (bShouldCallback) {
+ vm->RunFrameRequestCallbacks();
+ mLastVSyncTime = TimeStamp::Now();
+ }
+
+ // Check if we need to trigger onVRDisplayPresentChange event
+ if (bLastEventWasPresenting != isPresenting) {
+ bLastEventWasPresenting = isPresenting;
+ vm->FireDOMVRDisplayPresentChangeEvent();
+ }
+}
+
+void
+VRDisplayClient::NotifyVRVsync()
+{
+ VRManagerChild *vm = VRManagerChild::Get();
+ vm->RunFrameRequestCallbacks();
+ mLastVSyncTime = TimeStamp::Now();
+}
+
+bool
+VRDisplayClient::GetIsConnected() const
+{
+ return mDisplayInfo.GetIsConnected();
+}
+
+bool
+VRDisplayClient::GetIsPresenting() const
+{
+ return mDisplayInfo.GetIsPresenting();
+}
+
+void
+VRDisplayClient::NotifyDisconnected()
+{
+ mDisplayInfo.mIsConnected = false;
+}
new file mode 100644
--- /dev/null
+++ b/gfx/vr/VRDisplayClient.h
@@ -0,0 +1,61 @@
+/* -*- Mode: C++; tab-width: 20; indent-tabs-mode: nil; c-basic-offset: 2 -*-
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef GFX_VR_DISPLAY_CLIENT_H
+#define GFX_VR_DISPLAY_CLIENT_H
+
+#include "nsIScreen.h"
+#include "nsCOMPtr.h"
+#include "mozilla/RefPtr.h"
+#include "mozilla/dom/VRDisplayBinding.h"
+
+#include "gfxVR.h"
+
+namespace mozilla {
+namespace gfx {
+class VRDisplayPresentation;
+class VRManagerChild;
+
+class VRDisplayClient
+{
+public:
+ NS_INLINE_DECL_THREADSAFE_REFCOUNTING(VRDisplayClient)
+
+ explicit VRDisplayClient(const VRDisplayInfo& aDisplayInfo);
+
+ void UpdateDisplayInfo(const VRDisplayInfo& aDisplayInfo);
+
+ const VRDisplayInfo& GetDisplayInfo() const { return mDisplayInfo; }
+ virtual VRHMDSensorState GetSensorState();
+ virtual VRHMDSensorState GetImmediateSensorState();
+
+ virtual void ZeroSensor();
+
+ already_AddRefed<VRDisplayPresentation> BeginPresentation(const nsTArray<dom::VRLayer>& aLayers);
+ void PresentationDestroyed();
+
+ void NotifyVsync();
+ void NotifyVRVsync();
+
+ bool GetIsConnected() const;
+ bool GetIsPresenting() const;
+
+ void NotifyDisconnected();
+
+protected:
+ virtual ~VRDisplayClient();
+
+ VRDisplayInfo mDisplayInfo;
+
+ bool bLastEventWasPresenting;
+
+ TimeStamp mLastVSyncTime;
+ int mPresentationCount;
+};
+
+} // namespace gfx
+} // namespace mozilla
+
+#endif /* GFX_VR_DISPLAY_CLIENT_H */
new file mode 100644
--- /dev/null
+++ b/gfx/vr/VRDisplayHost.cpp
@@ -0,0 +1,130 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-
+* This Source Code Form is subject to the terms of the Mozilla Public
+* License, v. 2.0. If a copy of the MPL was not distributed with this
+* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "VRDisplayHost.h"
+#include "gfxVR.h"
+
+#if defined(XP_WIN)
+
+#include <d3d11.h>
+#include "gfxWindowsPlatform.h"
+#include "../layers/d3d11/CompositorD3D11.h"
+#include "mozilla/layers/TextureD3D11.h"
+
+#endif
+
+using namespace mozilla;
+using namespace mozilla::gfx;
+using namespace mozilla::layers;
+
+VRDisplayHost::VRDisplayHost(VRDisplayType aType)
+ : mInputFrameID(0)
+{
+ MOZ_COUNT_CTOR(VRDisplayHost);
+ mDisplayInfo.mType = aType;
+ mDisplayInfo.mDisplayID = VRDisplayManager::AllocateDisplayID();
+ mDisplayInfo.mIsPresenting = false;
+
+ for (int i = 0; i < kMaxLatencyFrames; i++) {
+ mLastSensorState[i].Clear();
+ }
+}
+
+VRDisplayHost::~VRDisplayHost()
+{
+ MOZ_COUNT_DTOR(VRDisplayHost);
+}
+
+void
+VRDisplayHost::AddLayer(VRLayerParent *aLayer)
+{
+ mLayers.AppendElement(aLayer);
+ if (mLayers.Length() == 1) {
+ StartPresentation();
+ }
+ mDisplayInfo.mIsPresenting = mLayers.Length() > 0;
+}
+
+void
+VRDisplayHost::RemoveLayer(VRLayerParent *aLayer)
+{
+ mLayers.RemoveElement(aLayer);
+ if (mLayers.Length() == 0) {
+ StopPresentation();
+ }
+ mDisplayInfo.mIsPresenting = mLayers.Length() > 0;
+}
+
+#if defined(XP_WIN)
+
+void
+VRDisplayHost::SubmitFrame(VRLayerParent* aLayer, const int32_t& aInputFrameID,
+ PTextureParent* aTexture, const gfx::Rect& aLeftEyeRect,
+ const gfx::Rect& aRightEyeRect)
+{
+ int32_t inputFrameID = aInputFrameID;
+ if (inputFrameID == 0) {
+ inputFrameID = mInputFrameID;
+ }
+ if (inputFrameID < 0) {
+ // Sanity check to prevent invalid memory access on builds with assertions
+ // disabled.
+ inputFrameID = 0;
+ }
+
+ VRHMDSensorState sensorState = mLastSensorState[inputFrameID % kMaxLatencyFrames];
+ // It is possible to get a cache miss on mLastSensorState if latency is
+ // longer than kMaxLatencyFrames. An optimization would be to find a frame
+ // that is closer than the one selected with the modulus.
+ // If we hit this; however, latency is already so high that the site is
+ // un-viewable and a more accurate pose prediction is not likely to
+ // compensate.
+
+ TextureHost* th = TextureHost::AsTextureHost(aTexture);
+ AutoLockTextureHost autoLock(th);
+ if (autoLock.Failed()) {
+ NS_WARNING("Failed to lock the VR layer texture");
+ return;
+ }
+
+ CompositableTextureSourceRef source;
+ if (!th->BindTextureSource(source)) {
+ NS_WARNING("The TextureHost was successfully locked but can't provide a TextureSource");
+ return;
+ }
+ MOZ_ASSERT(source);
+
+ IntSize texSize = source->GetSize();
+
+ TextureSourceD3D11* sourceD3D11 = source->AsSourceD3D11();
+ if (!sourceD3D11) {
+ NS_WARNING("WebVR support currently only implemented for D3D11");
+ return;
+ }
+
+ SubmitFrame(sourceD3D11, texSize, sensorState, aLeftEyeRect, aRightEyeRect);
+}
+
+#else
+
+void
+VRDisplayHost::SubmitFrame(VRLayerParent* aLayer, const int32_t& aInputFrameID,
+ PTextureParent* aTexture, const gfx::Rect& aLeftEyeRect,
+ const gfx::Rect& aRightEyeRect)
+{
+ NS_WARNING("WebVR only supported in Windows.");
+}
+
+#endif
+
+bool
+VRDisplayHost::CheckClearDisplayInfoDirty()
+{
+ if (mDisplayInfo == mLastUpdateDisplayInfo) {
+ return false;
+ }
+ mLastUpdateDisplayInfo = mDisplayInfo;
+ return true;
+}
new file mode 100644
--- /dev/null
+++ b/gfx/vr/VRDisplayHost.h
@@ -0,0 +1,89 @@
+/* -*- Mode: C++; tab-width: 20; indent-tabs-mode: nil; c-basic-offset: 2 -*-
+* This Source Code Form is subject to the terms of the Mozilla Public
+* License, v. 2.0. If a copy of the MPL was not distributed with this
+* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef GFX_VR_DISPLAY_HOST_H
+#define GFX_VR_DISPLAY_HOST_H
+
+#include "gfxVR.h"
+#include "nsTArray.h"
+#include "nsString.h"
+#include "nsCOMPtr.h"
+#include "mozilla/RefPtr.h"
+#include "mozilla/gfx/2D.h"
+#include "mozilla/Atomics.h"
+#include "mozilla/EnumeratedArray.h"
+#include "mozilla/TimeStamp.h"
+#include "mozilla/TypedEnumBits.h"
+
+namespace mozilla {
+namespace layers {
+class PTextureParent;
+#if defined(XP_WIN)
+class TextureSourceD3D11;
+#endif
+} // namespace layers
+namespace gfx {
+class VRLayerParent;
+
+class VRDisplayHost {
+public:
+ NS_INLINE_DECL_THREADSAFE_REFCOUNTING(VRDisplayHost)
+
+ const VRDisplayInfo& GetDisplayInfo() const { return mDisplayInfo; }
+
+ void AddLayer(VRLayerParent* aLayer);
+ void RemoveLayer(VRLayerParent* aLayer);
+
+ virtual VRHMDSensorState GetSensorState() = 0;
+ virtual VRHMDSensorState GetImmediateSensorState() = 0;
+ virtual void ZeroSensor() = 0;
+ virtual void StartPresentation() = 0;
+ virtual void StopPresentation() = 0;
+ virtual void NotifyVSync() { };
+
+ void SubmitFrame(VRLayerParent* aLayer,
+ const int32_t& aInputFrameID,
+ mozilla::layers::PTextureParent* aTexture,
+ const gfx::Rect& aLeftEyeRect,
+ const gfx::Rect& aRightEyeRect);
+
+ bool CheckClearDisplayInfoDirty();
+
+protected:
+ explicit VRDisplayHost(VRDisplayType aType);
+ virtual ~VRDisplayHost();
+
+#if defined(XP_WIN)
+ virtual void SubmitFrame(mozilla::layers::TextureSourceD3D11* aSource,
+ const IntSize& aSize,
+ const VRHMDSensorState& aSensorState,
+ const gfx::Rect& aLeftEyeRect,
+ const gfx::Rect& aRightEyeRect) = 0;
+#endif
+
+ VRDisplayInfo mDisplayInfo;
+
+ nsTArray<RefPtr<VRLayerParent>> mLayers;
+ // Weak reference to mLayers entries are cleared in VRLayerParent destructor
+
+ // The maximum number of frames of latency that we would expect before we
+ // should give up applying pose prediction.
+ // If latency is greater than one second, then the experience is not likely
+ // to be corrected by pose prediction. Setting this value too
+ // high may result in unnecessary memory allocation.
+ // As the current fastest refresh rate is 90hz, 100 is selected as a
+ // conservative value.
+ static const int kMaxLatencyFrames = 100;
+ VRHMDSensorState mLastSensorState[kMaxLatencyFrames];
+ int32_t mInputFrameID;
+
+private:
+ VRDisplayInfo mLastUpdateDisplayInfo;
+};
+
+} // namespace gfx
+} // namespace mozilla
+
+#endif /* GFX_VR_DISPLAY_HOST_H */
new file mode 100644
--- /dev/null
+++ b/gfx/vr/VRDisplayPresentation.cpp
@@ -0,0 +1,112 @@
+/* -*- Mode: C++; tab-width: 20; indent-tabs-mode: nil; c-basic-offset: 2 -*-
+* This Source Code Form is subject to the terms of the Mozilla Public
+* License, v. 2.0. If a copy of the MPL was not distributed with this
+* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "VRDisplayPresentation.h"
+
+#include "mozilla/unused.h"
+#include "VRDisplayClient.h"
+#include "VRLayerChild.h"
+
+using namespace mozilla;
+using namespace mozilla::gfx;
+
+VRDisplayPresentation::VRDisplayPresentation(VRDisplayClient *aDisplayClient,
+ const nsTArray<mozilla::dom::VRLayer>& aLayers)
+ : mDisplayClient(aDisplayClient)
+ , mDOMLayers(aLayers)
+{
+ CreateLayers();
+}
+
+void
+VRDisplayPresentation::CreateLayers()
+{
+ if (mLayers.Length()) {
+ return;
+ }
+
+ for (dom::VRLayer& layer : mDOMLayers) {
+ dom::HTMLCanvasElement* canvasElement = layer.mSource;
+ if (!canvasElement) {
+ /// XXX In the future we will support WebVR in WebWorkers here
+ continue;
+ }
+
+ Rect leftBounds(0.0, 0.0, 0.5, 1.0);
+ if (layer.mLeftBounds.Length() == 4) {
+ leftBounds.x = layer.mLeftBounds[0];
+ leftBounds.y = layer.mLeftBounds[1];
+ leftBounds.width = layer.mLeftBounds[2];
+ leftBounds.height = layer.mLeftBounds[3];
+ } else if (layer.mLeftBounds.Length() != 0) {
+ /**
+ * We ignore layers with an incorrect number of values.
+ * In the future, VRDisplay.requestPresent may throw in
+ * this case. See https://github.com/w3c/webvr/issues/71
+ */
+ continue;
+ }
+
+ Rect rightBounds(0.5, 0.0, 0.5, 1.0);
+ if (layer.mRightBounds.Length() == 4) {
+ rightBounds.x = layer.mRightBounds[0];
+ rightBounds.y = layer.mRightBounds[1];
+ rightBounds.width = layer.mRightBounds[2];
+ rightBounds.height = layer.mRightBounds[3];
+ } else if (layer.mRightBounds.Length() != 0) {
+ /**
+ * We ignore layers with an incorrect number of values.
+ * In the future, VRDisplay.requestPresent may throw in
+ * this case. See https://github.com/w3c/webvr/issues/71
+ */
+ continue;
+ }
+
+ VRManagerChild *manager = VRManagerChild::Get();
+ if (!manager) {
+ NS_WARNING("VRManagerChild::Get returned null!");
+ continue;
+ }
+
+ RefPtr<VRLayerChild> vrLayer = static_cast<VRLayerChild*>(manager->CreateVRLayer(mDisplayClient->GetDisplayInfo().GetDisplayID(), leftBounds, rightBounds));
+ if (!vrLayer) {
+ NS_WARNING("CreateVRLayer returned null!");
+ continue;
+ }
+
+ vrLayer->Initialize(canvasElement);
+
+ mLayers.AppendElement(vrLayer);
+ }
+}
+
+void
+VRDisplayPresentation::DestroyLayers()
+{
+ for (VRLayerChild* layer : mLayers) {
+ Unused << layer->SendDestroy();
+ }
+ mLayers.Clear();
+}
+
+void
+VRDisplayPresentation::GetDOMLayers(nsTArray<dom::VRLayer>& result)
+{
+ result = mDOMLayers;
+}
+
+VRDisplayPresentation::~VRDisplayPresentation()
+{
+ DestroyLayers();
+ mDisplayClient->PresentationDestroyed();
+}
+
+void VRDisplayPresentation::SubmitFrame(int32_t aInputFrameID)
+{
+ for (VRLayerChild *layer : mLayers) {
+ layer->SubmitFrame(aInputFrameID);
+ break; // Currently only one layer supported, submit only the first
+ }
+}
new file mode 100644
--- /dev/null
+++ b/gfx/vr/VRDisplayPresentation.h
@@ -0,0 +1,41 @@
+/* -*- Mode: C++; tab-width: 20; indent-tabs-mode: nil; c-basic-offset: 2 -*-
+* This Source Code Form is subject to the terms of the Mozilla Public
+* License, v. 2.0. If a copy of the MPL was not distributed with this
+* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef GFX_VR_DISPLAY_PRESENTATION_H
+#define GFX_VR_DISPLAY_PRESENTATION_H
+
+#include "mozilla/RefPtr.h"
+#include "mozilla/dom/VRDisplayBinding.h"
+
+namespace mozilla {
+namespace gfx {
+class VRDisplayClient;
+namespace vr {
+class VRLayerChild;
+} // namepsace vr
+
+class VRDisplayPresentation final
+{
+ NS_INLINE_DECL_THREADSAFE_REFCOUNTING(VRDisplayPresentation)
+
+public:
+ VRDisplayPresentation(VRDisplayClient *aDisplayClient, const nsTArray<dom::VRLayer>& aLayers);
+ void SubmitFrame(int32_t aInputFrameID);
+ void GetDOMLayers(nsTArray<dom::VRLayer>& result);
+
+private:
+ ~VRDisplayPresentation();
+ void CreateLayers();
+ void DestroyLayers();
+
+ RefPtr<VRDisplayClient> mDisplayClient;
+ nsTArray<dom::VRLayer> mDOMLayers;
+ nsTArray<RefPtr<VRLayerChild>> mLayers;
+};
+
+} // namespace gfx
+} // namespace mozilla
+
+#endif /* GFX_VR_DISPLAY_PRESENTAITON_H */
deleted file mode 100644
--- a/gfx/vr/VRDisplayProxy.cpp
+++ /dev/null
@@ -1,165 +0,0 @@
-/* -*- Mode: C++; tab-width: 20; indent-tabs-mode: nil; c-basic-offset: 2 -*-
- * This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#include <math.h>
-
-#include "prlink.h"
-#include "prmem.h"
-#include "prenv.h"
-#include "gfxPrefs.h"
-#include "nsString.h"
-#include "mozilla/Preferences.h"
-#include "mozilla/unused.h"
-#include "nsServiceManagerUtils.h"
-#include "nsIScreenManager.h"
-
-
-#ifdef XP_WIN
-#include "../layers/d3d11/CompositorD3D11.h"
-#endif
-
-#include "VRDisplayProxy.h"
-#include "VRManagerChild.h"
-
-using namespace mozilla;
-using namespace mozilla::gfx;
-
-VRDisplayProxy::VRDisplayProxy(const VRDisplayUpdate& aDeviceUpdate)
- : mDeviceInfo(aDeviceUpdate.mDeviceInfo)
- , mSensorState(aDeviceUpdate.mSensorState)
-{
- MOZ_COUNT_CTOR(VRDisplayProxy);
-
- if (mDeviceInfo.mScreenRect.width && mDeviceInfo.mScreenRect.height) {
- if (mDeviceInfo.mIsFakeScreen) {
- mScreen = MakeFakeScreen(mDeviceInfo.mScreenRect);
- } else {
- nsCOMPtr<nsIScreenManager> screenmgr = do_GetService("@mozilla.org/gfx/screenmanager;1");
- if (screenmgr) {
- screenmgr->ScreenForRect(mDeviceInfo.mScreenRect.x, mDeviceInfo.mScreenRect.y,
- mDeviceInfo.mScreenRect.width, mDeviceInfo.mScreenRect.height,
- getter_AddRefs(mScreen));
- }
- }
-#ifdef DEBUG
- printf_stderr("VR DEVICE SCREEN: %d %d %d %d\n",
- mDeviceInfo.mScreenRect.x, mDeviceInfo.mScreenRect.y,
- mDeviceInfo.mScreenRect.width, mDeviceInfo.mScreenRect.height);
-#endif
- }
-}
-
-VRDisplayProxy::~VRDisplayProxy() {
- MOZ_COUNT_DTOR(VRDisplayProxy);
-}
-
-void
-VRDisplayProxy::UpdateDeviceInfo(const VRDisplayUpdate& aDeviceUpdate)
-{
- mDeviceInfo = aDeviceUpdate.mDeviceInfo;
- mSensorState = aDeviceUpdate.mSensorState;
-}
-
-bool
-VRDisplayProxy::SetFOV(const VRFieldOfView& aFOVLeft, const VRFieldOfView& aFOVRight,
- double zNear, double zFar)
-{
- VRManagerChild *vm = VRManagerChild::Get();
- vm->SendSetFOV(mDeviceInfo.mDeviceID, aFOVLeft, aFOVRight, zNear, zFar);
- return true;
-}
-
-void
-VRDisplayProxy::ZeroSensor()
-{
- VRManagerChild *vm = VRManagerChild::Get();
- vm->SendResetSensor(mDeviceInfo.mDeviceID);
-}
-
-VRHMDSensorState
-VRDisplayProxy::GetSensorState()
-{
- VRManagerChild *vm = VRManagerChild::Get();
- Unused << vm->SendKeepSensorTracking(mDeviceInfo.mDeviceID);
- return mSensorState;
-}
-
-VRHMDSensorState
-VRDisplayProxy::GetImmediateSensorState()
-{
- // XXX TODO - Need to perform IPC call to get the current sensor
- // state rather than the predictive state used for the frame rendering.
- return GetSensorState();
-}
-
-void
-VRDisplayProxy::UpdateSensorState(const VRHMDSensorState& aSensorState)
-{
- mSensorState = aSensorState;
-}
-
-// Dummy nsIScreen implementation, for when we just need to specify a size
-class FakeScreen : public nsIScreen
-{
-public:
- explicit FakeScreen(const IntRect& aScreenRect)
- : mScreenRect(aScreenRect)
- { }
-
- NS_DECL_ISUPPORTS
-
- NS_IMETHOD GetRect(int32_t *l, int32_t *t, int32_t *w, int32_t *h) override {
- *l = mScreenRect.x;
- *t = mScreenRect.y;
- *w = mScreenRect.width;
- *h = mScreenRect.height;
- return NS_OK;
- }
- NS_IMETHOD GetAvailRect(int32_t *l, int32_t *t, int32_t *w, int32_t *h) override {
- return GetRect(l, t, w, h);
- }
- NS_IMETHOD GetRectDisplayPix(int32_t *l, int32_t *t, int32_t *w, int32_t *h) override {
- return GetRect(l, t, w, h);
- }
- NS_IMETHOD GetAvailRectDisplayPix(int32_t *l, int32_t *t, int32_t *w, int32_t *h) override {
- return GetAvailRect(l, t, w, h);
- }
-
- NS_IMETHOD GetId(uint32_t* aId) override { *aId = (uint32_t)-1; return NS_OK; }
- NS_IMETHOD GetPixelDepth(int32_t* aPixelDepth) override { *aPixelDepth = 24; return NS_OK; }
- NS_IMETHOD GetColorDepth(int32_t* aColorDepth) override { *aColorDepth = 24; return NS_OK; }
-
- NS_IMETHOD LockMinimumBrightness(uint32_t aBrightness) override { return NS_ERROR_NOT_AVAILABLE; }
- NS_IMETHOD UnlockMinimumBrightness(uint32_t aBrightness) override { return NS_ERROR_NOT_AVAILABLE; }
- NS_IMETHOD GetRotation(uint32_t* aRotation) override {
- *aRotation = nsIScreen::ROTATION_0_DEG;
- return NS_OK;
- }
- NS_IMETHOD SetRotation(uint32_t aRotation) override { return NS_ERROR_NOT_AVAILABLE; }
- NS_IMETHOD GetContentsScaleFactor(double* aContentsScaleFactor) override {
- *aContentsScaleFactor = 1.0;
- return NS_OK;
- }
- NS_IMETHOD GetDefaultCSSScaleFactor(double* aScaleFactor) override {
- *aScaleFactor = 1.0;
- return NS_OK;
- }
-
-protected:
- virtual ~FakeScreen() {}
-
- IntRect mScreenRect;
-};
-
-NS_IMPL_ISUPPORTS(FakeScreen, nsIScreen)
-
-
-/* static */ already_AddRefed<nsIScreen>
-VRDisplayProxy::MakeFakeScreen(const IntRect& aScreenRect)
-{
- nsCOMPtr<nsIScreen> screen = new FakeScreen(aScreenRect);
- return screen.forget();
-}
-
deleted file mode 100644
--- a/gfx/vr/VRDisplayProxy.h
+++ /dev/null
@@ -1,58 +0,0 @@
-/* -*- Mode: C++; tab-width: 20; indent-tabs-mode: nil; c-basic-offset: 2 -*-
- * This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#ifndef GFX_VR_PROXY_H
-#define GFX_VR_PROXY_H
-
-#include "nsIScreen.h"
-#include "nsCOMPtr.h"
-#include "mozilla/RefPtr.h"
-
-#include "gfxVR.h"
-
-namespace mozilla {
-namespace gfx {
-
-class VRManagerChild;
-
-class VRDisplayProxy
-{
-public:
- NS_INLINE_DECL_THREADSAFE_REFCOUNTING(VRDisplayProxy)
-
- explicit VRDisplayProxy(const VRDisplayUpdate& aDeviceUpdate);
-
- void UpdateDeviceInfo(const VRDisplayUpdate& aDeviceUpdate);
- void UpdateSensorState(const VRHMDSensorState& aSensorState);
-
- const VRDisplayInfo& GetDeviceInfo() const { return mDeviceInfo; }
- virtual VRHMDSensorState GetSensorState();
- virtual VRHMDSensorState GetImmediateSensorState();
-
- bool SetFOV(const VRFieldOfView& aFOVLeft, const VRFieldOfView& aFOVRight,
- double zNear, double zFar);
-
- virtual void ZeroSensor();
-
-
- // The nsIScreen that represents this device
- nsIScreen* GetScreen() { return mScreen; }
-
-protected:
- virtual ~VRDisplayProxy();
-
- VRDisplayInfo mDeviceInfo;
- VRHMDSensorState mSensorState;
-
- nsCOMPtr<nsIScreen> mScreen;
-
- static already_AddRefed<nsIScreen> MakeFakeScreen(const IntRect& aScreenRect);
-
-};
-
-} // namespace gfx
-} // namespace mozilla
-
-#endif /* GFX_VR_PROXY_H */
--- a/gfx/vr/VRManager.cpp
+++ b/gfx/vr/VRManager.cpp
@@ -4,26 +4,33 @@
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "VRManager.h"
#include "VRManagerParent.h"
#include "gfxVR.h"
#include "mozilla/ClearOnShutdown.h"
#include "mozilla/dom/VRDisplay.h"
+#include "mozilla/layers/TextureHost.h"
#include "mozilla/unused.h"
#include "gfxPrefs.h"
#include "gfxVR.h"
#if defined(XP_WIN)
#include "gfxVROculus.h"
#endif
#if defined(XP_WIN) || defined(XP_MACOSX) || defined(XP_LINUX)
#include "gfxVROSVR.h"
#endif
+#include "ipc/VRLayerParent.h"
+
+using namespace mozilla;
+using namespace mozilla::gfx;
+using namespace mozilla::layers;
+using namespace mozilla::gl;
namespace mozilla {
namespace gfx {
static StaticRefPtr<VRManager> sVRManagerSingleton;
/*static*/ void
VRManager::ManagerInit()
@@ -37,32 +44,32 @@ VRManager::ManagerInit()
}
VRManager::VRManager()
: mInitialized(false)
{
MOZ_COUNT_CTOR(VRManager);
MOZ_ASSERT(sVRManagerSingleton == nullptr);
- RefPtr<VRHMDManager> mgr;
+ RefPtr<VRDisplayManager> mgr;
#if defined(XP_WIN)
- mgr = VRHMDManagerOculus::Create();
+ // The Oculus runtime is supported only on Windows
+ mgr = VRDisplayManagerOculus::Create();
if (mgr) {
mManagers.AppendElement(mgr);
}
#endif
#if defined(XP_WIN) || defined(XP_MACOSX) || defined(XP_LINUX)
// OSVR is cross platform compatible
- mgr = VRHMDManagerOSVR::Create();
+ mgr = VRDisplayManagerOSVR::Create();
if (mgr){
mManagers.AppendElement(mgr);
}
-
#endif
}
VRManager::~VRManager()
{
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(!mInitialized);
MOZ_COUNT_DTOR(VRManager);
@@ -111,96 +118,135 @@ VRManager::RemoveVRManagerParent(VRManag
if (mVRManagerParents.IsEmpty()) {
Destroy();
}
}
void
VRManager::NotifyVsync(const TimeStamp& aVsyncTimestamp)
{
+ const double kVRDisplayRefreshMaxDuration = 5000; // milliseconds
+
+ bool bHaveEventListener = false;
+
+ for (auto iter = mVRManagerParents.Iter(); !iter.Done(); iter.Next()) {
+ VRManagerParent *vmp = iter.Get()->GetKey();
+ Unused << vmp->SendNotifyVSync();
+ bHaveEventListener |= vmp->HaveEventListener();
+ }
+
for (auto iter = mVRDisplays.Iter(); !iter.Done(); iter.Next()) {
- gfx::VRHMDInfo* device = iter.UserData();
- device->NotifyVsync(aVsyncTimestamp);
+ gfx::VRDisplayHost* display = iter.UserData();
+ display->NotifyVSync();
}
- DispatchVRDisplaySensorUpdate();
+
+ if (bHaveEventListener) {
+ // If content has set an EventHandler to be notified of VR display events
+ // we must continually refresh the VR display enumeration to check
+ // for events that we must fire such as Window.onvrdisplayconnected
+ // Note that enumeration itself may activate display hardware, such
+ // as Oculus, so we only do this when we know we are displaying content
+ // that is looking for VR displays.
+ if (mLastRefreshTime.IsNull()) {
+ // This is the first vsync, must refresh VR displays
+ RefreshVRDisplays();
+ } else {
+ // We don't have to do this every frame, so check if we
+ // have refreshed recently.
+ TimeDuration duration = TimeStamp::Now() - mLastRefreshTime;
+ if (duration.ToMilliseconds() > kVRDisplayRefreshMaxDuration) {
+ RefreshVRDisplays();
+ }
+ }
+ }
}
void
-VRManager::RefreshVRDisplays()
+VRManager::NotifyVRVsync(const uint32_t& aDisplayID)
{
- nsTArray<RefPtr<gfx::VRHMDInfo> > devices;
+ for (auto iter = mVRManagerParents.Iter(); !iter.Done(); iter.Next()) {
+ Unused << iter.Get()->GetKey()->SendNotifyVRVSync(aDisplayID);
+ }
+}
+
+void
+VRManager::RefreshVRDisplays(bool aMustDispatch)
+{
+ nsTArray<RefPtr<gfx::VRDisplayHost> > displays;
for (uint32_t i = 0; i < mManagers.Length(); ++i) {
- mManagers[i]->GetHMDs(devices);
+ mManagers[i]->GetHMDs(displays);
}
- bool deviceInfoChanged = false;
+ bool displayInfoChanged = false;
- if (devices.Length() != mVRDisplays.Count()) {
- deviceInfoChanged = true;
+ if (displays.Length() != mVRDisplays.Count()) {
+ // Catch cases where a VR display has been removed
+ displayInfoChanged = true;
}
- for (const auto& device: devices) {
- RefPtr<VRHMDInfo> oldDevice = GetDevice(device->GetDeviceInfo().GetDeviceID());
- if (oldDevice == nullptr) {
- deviceInfoChanged = true;
+ for (const auto& display: displays) {
+ if (!GetDisplay(display->GetDisplayInfo().GetDisplayID())) {
+ // This is a new display
+ displayInfoChanged = true;
break;
}
- if (oldDevice->GetDeviceInfo() != device->GetDeviceInfo()) {
- deviceInfoChanged = true;
+
+ if (display->CheckClearDisplayInfoDirty()) {
+ // This display's info has changed
+ displayInfoChanged = true;
break;
}
}
- if (deviceInfoChanged) {
+ if (displayInfoChanged) {
mVRDisplays.Clear();
- for (const auto& device: devices) {
- mVRDisplays.Put(device->GetDeviceInfo().GetDeviceID(), device);
+ for (const auto& display: displays) {
+ mVRDisplays.Put(display->GetDisplayInfo().GetDisplayID(), display);
}
}
- DispatchVRDisplayInfoUpdate();
+ if (displayInfoChanged || aMustDispatch) {
+ DispatchVRDisplayInfoUpdate();
+ }
+
+ mLastRefreshTime = TimeStamp::Now();
}
void
VRManager::DispatchVRDisplayInfoUpdate()
{
- nsTArray<VRDisplayUpdate> update;
+ nsTArray<VRDisplayInfo> update;
for (auto iter = mVRDisplays.Iter(); !iter.Done(); iter.Next()) {
- gfx::VRHMDInfo* device = iter.UserData();
- update.AppendElement(VRDisplayUpdate(device->GetDeviceInfo(),
- device->GetSensorState()));
+ gfx::VRDisplayHost* display = iter.UserData();
+ update.AppendElement(VRDisplayInfo(display->GetDisplayInfo()));
}
for (auto iter = mVRManagerParents.Iter(); !iter.Done(); iter.Next()) {
- Unused << iter.Get()->GetKey()->SendUpdateDeviceInfo(update);
+ Unused << iter.Get()->GetKey()->SendUpdateDisplayInfo(update);
}
}
-void
-VRManager::DispatchVRDisplaySensorUpdate()
+RefPtr<gfx::VRDisplayHost>
+VRManager::GetDisplay(const uint32_t& aDisplayID)
{
- nsTArray<VRSensorUpdate> update;
-
- for (auto iter = mVRDisplays.Iter(); !iter.Done(); iter.Next()) {
- gfx::VRHMDInfo* device = iter.UserData();
- update.AppendElement(VRSensorUpdate(device->GetDeviceInfo().GetDeviceID(),
- device->GetSensorState()));
- }
- if (update.Length() > 0) {
- for (auto iter = mVRManagerParents.Iter(); !iter.Done(); iter.Next()) {
- Unused << iter.Get()->GetKey()->SendUpdateDeviceSensors(update);
- }
- }
-}
-
-RefPtr<gfx::VRHMDInfo>
-VRManager::GetDevice(const uint32_t& aDeviceID)
-{
- RefPtr<gfx::VRHMDInfo> device;
- if (mVRDisplays.Get(aDeviceID, getter_AddRefs(device))) {
- return device;
+ RefPtr<gfx::VRDisplayHost> display;
+ if (mVRDisplays.Get(aDisplayID, getter_AddRefs(display))) {
+ return display;
}
return nullptr;
}
+void
+VRManager::SubmitFrame(VRLayerParent* aLayer, const int32_t& aInputFrameID,
+ layers::PTextureParent* aTexture, const gfx::Rect& aLeftEyeRect,
+ const gfx::Rect& aRightEyeRect)
+{
+ TextureHost* th = TextureHost::AsTextureHost(aTexture);
+ mLastFrame = th;
+ RefPtr<VRDisplayHost> display = GetDisplay(aLayer->GetDisplayID());
+ if (display) {
+ display->SubmitFrame(aLayer, aInputFrameID, aTexture, aLeftEyeRect, aRightEyeRect);
+ }
+}
+
} // namespace gfx
} // namespace mozilla
--- a/gfx/vr/VRManager.h
+++ b/gfx/vr/VRManager.h
@@ -4,61 +4,72 @@
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef GFX_VR_MANAGER_H
#define GFX_VR_MANAGER_H
#include "nsRefPtrHashtable.h"
#include "nsTArray.h"
#include "nsTHashtable.h"
+#include "nsDataHashtable.h"
#include "mozilla/TimeStamp.h"
#include "gfxVR.h"
namespace mozilla {
+namespace layers {
+class TextureHost;
+}
namespace gfx {
+class VRLayerParent;
class VRManagerParent;
-class VRHMDInfo;
+class VRDisplayHost;
class VRManager
{
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(mozilla::gfx::VRManager)
public:
static void ManagerInit();
static VRManager* Get();
void AddVRManagerParent(VRManagerParent* aVRManagerParent);
void RemoveVRManagerParent(VRManagerParent* aVRManagerParent);
void NotifyVsync(const TimeStamp& aVsyncTimestamp);
- void RefreshVRDisplays();
- RefPtr<gfx::VRHMDInfo> GetDevice(const uint32_t& aDeviceID);
+ void NotifyVRVsync(const uint32_t& aDisplayID);
+ void RefreshVRDisplays(bool aMustDispatch = false);
+ RefPtr<gfx::VRDisplayHost> GetDisplay(const uint32_t& aDisplayID);
+
+ void SubmitFrame(VRLayerParent* aLayer, const int32_t& aInputFrameID,
+ layers::PTextureParent* aTexture, const gfx::Rect& aLeftEyeRect,
+ const gfx::Rect& aRightEyeRect);
protected:
VRManager();
~VRManager();
private:
+ RefPtr<layers::TextureHost> mLastFrame;
void Init();
void Destroy();
void DispatchVRDisplayInfoUpdate();
- void DispatchVRDisplaySensorUpdate();
typedef nsTHashtable<nsRefPtrHashKey<VRManagerParent>> VRManagerParentSet;
VRManagerParentSet mVRManagerParents;
- typedef nsTArray<RefPtr<VRHMDManager>> VRHMDManagerArray;
- VRHMDManagerArray mManagers;
+ typedef nsTArray<RefPtr<VRDisplayManager>> VRDisplayManagerArray;
+ VRDisplayManagerArray mManagers;
- typedef nsRefPtrHashtable<nsUint32HashKey, gfx::VRHMDInfo> VRHMDInfoHashMap;
- VRHMDInfoHashMap mVRDisplays;
+ typedef nsRefPtrHashtable<nsUint32HashKey, gfx::VRDisplayHost> VRDisplayHostHashMap;
+ VRDisplayHostHashMap mVRDisplays;
Atomic<bool> mInitialized;
+ TimeStamp mLastRefreshTime;
};
} // namespace gfx
} // namespace mozilla
#endif // GFX_VR_MANAGER_H
--- a/gfx/vr/gfxVR.cpp
+++ b/gfx/vr/gfxVR.cpp
@@ -1,66 +1,30 @@
/* -*- Mode: C++; tab-width: 20; indent-tabs-mode: nil; c-basic-offset: 2 -*-
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include <math.h>
-#include "prlink.h"
-#include "prmem.h"
-#include "prenv.h"
-#include "nsString.h"
-
-#include "gfxPrefs.h"
#include "gfxVR.h"
-#if defined(XP_WIN)
-#include "gfxVROculus.h"
-#endif
-#if defined(XP_WIN) || defined(XP_MACOSX) || defined(XP_LINUX)
-#include "gfxVROSVR.h"
-#endif
-
-#include "mozilla/unused.h"
-#include "mozilla/layers/Compositor.h"
-#include "mozilla/layers/TextureHost.h"
#ifndef M_PI
# define M_PI 3.14159265358979323846
#endif
using namespace mozilla;
using namespace mozilla::gfx;
-Atomic<uint32_t> VRHMDManager::sDeviceBase(0);
-
-VRHMDInfo::VRHMDInfo(VRHMDType aType)
-{
- MOZ_COUNT_CTOR(VRHMDInfo);
- mDeviceInfo.mType = aType;
- mDeviceInfo.mDeviceID = VRHMDManager::AllocateDeviceID();
-}
-
-VRHMDInfo::~VRHMDInfo()
-{
- MOZ_COUNT_DTOR(VRHMDInfo);
-}
+Atomic<uint32_t> VRDisplayManager::sDisplayBase(0);
/* static */ uint32_t
-VRHMDManager::AllocateDeviceID()
+VRDisplayManager::AllocateDisplayID()
{
- return ++sDeviceBase;
-}
-
-VRHMDRenderingSupport::RenderTargetSet::RenderTargetSet()
-{
-}
-
-VRHMDRenderingSupport::RenderTargetSet::~RenderTargetSet()
-{
+ return ++sDisplayBase;
}
Matrix4x4
VRFieldOfView::ConstructProjectionMatrix(float zNear, float zFar, bool rightHanded)
{
float upTan = tan(upDegrees * M_PI / 180.0);
float downTan = tan(downDegrees * M_PI / 180.0);
float leftTan = tan(leftDegrees * M_PI / 180.0);
--- a/gfx/vr/gfxVR.h
+++ b/gfx/vr/gfxVR.h
@@ -5,35 +5,34 @@
#ifndef GFX_VR_H
#define GFX_VR_H
#include "nsTArray.h"
#include "nsString.h"
#include "nsCOMPtr.h"
#include "mozilla/RefPtr.h"
-
#include "mozilla/gfx/2D.h"
#include "mozilla/Atomics.h"
#include "mozilla/EnumeratedArray.h"
#include "mozilla/TimeStamp.h"
#include "mozilla/TypedEnumBits.h"
namespace mozilla {
namespace layers {
-class Compositor;
-class CompositingRenderTarget;
+class PTextureParent;
}
+namespace gfx {
+class VRLayerParent;
+class VRDisplayHost;
-namespace gfx {
-
-enum class VRHMDType : uint16_t {
+enum class VRDisplayType : uint16_t {
Oculus,
OSVR,
- NumHMDTypes
+ NumVRDisplayTypes
};
enum class VRDisplayCapabilityFlags : uint16_t {
Cap_None = 0,
/**
* Cap_Position is set if the VRDisplay is capable of tracking its position.
*/
Cap_Position = 1 << 1,
@@ -92,245 +91,95 @@ struct VRFieldOfView {
Matrix4x4 ConstructProjectionMatrix(float zNear, float zFar, bool rightHanded);
double upDegrees;
double rightDegrees;
double downDegrees;
double leftDegrees;
};
-struct VRDistortionVertex {
- float values[12];
-};
-
-struct VRDistortionMesh {
- nsTArray<VRDistortionVertex> mVertices;
- nsTArray<uint16_t> mIndices;
-};
-
-// 12 floats per vertex. Position, tex coordinates
-// for each channel, and 4 generic attributes
-struct VRDistortionConstants {
- float eyeToSourceScaleAndOffset[4];
- float destinationScaleAndOffset[4];
-};
-
struct VRDisplayInfo
{
- VRHMDType GetType() const { return mType; }
- uint32_t GetDeviceID() const { return mDeviceID; }
- const nsCString& GetDeviceName() const { return mDeviceName; }
+ VRDisplayType GetType() const { return mType; }
+ uint32_t GetDisplayID() const { return mDisplayID; }
+ const nsCString& GetDisplayName() const { return mDisplayName; }
VRDisplayCapabilityFlags GetCapabilities() const { return mCapabilityFlags; }
- const VRFieldOfView& GetRecommendedEyeFOV(uint32_t whichEye) const { return mRecommendedEyeFOV[whichEye]; }
- const VRFieldOfView& GetMaximumEyeFOV(uint32_t whichEye) const { return mMaximumEyeFOV[whichEye]; }
const IntSize& SuggestedEyeResolution() const { return mEyeResolution; }
const Point3D& GetEyeTranslation(uint32_t whichEye) const { return mEyeTranslation[whichEye]; }
- const Matrix4x4& GetEyeProjectionMatrix(uint32_t whichEye) const { return mEyeProjectionMatrix[whichEye]; }
const VRFieldOfView& GetEyeFOV(uint32_t whichEye) const { return mEyeFOV[whichEye]; }
+ bool GetIsConnected() const { return mIsConnected; }
+ bool GetIsPresenting() const { return mIsPresenting; }
enum Eye {
Eye_Left,
Eye_Right,
NumEyes
};
- uint32_t mDeviceID;
- VRHMDType mType;
- nsCString mDeviceName;
+ uint32_t mDisplayID;
+ VRDisplayType mType;
+ nsCString mDisplayName;
VRDisplayCapabilityFlags mCapabilityFlags;
- VRFieldOfView mMaximumEyeFOV[VRDisplayInfo::NumEyes];
- VRFieldOfView mRecommendedEyeFOV[VRDisplayInfo::NumEyes];
VRFieldOfView mEyeFOV[VRDisplayInfo::NumEyes];
Point3D mEyeTranslation[VRDisplayInfo::NumEyes];
- Matrix4x4 mEyeProjectionMatrix[VRDisplayInfo::NumEyes];
- /* Suggested resolution for rendering a single eye.
- * Assumption is that left/right rendering will be 2x of this size.
- * XXX fix this for vertical displays
- */
IntSize mEyeResolution;
- IntRect mScreenRect;
-
- bool mIsFakeScreen;
-
-
+ bool mIsConnected;
+ bool mIsPresenting;
bool operator==(const VRDisplayInfo& other) const {
return mType == other.mType &&
- mDeviceID == other.mDeviceID &&
- mDeviceName == other.mDeviceName &&
+ mDisplayID == other.mDisplayID &&
+ mDisplayName == other.mDisplayName &&
mCapabilityFlags == other.mCapabilityFlags &&
mEyeResolution == other.mEyeResolution &&
- mScreenRect == other.mScreenRect &&
- mIsFakeScreen == other.mIsFakeScreen &&
- mMaximumEyeFOV[0] == other.mMaximumEyeFOV[0] &&
- mMaximumEyeFOV[1] == other.mMaximumEyeFOV[1] &&
- mRecommendedEyeFOV[0] == other.mRecommendedEyeFOV[0] &&
- mRecommendedEyeFOV[1] == other.mRecommendedEyeFOV[1] &&
+ mIsConnected == other.mIsConnected &&
+ mIsPresenting == other.mIsPresenting &&
mEyeFOV[0] == other.mEyeFOV[0] &&
mEyeFOV[1] == other.mEyeFOV[1] &&
mEyeTranslation[0] == other.mEyeTranslation[0] &&
- mEyeTranslation[1] == other.mEyeTranslation[1] &&
- mEyeProjectionMatrix[0] == other.mEyeProjectionMatrix[0] &&
- mEyeProjectionMatrix[1] == other.mEyeProjectionMatrix[1];
+ mEyeTranslation[1] == other.mEyeTranslation[1];
}
bool operator!=(const VRDisplayInfo& other) const {
return !(*this == other);
}
};
-
-
struct VRHMDSensorState {
double timestamp;
int32_t inputFrameID;
VRDisplayCapabilityFlags flags;
float orientation[4];
float position[3];
float angularVelocity[3];
float angularAcceleration[3];
float linearVelocity[3];
float linearAcceleration[3];
void Clear() {
memset(this, 0, sizeof(VRHMDSensorState));
}
};
-struct VRSensorUpdate {
- VRSensorUpdate() { }; // Required for ipdl binding
- VRSensorUpdate(uint32_t aDeviceID, const VRHMDSensorState& aSensorState)
- : mDeviceID(aDeviceID)
- , mSensorState(aSensorState) { };
-
- uint32_t mDeviceID;
- VRHMDSensorState mSensorState;
-};
-
-struct VRDisplayUpdate {
- VRDisplayUpdate() { }; // Required for ipdl binding
- VRDisplayUpdate(const VRDisplayInfo& aDeviceInfo,
- const VRHMDSensorState& aSensorState)
- : mDeviceInfo(aDeviceInfo)
- , mSensorState(aSensorState) { };
-
- VRDisplayInfo mDeviceInfo;
- VRHMDSensorState mSensorState;
-};
-
-/* A pure data struct that can be used to see if
- * the configuration of one HMDInfo matches another; for rendering purposes,
- * really asking "can the rendering details of this one be used for the other"
- */
-struct VRHMDConfiguration {
- VRHMDConfiguration() : hmdType(VRHMDType::NumHMDTypes) {}
-
- bool operator==(const VRHMDConfiguration& other) const {
- return hmdType == other.hmdType &&
- value == other.value &&
- fov[0] == other.fov[0] &&
- fov[1] == other.fov[1];
- }
+class VRDisplayManager {
+public:
+ static uint32_t AllocateDisplayID();
- bool operator!=(const VRHMDConfiguration& other) const {
- return hmdType != other.hmdType ||
- value != other.value ||
- fov[0] != other.fov[0] ||
- fov[1] != other.fov[1];
- }
-
- bool IsValid() const {
- return hmdType != VRHMDType::NumHMDTypes;
- }
-
- VRHMDType hmdType;
- uint32_t value;
- VRFieldOfView fov[2];
-};
-
-class VRHMDRenderingSupport {
-public:
- struct RenderTargetSet {
- RenderTargetSet();
-
- NS_INLINE_DECL_REFCOUNTING(RenderTargetSet)
-
- RefPtr<layers::Compositor> compositor;
- IntSize size;
- nsTArray<RefPtr<layers::CompositingRenderTarget>> renderTargets;
-
- virtual already_AddRefed<layers::CompositingRenderTarget> GetNextRenderTarget() = 0;
- protected:
- virtual ~RenderTargetSet();
- };
-
- virtual already_AddRefed<RenderTargetSet> CreateRenderTargetSet(layers::Compositor *aCompositor, const IntSize& aSize) = 0;
- virtual void DestroyRenderTargetSet(RenderTargetSet *aRTSet) = 0;
- virtual void SubmitFrame(RenderTargetSet *aRTSet, int32_t aInputFrameID) = 0;
protected:
- VRHMDRenderingSupport() { }
-};
-
-class VRHMDInfo {
+ static Atomic<uint32_t> sDisplayBase;
public:
- NS_INLINE_DECL_THREADSAFE_REFCOUNTING(VRHMDInfo)
-
- const VRHMDConfiguration& GetConfiguration() const { return mConfiguration; }
- const VRDisplayInfo& GetDeviceInfo() const { return mDeviceInfo; }
-
- /* set the FOV for this HMD unit; this triggers a computation of all the remaining bits. Returns false if it fails */
- virtual bool SetFOV(const VRFieldOfView& aFOVLeft, const VRFieldOfView& aFOVRight,
- double zNear, double zFar) = 0;
-
- virtual bool KeepSensorTracking() = 0;
- virtual void NotifyVsync(const TimeStamp& aVsyncTimestamp) = 0;
- virtual VRHMDSensorState GetSensorState() = 0;
- virtual VRHMDSensorState GetImmediateSensorState() = 0;
-
- virtual void ZeroSensor() = 0;
-
- // if rendering is offloaded
- virtual VRHMDRenderingSupport *GetRenderingSupport() { return nullptr; }
-
- // distortion mesh stuff; we should implement renderingsupport for this
- virtual void FillDistortionConstants(uint32_t whichEye,
- const IntSize& textureSize, // the full size of the texture
- const IntRect& eyeViewport, // the viewport within the texture for the current eye
- const Size& destViewport, // the size of the destination viewport
- const Rect& destRect, // the rectangle within the dest viewport that this should be rendered
- VRDistortionConstants& values) = 0;
-
- const VRDistortionMesh& GetDistortionMesh(uint32_t whichEye) const { return mDistortionMesh[whichEye]; }
-protected:
- explicit VRHMDInfo(VRHMDType aType);
- virtual ~VRHMDInfo();
-
- VRHMDConfiguration mConfiguration;
- VRDisplayInfo mDeviceInfo;
- VRDistortionMesh mDistortionMesh[VRDisplayInfo::NumEyes];
-};
-
-class VRHMDManager {
-public:
- static uint32_t AllocateDeviceID();
-
-protected:
- static Atomic<uint32_t> sDeviceBase;
-
-
-public:
- NS_INLINE_DECL_THREADSAFE_REFCOUNTING(VRHMDManager)
+ NS_INLINE_DECL_THREADSAFE_REFCOUNTING(VRDisplayManager)
virtual bool Init() = 0;
virtual void Destroy() = 0;
- virtual void GetHMDs(nsTArray<RefPtr<VRHMDInfo>>& aHMDResult) = 0;
+ virtual void GetHMDs(nsTArray<RefPtr<VRDisplayHost>>& aHMDResult) = 0;
protected:
- VRHMDManager() { }
- virtual ~VRHMDManager() { }
+ VRDisplayManager() { }
+ virtual ~VRDisplayManager() { }
};
} // namespace gfx
} // namespace mozilla
#endif /* GFX_VR_H */
--- a/gfx/vr/gfxVROSVR.cpp
+++ b/gfx/vr/gfxVROSVR.cpp
@@ -195,138 +195,96 @@ SetFromTanRadians(double left, double ri
mozilla::gfx::VRFieldOfView fovInfo;
fovInfo.leftDegrees = atan(left) * 180.0 / M_PI;
fovInfo.rightDegrees = atan(right) * 180.0 / M_PI;
fovInfo.upDegrees = atan(top) * 180.0 / M_PI;
fovInfo.downDegrees = atan(bottom) * 180.0 / M_PI;
return fovInfo;
}
-HMDInfoOSVR::HMDInfoOSVR(OSVR_ClientContext* context,
+VRDisplayOSVR::VRDisplayOSVR(OSVR_ClientContext* context,
OSVR_ClientInterface* iface,
OSVR_DisplayConfig* display)
- : VRHMDInfo(VRHMDType::OSVR)
+ : VRDisplayHost(VRDisplayType::OSVR)
, m_ctx(context)
, m_iface(iface)
, m_display(display)
{
- MOZ_COUNT_CTOR_INHERITED(HMDInfoOSVR, VRHMDInfo);
+ MOZ_COUNT_CTOR_INHERITED(VRDisplayOSVR, VRDisplayHost);
- mDeviceInfo.mDeviceName.AssignLiteral("OSVR HMD");
- mDeviceInfo.mCapabilityFlags = VRDisplayCapabilityFlags::Cap_None;
- mDeviceInfo.mCapabilityFlags =
+ mDisplayInfo.mIsConnected = true;
+ mDisplayInfo.mDisplayName.AssignLiteral("OSVR HMD");
+ mDisplayInfo.mCapabilityFlags = VRDisplayCapabilityFlags::Cap_None;
+ mDisplayInfo.mCapabilityFlags =
VRDisplayCapabilityFlags::Cap_Orientation | VRDisplayCapabilityFlags::Cap_Position;
- mDeviceInfo.mCapabilityFlags |= VRDisplayCapabilityFlags::Cap_External;
- mDeviceInfo.mCapabilityFlags |= VRDisplayCapabilityFlags::Cap_Present;
+ mDisplayInfo.mCapabilityFlags |= VRDisplayCapabilityFlags::Cap_External;
+ mDisplayInfo.mCapabilityFlags |= VRDisplayCapabilityFlags::Cap_Present;
// XXX OSVR display topology allows for more than one viewer
// will assume only one viewer for now (most likely stay that way)
OSVR_EyeCount numEyes;
osvr_ClientGetNumEyesForViewer(*m_display, 0, &numEyes);
for (uint8_t eye = 0; eye < numEyes; eye++) {
double left, right, bottom, top;
// XXX for now there is only one surface per eye
osvr_ClientGetViewerEyeSurfaceProjectionClippingPlanes(
*m_display, 0, eye, 0, &left, &right, &bottom, &top);
- mDeviceInfo.mRecommendedEyeFOV[eye] = mDeviceInfo.mMaximumEyeFOV[eye] =
+ mDisplayInfo.mEyeFOV[eye] =
SetFromTanRadians(-left, right, -bottom, top);
}
// XXX Assuming there is only one display input for now
// however, it's possible to have more than one (dSight with 2 HDMI inputs)
OSVR_DisplayDimension width, height;
osvr_ClientGetDisplayDimensions(*m_display, 0, &width, &height);
- SetFOV(mDeviceInfo.mRecommendedEyeFOV[VRDisplayInfo::Eye_Left],
- mDeviceInfo.mRecommendedEyeFOV[VRDisplayInfo::Eye_Right], 0.01,
- 10000.0);
-}
-void
-HMDInfoOSVR::Destroy()
-{
- // destroy non-owning pointers
- m_ctx = nullptr;
- m_iface = nullptr;
- m_display = nullptr;
-}
-
-bool
-HMDInfoOSVR::SetFOV(const gfx::VRFieldOfView& aFOVLeft,
- const gfx::VRFieldOfView& aFOVRight, double zNear,
- double zFar)
-{
- OSVR_EyeCount numEyes;
- osvr_ClientGetNumEyesForViewer(*m_display, 0, &numEyes);
for (uint8_t eye = 0; eye < numEyes; eye++) {
- mDeviceInfo.mEyeFOV[eye] = eye == 0 ? aFOVLeft : aFOVRight;
-
OSVR_ViewportDimension l, b, w, h;
osvr_ClientGetRelativeViewportForViewerEyeSurface(*m_display, 0, eye, 0, &l,
&b, &w, &h);
- mDeviceInfo.mEyeResolution.width = w;
- mDeviceInfo.mEyeResolution.height = h;
+ mDisplayInfo.mEyeResolution.width = w;
+ mDisplayInfo.mEyeResolution.height = h;
OSVR_Pose3 eyePose;
// Viewer eye pose may not be immediately available, update client context until we get it
OSVR_ReturnCode ret =
osvr_ClientGetViewerEyePose(*m_display, 0, eye, &eyePose);
while (ret != OSVR_RETURN_SUCCESS) {
osvr_ClientUpdate(*m_ctx);
ret = osvr_ClientGetViewerEyePose(*m_display, 0, eye, &eyePose);
}
- mDeviceInfo.mEyeTranslation[eye].x = eyePose.translation.data[0];
- mDeviceInfo.mEyeTranslation[eye].y = eyePose.translation.data[1];
- mDeviceInfo.mEyeTranslation[eye].z = eyePose.translation.data[2];
-
- mDeviceInfo.mEyeProjectionMatrix[eye] =
- mDeviceInfo.mEyeFOV[eye].ConstructProjectionMatrix(zNear, zFar, true);
+ mDisplayInfo.mEyeTranslation[eye].x = eyePose.translation.data[0];
+ mDisplayInfo.mEyeTranslation[eye].y = eyePose.translation.data[1];
+ mDisplayInfo.mEyeTranslation[eye].z = eyePose.translation.data[2];
}
-
- mConfiguration.hmdType = mDeviceInfo.mType;
- mConfiguration.value = 0;
- mConfiguration.fov[VRDisplayInfo::Eye_Left] = aFOVLeft;
- mConfiguration.fov[VRDisplayInfo::Eye_Right] = aFOVRight;
-
- return true;
}
void
-HMDInfoOSVR::FillDistortionConstants(
- uint32_t whichEye, const IntSize& textureSize, const IntRect& eyeViewport,
- const Size& destViewport, const Rect& destRect, VRDistortionConstants& values)
+VRDisplayOSVR::Destroy()
{
-}
-
-bool
-HMDInfoOSVR::KeepSensorTracking()
-{
- // Tracking is enabled if the device supports tracking and in that
- // case is enabled automatically unless you cannot connect to it
- return true;
+ // destroy non-owning pointers
+ m_ctx = nullptr;
+ m_iface = nullptr;
+ m_display = nullptr;
}
void
-HMDInfoOSVR::NotifyVsync(const mozilla::TimeStamp& aVsyncTimestamp)
-{
-}
-
-void
-HMDInfoOSVR::ZeroSensor()
+VRDisplayOSVR::ZeroSensor()
{
// recenter pose aka reset yaw
osvr_ClientSetRoomRotationUsingHead(*m_ctx);
}
VRHMDSensorState
-HMDInfoOSVR::GetSensorState()
+VRDisplayOSVR::GetSensorState()
{
//update client context before anything
//this usually goes into app's mainloop
osvr_ClientUpdate(*m_ctx);
VRHMDSensorState result;
OSVR_TimeValue timestamp;
@@ -355,118 +313,64 @@ HMDInfoOSVR::GetSensorState()
result.position[1] = position.data[1];
result.position[2] = position.data[2];
}
return result;
}
VRHMDSensorState
-HMDInfoOSVR::GetImmediateSensorState()
+VRDisplayOSVR::GetImmediateSensorState()
{
return GetSensorState();
}
-struct RenderTargetSetOSVR : public VRHMDRenderingSupport::RenderTargetSet
-{
- RenderTargetSetOSVR(Compositor* aCompositor, const IntSize& aSize,
- HMDInfoOSVR* aHMD)
- {
-
- size = aSize;
- mCompositorBackend = aCompositor->GetBackendType();
- currentRenderTarget = 0;
- const uint32_t numTargets = 2;
- renderTargets.SetLength(numTargets);
- for (uint32_t i = 0; i < numTargets; ++i) {
- renderTargets[i] = aCompositor->CreateRenderTarget(
- IntRect(0, 0, aSize.width, aSize.height), INIT_MODE_NONE);
- }
- }
-
- bool Valid() const
- {
- for (uint32_t i = 0; i < renderTargets.Length(); ++i) {
- if (!renderTargets[i])
- return false;
- }
- return true;
- }
+#if defined(XP_WIN)
- already_AddRefed<CompositingRenderTarget> GetNextRenderTarget() override
- {
- currentRenderTarget = (currentRenderTarget + 1) % renderTargets.Length();
- renderTargets[currentRenderTarget]->ClearOnBind();
- RefPtr<CompositingRenderTarget> rt = renderTargets[currentRenderTarget];
- return rt.forget();
- }
-
- void Destroy() {}
-
- ~RenderTargetSetOSVR() {}
-
- int currentRenderTarget;
- LayersBackend mCompositorBackend;
-};
+void
+VRDisplayOSVR::SubmitFrame(TextureSourceD3D11* aSource,
+ const IntSize& aSize,
+ const VRHMDSensorState& aSensorState,
+ const gfx::Rect& aLeftEyeRect,
+ const gfx::Rect& aRightEyeRect)
+{
+ // XXX Add code to submit frame
+}
-already_AddRefed<VRHMDRenderingSupport::RenderTargetSet>
-HMDInfoOSVR::CreateRenderTargetSet(layers::Compositor* aCompositor,
- const IntSize& aSize)
-{
-#ifdef XP_WIN
- if (aCompositor->GetBackendType() == layers::LayersBackend::LAYERS_D3D11) {
- layers::CompositorD3D11* comp11 =
- static_cast<layers::CompositorD3D11*>(aCompositor);
-
- RefPtr<RenderTargetSetOSVR> rts =
- new RenderTargetSetOSVR(comp11, aSize, this);
- if (!rts->Valid()) {
- return nullptr;
- }
-
- return rts.forget();
- }
#endif
- if (aCompositor->GetBackendType() == layers::LayersBackend::LAYERS_OPENGL) {
- }
-
- return nullptr;
+void
+VRDisplayOSVR::StartPresentation()
+{
+ // XXX Add code to start VR Presentation
}
void
-HMDInfoOSVR::DestroyRenderTargetSet(RenderTargetSet* aRTSet)
+VRDisplayOSVR::StopPresentation()
{
- RenderTargetSetOSVR* rts = static_cast<RenderTargetSetOSVR*>(aRTSet);
- rts->Destroy();
+ // XXX Add code to end VR Presentation
}
-void
-HMDInfoOSVR::SubmitFrame(RenderTargetSet* aRTSet, int32_t aInputFrameID)
-{
- // XXX, add renderManager code to submit frame
-}
-
-already_AddRefed<VRHMDManagerOSVR>
-VRHMDManagerOSVR::Create()
+already_AddRefed<VRDisplayManagerOSVR>
+VRDisplayManagerOSVR::Create()
{
MOZ_ASSERT(NS_IsMainThread());
if (!gfxPrefs::VREnabled() || !gfxPrefs::VROSVREnabled()) {
return nullptr;
}
if (!LoadOSVRRuntime()) {
return nullptr;
}
- RefPtr<VRHMDManagerOSVR> manager = new VRHMDManagerOSVR();
+ RefPtr<VRDisplayManagerOSVR> manager = new VRDisplayManagerOSVR();
return manager.forget();
}
void
-VRHMDManagerOSVR::CheckOSVRStatus()
+VRDisplayManagerOSVR::CheckOSVRStatus()
{
if (mOSVRInitialized) {
return;
}
// client context must be initialized first
InitializeClientContext();
@@ -480,17 +384,17 @@ VRHMDManagerOSVR::CheckOSVRStatus()
// OSVR is fully initialized now
if (mClientContextInitialized && mDisplayConfigInitialized &&
mInterfaceInitialized) {
mOSVRInitialized = true;
}
}
void
-VRHMDManagerOSVR::InitializeClientContext()
+VRDisplayManagerOSVR::InitializeClientContext()
{
// already initialized
if (mClientContextInitialized) {
return;
}
// first time creating
if (!m_ctx) {
@@ -509,34 +413,34 @@ VRHMDManagerOSVR::InitializeClientContex
osvr_ClientUpdate(m_ctx);
if (OSVR_RETURN_SUCCESS == osvr_ClientCheckStatus(m_ctx)) {
mClientContextInitialized = true;
}
}
}
void
-VRHMDManagerOSVR::InitializeInterface()
+VRDisplayManagerOSVR::InitializeInterface()
{
// already initialized
if (mInterfaceInitialized) {
return;
}
//Client context must be initialized before getting interface
if (mClientContextInitialized) {
// m_iface will remain nullptr if no interface is returned
if (OSVR_RETURN_SUCCESS ==
osvr_ClientGetInterface(m_ctx, "/me/head", &m_iface)) {
mInterfaceInitialized = true;
}
}
}
void
-VRHMDManagerOSVR::InitializeDisplay()
+VRDisplayManagerOSVR::InitializeDisplay()
{
// display is fully configured
if (mDisplayConfigInitialized) {
return;
}
//Client context must be initialized before getting interface
if (mClientContextInitialized) {
@@ -561,17 +465,17 @@ VRHMDManagerOSVR::InitializeDisplay()
if (OSVR_RETURN_SUCCESS == osvr_ClientCheckDisplayStartup(m_display)) {
mDisplayConfigInitialized = true;
}
}
}
}
bool
-VRHMDManagerOSVR::Init()
+VRDisplayManagerOSVR::Init()
{
// OSVR server should be running in the background
// It would load plugins and take care of detecting HMDs
if (!mOSVRInitialized) {
nsIThread* thread = nullptr;
NS_GetCurrentThread(&thread);
mOSVRThread = already_AddRefed<nsIThread>(thread);
@@ -585,17 +489,17 @@ VRHMDManagerOSVR::Init()
// verify all components are initialized
CheckOSVRStatus();
}
return mOSVRInitialized;
}
void
-VRHMDManagerOSVR::Destroy()
+VRDisplayManagerOSVR::Destroy()
{
if (mOSVRInitialized) {
MOZ_ASSERT(NS_GetCurrentThread() == mOSVRThread);
mOSVRThread = nullptr;
mHMDInfo = nullptr;
mOSVRInitialized = false;
}
// client context may not have been initialized
@@ -603,23 +507,23 @@ VRHMDManagerOSVR::Destroy()
osvr_ClientFreeDisplay(m_display);
}
// osvr checks that m_ctx or m_iface are not null
osvr_ClientFreeInterface(m_ctx, m_iface);
osvr_ClientShutdown(m_ctx);
}
void
-VRHMDManagerOSVR::GetHMDs(nsTArray<RefPtr<VRHMDInfo>>& aHMDResult)
+VRDisplayManagerOSVR::GetHMDs(nsTArray<RefPtr<VRDisplayHost>>& aHMDResult)
{
// make sure context, interface and display are initialized
CheckOSVRStatus();
if (!mOSVRInitialized) {
return;
}
- mHMDInfo = new HMDInfoOSVR(&m_ctx, &m_iface, &m_display);
+ mHMDInfo = new VRDisplayOSVR(&m_ctx, &m_iface, &m_display);
if (mHMDInfo) {
aHMDResult.AppendElement(mHMDInfo);
}
}
--- a/gfx/vr/gfxVROSVR.h
+++ b/gfx/vr/gfxVROSVR.h
@@ -8,100 +8,85 @@
#include "nsTArray.h"
#include "mozilla/RefPtr.h"
#include "nsThreadUtils.h"
#include "mozilla/gfx/2D.h"
#include "mozilla/EnumeratedArray.h"
-#include "gfxVR.h"
+#include "VRDisplayHost.h"
#include <osvr/ClientKit/ClientKitC.h>
#include <osvr/ClientKit/DisplayC.h>
namespace mozilla {
namespace gfx {
namespace impl {
-class HMDInfoOSVR : public VRHMDInfo, public VRHMDRenderingSupport
+class VRDisplayOSVR : public VRDisplayHost
{
public:
- explicit HMDInfoOSVR(OSVR_ClientContext* context, OSVR_ClientInterface* iface,
- OSVR_DisplayConfig* display);
-
- bool SetFOV(const VRFieldOfView& aFOVLeft, const VRFieldOfView& aFOVRight,
- double zNear, double zFar) override;
-
VRHMDSensorState GetSensorState() override;
VRHMDSensorState GetImmediateSensorState() override;
- bool KeepSensorTracking() override;
void ZeroSensor() override;
- void NotifyVsync(const TimeStamp& aVsyncTimestamp) override;
-
- void FillDistortionConstants(uint32_t whichEye, const IntSize& textureSize,
- const IntRect& eyeViewport,
- const Size& destViewport, const Rect& destRect,
- VRDistortionConstants& values) override;
-
- VRHMDRenderingSupport* GetRenderingSupport() override { return this; }
-
- void Destroy();
-
- /* VRHMDRenderingSupport */
- already_AddRefed<RenderTargetSet> CreateRenderTargetSet(
- layers::Compositor* aCompositor, const IntSize& aSize) override;
- void DestroyRenderTargetSet(RenderTargetSet* aRTSet) override;
- void SubmitFrame(RenderTargetSet* aRTSet, int32_t aInputFrameID) override;
protected:
- virtual ~HMDInfoOSVR()
+ virtual void StartPresentation() override;
+ virtual void StopPresentation() override;
+
+#if defined(XP_WIN)
+ virtual void SubmitFrame(TextureSourceD3D11* aSource,
+ const IntSize& aSize,
+ const VRHMDSensorState& aSensorState,
+ const gfx::Rect& aLeftEyeRect,
+ const gfx::Rect& aRightEyeRect) override;
+#endif
+
+public:
+ explicit VRDisplayOSVR(OSVR_ClientContext* context,
+ OSVR_ClientInterface* iface,
+ OSVR_DisplayConfig* display);
+
+protected:
+ virtual ~VRDisplayOSVR()
{
Destroy();
- MOZ_COUNT_DTOR_INHERITED(HMDInfoOSVR, VRHMDInfo);
+ MOZ_COUNT_DTOR_INHERITED(VRDisplayOSVR, VRDisplayHost);
}
-
- // must match the size of VRDistortionVertex
- struct DistortionVertex
- {
- float pos[2];
- float texR[2];
- float texG[2];
- float texB[2];
- float genericAttribs[4];
- };
+ void Destroy();
OSVR_ClientContext* m_ctx;
OSVR_ClientInterface* m_iface;
OSVR_DisplayConfig* m_display;
};
} // namespace impl
-class VRHMDManagerOSVR : public VRHMDManager
+class VRDisplayManagerOSVR : public VRDisplayManager
{
public:
- static already_AddRefed<VRHMDManagerOSVR> Create();
+ static already_AddRefed<VRDisplayManagerOSVR> Create();
virtual bool Init() override;
virtual void Destroy() override;
- virtual void GetHMDs(nsTArray<RefPtr<VRHMDInfo>>& aHMDResult) override;
+ virtual void GetHMDs(nsTArray<RefPtr<VRDisplayHost>>& aHMDResult) override;
protected:
- VRHMDManagerOSVR()
+ VRDisplayManagerOSVR()
: mOSVRInitialized(false)
, mClientContextInitialized(false)
, mDisplayConfigInitialized(false)
, mInterfaceInitialized(false)
, m_ctx(nullptr)
, m_iface(nullptr)
, m_display(nullptr)
{
}
- RefPtr<impl::HMDInfoOSVR> mHMDInfo;
+ RefPtr<impl::VRDisplayOSVR> mHMDInfo;
bool mOSVRInitialized;
bool mClientContextInitialized;
bool mDisplayConfigInitialized;
bool mInterfaceInitialized;
RefPtr<nsIThread> mOSVRThread;
OSVR_ClientContext m_ctx;
OSVR_ClientInterface m_iface;
--- a/gfx/vr/gfxVROculus.cpp
+++ b/gfx/vr/gfxVROculus.cpp
@@ -1,37 +1,59 @@
/* -*- Mode: C++; tab-width: 20; indent-tabs-mode: nil; c-basic-offset: 2 -*-
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+#ifndef XP_WIN
+#error "Oculus 1.3 runtime support only available for Windows"
+#endif
+
#include <math.h>
+
#include "prlink.h"
#include "prmem.h"
#include "prenv.h"
#include "gfxPrefs.h"
#include "nsString.h"
+#include "mozilla/DebugOnly.h"
#include "mozilla/Preferences.h"
#include "mozilla/TimeStamp.h"
+#include "mozilla/gfx/DeviceManagerD3D11.h"
+#include "ipc/VRLayerParent.h"
#include "mozilla/gfx/Quaternion.h"
-#ifdef XP_WIN
+#include <d3d11.h>
#include "../layers/d3d11/CompositorD3D11.h"
-#endif
+#include "mozilla/layers/TextureD3D11.h"
#include "gfxVROculus.h"
+/** XXX The DX11 objects and quad blitting could be encapsulated
+ * into a separate object if either Oculus starts supporting
+ * non-Windows platforms or the blit is needed by other HMD\
+ * drivers.
+ * Alternately, we could remove the extra blit for
+ * Oculus as well with some more refactoring.
+ */
+
+// See CompositorD3D11Shaders.h
+struct ShaderBytes { const void* mData; size_t mLength; };
+extern ShaderBytes sRGBShader;
+extern ShaderBytes sLayerQuadVS;
#ifndef M_PI
# define M_PI 3.14159265358979323846
#endif
+using namespace mozilla;
using namespace mozilla::gfx;
using namespace mozilla::gfx::impl;
+using namespace mozilla::layers;
namespace {
#ifdef OVR_CAPI_LIMITED_MOZILLA
static pfn_ovr_Initialize ovr_Initialize = nullptr;
static pfn_ovr_Shutdown ovr_Shutdown = nullptr;
static pfn_ovr_GetLastErrorInfo ovr_GetLastErrorInfo = nullptr;
static pfn_ovr_GetVersionString ovr_GetVersionString = nullptr;
@@ -264,179 +286,145 @@ InitializeOculusCAPI()
static bool InitializeOculusCAPI()
{
return true;
}
#endif
ovrFovPort
-ToFovPort(const gfx::VRFieldOfView& aFOV)
+ToFovPort(const VRFieldOfView& aFOV)
{
ovrFovPort fovPort;
fovPort.LeftTan = tan(aFOV.leftDegrees * M_PI / 180.0);
fovPort.RightTan = tan(aFOV.rightDegrees * M_PI / 180.0);
fovPort.UpTan = tan(aFOV.upDegrees * M_PI / 180.0);
fovPort.DownTan = tan(aFOV.downDegrees * M_PI / 180.0);
return fovPort;
}
-gfx::VRFieldOfView
+VRFieldOfView
FromFovPort(const ovrFovPort& aFOV)
{
- gfx::VRFieldOfView fovInfo;
+ VRFieldOfView fovInfo;
fovInfo.leftDegrees = atan(aFOV.LeftTan) * 180.0 / M_PI;
fovInfo.rightDegrees = atan(aFOV.RightTan) * 180.0 / M_PI;
fovInfo.upDegrees = atan(aFOV.UpTan) * 180.0 / M_PI;
fovInfo.downDegrees = atan(aFOV.DownTan) * 180.0 / M_PI;
return fovInfo;
}
} // namespace
-HMDInfoOculus::HMDInfoOculus(ovrSession aSession)
- : VRHMDInfo(VRHMDType::Oculus)
+VRDisplayOculus::VRDisplayOculus(ovrSession aSession)
+ : VRDisplayHost(VRDisplayType::Oculus)
, mSession(aSession)
- , mInputFrameID(0)
+ , mTextureSet(nullptr)
+ , mQuadVS(nullptr)
+ , mQuadPS(nullptr)
+ , mVSConstantBuffer(nullptr)
+ , mPSConstantBuffer(nullptr)
+ , mVertexBuffer(nullptr)
+ , mInputLayout(nullptr)
+ , mLinearSamplerState(nullptr)
+ , mIsPresenting(false)
{
- MOZ_ASSERT(sizeof(HMDInfoOculus::DistortionVertex) == sizeof(VRDistortionVertex),
- "HMDInfoOculus::DistortionVertex must match the size of VRDistortionVertex");
+ MOZ_COUNT_CTOR_INHERITED(VRDisplayOculus, VRDisplayHost);
- MOZ_COUNT_CTOR_INHERITED(HMDInfoOculus, VRHMDInfo);
-
- mDeviceInfo.mDeviceName.AssignLiteral("Oculus VR HMD");
+ mDisplayInfo.mDisplayName.AssignLiteral("Oculus VR HMD");
+ mDisplayInfo.mIsConnected = true;
mDesc = ovr_GetHmdDesc(aSession);
- mDeviceInfo.mCapabilityFlags = VRDisplayCapabilityFlags::Cap_None;
+ mDisplayInfo.mCapabilityFlags = VRDisplayCapabilityFlags::Cap_None;
if (mDesc.AvailableTrackingCaps & ovrTrackingCap_Orientation) {
- mDeviceInfo.mCapabilityFlags |= VRDisplayCapabilityFlags::Cap_Orientation;
+ mDisplayInfo.mCapabilityFlags |= VRDisplayCapabilityFlags::Cap_Orientation;
}
if (mDesc.AvailableTrackingCaps & ovrTrackingCap_Position) {
- mDeviceInfo.mCapabilityFlags |= VRDisplayCapabilityFlags::Cap_Position;
+ mDisplayInfo.mCapabilityFlags |= VRDisplayCapabilityFlags::Cap_Position;
}
- mDeviceInfo.mCapabilityFlags |= VRDisplayCapabilityFlags::Cap_External;
- mDeviceInfo.mCapabilityFlags |= VRDisplayCapabilityFlags::Cap_Present;
+ mDisplayInfo.mCapabilityFlags |= VRDisplayCapabilityFlags::Cap_External;
+ mDisplayInfo.mCapabilityFlags |= VRDisplayCapabilityFlags::Cap_Present;
- mDeviceInfo.mRecommendedEyeFOV[VRDisplayInfo::Eye_Left] = FromFovPort(mDesc.DefaultEyeFov[ovrEye_Left]);
- mDeviceInfo.mRecommendedEyeFOV[VRDisplayInfo::Eye_Right] = FromFovPort(mDesc.DefaultEyeFov[ovrEye_Right]);
+ mFOVPort[VRDisplayInfo::Eye_Left] = mDesc.DefaultEyeFov[ovrEye_Left];
+ mFOVPort[VRDisplayInfo::Eye_Right] = mDesc.DefaultEyeFov[ovrEye_Right];
- mDeviceInfo.mMaximumEyeFOV[VRDisplayInfo::Eye_Left] = FromFovPort(mDesc.MaxEyeFov[ovrEye_Left]);
- mDeviceInfo.mMaximumEyeFOV[VRDisplayInfo::Eye_Right] = FromFovPort(mDesc.MaxEyeFov[ovrEye_Right]);
+ mDisplayInfo.mEyeFOV[VRDisplayInfo::Eye_Left] = FromFovPort(mFOVPort[VRDisplayInfo::Eye_Left]);
+ mDisplayInfo.mEyeFOV[VRDisplayInfo::Eye_Right] = FromFovPort(mFOVPort[VRDisplayInfo::Eye_Right]);
uint32_t w = mDesc.Resolution.w;
uint32_t h = mDesc.Resolution.h;
- mDeviceInfo.mScreenRect.x = 0;
- mDeviceInfo.mScreenRect.y = 0;
- mDeviceInfo.mScreenRect.width = std::max(w, h);
- mDeviceInfo.mScreenRect.height = std::min(w, h);
- mDeviceInfo.mIsFakeScreen = true;
+
+ float pixelsPerDisplayPixel = 1.0;
+ ovrSizei texSize[2];
+
+ // get eye parameters and create the mesh
+ for (uint32_t eye = 0; eye < VRDisplayInfo::NumEyes; eye++) {
+
+ ovrEyeRenderDesc renderDesc = ovr_GetRenderDesc(mSession, (ovrEyeType)eye, mFOVPort[eye]);
+
+ // As of Oculus 0.6.0, the HmdToEyeOffset values are correct and don't need to be negated.
+ mDisplayInfo.mEyeTranslation[eye] = Point3D(renderDesc.HmdToEyeOffset.x, renderDesc.HmdToEyeOffset.y, renderDesc.HmdToEyeOffset.z);
- SetFOV(mDeviceInfo.mRecommendedEyeFOV[VRDisplayInfo::Eye_Left], mDeviceInfo.mRecommendedEyeFOV[VRDisplayInfo::Eye_Right], 0.01, 10000.0);
+ texSize[eye] = ovr_GetFovTextureSize(mSession, (ovrEyeType)eye, mFOVPort[eye], pixelsPerDisplayPixel);
+ }
- for (int i = 0; i < kMaxLatencyFrames; i++) {
- mLastSensorState[i].Clear();
- }
+ // take the max of both for eye resolution
+ mDisplayInfo.mEyeResolution.width = std::max(texSize[VRDisplayInfo::Eye_Left].w, texSize[VRDisplayInfo::Eye_Right].w);
+ mDisplayInfo.mEyeResolution.height = std::max(texSize[VRDisplayInfo::Eye_Left].h, texSize[VRDisplayInfo::Eye_Right].h);
+}
+
+VRDisplayOculus::~VRDisplayOculus() {
+ StopPresentation();
+ Destroy();
+ MOZ_COUNT_DTOR_INHERITED(VRDisplayOculus, VRDisplayHost);
}
void
-HMDInfoOculus::Destroy()
+VRDisplayOculus::Destroy()
{
if (mSession) {
ovr_Destroy(mSession);
mSession = nullptr;
}
}
-bool
-HMDInfoOculus::SetFOV(const gfx::VRFieldOfView& aFOVLeft, const gfx::VRFieldOfView& aFOVRight,
- double zNear, double zFar)
-{
- float pixelsPerDisplayPixel = 1.0;
- ovrSizei texSize[2];
-
- // get eye parameters and create the mesh
- for (uint32_t eye = 0; eye < VRDisplayInfo::NumEyes; eye++) {
- mDeviceInfo.mEyeFOV[eye] = eye == 0 ? aFOVLeft : aFOVRight;
- mFOVPort[eye] = ToFovPort(mDeviceInfo.mEyeFOV[eye]);
-
- ovrEyeRenderDesc renderDesc = ovr_GetRenderDesc(mSession, (ovrEyeType)eye, mFOVPort[eye]);
-
- // As of Oculus 0.6.0, the HmdToEyeOffset values are correct and don't need to be negated.
- mDeviceInfo.mEyeTranslation[eye] = Point3D(renderDesc.HmdToEyeOffset.x, renderDesc.HmdToEyeOffset.y, renderDesc.HmdToEyeOffset.z);
-
- // note that we are using a right-handed coordinate system here, to match CSS
- mDeviceInfo.mEyeProjectionMatrix[eye] = mDeviceInfo.mEyeFOV[eye].ConstructProjectionMatrix(zNear, zFar, true);
-
- texSize[eye] = ovr_GetFovTextureSize(mSession, (ovrEyeType)eye, mFOVPort[eye], pixelsPerDisplayPixel);
- }
-
- // take the max of both for eye resolution
- mDeviceInfo.mEyeResolution.width = std::max(texSize[VRDisplayInfo::Eye_Left].w, texSize[VRDisplayInfo::Eye_Right].w);
- mDeviceInfo.mEyeResolution.height = std::max(texSize[VRDisplayInfo::Eye_Left].h, texSize[VRDisplayInfo::Eye_Right].h);
-
- mConfiguration.hmdType = mDeviceInfo.mType;
- mConfiguration.value = 0;
- mConfiguration.fov[0] = aFOVLeft;
- mConfiguration.fov[1] = aFOVRight;
-
- return true;
-}
-
void
-HMDInfoOculus::FillDistortionConstants(uint32_t whichEye,
- const IntSize& textureSize,
- const IntRect& eyeViewport,
- const Size& destViewport,
- const Rect& destRect,
- VRDistortionConstants& values)
-{
-}
-
-bool
-HMDInfoOculus::KeepSensorTracking()
-{
- // Oculus PC SDK 0.8 and newer enable tracking by default
- return true;
-}
-
-void
-HMDInfoOculus::NotifyVsync(const mozilla::TimeStamp& aVsyncTimestamp)
-{
- ++mInputFrameID;
-}
-
-void
-HMDInfoOculus::ZeroSensor()
+VRDisplayOculus::ZeroSensor()
{
ovr_RecenterTrackingOrigin(mSession);
}
VRHMDSensorState
-HMDInfoOculus::GetSensorState()
+VRDisplayOculus::GetSensorState()
{
+ mInputFrameID++;
+
VRHMDSensorState result;
- double frameTiming = 0.0f;
+ double frameDelta = 0.0f;
if (gfxPrefs::VRPosePredictionEnabled()) {
- frameTiming = ovr_GetPredictedDisplayTime(mSession, mInputFrameID);
+ // XXX We might need to call ovr_GetPredictedDisplayTime even if we don't use the result.
+ // If we don't call it, the Oculus driver will spew out many warnings...
+ double predictedFrameTime = ovr_GetPredictedDisplayTime(mSession, mInputFrameID);
+ frameDelta = predictedFrameTime - ovr_GetTimeInSeconds();
}
- result = GetSensorState(frameTiming);
+ result = GetSensorState(frameDelta);
result.inputFrameID = mInputFrameID;
- mLastSensorState[mInputFrameID % kMaxLatencyFrames] = result;
+ mLastSensorState[result.inputFrameID % kMaxLatencyFrames] = result;
return result;
}
VRHMDSensorState
-HMDInfoOculus::GetImmediateSensorState()
+VRDisplayOculus::GetImmediateSensorState()
{
return GetSensorState(0.0);
}
VRHMDSensorState
-HMDInfoOculus::GetSensorState(double timeOffset)
+VRDisplayOculus::GetSensorState(double timeOffset)
{
VRHMDSensorState result;
result.Clear();
ovrTrackingState state = ovr_GetTrackingState(mSession, timeOffset, true);
ovrPoseStatef& pose(state.HeadPose);
result.timestamp = pose.TimeInSeconds;
@@ -470,238 +458,188 @@ HMDInfoOculus::GetSensorState(double tim
result.linearVelocity[2] = pose.LinearVelocity.z;
result.linearAcceleration[0] = pose.LinearAcceleration.x;
result.linearAcceleration[1] = pose.LinearAcceleration.y;
result.linearAcceleration[2] = pose.LinearAcceleration.z;
}
result.flags |= VRDisplayCapabilityFlags::Cap_External;
result.flags |= VRDisplayCapabilityFlags::Cap_Present;
-
+
return result;
}
-struct RenderTargetSetOculus : public VRHMDRenderingSupport::RenderTargetSet
+void
+VRDisplayOculus::StartPresentation()
{
- RenderTargetSetOculus(ovrSession aSession,
- const IntSize& aSize,
- HMDInfoOculus *aHMD,
- ovrTextureSwapChain aTS)
- : hmd(aHMD)
- , textureSet(aTS)
- , session(aSession)
- {
- size = aSize;
+ if (mIsPresenting) {
+ return;
}
-
- already_AddRefed<layers::CompositingRenderTarget> GetNextRenderTarget() override {
- int currentRenderTarget = 0;
- DebugOnly<ovrResult> orv = ovr_GetTextureSwapChainCurrentIndex(session, textureSet, ¤tRenderTarget);
- MOZ_ASSERT(orv == ovrSuccess, "ovr_GetTextureSwapChainCurrentIndex failed.");
+ mIsPresenting = true;
- renderTargets[currentRenderTarget]->ClearOnBind();
- RefPtr<layers::CompositingRenderTarget> rt = renderTargets[currentRenderTarget];
- return rt.forget();
+ /**
+ * The presentation format is determined by content, which describes the
+ * left and right eye rectangles in the VRLayer. The default, if no
+ * coordinates are passed is to place the left and right eye textures
+ * side-by-side within the buffer.
+ *
+ * XXX - An optimization would be to dynamically resize this buffer
+ * to accomodate sites that are choosing to render in a lower
+ * resolution or are using space outside of the left and right
+ * eye textures for other purposes. (Bug 1291443)
+ */
+ ovrTextureSwapChainDesc desc;
+ memset(&desc, 0, sizeof(desc));
+ desc.Type = ovrTexture_2D;
+ desc.ArraySize = 1;
+ desc.Format = OVR_FORMAT_B8G8R8A8_UNORM_SRGB;
+ desc.Width = mDisplayInfo.mEyeResolution.width * 2;
+ desc.Height = mDisplayInfo.mEyeResolution.height;
+ desc.MipLevels = 1;
+ desc.SampleCount = 1;
+ desc.StaticImage = false;
+ desc.MiscFlags = ovrTextureMisc_DX_Typeless;
+ desc.BindFlags = ovrTextureBind_DX_RenderTarget;
+
+ if (!mDevice) {
+ mDevice = gfx::DeviceManagerD3D11::Get()->GetCompositorDevice();
+ if (!mDevice) {
+ NS_WARNING("Failed to get a D3D11Device for Oculus");
+ return;
+ }
}
- void Destroy() {
- ovr_DestroyTextureSwapChain(session, textureSet);
- hmd = nullptr;
- textureSet = nullptr;
+ mDevice->GetImmediateContext(getter_AddRefs(mContext));
+ if (!mContext) {
+ NS_WARNING("Failed to get immediate context for Oculus");
+ return;
}
-
- ~RenderTargetSetOculus() {
- Destroy();
+
+ if (FAILED(mDevice->CreateVertexShader(sLayerQuadVS.mData, sLayerQuadVS.mLength, nullptr, &mQuadVS))) {
+ NS_WARNING("Failed to create vertex shader for Oculus");
+ return;
+ }
+
+ if (FAILED(mDevice->CreatePixelShader(sRGBShader.mData, sRGBShader.mLength, nullptr, &mQuadPS))) {
+ NS_WARNING("Failed to create pixel shader for Oculus");
+ return;
}
- RefPtr<HMDInfoOculus> hmd;
- ovrTextureSwapChain textureSet;
- ovrSession session;
-};
-
-#ifdef XP_WIN
-class BasicTextureSourceD3D11 : public layers::TextureSourceD3D11
-{
-public:
- BasicTextureSourceD3D11(ID3D11Texture2D *aTexture, const IntSize& aSize) {
- mTexture = aTexture;
- mSize = aSize;
- }
-};
+ CD3D11_BUFFER_DESC cBufferDesc(sizeof(layers::VertexShaderConstants),
+ D3D11_BIND_CONSTANT_BUFFER,
+ D3D11_USAGE_DYNAMIC,
+ D3D11_CPU_ACCESS_WRITE);
-struct RenderTargetSetD3D11 : public RenderTargetSetOculus
-{
- RenderTargetSetD3D11(ovrSession aSession,
- layers::CompositorD3D11 *aCompositor,
- const IntSize& aSize,
- HMDInfoOculus *aHMD,
- ovrTextureSwapChain aTS)
- : RenderTargetSetOculus(aSession, aSize, aHMD, aTS)
- {
- compositor = aCompositor;
-
- int textureCount = 0;
- DebugOnly<ovrResult> orv = ovr_GetTextureSwapChainLength(session, aTS, &textureCount);
- MOZ_ASSERT(orv == ovrSuccess, "ovr_GetTextureSwapChainLength failed.");
+ if (FAILED(mDevice->CreateBuffer(&cBufferDesc, nullptr, getter_AddRefs(mVSConstantBuffer)))) {
+ NS_WARNING("Failed to vertex shader constant buffer for Oculus");
+ return;
+ }
- renderTargets.SetLength(textureCount);
-
- for (int i = 0; i < textureCount; ++i) {
-
- RefPtr<layers::CompositingRenderTargetD3D11> rt;
+ cBufferDesc.ByteWidth = sizeof(layers::PixelShaderConstants);
+ if (FAILED(mDevice->CreateBuffer(&cBufferDesc, nullptr, getter_AddRefs(mPSConstantBuffer)))) {
+ NS_WARNING("Failed to pixel shader constant buffer for Oculus");
+ return;
+ }
- ID3D11Texture2D* texture = nullptr;
- orv = ovr_GetTextureSwapChainBufferDX(session, aTS, i, IID_PPV_ARGS(&texture));
- MOZ_ASSERT(orv == ovrSuccess, "ovr_GetTextureSwapChainBufferDX failed.");
- rt = new layers::CompositingRenderTargetD3D11(texture, IntPoint(0, 0), DXGI_FORMAT_B8G8R8A8_UNORM);
- rt->SetSize(size);
- renderTargets[i] = rt;
- texture->Release();
- }
+ CD3D11_SAMPLER_DESC samplerDesc(D3D11_DEFAULT);
+ if (FAILED(mDevice->CreateSamplerState(&samplerDesc, getter_AddRefs(mLinearSamplerState)))) {
+ NS_WARNING("Failed to create sampler state for Oculus");
+ return;
}
-};
-#endif
-
-already_AddRefed<VRHMDRenderingSupport::RenderTargetSet>
-HMDInfoOculus::CreateRenderTargetSet(layers::Compositor *aCompositor, const IntSize& aSize)
-{
-#ifdef XP_WIN
- if (aCompositor->GetBackendType() == layers::LayersBackend::LAYERS_D3D11)
- {
- layers::CompositorD3D11 *comp11 = static_cast<layers::CompositorD3D11*>(aCompositor);
- ovrTextureSwapChainDesc desc;
- memset(&desc, 0, sizeof(desc));
- desc.Type = ovrTexture_2D;
- desc.ArraySize = 1;
- desc.Format = OVR_FORMAT_B8G8R8A8_UNORM_SRGB;
- desc.Width = aSize.width;
- desc.Height = aSize.height;
- desc.MipLevels = 1;
- desc.SampleCount = 1;
- desc.StaticImage = false;
- desc.MiscFlags = ovrTextureMisc_DX_Typeless;
- desc.BindFlags = ovrTextureBind_DX_RenderTarget;
+ D3D11_INPUT_ELEMENT_DESC layout[] =
+ {
+ { "POSITION", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 },
+ };
- ovrTextureSwapChain ts = nullptr;
-
- ovrResult orv = ovr_CreateTextureSwapChainDX(mSession, comp11->GetDevice(), &desc, &ts);
- if (orv != ovrSuccess) {
- return nullptr;
- }
-
- RefPtr<RenderTargetSetD3D11> rts = new RenderTargetSetD3D11(mSession, comp11, aSize, this, ts);
- return rts.forget();
- }
-#endif
-
- if (aCompositor->GetBackendType() == layers::LayersBackend::LAYERS_OPENGL) {
+ if (FAILED(mDevice->CreateInputLayout(layout,
+ sizeof(layout) / sizeof(D3D11_INPUT_ELEMENT_DESC),
+ sLayerQuadVS.mData,
+ sLayerQuadVS.mLength,
+ getter_AddRefs(mInputLayout)))) {
+ NS_WARNING("Failed to create input layout for Oculus");
+ return;
}
- return nullptr;
+ ovrResult orv = ovr_CreateTextureSwapChainDX(mSession, mDevice, &desc, &mTextureSet);
+ if (orv != ovrSuccess) {
+ NS_WARNING("ovr_CreateTextureSwapChainDX failed");
+ return;
+ }
+
+ int textureCount = 0;
+ orv = ovr_GetTextureSwapChainLength(mSession, mTextureSet, &textureCount);
+ if (orv != ovrSuccess) {
+ NS_WARNING("ovr_GetTextureSwapChainLength failed");
+ return;
+ }
+
+ Vertex vertices[] = { { { 0.0, 0.0 } },{ { 1.0, 0.0 } },{ { 0.0, 1.0 } },{ { 1.0, 1.0 } } };
+ CD3D11_BUFFER_DESC bufferDesc(sizeof(vertices), D3D11_BIND_VERTEX_BUFFER);
+ D3D11_SUBRESOURCE_DATA data;
+ data.pSysMem = (void*)vertices;
+
+ if (FAILED(mDevice->CreateBuffer(&bufferDesc, &data, getter_AddRefs(mVertexBuffer)))) {
+ NS_WARNING("Failed to create vertex buffer for Oculus");
+ return;
+ }
+
+ mRenderTargets.SetLength(textureCount);
+
+ memset(&mVSConstants, 0, sizeof(mVSConstants));
+ memset(&mPSConstants, 0, sizeof(mPSConstants));
+
+ for (int i = 0; i < textureCount; ++i) {
+ RefPtr<CompositingRenderTargetD3D11> rt;
+ ID3D11Texture2D* texture = nullptr;
+ orv = ovr_GetTextureSwapChainBufferDX(mSession, mTextureSet, i, IID_PPV_ARGS(&texture));
+ MOZ_ASSERT(orv == ovrSuccess, "ovr_GetTextureSwapChainBufferDX failed.");
+ rt = new CompositingRenderTargetD3D11(texture, IntPoint(0, 0), DXGI_FORMAT_B8G8R8A8_UNORM);
+ rt->SetSize(IntSize(mDisplayInfo.mEyeResolution.width * 2, mDisplayInfo.mEyeResolution.height));
+ mRenderTargets[i] = rt;
+ texture->Release();
+ }
}
void
-HMDInfoOculus::DestroyRenderTargetSet(RenderTargetSet *aRTSet)
-{
- RenderTargetSetOculus *rts = static_cast<RenderTargetSetOculus*>(aRTSet);
- rts->Destroy();
-}
-
-void
-HMDInfoOculus::SubmitFrame(RenderTargetSet *aRTSet, int32_t aInputFrameID)
+VRDisplayOculus::StopPresentation()
{
- RenderTargetSetOculus *rts = static_cast<RenderTargetSetOculus*>(aRTSet);
- MOZ_ASSERT(rts->hmd != nullptr);
- MOZ_ASSERT(rts->textureSet != nullptr);
- MOZ_ASSERT(aInputFrameID >= 0);
- if (aInputFrameID < 0) {
- // Sanity check to prevent invalid memory access on builds with assertions
- // disabled.
- aInputFrameID = 0;
+ if (!mIsPresenting) {
+ return;
}
- ovrResult orv = ovr_CommitTextureSwapChain(mSession, rts->textureSet);
- if (orv != ovrSuccess) {
- printf_stderr("ovr_CommitTextureSwapChain failed.\n");
- }
+ mIsPresenting = false;
- VRHMDSensorState sensorState = mLastSensorState[aInputFrameID % kMaxLatencyFrames];
- // It is possible to get a cache miss on mLastSensorState if latency is
- // longer than kMaxLatencyFrames. An optimization would be to find a frame
- // that is closer than the one selected with the modulus.
- // If we hit this; however, latency is already so high that the site is
- // un-viewable and a more accurate pose prediction is not likely to
- // compensate.
- ovrLayerEyeFov layer;
- memset(&layer, 0, sizeof(layer));
- layer.Header.Type = ovrLayerType_EyeFov;
- layer.Header.Flags = 0;
- layer.ColorTexture[0] = rts->textureSet;
- layer.ColorTexture[1] = nullptr;
- layer.Fov[0] = mFOVPort[0];
- layer.Fov[1] = mFOVPort[1];
- layer.Viewport[0].Pos.x = 0;
- layer.Viewport[0].Pos.y = 0;
- layer.Viewport[0].Size.w = rts->size.width / 2;
- layer.Viewport[0].Size.h = rts->size.height;
- layer.Viewport[1].Pos.x = rts->size.width / 2;
- layer.Viewport[1].Pos.y = 0;
- layer.Viewport[1].Size.w = rts->size.width / 2;
- layer.Viewport[1].Size.h = rts->size.height;
+ ovr_SubmitFrame(mSession, 0, nullptr, nullptr, 0);
- const Point3D& l = rts->hmd->mDeviceInfo.mEyeTranslation[0];
- const Point3D& r = rts->hmd->mDeviceInfo.mEyeTranslation[1];
- const ovrVector3f hmdToEyeViewOffset[2] = { { l.x, l.y, l.z },
- { r.x, r.y, r.z } };
-
- for (uint32_t i = 0; i < 2; ++i) {
- gfx::Quaternion o(sensorState.orientation[0],
- sensorState.orientation[1],
- sensorState.orientation[2],
- sensorState.orientation[3]);
- Point3D vo(hmdToEyeViewOffset[i].x, hmdToEyeViewOffset[i].y, hmdToEyeViewOffset[i].z);
- Point3D p = o.RotatePoint(vo);
-
- layer.RenderPose[i].Orientation.x = o.x;
- layer.RenderPose[i].Orientation.y = o.y;
- layer.RenderPose[i].Orientation.z = o.z;
- layer.RenderPose[i].Orientation.w = o.w;
- layer.RenderPose[i].Position.x = p.x + sensorState.position[0];
- layer.RenderPose[i].Position.y = p.y + sensorState.position[1];
- layer.RenderPose[i].Position.z = p.z + sensorState.position[2];
- }
-
- ovrLayerHeader *layers = &layer.Header;
- orv = ovr_SubmitFrame(mSession, aInputFrameID, nullptr, &layers, 1);
- //printf_stderr("Submitted frame %d, result: %d\n", rts->textureSet->CurrentIndex, orv);
- if (orv != ovrSuccess) {
- printf_stderr("ovr_SubmitFrame failed.\n");
+ if (mTextureSet) {
+ ovr_DestroyTextureSwapChain(mSession, mTextureSet);
+ mTextureSet = nullptr;
}
}
-/*static*/ already_AddRefed<VRHMDManagerOculus>
-VRHMDManagerOculus::Create()
+/*static*/ already_AddRefed<VRDisplayManagerOculus>
+VRDisplayManagerOculus::Create()
{
MOZ_ASSERT(NS_IsMainThread());
if (!gfxPrefs::VREnabled() || !gfxPrefs::VROculusEnabled())
{
return nullptr;
}
if (!InitializeOculusCAPI()) {
return nullptr;
}
- RefPtr<VRHMDManagerOculus> manager = new VRHMDManagerOculus();
+ RefPtr<VRDisplayManagerOculus> manager = new VRDisplayManagerOculus();
return manager.forget();
}
bool
-VRHMDManagerOculus::Init()
+VRDisplayManagerOculus::Init()
{
if (!mOculusInitialized) {
nsIThread* thread = nullptr;
NS_GetCurrentThread(&thread);
mOculusThread = already_AddRefed<nsIThread>(thread);
ovrInitParams params;
memset(¶ms, 0, sizeof(params));
@@ -716,31 +654,31 @@ VRHMDManagerOculus::Init()
mOculusInitialized = true;
}
}
return mOculusInitialized;
}
void
-VRHMDManagerOculus::Destroy()
+VRDisplayManagerOculus::Destroy()
{
- if(mOculusInitialized) {
+ if (mOculusInitialized) {
MOZ_ASSERT(NS_GetCurrentThread() == mOculusThread);
mOculusThread = nullptr;
mHMDInfo = nullptr;
ovr_Shutdown();
mOculusInitialized = false;
}
}
void
-VRHMDManagerOculus::GetHMDs(nsTArray<RefPtr<VRHMDInfo>>& aHMDResult)
+VRDisplayManagerOculus::GetHMDs(nsTArray<RefPtr<VRDisplayHost>>& aHMDResult)
{
if (!mOculusInitialized) {
return;
}
// ovr_Create can be slow when no HMD is present and we wish
// to keep the same oculus session when possible, so we detect
// presence of an HMD with ovr_GetHmdDesc before calling ovr_Create
@@ -749,16 +687,196 @@ VRHMDManagerOculus::GetHMDs(nsTArray<Ref
// No HMD connected.
mHMDInfo = nullptr;
} else if (mHMDInfo == nullptr) {
// HMD Detected
ovrSession session;
ovrGraphicsLuid luid;
ovrResult orv = ovr_Create(&session, &luid);
if (orv == ovrSuccess) {
- mHMDInfo = new HMDInfoOculus(session);
+ mHMDInfo = new VRDisplayOculus(session);
}
}
if (mHMDInfo) {
aHMDResult.AppendElement(mHMDInfo);
}
}
+
+already_AddRefed<CompositingRenderTargetD3D11>
+VRDisplayOculus::GetNextRenderTarget()
+{
+ int currentRenderTarget = 0;
+ DebugOnly<ovrResult> orv = ovr_GetTextureSwapChainCurrentIndex(mSession, mTextureSet, ¤tRenderTarget);
+ MOZ_ASSERT(orv == ovrSuccess, "ovr_GetTextureSwapChainCurrentIndex failed.");
+
+ mRenderTargets[currentRenderTarget]->ClearOnBind();
+ RefPtr<CompositingRenderTargetD3D11> rt = mRenderTargets[currentRenderTarget];
+ return rt.forget();
+}
+
+bool
+VRDisplayOculus::UpdateConstantBuffers()
+{
+ HRESULT hr;
+ D3D11_MAPPED_SUBRESOURCE resource;
+ resource.pData = nullptr;
+
+ hr = mContext->Map(mVSConstantBuffer, 0, D3D11_MAP_WRITE_DISCARD, 0, &resource);
+ if (FAILED(hr) || !resource.pData) {
+ return false;
+ }
+ *(VertexShaderConstants*)resource.pData = mVSConstants;
+ mContext->Unmap(mVSConstantBuffer, 0);
+ resource.pData = nullptr;
+
+ hr = mContext->Map(mPSConstantBuffer, 0, D3D11_MAP_WRITE_DISCARD, 0, &resource);
+ if (FAILED(hr) || !resource.pData) {
+ return false;
+ }
+ *(PixelShaderConstants*)resource.pData = mPSConstants;
+ mContext->Unmap(mPSConstantBuffer, 0);
+
+ ID3D11Buffer *buffer = mVSConstantBuffer;
+ mContext->VSSetConstantBuffers(0, 1, &buffer);
+ buffer = mPSConstantBuffer;
+ mContext->PSSetConstantBuffers(0, 1, &buffer);
+ return true;
+}
+
+void
+VRDisplayOculus::SubmitFrame(TextureSourceD3D11* aSource,
+ const IntSize& aSize,
+ const VRHMDSensorState& aSensorState,
+ const gfx::Rect& aLeftEyeRect,
+ const gfx::Rect& aRightEyeRect)
+{
+ if (!mIsPresenting) {
+ return;
+ }
+ MOZ_ASSERT(mDevice);
+ MOZ_ASSERT(mContext);
+
+ RefPtr<CompositingRenderTargetD3D11> surface = GetNextRenderTarget();
+
+ surface->BindRenderTarget(mContext);
+
+ Matrix viewMatrix = Matrix::Translation(-1.0, 1.0);
+ viewMatrix.PreScale(2.0f / float(aSize.width), 2.0f / float(aSize.height));
+ viewMatrix.PreScale(1.0f, -1.0f);
+ Matrix4x4 projection = Matrix4x4::From2D(viewMatrix);
+ projection._33 = 0.0f;
+
+ Matrix transform2d;
+ gfx::Matrix4x4 transform = gfx::Matrix4x4::From2D(transform2d);
+
+ D3D11_VIEWPORT viewport;
+ viewport.MinDepth = 0.0f;
+ viewport.MaxDepth = 1.0f;
+ viewport.Width = aSize.width;
+ viewport.Height = aSize.height;
+ viewport.TopLeftX = 0;
+ viewport.TopLeftY = 0;
+
+ D3D11_RECT scissor;
+ scissor.left = 0;
+ scissor.right = aSize.width;
+ scissor.top = 0;
+ scissor.bottom = aSize.height;
+
+ memcpy(&mVSConstants.layerTransform, &transform._11, sizeof(mVSConstants.layerTransform));
+ memcpy(&mVSConstants.projection, &projection._11, sizeof(mVSConstants.projection));
+ mVSConstants.renderTargetOffset[0] = 0.0f;
+ mVSConstants.renderTargetOffset[1] = 0.0f;
+ mVSConstants.layerQuad = Rect(0.0f, 0.0f, aSize.width, aSize.height);
+ mVSConstants.textureCoords = Rect(0.0f, 1.0f, 1.0f, -1.0f);
+
+ mPSConstants.layerOpacity[0] = 1.0f;
+
+ ID3D11Buffer* vbuffer = mVertexBuffer;
+ UINT vsize = sizeof(Vertex);
+ UINT voffset = 0;
+ mContext->IASetVertexBuffers(0, 1, &vbuffer, &vsize, &voffset);
+ mContext->IASetIndexBuffer(nullptr, DXGI_FORMAT_R16_UINT, 0);
+ mContext->IASetInputLayout(mInputLayout);
+ mContext->RSSetViewports(1, &viewport);
+ mContext->RSSetScissorRects(1, &scissor);
+ mContext->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLESTRIP);
+ mContext->VSSetShader(mQuadVS, nullptr, 0);
+ mContext->PSSetShader(mQuadPS, nullptr, 0);
+ ID3D11ShaderResourceView* srView = aSource->GetShaderResourceView();
+ mContext->PSSetShaderResources(0 /* 0 == TexSlot::RGB */, 1, &srView);
+ // XXX Use Constant from TexSlot in CompositorD3D11.cpp?
+
+ ID3D11SamplerState *sampler = mLinearSamplerState;
+ mContext->PSSetSamplers(0, 1, &sampler);
+
+ if (!UpdateConstantBuffers()) {
+ NS_WARNING("Failed to update constant buffers for Oculus");
+ return;
+ }
+
+ mContext->Draw(4, 0);
+
+ ovrResult orv = ovr_CommitTextureSwapChain(mSession, mTextureSet);
+ if (orv != ovrSuccess) {
+ NS_WARNING("ovr_CommitTextureSwapChain failed.\n");
+ return;
+ }
+
+ ovrLayerEyeFov layer;
+ memset(&layer, 0, sizeof(layer));
+ layer.Header.Type = ovrLayerType_EyeFov;
+ layer.Header.Flags = 0;
+ layer.ColorTexture[0] = mTextureSet;
+ layer.ColorTexture[1] = nullptr;
+ layer.Fov[0] = mFOVPort[0];
+ layer.Fov[1] = mFOVPort[1];
+ layer.Viewport[0].Pos.x = aSize.width * aLeftEyeRect.x;
+ layer.Viewport[0].Pos.y = aSize.height * aLeftEyeRect.y;
+ layer.Viewport[0].Size.w = aSize.width * aLeftEyeRect.width;
+ layer.Viewport[0].Size.h = aSize.height * aLeftEyeRect.height;
+ layer.Viewport[1].Pos.x = aSize.width * aRightEyeRect.x;
+ layer.Viewport[1].Pos.y = aSize.height * aRightEyeRect.y;
+ layer.Viewport[1].Size.w = aSize.width * aRightEyeRect.width;
+ layer.Viewport[1].Size.h = aSize.height * aRightEyeRect.height;
+
+ const Point3D& l = mDisplayInfo.mEyeTranslation[0];
+ const Point3D& r = mDisplayInfo.mEyeTranslation[1];
+ const ovrVector3f hmdToEyeViewOffset[2] = { { l.x, l.y, l.z },
+ { r.x, r.y, r.z } };
+
+ for (uint32_t i = 0; i < 2; ++i) {
+ Quaternion o(aSensorState.orientation[0],
+ aSensorState.orientation[1],
+ aSensorState.orientation[2],
+ aSensorState.orientation[3]);
+ Point3D vo(hmdToEyeViewOffset[i].x, hmdToEyeViewOffset[i].y, hmdToEyeViewOffset[i].z);
+ Point3D p = o.RotatePoint(vo);
+ layer.RenderPose[i].Orientation.x = o.x;
+ layer.RenderPose[i].Orientation.y = o.y;
+ layer.RenderPose[i].Orientation.z = o.z;
+ layer.RenderPose[i].Orientation.w = o.w;
+ layer.RenderPose[i].Position.x = p.x + aSensorState.position[0];
+ layer.RenderPose[i].Position.y = p.y + aSensorState.position[1];
+ layer.RenderPose[i].Position.z = p.z + aSensorState.position[2];
+ }
+
+ ovrLayerHeader *layers = &layer.Header;
+ orv = ovr_SubmitFrame(mSession, aSensorState.inputFrameID, nullptr, &layers, 1);
+
+ if (orv != ovrSuccess) {
+ printf_stderr("ovr_SubmitFrame failed.\n");
+ }
+
+ // Trigger the next VSync immediately
+ VRManager *vm = VRManager::Get();
+ MOZ_ASSERT(vm);
+ vm->NotifyVRVsync(mDisplayInfo.mDisplayID);
+}
+
+void
+VRDisplayOculus::NotifyVSync()
+{
+ ovrSessionStatus sessionStatus;
+ ovrResult ovr = ovr_GetSessionStatus(mSession, &sessionStatus);
+ mDisplayInfo.mIsConnected = (ovr == ovrSuccess && sessionStatus.HmdPresent);
+}
--- a/gfx/vr/gfxVROculus.h
+++ b/gfx/vr/gfxVROculus.h
@@ -8,101 +8,104 @@
#include "nsTArray.h"
#include "mozilla/RefPtr.h"
#include "mozilla/gfx/2D.h"
#include "mozilla/EnumeratedArray.h"
#include "gfxVR.h"
-//#include <OVR_CAPI.h>
-//#include <OVR_CAPI_D3D.h>
+#include "VRDisplayHost.h"
#include "ovr_capi_dynamic.h"
+struct ID3D11Device;
+
namespace mozilla {
+namespace layers {
+class CompositingRenderTargetD3D11;
+struct VertexShaderConstants;
+struct PixelShaderConstants;
+}
namespace gfx {
namespace impl {
-class HMDInfoOculus : public VRHMDInfo, public VRHMDRenderingSupport {
+class VRDisplayOculus : public VRDisplayHost
+{
public:
- explicit HMDInfoOculus(ovrSession aSession);
-
- bool SetFOV(const VRFieldOfView& aFOVLeft, const VRFieldOfView& aFOVRight,
- double zNear, double zFar) override;
-
+ virtual void NotifyVSync() override;
virtual VRHMDSensorState GetSensorState() override;
virtual VRHMDSensorState GetImmediateSensorState() override;
void ZeroSensor() override;
- bool KeepSensorTracking() override;
- void NotifyVsync(const TimeStamp& aVsyncTimestamp) override;
- void FillDistortionConstants(uint32_t whichEye,
- const IntSize& textureSize, const IntRect& eyeViewport,
- const Size& destViewport, const Rect& destRect,
- VRDistortionConstants& values) override;
+protected:
+ virtual void StartPresentation() override;
+ virtual void StopPresentation() override;
+ virtual void SubmitFrame(mozilla::layers::TextureSourceD3D11* aSource,
+ const IntSize& aSize,
+ const VRHMDSensorState& aSensorState,
+ const gfx::Rect& aLeftEyeRect,
+ const gfx::Rect& aRightEyeRect) override;
- VRHMDRenderingSupport* GetRenderingSupport() override { return this; }
-
+public:
+ explicit VRDisplayOculus(ovrSession aSession);
+
+protected:
+ virtual ~VRDisplayOculus();
void Destroy();
- /* VRHMDRenderingSupport */
- already_AddRefed<RenderTargetSet> CreateRenderTargetSet(layers::Compositor *aCompositor, const IntSize& aSize) override;
- void DestroyRenderTargetSet(RenderTargetSet *aRTSet) override;
- void SubmitFrame(RenderTargetSet *aRTSet, int32_t aInputFrameID) override;
-
- ovrSession GetOculusSession() const { return mSession; }
-
-protected:
- virtual ~HMDInfoOculus() {
- Destroy();
- MOZ_COUNT_DTOR_INHERITED(HMDInfoOculus, VRHMDInfo);
- }
+ bool RequireSession();
+ const ovrHmdDesc& GetHmdDesc();
- // must match the size of VRDistortionVertex
- struct DistortionVertex {
- float pos[2];
- float texR[2];
- float texG[2];
- float texB[2];
- float genericAttribs[4];
- };
-
- ovrSession mSession;
- ovrHmdDesc mDesc;
- ovrFovPort mFOVPort[2];
+ already_AddRefed<layers::CompositingRenderTargetD3D11> GetNextRenderTarget();
VRHMDSensorState GetSensorState(double timeOffset);
- // The maximum number of frames of latency that we would expect before we
- // should give up applying pose prediction.
- // If latency is greater than one second, then the experience is not likely
- // to be corrected by pose prediction. Setting this value too
- // high may result in unnecessary memory allocation.
- // As the current fastest refresh rate is 90hz, 100 is selected as a
- // conservative value.
- static const int kMaxLatencyFrames = 100;
- VRHMDSensorState mLastSensorState[kMaxLatencyFrames];
- int32_t mInputFrameID;
+ ovrHmdDesc mDesc;
+ ovrSession mSession;
+ ovrFovPort mFOVPort[2];
+ ovrTextureSwapChain mTextureSet;
+ nsTArray<RefPtr<layers::CompositingRenderTargetD3D11>> mRenderTargets;
+
+ RefPtr<ID3D11Device> mDevice;
+ RefPtr<ID3D11DeviceContext> mContext;
+ ID3D11VertexShader* mQuadVS;
+ ID3D11PixelShader* mQuadPS;
+ RefPtr<ID3D11SamplerState> mLinearSamplerState;
+ layers::VertexShaderConstants mVSConstants;
+ layers::PixelShaderConstants mPSConstants;
+ RefPtr<ID3D11Buffer> mVSConstantBuffer;
+ RefPtr<ID3D11Buffer> mPSConstantBuffer;
+ RefPtr<ID3D11Buffer> mVertexBuffer;
+ RefPtr<ID3D11InputLayout> mInputLayout;
+
+ bool mIsPresenting;
+
+ bool UpdateConstantBuffers();
+
+ struct Vertex
+ {
+ float position[2];
+ };
};
} // namespace impl
-class VRHMDManagerOculus : public VRHMDManager
+class VRDisplayManagerOculus : public VRDisplayManager
{
public:
- static already_AddRefed<VRHMDManagerOculus> Create();
+ static already_AddRefed<VRDisplayManagerOculus> Create();
virtual bool Init() override;
virtual void Destroy() override;
- virtual void GetHMDs(nsTArray<RefPtr<VRHMDInfo> >& aHMDResult) override;
+ virtual void GetHMDs(nsTArray<RefPtr<VRDisplayHost> >& aHMDResult) override;
protected:
- VRHMDManagerOculus()
+ VRDisplayManagerOculus()
: mOculusInitialized(false)
{ }
- RefPtr<impl::HMDInfoOculus> mHMDInfo;
+ RefPtr<impl::VRDisplayOculus> mHMDInfo;
bool mOculusInitialized;
RefPtr<nsIThread> mOculusThread;
};
} // namespace gfx
} // namespace mozilla
#endif /* GFX_VR_OCULUS_H */
new file mode 100644
--- /dev/null
+++ b/gfx/vr/ipc/PVRLayer.ipdl
@@ -0,0 +1,27 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
+ * vim: sw=2 ts=8 et :
+ */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+include protocol PVRManager;
+include protocol PTexture;
+
+namespace mozilla {
+namespace gfx {
+
+async protocol PVRLayer
+{
+ manager PVRManager;
+
+parent:
+ async SubmitFrame(int32_t aInputFrameID, PTexture aTexture);
+ async Destroy();
+
+child:
+ async __delete__();
+};
+
+} // gfx
+} // mozilla
--- a/gfx/vr/ipc/PVRManager.ipdl
+++ b/gfx/vr/ipc/PVRManager.ipdl
@@ -1,63 +1,73 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
* vim: sw=2 ts=8 et :
*/
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+include LayersSurfaces;
+include protocol PLayer;
+include protocol PTexture;
+include protocol PVRLayer;
+include LayersMessages;
+
include "VRMessageUtils.h";
using struct mozilla::gfx::VRFieldOfView from "gfxVR.h";
-using struct mozilla::gfx::VRDisplayUpdate from "gfxVR.h";
+using struct mozilla::gfx::VRDisplayInfo from "gfxVR.h";
using struct mozilla::gfx::VRSensorUpdate from "gfxVR.h";
+using struct mozilla::gfx::VRHMDSensorState from "gfxVR.h";
+using mozilla::layers::LayersBackend from "mozilla/layers/LayersTypes.h";
+using mozilla::layers::TextureFlags from "mozilla/layers/CompositorTypes.h";
+
namespace mozilla {
namespace gfx {
/**
- * The PVRManager protocol is used to enable communication of VR device
+ * The PVRManager protocol is used to enable communication of VR display
* enumeration and sensor state between the compositor thread and
* content threads/processes.
*/
sync protocol PVRManager
{
+ manages PTexture;
+ manages PVRLayer;
+
parent:
- // (Re)Enumerate VR Devices. An updated list of VR devices will be returned
- // asynchronously to children via UpdateDeviceInfo.
- async RefreshDevices();
+ async PTexture(SurfaceDescriptor aSharedData, LayersBackend aBackend,
+ TextureFlags aTextureFlags, uint64_t aSerial);
- // Reset the sensor of the device identified by aDeviceID so that the current
- // sensor state is the "Zero" position.
- async ResetSensor(uint32_t aDeviceID);
+ async PVRLayer(uint32_t aDisplayID, float aLeftEyeX, float aLeftEyeY, float aLeftEyeWidth, float aLeftEyeHeight, float aRightEyeX, float aRightEyeY, float aRightEyeWidth, float aRightEyeHeight);
- // KeepSensorTracking is called continuously by children to indicate their
- // interest in receiving sensor data from the device identified by aDeviceID.
- // This will activate any physical sensor tracking system requiring
- // initialization and guarantee that it will remain active until at least one
- // second has passed since the last KeepSensorTracking call has been made.
- // Sensor data will be sent asynchronously via UpdateDeviceSensors
- async KeepSensorTracking(uint32_t aDeviceID);
+ // (Re)Enumerate VR Displays. An updated list of VR displays will be returned
+ // asynchronously to children via UpdateDisplayInfo.
+ async RefreshDisplays();
- // Set the field of view parameters for an HMD identified by aDeviceID
- async SetFOV(uint32_t aDeviceID, VRFieldOfView aFOVLeft,
- VRFieldOfView aFOVRight, double zNear, double zFar);
+ // Reset the sensor of the display identified by aDisplayID so that the current
+ // sensor state is the "Zero" position.
+ async ResetSensor(uint32_t aDisplayID);
+
+ sync GetSensorState(uint32_t aDisplayID) returns(VRHMDSensorState aState);
+ sync GetImmediateSensorState(uint32_t aDisplayID) returns(VRHMDSensorState aState);
+ async SetHaveEventListener(bool aHaveEventListener);
child:
- // Notify children of updated VR device enumeration and details. This will
- // be sent to all children when the parent receives RefreshDevices, even
+ async ParentAsyncMessages(AsyncParentMessageData[] aMessages);
+
+ // Notify children of updated VR display enumeration and details. This will
+ // be sent to all children when the parent receives RefreshDisplays, even
// if no changes have been detected. This ensures that Promises exposed
// through DOM calls are always resolved.
- async UpdateDeviceInfo(VRDisplayUpdate[] aDeviceUpdates);
+ async UpdateDisplayInfo(VRDisplayInfo[] aDisplayUpdates);
- // Notify children of updated VR device sensor states. This will be
- // sent once per frame for at least one second after the parent receives
- // KeepSensorTracking.
- async UpdateDeviceSensors(VRSensorUpdate[] aDeviceSensorUpdates);
+ async NotifyVSync();
+ async NotifyVRVSync(uint32_t aDisplayID);
async __delete__();
};
} // gfx
} // mozilla
new file mode 100644
--- /dev/null
+++ b/gfx/vr/ipc/VRLayerChild.cpp
@@ -0,0 +1,86 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "VRLayerChild.h"
+#include "GLScreenBuffer.h"
+#include "mozilla/layers/TextureClientSharedSurface.h"
+#include "SharedSurface.h" // for SharedSurface
+#include "SharedSurfaceGL.h" // for SharedSurface
+#include "mozilla/layers/LayersMessages.h" // for TimedTexture
+#include "nsICanvasRenderingContextInternal.h"
+#include "mozilla/dom/HTMLCanvasElement.h"
+
+namespace mozilla {
+namespace gfx {
+
+VRLayerChild::VRLayerChild(uint32_t aVRDisplayID, VRManagerChild* aVRManagerChild)
+ : mVRDisplayID(aVRDisplayID)
+ , mCanvasElement(nullptr)
+ , mShSurfClient(nullptr)
+ , mFront(nullptr)
+{
+ MOZ_COUNT_CTOR(VRLayerChild);
+}
+
+VRLayerChild::~VRLayerChild()
+{
+ if (mCanvasElement) {
+ mCanvasElement->StopVRPresentation();
+ }
+
+ ClearSurfaces();
+
+ MOZ_COUNT_DTOR(VRLayerChild);
+}
+
+void
+VRLayerChild::Initialize(dom::HTMLCanvasElement* aCanvasElement)
+{
+ MOZ_ASSERT(aCanvasElement);
+ mCanvasElement = aCanvasElement;
+ mCanvasElement->StartVRPresentation();
+
+ VRManagerChild *vrmc = VRManagerChild::Get();
+ vrmc->RunFrameRequestCallbacks();
+}
+
+void
+VRLayerChild::SubmitFrame(int32_t aInputFrameID)
+{
+ if (!mCanvasElement) {
+ return;
+ }
+
+ mShSurfClient = mCanvasElement->GetVRFrame();
+ if (!mShSurfClient) {
+ return;
+ }
+
+ gl::SharedSurface* surf = mShSurfClient->Surf();
+ if (surf->mType == gl::SharedSurfaceType::Basic) {
+ gfxCriticalError() << "SharedSurfaceType::Basic not supported for WebVR";
+ return;
+ }
+
+ mFront = mShSurfClient;
+ mShSurfClient = nullptr;
+
+ mFront->SetAddedToCompositableClient();
+ VRManagerChild* vrmc = VRManagerChild::Get();
+ mFront->SyncWithObject(vrmc->GetSyncObject());
+ MOZ_ALWAYS_TRUE(mFront->InitIPDLActor(vrmc, vrmc->GetBackendType()));
+
+ SendSubmitFrame(aInputFrameID, mFront->GetIPDLActor());
+}
+
+void
+VRLayerChild::ClearSurfaces()
+{
+ mFront = nullptr;
+ mShSurfClient = nullptr;
+}
+
+} // namespace gfx
+} // namespace mozilla
new file mode 100644
--- /dev/null
+++ b/gfx/vr/ipc/VRLayerChild.h
@@ -0,0 +1,53 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef GFX_VR_LAYERCHILD_H
+#define GFX_VR_LAYERCHILD_H
+
+#include "VRManagerChild.h"
+
+#include "mozilla/RefPtr.h"
+#include "mozilla/gfx/PVRLayerChild.h"
+#include "GLContext.h"
+#include "gfxVR.h"
+
+class nsICanvasRenderingContextInternal;
+
+namespace mozilla {
+class WebGLContext;
+namespace dom {
+class HTMLCanvasElement;
+}
+namespace layers {
+class SharedSurfaceTextureClient;
+}
+namespace gl {
+class SurfaceFactory;
+}
+namespace gfx {
+
+class VRLayerChild : public PVRLayerChild {
+ NS_INLINE_DECL_REFCOUNTING(VRLayerChild)
+
+public:
+ VRLayerChild(uint32_t aVRDisplayID, VRManagerChild* aVRManagerChild);
+ void Initialize(dom::HTMLCanvasElement* aCanvasElement);
+ void SubmitFrame(int32_t aInputFrameID);
+
+protected:
+ virtual ~VRLayerChild();
+ void ClearSurfaces();
+
+ uint32_t mVRDisplayID;
+
+ RefPtr<dom::HTMLCanvasElement> mCanvasElement;
+ RefPtr<layers::SharedSurfaceTextureClient> mShSurfClient;
+ RefPtr<layers::TextureClient> mFront;
+};
+
+} // namespace gfx
+} // namespace mozilla
+
+#endif
new file mode 100644
--- /dev/null
+++ b/gfx/vr/ipc/VRLayerParent.cpp
@@ -0,0 +1,60 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+
+#include "VRLayerParent.h"
+#include "mozilla/unused.h"
+
+namespace mozilla {
+namespace gfx {
+
+VRLayerParent::VRLayerParent(uint32_t aVRDisplayID, const Rect& aLeftEyeRect, const Rect& aRightEyeRect)
+ : mIPCOpen(true)
+ , mVRDisplayID(aVRDisplayID)
+ , mLeftEyeRect(aLeftEyeRect)
+ , mRightEyeRect(aRightEyeRect)
+{
+ MOZ_COUNT_CTOR(VRLayerParent);
+}
+
+VRLayerParent::~VRLayerParent()
+{
+ MOZ_COUNT_DTOR(VRLayerParent);
+}
+
+bool
+VRLayerParent::RecvDestroy()
+{
+ Destroy();
+ return true;
+}
+
+void
+VRLayerParent::ActorDestroy(ActorDestroyReason aWhy)
+{
+ mIPCOpen = false;
+}
+
+void
+VRLayerParent::Destroy()
+{
+ if (mIPCOpen) {
+ Unused << PVRLayerParent::Send__delete__(this);
+ }
+}
+
+bool
+VRLayerParent::RecvSubmitFrame(const int32_t& aInputFrameID,
+ PTextureParent* texture)
+{
+ VRManager* vm = VRManager::Get();
+ vm->SubmitFrame(this, aInputFrameID, texture, mLeftEyeRect, mRightEyeRect);
+
+ return true;
+}
+
+
+} // namespace gfx
+} // namespace mozilla
new file mode 100644
--- /dev/null
+++ b/gfx/vr/ipc/VRLayerParent.h
@@ -0,0 +1,44 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef GFX_VR_LAYERPARENT_H
+#define GFX_VR_LAYERPARENT_H
+
+#include "VRManager.h"
+
+#include "mozilla/RefPtr.h"
+#include "mozilla/gfx/PVRLayerParent.h"
+#include "gfxVR.h"
+
+namespace mozilla {
+namespace gfx {
+
+class VRLayerParent : public PVRLayerParent {
+ NS_INLINE_DECL_REFCOUNTING(VRLayerParent)
+
+public:
+ VRLayerParent(uint32_t aVRDisplayID, const Rect& aLeftEyeRect, const Rect& aRightEyeRect);
+ virtual bool RecvSubmitFrame(const int32_t& aInputFrameID,
+ PTextureParent* texture) override;
+ virtual bool RecvDestroy() override;
+ uint32_t GetDisplayID() const { return mVRDisplayID; }
+protected:
+ virtual void ActorDestroy(ActorDestroyReason aWhy) override;
+
+ virtual ~VRLayerParent();
+ void Destroy();
+
+ bool mIPCOpen;
+
+ uint32_t mVRDisplayID;
+ gfx::IntSize mSize;
+ gfx::Rect mLeftEyeRect;
+ gfx::Rect mRightEyeRect;
+};
+
+} // namespace gfx
+} // namespace mozilla
+
+#endif
--- a/gfx/vr/ipc/VRManagerChild.cpp
+++ b/gfx/vr/ipc/VRManagerChild.cpp
@@ -2,44 +2,69 @@
* vim: sw=2 ts=8 et :
*/
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "VRManagerChild.h"
#include "VRManagerParent.h"
-#include "VRDisplayProxy.h"
+#include "VRDisplayClient.h"
#include "mozilla/StaticPtr.h"
#include "mozilla/layers/CompositorThread.h" // for CompositorThread
#include "mozilla/dom/Navigator.h"
+#include "mozilla/dom/WindowBinding.h" // for FrameRequestCallback
+#include "mozilla/layers/TextureClient.h"
+
+using layers::TextureClient;
namespace mozilla {
namespace gfx {
static StaticRefPtr<VRManagerChild> sVRManagerChildSingleton;
static StaticRefPtr<VRManagerParent> sVRManagerParentSingleton;
void ReleaseVRManagerParentSingleton() {
sVRManagerParentSingleton = nullptr;
}
VRManagerChild::VRManagerChild()
- : mInputFrameID(-1)
+ : TextureForwarder()
+ , mInputFrameID(-1)
+ , mMessageLoop(MessageLoop::current())
+ , mFrameRequestCallbackCounter(0)
+ , mBackend(layers::LayersBackend::LAYERS_NONE)
{
MOZ_COUNT_CTOR(VRManagerChild);
MOZ_ASSERT(NS_IsMainThread());
+
+ mStartTimeStamp = TimeStamp::Now();
}
VRManagerChild::~VRManagerChild()
{
MOZ_ASSERT(NS_IsMainThread());
MOZ_COUNT_DTOR(VRManagerChild);
}
+/*static*/ void
+VRManagerChild::IdentifyTextureHost(const TextureFactoryIdentifier& aIdentifier)
+{
+ if (sVRManagerChildSingleton) {
+ sVRManagerChildSingleton->mBackend = aIdentifier.mParentBackend;
+ sVRManagerChildSingleton->mSyncObject = SyncObject::CreateSyncObject(aIdentifier.mSyncHandle);
+ }
+}
+
+layers::LayersBackend
+VRManagerChild::GetBackendType() const
+{
+ return mBackend;
+}
+
/*static*/ VRManagerChild*
VRManagerChild::Get()
{
MOZ_ASSERT(sVRManagerChildSingleton);
return sVRManagerChildSingleton;
}
/* static */ bool
@@ -97,101 +122,357 @@ VRManagerChild::ShutDown()
sVRManagerChildSingleton->Destroy();
sVRManagerChildSingleton = nullptr;
}
}
/*static*/ void
VRManagerChild::DeferredDestroy(RefPtr<VRManagerChild> aVRManagerChild)
{
- aVRManagerChild->Close();
+ aVRManagerChild->Close();
}
void
VRManagerChild::Destroy()
{
- // This must not be called from the destructor!
- MOZ_ASSERT(mRefCnt != 0);
+ mTexturesWaitingRecycled.Clear();
// Keep ourselves alive until everything has been shut down
RefPtr<VRManagerChild> selfRef = this;
// The DeferredDestroyVRManager task takes ownership of
// the VRManagerChild and will release it when it runs.
MessageLoop::current()->PostTask(
NewRunnableFunction(DeferredDestroy, selfRef));
}
+layers::PTextureChild*
+VRManagerChild::AllocPTextureChild(const SurfaceDescriptor&,
+ const LayersBackend&,
+ const TextureFlags&,
+ const uint64_t&)
+{
+ return TextureClient::CreateIPDLActor();
+}
+
bool
-VRManagerChild::RecvUpdateDeviceInfo(nsTArray<VRDisplayUpdate>&& aDeviceUpdates)
+VRManagerChild::DeallocPTextureChild(PTextureChild* actor)
{
- // mDevices could be a hashed container for more scalability, but not worth
- // it now as we expect < 10 entries.
- nsTArray<RefPtr<VRDisplayProxy> > devices;
- for (auto& deviceUpdate: aDeviceUpdates) {
- bool isNewDevice = true;
- for (auto& device: mDevices) {
- if (device->GetDeviceInfo().GetDeviceID() == deviceUpdate.mDeviceInfo.GetDeviceID()) {
- device->UpdateDeviceInfo(deviceUpdate);
- devices.AppendElement(device);
- isNewDevice = false;
+ return TextureClient::DestroyIPDLActor(actor);
+}
+
+PVRLayerChild*
+VRManagerChild::AllocPVRLayerChild(const uint32_t& aDisplayID,
+ const float& aLeftEyeX,
+ const float& aLeftEyeY,
+ const float& aLeftEyeWidth,
+ const float& aLeftEyeHeight,
+ const float& aRightEyeX,
+ const float& aRightEyeY,
+ const float& aRightEyeWidth,
+ const float& aRightEyeHeight)
+{
+ RefPtr<VRLayerChild> layer = new VRLayerChild(aDisplayID, this);
+ return layer.forget().take();
+}
+
+bool
+VRManagerChild::DeallocPVRLayerChild(PVRLayerChild* actor)
+{
+ delete actor;
+ return true;
+}
+
+bool
+VRManagerChild::RecvUpdateDisplayInfo(nsTArray<VRDisplayInfo>&& aDisplayUpdates)
+{
+ bool bDisplayConnected = false;
+ bool bDisplayDisconnected = false;
+
+ // Check if any displays have been disconnected
+ for (auto& display : mDisplays) {
+ bool found = false;
+ for (auto& displayUpdate : aDisplayUpdates) {
+ if (display->GetDisplayInfo().GetDisplayID() == displayUpdate.GetDisplayID()) {
+ found = true;
break;
}
}
- if (isNewDevice) {
- devices.AppendElement(new VRDisplayProxy(deviceUpdate));
+ if (!found) {
+ display->NotifyDisconnected();
+ bDisplayDisconnected = true;
}
}
- mDevices = devices;
+ // mDisplays could be a hashed container for more scalability, but not worth
+ // it now as we expect < 10 entries.
+ nsTArray<RefPtr<VRDisplayClient>> displays;
+ for (VRDisplayInfo& displayUpdate: aDisplayUpdates) {
+ bool isNewDisplay = true;
+ for (auto& display: mDisplays) {
+ const VRDisplayInfo& prevInfo = display->GetDisplayInfo();
+ if (prevInfo.GetDisplayID() == displayUpdate.GetDisplayID()) {
+ if (displayUpdate.GetIsConnected() && !prevInfo.GetIsConnected()) {
+ bDisplayConnected = true;
+ }
+ if (!displayUpdate.GetIsConnected() && prevInfo.GetIsConnected()) {
+ bDisplayDisconnected = true;
+ }
+ display->UpdateDisplayInfo(displayUpdate);
+ displays.AppendElement(display);
+ isNewDisplay = false;
+ break;
+ }
+ }
+ if (isNewDisplay) {
+ displays.AppendElement(new VRDisplayClient(displayUpdate));
+ bDisplayConnected = true;
+ }
+ }
+ mDisplays = displays;
for (auto& nav: mNavigatorCallbacks) {
+ // We must call NotifyVRDisplaysUpdated for every
+ // Navigator in mNavigatorCallbacks to ensure that
+ // the promise returned by Navigator.GetVRDevices
+ // can resolve. This must happen even if no changes
+ // to VRDisplays have been detected here.
nav->NotifyVRDisplaysUpdated();
}
mNavigatorCallbacks.Clear();
- return true;
-}
-
-bool
-VRManagerChild::RecvUpdateDeviceSensors(nsTArray<VRSensorUpdate>&& aDeviceSensorUpdates)
-{
- // mDevices could be a hashed container for more scalability, but not worth
- // it now as we expect < 10 entries.
- for (auto& sensorUpdate: aDeviceSensorUpdates) {
- for (auto& device: mDevices) {
- if (device->GetDeviceInfo().GetDeviceID() == sensorUpdate.mDeviceID) {
- device->UpdateSensorState(sensorUpdate.mSensorState);
- mInputFrameID = sensorUpdate.mSensorState.inputFrameID;
- break;
- }
- }
+ if (bDisplayConnected) {
+ FireDOMVRDisplayConnectedEvent();
+ }
+ if (bDisplayDisconnected) {
+ FireDOMVRDisplayDisconnectedEvent();
}
return true;
}
bool
-VRManagerChild::GetVRDisplays(nsTArray<RefPtr<VRDisplayProxy> >& aDevices)
+VRManagerChild::GetVRDisplays(nsTArray<RefPtr<VRDisplayClient>>& aDisplays)
{
- aDevices = mDevices;
+ aDisplays = mDisplays;
return true;
}
bool
VRManagerChild::RefreshVRDisplaysWithCallback(dom::Navigator* aNavigator)
{
- bool success = SendRefreshDevices();
+ bool success = SendRefreshDisplays();
if (success) {
mNavigatorCallbacks.AppendElement(aNavigator);
}
return success;
}
int
VRManagerChild::GetInputFrameID()
{
return mInputFrameID;
}
+bool
+VRManagerChild::RecvParentAsyncMessages(InfallibleTArray<AsyncParentMessageData>&& aMessages)
+{
+ for (InfallibleTArray<AsyncParentMessageData>::index_type i = 0; i < aMessages.Length(); ++i) {
+ const AsyncParentMessageData& message = aMessages[i];
+
+ switch (message.type()) {
+ case AsyncParentMessageData::TOpDeliverFence: {
+ const OpDeliverFence& op = message.get_OpDeliverFence();
+ FenceHandle fence = op.fence();
+ DeliverFence(op.TextureId(), fence);
+ break;
+ }
+ case AsyncParentMessageData::TOpNotifyNotUsed: {
+ const OpNotifyNotUsed& op = message.get_OpNotifyNotUsed();
+ NotifyNotUsed(op.TextureId(), op.fwdTransactionId());
+ break;
+ }
+ default:
+ NS_ERROR("unknown AsyncParentMessageData type");
+ return false;
+ }
+ }
+ return true;
+}
+
+PTextureChild*
+VRManagerChild::CreateTexture(const SurfaceDescriptor& aSharedData,
+ LayersBackend aLayersBackend,
+ TextureFlags aFlags,
+ uint64_t aSerial)
+{
+ return SendPTextureConstructor(aSharedData, aLayersBackend, aFlags, aSerial);
+}
+
+void
+VRManagerChild::DeliverFence(uint64_t aTextureId, FenceHandle& aReleaseFenceHandle)
+{
+ RefPtr<TextureClient> client = mTexturesWaitingRecycled.Get(aTextureId);
+ if (!client) {
+ return;
+ }
+ client->SetReleaseFenceHandle(aReleaseFenceHandle);
+}
+
+void
+VRManagerChild::CancelWaitForRecycle(uint64_t aTextureId)
+{
+ RefPtr<TextureClient> client = mTexturesWaitingRecycled.Get(aTextureId);
+ if (!client) {
+ return;
+ }
+ mTexturesWaitingRecycled.Remove(aTextureId);
+}
+
+void
+VRManagerChild::NotifyNotUsed(uint64_t aTextureId, uint64_t aFwdTransactionId)
+{
+ RefPtr<TextureClient> client = mTexturesWaitingRecycled.Get(aTextureId);
+ if (!client) {
+ return;
+ }
+ mTexturesWaitingRecycled.Remove(aTextureId);
+}
+
+bool
+VRManagerChild::AllocShmem(size_t aSize,
+ ipc::SharedMemory::SharedMemoryType aType,
+ ipc::Shmem* aShmem)
+{
+ return PVRManagerChild::AllocShmem(aSize, aType, aShmem);
+}
+
+bool
+VRManagerChild::AllocUnsafeShmem(size_t aSize,
+ ipc::SharedMemory::SharedMemoryType aType,
+ ipc::Shmem* aShmem)
+{
+ return PVRManagerChild::AllocUnsafeShmem(aSize, aType, aShmem);
+}
+
+void
+VRManagerChild::DeallocShmem(ipc::Shmem& aShmem)
+{
+ PVRManagerChild::DeallocShmem(aShmem);
+}
+
+PVRLayerChild*
+VRManagerChild::CreateVRLayer(uint32_t aDisplayID, const Rect& aLeftEyeRect, const Rect& aRightEyeRect)
+{
+ return SendPVRLayerConstructor(aDisplayID,
+ aLeftEyeRect.x, aLeftEyeRect.y, aLeftEyeRect.width, aLeftEyeRect.height,
+ aRightEyeRect.x, aRightEyeRect.y, aRightEyeRect.width, aRightEyeRect.height);
+}
+
+
+// XXX TODO - VRManagerChild::FrameRequest is the same as nsIDocument::FrameRequest, should we consolodate these?
+struct VRManagerChild::FrameRequest
+{
+ FrameRequest(mozilla::dom::FrameRequestCallback& aCallback,
+ int32_t aHandle) :
+ mCallback(&aCallback),
+ mHandle(aHandle)
+ {}
+
+ // Conversion operator so that we can append these to a
+ // FrameRequestCallbackList
+ operator const RefPtr<mozilla::dom::FrameRequestCallback>& () const {
+ return mCallback;
+ }
+
+ // Comparator operators to allow RemoveElementSorted with an
+ // integer argument on arrays of FrameRequest
+ bool operator==(int32_t aHandle) const {
+ return mHandle == aHandle;
+ }
+ bool operator<(int32_t aHandle) const {
+ return mHandle < aHandle;
+ }
+
+ RefPtr<mozilla::dom::FrameRequestCallback> mCallback;
+ int32_t mHandle;
+};
+
+nsresult
+VRManagerChild::ScheduleFrameRequestCallback(mozilla::dom::FrameRequestCallback& aCallback,
+ int32_t *aHandle)
+{
+ if (mFrameRequestCallbackCounter == INT32_MAX) {
+ // Can't increment without overflowing; bail out
+ return NS_ERROR_NOT_AVAILABLE;
+ }
+ int32_t newHandle = ++mFrameRequestCallbackCounter;
+
+ DebugOnly<FrameRequest*> request =
+ mFrameRequestCallbacks.AppendElement(FrameRequest(aCallback, newHandle));
+ NS_ASSERTION(request, "This is supposed to be infallible!");
+
+ *aHandle = newHandle;
+ return NS_OK;
+}
+
+void
+VRManagerChild::CancelFrameRequestCallback(int32_t aHandle)
+{
+ // mFrameRequestCallbacks is stored sorted by handle
+ mFrameRequestCallbacks.RemoveElementSorted(aHandle);
+}
+
+bool
+VRManagerChild::RecvNotifyVSync()
+{
+ for (auto& display : mDisplays) {
+ display->NotifyVsync();
+ }
+
+ return true;
+}
+
+bool
+VRManagerChild::RecvNotifyVRVSync(const uint32_t& aDisplayID)
+{
+ for (auto& display : mDisplays) {
+ if (display->GetDisplayInfo().GetDisplayID() == aDisplayID) {
+ display->NotifyVRVsync();
+ }
+ }
+
+ return true;
+}
+
+void
+VRManagerChild::RunFrameRequestCallbacks()
+{
+ TimeStamp nowTime = TimeStamp::Now();
+ mozilla::TimeDuration duration = nowTime - mStartTimeStamp;
+ DOMHighResTimeStamp timeStamp = duration.ToMilliseconds();
+
+
+ nsTArray<FrameRequest> callbacks;
+ callbacks.AppendElements(mFrameRequestCallbacks);
+ mFrameRequestCallbacks.Clear();
+ for (auto& callback : callbacks) {
+ callback.mCallback->Call(timeStamp);
+ }
+}
+
+void
+VRManagerChild::FireDOMVRDisplayConnectedEvent()
+{
+}
+
+void
+VRManagerChild::FireDOMVRDisplayDisconnectedEvent()
+{
+}
+
+void
+VRManagerChild::FireDOMVRDisplayPresentChangeEvent()
+{
+}
} // namespace gfx
} // namespace mozilla
--- a/gfx/vr/ipc/VRManagerChild.h
+++ b/gfx/vr/ipc/VRManagerChild.h
@@ -4,60 +4,162 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef MOZILLA_GFX_VR_VRMANAGERCHILD_H
#define MOZILLA_GFX_VR_VRMANAGERCHILD_H
#include "mozilla/gfx/PVRManagerChild.h"
+#include "mozilla/ipc/SharedMemory.h" // for SharedMemory, etc
#include "ThreadSafeRefcountingWithMainThreadDestruction.h"
+#include "mozilla/layers/ISurfaceAllocator.h" // for ISurfaceAllocator
+#include "mozilla/layers/LayersTypes.h" // for LayersBackend
+#include "mozilla/layers/TextureForwarder.h"
namespace mozilla {
namespace dom {
class Navigator;
class VRDisplay;
} // namespace dom
+namespace layers {
+class PCompositableChild;
+class TextureClient;
+}
namespace gfx {
-class VRDisplayProxy;
-
+class VRLayerChild;
+class VRDisplayClient;
class VRManagerChild : public PVRManagerChild
+ , public layers::TextureForwarder
+ , public layers::ShmemAllocator
{
public:
- NS_INLINE_DECL_THREADSAFE_REFCOUNTING_WITH_MAIN_THREAD_DESTRUCTION(VRManagerChild)
+ static VRManagerChild* Get();
int GetInputFrameID();
- bool GetVRDisplays(nsTArray<RefPtr<VRDisplayProxy> >& aDevices);
+ bool GetVRDisplays(nsTArray<RefPtr<VRDisplayClient> >& aDisplays);
bool RefreshVRDisplaysWithCallback(dom::Navigator* aNavigator);
static void InitSameProcess();
static void InitWithGPUProcess(Endpoint<PVRManagerChild>&& aEndpoint);
static bool InitForContent(Endpoint<PVRManagerChild>&& aEndpoint);
static void ShutDown();
static bool IsCreated();
- static VRManagerChild* Get();
+ virtual PTextureChild* CreateTexture(const SurfaceDescriptor& aSharedData,
+ layers::LayersBackend aLayersBackend,
+ TextureFlags aFlags,
+ uint64_t aSerial) override;
+ virtual void CancelWaitForRecycle(uint64_t aTextureId) override;
+
+ PVRLayerChild* CreateVRLayer(uint32_t aDisplayID, const Rect& aLeftEyeRect, const Rect& aRightEyeRect);
+
+ static void IdentifyTextureHost(const layers::TextureFactoryIdentifier& aIdentifier);
+ layers::LayersBackend GetBackendType() const;
+ layers::SyncObject* GetSyncObject() { return mSyncObject; }
+
+ virtual MessageLoop* GetMessageLoop() const override { return mMessageLoop; }
+ virtual base::ProcessId GetParentPid() const override { return OtherPid(); }
+
+ nsresult ScheduleFrameRequestCallback(mozilla::dom::FrameRequestCallback& aCallback,
+ int32_t *aHandle);
+ void CancelFrameRequestCallback(int32_t aHandle);
+ void RunFrameRequestCallbacks();
+
+ void FireDOMVRDisplayConnectedEvent();
+ void FireDOMVRDisplayDisconnectedEvent();
+ void FireDOMVRDisplayPresentChangeEvent();
protected:
explicit VRManagerChild();
~VRManagerChild();
void Destroy();
static void DeferredDestroy(RefPtr<VRManagerChild> aVRManagerChild);
- virtual bool RecvUpdateDeviceInfo(nsTArray<VRDisplayUpdate>&& aDeviceUpdates) override;
- virtual bool RecvUpdateDeviceSensors(nsTArray<VRSensorUpdate>&& aDeviceSensorUpdates) override;
+ virtual PTextureChild* AllocPTextureChild(const SurfaceDescriptor& aSharedData,
+ const layers::LayersBackend& aLayersBackend,
+ const TextureFlags& aFlags,
+ const uint64_t& aSerial) override;
+ virtual bool DeallocPTextureChild(PTextureChild* actor) override;
+
+ virtual PVRLayerChild* AllocPVRLayerChild(const uint32_t& aDisplayID,
+ const float& aLeftEyeX,
+ const float& aLeftEyeY,
+ const float& aLeftEyeWidth,
+ const float& aLeftEyeHeight,
+ const float& aRightEyeX,
+ const float& aRightEyeY,
+ const float& aRightEyeWidth,
+ const float& aRightEyeHeight) override;
+ virtual bool DeallocPVRLayerChild(PVRLayerChild* actor) override;
+
+ virtual bool RecvUpdateDisplayInfo(nsTArray<VRDisplayInfo>&& aDisplayUpdates) override;
+
+ virtual bool RecvParentAsyncMessages(InfallibleTArray<AsyncParentMessageData>&& aMessages) override;
+
+ virtual bool RecvNotifyVSync() override;
+ virtual bool RecvNotifyVRVSync(const uint32_t& aDisplayID) override;
+
+
+ // ShmemAllocator
+
+ virtual ShmemAllocator* AsShmemAllocator() override { return this; }
+
+ virtual bool AllocShmem(size_t aSize,
+ ipc::SharedMemory::SharedMemoryType aType,
+ ipc::Shmem* aShmem) override;
+
+ virtual bool AllocUnsafeShmem(size_t aSize,
+ ipc::SharedMemory::SharedMemoryType aType,
+ ipc::Shmem* aShmem) override;
+
+ virtual void DeallocShmem(ipc::Shmem& aShmem) override;
+
+ virtual bool IsSameProcess() const override
+ {
+ return OtherPid() == base::GetCurrentProcId();
+ }
friend class layers::CompositorBridgeChild;
private:
- nsTArray<RefPtr<VRDisplayProxy> > mDevices;
+ void DeliverFence(uint64_t aTextureId, FenceHandle& aReleaseFenceHandle);
+ /**
+ * Notify id of Texture When host side end its use. Transaction id is used to
+ * make sure if there is no newer usage.
+ */
+ void NotifyNotUsed(uint64_t aTextureId, uint64_t aFwdTransactionId);
+
+ nsTArray<RefPtr<VRDisplayClient> > mDisplays;
nsTArray<dom::Navigator*> mNavigatorCallbacks;
int32_t mInputFrameID;
+
+ MessageLoop* mMessageLoop;
+
+ struct FrameRequest;
+
+ nsTArray<FrameRequest> mFrameRequestCallbacks;
+ /**
+ * The current frame request callback handle
+ */
+ int32_t mFrameRequestCallbackCounter;
+ mozilla::TimeStamp mStartTimeStamp;
+
+ /**
+ * Hold TextureClients refs until end of their usages on host side.
+ * It defer calling of TextureClient recycle callback.
+ */
+ nsDataHashtable<nsUint64HashKey, RefPtr<layers::TextureClient> > mTexturesWaitingRecycled;
+
+ layers::LayersBackend mBackend;
+ RefPtr<layers::SyncObject> mSyncObject;
+
+ DISALLOW_COPY_AND_ASSIGN(VRManagerChild);
};
} // namespace mozilla
} // namespace gfx
#endif // MOZILLA_GFX_VR_VRMANAGERCHILD_H
--- a/gfx/vr/ipc/VRManagerParent.cpp
+++ b/gfx/vr/ipc/VRManagerParent.cpp
@@ -1,52 +1,155 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
* vim: sw=2 ts=8 et :
*/
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "VRManagerParent.h"
+#include "ipc/VRLayerParent.h"
#include "mozilla/gfx/PVRManagerParent.h"
#include "mozilla/ipc/ProtocolTypes.h"
#include "mozilla/ipc/ProtocolUtils.h" // for IToplevelProtocol
#include "mozilla/TimeStamp.h" // for TimeStamp
#include "mozilla/layers/CompositorThread.h"
#include "mozilla/unused.h"
#include "VRManager.h"
namespace mozilla {
+using namespace layers;
namespace gfx {
VRManagerParent::VRManagerParent(ProcessId aChildProcessId)
+ : HostIPCAllocator()
+ , mHaveEventListener(false)
{
MOZ_COUNT_CTOR(VRManagerParent);
MOZ_ASSERT(NS_IsMainThread());
SetOtherProcessId(aChildProcessId);
}
VRManagerParent::~VRManagerParent()
{
- MOZ_ASSERT(NS_IsMainThread());
-
MOZ_ASSERT(!mVRManagerHolder);
MOZ_COUNT_DTOR(VRManagerParent);
}
-void VRManagerParent::RegisterWithManager()
+PTextureParent*
+VRManagerParent::AllocPTextureParent(const SurfaceDescriptor& aSharedData,
+ const LayersBackend& aLayersBackend,
+ const TextureFlags& aFlags,
+ const uint64_t& aSerial)
+{
+ return layers::TextureHost::CreateIPDLActor(this, aSharedData, aLayersBackend, aFlags, aSerial);
+}
+
+bool
+VRManagerParent::DeallocPTextureParent(PTextureParent* actor)
+{
+ return layers::TextureHost::DestroyIPDLActor(actor);
+}
+
+PVRLayerParent*
+VRManagerParent::AllocPVRLayerParent(const uint32_t& aDisplayID,
+ const float& aLeftEyeX,
+ const float& aLeftEyeY,
+ const float& aLeftEyeWidth,
+ const float& aLeftEyeHeight,
+ const float& aRightEyeX,
+ const float& aRightEyeY,
+ const float& aRightEyeWidth,
+ const float& aRightEyeHeight)
+{
+ RefPtr<VRLayerParent> layer;
+ layer = new VRLayerParent(aDisplayID,
+ Rect(aLeftEyeX, aLeftEyeY, aLeftEyeWidth, aLeftEyeHeight),
+ Rect(aRightEyeX, aRightEyeY, aRightEyeWidth, aRightEyeHeight));
+ VRManager* vm = VRManager::Get();
+ RefPtr<gfx::VRDisplayHost> display = vm->GetDisplay(aDisplayID);
+ if (display) {
+ display->AddLayer(layer);
+ }
+ return layer.forget().take();
+}
+
+bool
+VRManagerParent::DeallocPVRLayerParent(PVRLayerParent* actor)
+{
+ gfx::VRLayerParent* layer = static_cast<gfx::VRLayerParent*>(actor);
+
+ VRManager* vm = VRManager::Get();
+ RefPtr<gfx::VRDisplayHost> display = vm->GetDisplay(layer->GetDisplayID());
+ if (display) {
+ display->RemoveLayer(layer);
+ }
+
+ delete actor;
+ return true;
+}
+
+bool
+VRManagerParent::AllocShmem(size_t aSize,
+ ipc::SharedMemory::SharedMemoryType aType,
+ ipc::Shmem* aShmem)
+{
+ return PVRManagerParent::AllocShmem(aSize, aType, aShmem);
+}
+
+bool
+VRManagerParent::AllocUnsafeShmem(size_t aSize,
+ ipc::SharedMemory::SharedMemoryType aType,
+ ipc::Shmem* aShmem)
+{
+ return PVRManagerParent::AllocUnsafeShmem(aSize, aType, aShmem);
+}
+
+void
+VRManagerParent::DeallocShmem(ipc::Shmem& aShmem)
+{
+ PVRManagerParent::DeallocShmem(aShmem);
+}
+
+bool
+VRManagerParent::IsSameProcess() const
+{
+ return OtherPid() == base::GetCurrentProcId();
+}
+
+void
+VRManagerParent::NotifyNotUsed(PTextureParent* aTexture, uint64_t aTransactionId)
+{
+ MOZ_ASSERT_UNREACHABLE("unexpected to be called");
+}
+
+void
+VRManagerParent::SendAsyncMessage(const InfallibleTArray<AsyncParentMessageData>& aMessage)
+{
+ MOZ_ASSERT_UNREACHABLE("unexpected to be called");
+}
+
+base::ProcessId
+VRManagerParent::GetChildProcessId()
+{
+ return OtherPid();
+}
+
+void
+VRManagerParent::RegisterWithManager()
{
VRManager* vm = VRManager::Get();
vm->AddVRManagerParent(this);
mVRManagerHolder = vm;
}
-void VRManagerParent::UnregisterFromManager()
+void
+VRManagerParent::UnregisterFromManager()
{
VRManager* vm = VRManager::Get();
vm->RemoveVRManagerParent(this);
mVRManagerHolder = nullptr;
}
/* static */ bool
VRManagerParent::CreateForContent(Endpoint<PVRManagerParent>&& aEndpoint)
@@ -125,56 +228,69 @@ VRManagerParent::CloneToplevel(const Inf
void
VRManagerParent::OnChannelConnected(int32_t aPid)
{
mCompositorThreadHolder = layers::CompositorThreadHolder::GetSingleton();
}
bool
-VRManagerParent::RecvRefreshDevices()
+VRManagerParent::RecvRefreshDisplays()
{
+ // This is called to refresh the VR Displays for Navigator.GetVRDevices().
+ // We must pass "true" to VRManager::RefreshVRDisplays()
+ // to ensure that the promise returned by Navigator.GetVRDevices
+ // can resolve even if there are no changes to the VR Displays.
VRManager* vm = VRManager::Get();
- vm->RefreshVRDisplays();
+ vm->RefreshVRDisplays(true);
return true;
}
bool
-VRManagerParent::RecvResetSensor(const uint32_t& aDeviceID)
+VRManagerParent::RecvResetSensor(const uint32_t& aDisplayID)
{
VRManager* vm = VRManager::Get();
- RefPtr<gfx::VRHMDInfo> device = vm->GetDevice(aDeviceID);
- if (device != nullptr) {
- device->ZeroSensor();
+ RefPtr<gfx::VRDisplayHost> display = vm->GetDisplay(aDisplayID);
+ if (display != nullptr) {
+ display->ZeroSensor();
}
return true;
}
bool
-VRManagerParent::RecvKeepSensorTracking(const uint32_t& aDeviceID)
+VRManagerParent::RecvGetSensorState(const uint32_t& aDisplayID, VRHMDSensorState* aState)
{
VRManager* vm = VRManager::Get();
- RefPtr<gfx::VRHMDInfo> device = vm->GetDevice(aDeviceID);
- if (device != nullptr) {
- Unused << device->KeepSensorTracking();
+ RefPtr<gfx::VRDisplayHost> display = vm->GetDisplay(aDisplayID);
+ if (display != nullptr) {
+ *aState = display->GetSensorState();
}
return true;
}
bool
-VRManagerParent::RecvSetFOV(const uint32_t& aDeviceID,
- const VRFieldOfView& aFOVLeft,
- const VRFieldOfView& aFOVRight,
- const double& zNear,
- const double& zFar)
+VRManagerParent::RecvGetImmediateSensorState(const uint32_t& aDisplayID, VRHMDSensorState* aState)
{
VRManager* vm = VRManager::Get();
- RefPtr<gfx::VRHMDInfo> device = vm->GetDevice(aDeviceID);
- if (device != nullptr) {
- device->SetFOV(aFOVLeft, aFOVRight, zNear, zFar);
+ RefPtr<gfx::VRDisplayHost> display = vm->GetDisplay(aDisplayID);
+ if (display != nullptr) {
+ *aState = display->GetImmediateSensorState();
}
return true;
}
+bool
+VRManagerParent::HaveEventListener()
+{
+ return mHaveEventListener;
+}
+
+bool
+VRManagerParent::RecvSetHaveEventListener(const bool& aHaveEventListener)
+{
+ mHaveEventListener = aHaveEventListener;
+ return true;
+}
+
} // namespace gfx
} // namespace mozilla
--- a/gfx/vr/ipc/VRManagerParent.h
+++ b/gfx/vr/ipc/VRManagerParent.h
@@ -3,57 +3,97 @@
*/
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef MOZILLA_GFX_VR_VRMANAGERPARENT_H
#define MOZILLA_GFX_VR_VRMANAGERPARENT_H
+#include "mozilla/layers/CompositableTransactionParent.h"
#include "mozilla/layers/CompositorThread.h" // for CompositorThreadHolder
#include "mozilla/gfx/PVRManagerParent.h" // for PVRManagerParent
+#include "mozilla/gfx/PVRLayerParent.h" // for PVRLayerParent
#include "mozilla/ipc/ProtocolUtils.h" // for IToplevelProtocol
#include "mozilla/TimeStamp.h" // for TimeStamp
#include "gfxVR.h" // for VRFieldOfView
namespace mozilla {
+using namespace layers;
namespace gfx {
class VRManager;
class VRManagerParent final : public PVRManagerParent
+ , public HostIPCAllocator
+ , public ShmemAllocator
{
- NS_INLINE_DECL_THREADSAFE_REFCOUNTING_WITH_MAIN_THREAD_DESTRUCTION(VRManagerParent)
public:
explicit VRManagerParent(ProcessId aChildProcessId);
static VRManagerParent* CreateSameProcess();
static bool CreateForGPUProcess(Endpoint<PVRManagerParent>&& aEndpoint);
static bool CreateForContent(Endpoint<PVRManagerParent>&& aEndpoint);
// Overriden from IToplevelProtocol
ipc::IToplevelProtocol*
CloneToplevel(const InfallibleTArray<ipc::ProtocolFdMapping>& aFds,
base::ProcessHandle aPeerProcess,
mozilla::ipc::ProtocolCloneContext* aCtx) override;
+ virtual base::ProcessId GetChildProcessId() override;
+
+ // ShmemAllocator
+
+ virtual ShmemAllocator* AsShmemAllocator() override { return this; }
+
+ virtual bool AllocShmem(size_t aSize,
+ ipc::SharedMemory::SharedMemoryType aType,
+ ipc::Shmem* aShmem) override;
+
+ virtual bool AllocUnsafeShmem(size_t aSize,
+ ipc::SharedMemory::SharedMemoryType aType,
+ ipc::Shmem* aShmem) override;
+
+ virtual void DeallocShmem(ipc::Shmem& aShmem) override;
+
+ virtual bool IsSameProcess() const override;
+ bool HaveEventListener();
+
+ virtual void NotifyNotUsed(PTextureParent* aTexture, uint64_t aTransactionId) override;
+ virtual void SendAsyncMessage(const InfallibleTArray<AsyncParentMessageData>& aMessage) override;
+
protected:
~VRManagerParent();
+ virtual PTextureParent* AllocPTextureParent(const SurfaceDescriptor& aSharedData,
+ const LayersBackend& aLayersBackend,
+ const TextureFlags& aFlags,
+ const uint64_t& aSerial) override;
+ virtual bool DeallocPTextureParent(PTextureParent* actor) override;
+
+ virtual PVRLayerParent* AllocPVRLayerParent(const uint32_t& aDisplayID,
+ const float& aLeftEyeX,
+ const float& aLeftEyeY,
+ const float& aLeftEyeWidth,
+ const float& aLeftEyeHeight,
+ const float& aRightEyeX,
+ const float& aRightEyeY,
+ const float& aRightEyeWidth,
+ const float& aRightEyeHeight) override;
+ virtual bool DeallocPVRLayerParent(PVRLayerParent* actor) override;
+
virtual void ActorDestroy(ActorDestroyReason why) override;
void OnChannelConnected(int32_t pid) override;
- virtual bool RecvRefreshDevices() override;
- virtual bool RecvResetSensor(const uint32_t& aDeviceID) override;
- virtual bool RecvKeepSensorTracking(const uint32_t& aDeviceID) override;
- virtual bool RecvSetFOV(const uint32_t& aDeviceID,
- const VRFieldOfView& aFOVLeft,
- const VRFieldOfView& aFOVRight,
- const double& zNear,
- const double& zFar) override;
+ virtual bool RecvRefreshDisplays() override;
+ virtual bool RecvResetSensor(const uint32_t& aDisplayID) override;
+ virtual bool RecvGetSensorState(const uint32_t& aDisplayID, VRHMDSensorState* aState) override;
+ virtual bool RecvGetImmediateSensorState(const uint32_t& aDisplayID, VRHMDSensorState* aState) override;
+ virtual bool RecvSetHaveEventListener(const bool& aHaveEventListener) override;
private:
void RegisterWithManager();
void UnregisterFromManager();
void Bind(Endpoint<PVRManagerParent>&& aEndpoint);
static void RegisterVRManagerInCompositorThread(VRManagerParent* aVRManager);
@@ -64,14 +104,15 @@ private:
// deferred destruction of ourselves.
RefPtr<VRManagerParent> mSelfRef;
// Keep the compositor thread alive, until we have destroyed ourselves.
RefPtr<layers::CompositorThreadHolder> mCompositorThreadHolder;
// Keep the VRManager alive, until we have destroyed ourselves.
RefPtr<VRManager> mVRManagerHolder;
+ bool mHaveEventListener;
};
} // namespace mozilla
} // namespace gfx
#endif // MOZILLA_GFX_VR_VRMANAGERPARENT_H
--- a/gfx/vr/ipc/VRMessageUtils.h
+++ b/gfx/vr/ipc/VRMessageUtils.h
@@ -11,108 +11,60 @@
#include "mozilla/GfxMessageUtils.h"
#include "VRManager.h"
#include "gfxVR.h"
namespace IPC {
template<>
-struct ParamTraits<mozilla::gfx::VRHMDType> :
- public ContiguousEnumSerializer<mozilla::gfx::VRHMDType,
- mozilla::gfx::VRHMDType(0),
- mozilla::gfx::VRHMDType(mozilla::gfx::VRHMDType::NumHMDTypes)> {};
+struct ParamTraits<mozilla::gfx::VRDisplayType> :
+ public ContiguousEnumSerializer<mozilla::gfx::VRDisplayType,
+ mozilla::gfx::VRDisplayType(0),
+ mozilla::gfx::VRDisplayType(mozilla::gfx::VRDisplayType::NumVRDisplayTypes)> {};
template<>
struct ParamTraits<mozilla::gfx::VRDisplayCapabilityFlags> :
public BitFlagsEnumSerializer<mozilla::gfx::VRDisplayCapabilityFlags,
mozilla::gfx::VRDisplayCapabilityFlags::Cap_All> {};
template <>
-struct ParamTraits<mozilla::gfx::VRDisplayUpdate>
-{
- typedef mozilla::gfx::VRDisplayUpdate paramType;
-
- static void Write(Message* aMsg, const paramType& aParam)
- {
- WriteParam(aMsg, aParam.mDeviceInfo);
- WriteParam(aMsg, aParam.mSensorState);
- }
-
- static bool Read(const Message* aMsg, PickleIterator* aIter, paramType* aResult)
- {
- if (!ReadParam(aMsg, aIter, &(aResult->mDeviceInfo)) ||
- !ReadParam(aMsg, aIter, &(aResult->mSensorState))) {
- return false;
- }
- return true;
- }
-};
-
-template <>
-struct ParamTraits<mozilla::gfx::VRSensorUpdate>
-{
- typedef mozilla::gfx::VRSensorUpdate paramType;
-
- static void Write(Message* aMsg, const paramType& aParam)
- {
- WriteParam(aMsg, aParam.mDeviceID);
- WriteParam(aMsg, aParam.mSensorState);
- }
-
- static bool Read(const Message* aMsg, PickleIterator* aIter, paramType* aResult)
- {
- if (!ReadParam(aMsg, aIter, &(aResult->mDeviceID)) ||
- !ReadParam(aMsg, aIter, &(aResult->mSensorState))) {
- return false;
- }
- return true;
- }
-};
-
-template <>
struct ParamTraits<mozilla::gfx::VRDisplayInfo>
{
typedef mozilla::gfx::VRDisplayInfo paramType;
static void Write(Message* aMsg, const paramType& aParam)
{
WriteParam(aMsg, aParam.mType);
- WriteParam(aMsg, aParam.mDeviceID);
- WriteParam(aMsg, aParam.mDeviceName);
+ WriteParam(aMsg, aParam.mDisplayID);
+ WriteParam(aMsg, aParam.mDisplayName);
WriteParam(aMsg, aParam.mCapabilityFlags);
WriteParam(aMsg, aParam.mEyeResolution);
- WriteParam(aMsg, aParam.mScreenRect);
- WriteParam(aMsg, aParam.mIsFakeScreen);
+ WriteParam(aMsg, aParam.mIsConnected);
+ WriteParam(aMsg, aParam.mIsPresenting);
for (int i = 0; i < mozilla::gfx::VRDisplayInfo::NumEyes; i++) {
- WriteParam(aMsg, aParam.mMaximumEyeFOV[i]);
- WriteParam(aMsg, aParam.mRecommendedEyeFOV[i]);
WriteParam(aMsg, aParam.mEyeFOV[i]);
WriteParam(aMsg, aParam.mEyeTranslation[i]);
- WriteParam(aMsg, aParam.mEyeProjectionMatrix[i]);
}
}
static bool Read(const Message* aMsg, PickleIterator* aIter, paramType* aResult)
{
if (!ReadParam(aMsg, aIter, &(aResult->mType)) ||
- !ReadParam(aMsg, aIter, &(aResult->mDeviceID)) ||
- !ReadParam(aMsg, aIter, &(aResult->mDeviceName)) ||
+ !ReadParam(aMsg, aIter, &(aResult->mDisplayID)) ||
+ !ReadParam(aMsg, aIter, &(aResult->mDisplayName)) ||
!ReadParam(aMsg, aIter, &(aResult->mCapabilityFlags)) ||
!ReadParam(aMsg, aIter, &(aResult->mEyeResolution)) ||
- !ReadParam(aMsg, aIter, &(aResult->mScreenRect)) ||
- !ReadParam(aMsg, aIter, &(aResult->mIsFakeScreen))) {
+ !ReadParam(aMsg, aIter, &(aResult->mIsConnected)) ||
+ !ReadParam(aMsg, aIter, &(aResult->mIsPresenting))) {
return false;
}
for (int i = 0; i < mozilla::gfx::VRDisplayInfo::NumEyes; i++) {
- if (!ReadParam(aMsg, aIter, &(aResult->mMaximumEyeFOV[i])) ||
- !ReadParam(aMsg, aIter, &(aResult->mRecommendedEyeFOV[i])) ||
- !ReadParam(aMsg, aIter, &(aResult->mEyeFOV[i])) ||
- !ReadParam(aMsg, aIter, &(aResult->mEyeTranslation[i])) ||
- !ReadParam(aMsg, aIter, &(aResult->mEyeProjectionMatrix[i]))) {
+ if (!ReadParam(aMsg, aIter, &(aResult->mEyeFOV[i])) ||
+ !ReadParam(aMsg, aIter, &(aResult->mEyeTranslation[i]))) {
return false;
}
}
return true;
}
};
--- a/gfx/vr/moz.build
+++ b/gfx/vr/moz.build
@@ -1,38 +1,49 @@
# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
# vim: set filetype=python:
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
EXPORTS += [
'gfxVR.h',
+ 'ipc/VRLayerChild.h',
'ipc/VRManagerChild.h',
'ipc/VRManagerParent.h',
'ipc/VRMessageUtils.h',
- 'VRDisplayProxy.h',
+ 'VRDisplayClient.h',
+ 'VRDisplayPresentation.h',
'VRManager.h',
]
LOCAL_INCLUDES += [
'/gfx/thebes',
]
UNIFIED_SOURCES += [
'gfxVR.cpp',
- 'gfxVROculus.cpp',
'gfxVROSVR.cpp',
+ 'ipc/VRLayerChild.cpp',
+ 'ipc/VRLayerParent.cpp',
'ipc/VRManagerChild.cpp',
'ipc/VRManagerParent.cpp',
- 'VRDisplayProxy.cpp',
+ 'VRDisplayClient.cpp',
+ 'VRDisplayHost.cpp',
+ 'VRDisplayPresentation.cpp',
'VRManager.cpp',
]
+if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'windows':
+ SOURCES += [
+ 'gfxVROculus.cpp',
+ ]
+
IPDL_SOURCES = [
+ 'ipc/PVRLayer.ipdl',
'ipc/PVRManager.ipdl',
]
# For building with the real SDK instead of our local hack
#SOURCES += [
# 'OVR_CAPI_Util.cpp',
# 'OVR_CAPIShim.c',
# 'OVR_StereoProjection.cpp',
--- a/layout/base/nsDisplayList.cpp
+++ b/layout/base/nsDisplayList.cpp
@@ -14,17 +14,16 @@
#include <stdint.h>
#include <algorithm>
#include "gfxUtils.h"
#include "mozilla/dom/TabChild.h"
#include "mozilla/dom/KeyframeEffect.h"
#include "mozilla/gfx/2D.h"
-#include "VRDisplayProxy.h"
#include "mozilla/layers/PLayerTransaction.h"
#include "nsCSSRendering.h"
#include "nsRenderingContext.h"
#include "nsISelectionController.h"
#include "nsIPresShell.h"
#include "nsRegion.h"
#include "nsStyleStructInlines.h"
#include "nsStyleTransformMatrix.h"
--- a/widget/nsBaseWidget.cpp
+++ b/widget/nsBaseWidget.cpp
@@ -67,16 +67,17 @@
#include "WritingModes.h"
#include "InputData.h"
#include "FrameLayerBuilder.h"
#ifdef ACCESSIBILITY
#include "nsAccessibilityService.h"
#endif
#include "gfxConfig.h"
#include "mozilla/layers/CompositorSession.h"
+#include "VRManagerChild.h"
#ifdef DEBUG
#include "nsIObserver.h"
static void debug_RegisterPrefCallbacks();
#endif
@@ -347,16 +348,17 @@ nsBaseWidget::OnRenderingDeviceReset()
FrameLayerBuilder::InvalidateAllLayers(mLayerManager);
// Update the texture factory identifier.
clm->UpdateTextureFactoryIdentifier(identifier);
if (ShadowLayerForwarder* lf = clm->AsShadowForwarder()) {
lf->IdentifyTextureHost(identifier);
}
ImageBridgeChild::IdentifyCompositorTextureHost(identifier);
+ gfx::VRManagerChild::IdentifyTextureHost(identifier);
}
void
nsBaseWidget::FreeShutdownObserver()
{
if (mShutdownObserver) {
mShutdownObserver->Unregister();
}
@@ -1380,16 +1382,17 @@ void nsBaseWidget::CreateCompositor(int
DestroyCompositor();
mLayerManager = nullptr;
return;
}
lf->SetShadowManager(shadowManager);
lf->IdentifyTextureHost(textureFactoryIdentifier);
ImageBridgeChild::IdentifyCompositorTextureHost(textureFactoryIdentifier);
+ gfx::VRManagerChild::IdentifyTextureHost(textureFactoryIdentifier);
WindowUsesOMTC();
mLayerManager = lm.forget();
if (mWindowType == eWindowType_toplevel) {
// Only track compositors for top-level windows, since other window types
// may use the basic compositor.
gfxPlatform::GetPlatform()->NotifyCompositorCreated(mLayerManager->GetCompositorBackendType());