Bug 1179944, [webvr] support Oculus 0.6.0 runtime and rendering; r=mstange

---
 gfx/2d/Quaternion.h                              |  10 +
 gfx/layers/Layers.h                              |   6 +-
 gfx/layers/composite/ContainerLayerComposite.cpp | 161 +++++++---
 gfx/layers/composite/ContainerLayerComposite.h   |   3 +
 gfx/thebes/gfxPrefs.h                            |   2 +
 gfx/vr/gfxVR.cpp                                 |  56 ++++
 gfx/vr/gfxVR.h                                   |  37 +++
 gfx/vr/gfxVRCardboard.cpp                        |  54 +---
 gfx/vr/gfxVROculus.cpp                           | 367 +++++++++++++++--------
 gfx/vr/gfxVROculus.h                             |  14 +-
 gfx/vr/moz.build                                 |  10 +
 gfx/vr/ovr_capi_dynamic.h                        | 261 +++++++++++-----
 modules/libpref/init/all.js                      |   3 +
 13 files changed, 694 insertions(+), 290 deletions(-)
This commit is contained in:
Vladimir Vukicevic 2015-07-02 11:58:24 -04:00
parent 4661807fc2
commit ff08123863
13 changed files with 693 additions and 289 deletions

View File

@ -92,6 +92,16 @@ public:
{
return Conjugate().Normalize();
}
Point3D RotatePoint(const Point3D& aPoint) {
Float uvx = Float(2.0) * (y*aPoint.z - z*aPoint.y);
Float uvy = Float(2.0) * (z*aPoint.x - x*aPoint.z);
Float uvz = Float(2.0) * (x*aPoint.y - y*aPoint.x);
return Point3D(aPoint.x + w*uvx + y*uvz - z*uvy,
aPoint.y + w*uvy + z*uvx - x*uvz,
aPoint.z + w*uvz + x*uvy - y*uvx);
}
};
} // namespace gfx

View File

@ -1677,7 +1677,7 @@ public:
virtual gfx::Matrix4x4 ReplaceEffectiveTransform(const gfx::Matrix4x4& aNewEffectiveTransform) {
gfx::Matrix4x4 old = mEffectiveTransform;
mEffectiveTransform = aNewEffectiveTransform;
ComputeEffectiveTransformForMaskLayer(mEffectiveTransform);
ComputeEffectiveTransformForMaskLayers(mEffectiveTransform);
return old;
}
@ -2078,7 +2078,7 @@ public:
gfx::Matrix4x4 old = mEffectiveTransform;
mEffectiveTransform = aNewEffectiveTransform;
ComputeEffectiveTransformsForChildren(mEffectiveTransform);
ComputeEffectiveTransformForMaskLayer(mEffectiveTransform);
ComputeEffectiveTransformForMaskLayers(mEffectiveTransform);
return old;
}
@ -2287,6 +2287,8 @@ public:
mPreTransCallbackData = closureData;
}
const nsIntRect& GetBounds() const { return mBounds; }
protected:
void FirePreTransactionCallback()
{

View File

@ -40,6 +40,11 @@
#define CULLING_LOG(...)
// #define CULLING_LOG(...) printf_stderr("CULLING: " __VA_ARGS__)
#define DUMP(...) do { if (getenv("DUMP_DEBUG")) { printf_stderr(__VA_ARGS__); } } while(0)
#define XYWH(k) (k).x, (k).y, (k).width, (k).height
#define XY(k) (k).x, (k).y
#define WH(k) (k).width, (k).height
namespace mozilla {
namespace layers {
@ -141,69 +146,138 @@ ContainerRenderVR(ContainerT* aContainer,
RefPtr<CompositingRenderTarget> previousTarget = compositor->GetCurrentRenderTarget();
gfx::IntRect visibleRect = aContainer->GetEffectiveVisibleRegion().GetBounds();
float opacity = aContainer->GetEffectiveOpacity();
gfx::IntRect surfaceRect = gfx::IntRect(visibleRect.x, visibleRect.y,
visibleRect.width, visibleRect.height);
// we're about to create a framebuffer backed by textures to use as an intermediate
// surface. What to do if its size (as given by framebufferRect) would exceed the
// maximum texture size supported by the GL? The present code chooses the compromise
// of just clamping the framebuffer's size to the max supported size.
// This gives us a lower resolution rendering of the intermediate surface (children layers).
// See bug 827170 for a discussion.
// The size of each individual eye surface
gfx::IntSize eyeResolution = aHMD->SuggestedEyeResolution();
gfx::IntRect eyeRect[2];
eyeRect[0] = gfx::IntRect(0, 0, eyeResolution.width, eyeResolution.height);
eyeRect[1] = gfx::IntRect(eyeResolution.width, 0, eyeResolution.width, eyeResolution.height);
// The intermediate surface size; we're going to assume that we're not going to run
// into max texture size limits
gfx::IntRect surfaceRect = gfx::IntRect(0, 0, eyeResolution.width * 2, eyeResolution.height);
int32_t maxTextureSize = compositor->GetMaxTextureSize();
surfaceRect.width = std::min(maxTextureSize, surfaceRect.width);
surfaceRect.height = std::min(maxTextureSize, surfaceRect.height);
// use NONE here, because we draw black to clear below
surface = compositor->CreateRenderTarget(surfaceRect, INIT_MODE_NONE);
if (!surface) {
return;
gfx::VRHMDRenderingSupport *vrRendering = aHMD->GetRenderingSupport();
if (PR_GetEnv("NO_VR_RENDERING")) vrRendering = nullptr;
if (vrRendering) {
if (!aContainer->mVRRenderTargetSet || aContainer->mVRRenderTargetSet->size != surfaceRect.Size()) {
aContainer->mVRRenderTargetSet = vrRendering->CreateRenderTargetSet(compositor, surfaceRect.Size());
}
surface = aContainer->mVRRenderTargetSet->GetNextRenderTarget();
if (!surface) {
NS_WARNING("GetNextRenderTarget failed");
return;
}
} else {
surface = compositor->CreateRenderTarget(surfaceRect, INIT_MODE_CLEAR);
if (!surface) {
return;
}
}
gfx::IntRect rtBounds = previousTarget->GetRect();
DUMP("eyeResolution: %d %d targetRT: %d %d %d %d\n", WH(eyeResolution), XYWH(rtBounds));
compositor->SetRenderTarget(surface);
nsAutoTArray<Layer*, 12> children;
aContainer->SortChildrenBy3DZOrder(children);
/**
* Render this container's contents.
*/
gfx::IntRect surfaceClipRect(0, 0, surfaceRect.width, surfaceRect.height);
RenderTargetIntRect rtClipRect(0, 0, surfaceRect.width, surfaceRect.height);
gfx::Matrix4x4 origTransform = aContainer->GetEffectiveTransform();
for (uint32_t i = 0; i < children.Length(); i++) {
LayerComposite* layerToRender = static_cast<LayerComposite*>(children.ElementAt(i)->ImplData());
Layer* layer = layerToRender->GetLayer();
uint32_t contentFlags = layer->GetContentFlags();
if (layer->GetEffectiveVisibleRegion().IsEmpty() &&
!layer->AsContainerLayer()) {
continue;
}
RenderTargetIntRect clipRect = layer->CalculateScissorRect(rtClipRect);
if (clipRect.IsEmpty()) {
continue;
}
// We flip between pre-rendered and Gecko-rendered VR based on whether
// the child layer of this VR container layer has PRESERVE_3D or not.
if ((contentFlags & Layer::CONTENT_PRESERVE_3D) == 0) {
// This layer is native VR
DUMP("%p Switching to pre-rendered VR\n", aContainer);
layerToRender->Prepare(rtClipRect);
layerToRender->RenderLayer(surfaceClipRect);
}
// XXX we still need depth test here, but we have no way of preserving
// depth anyway in native VR layers until we have a way to save them
// from WebGL (and maybe depth video?)
compositor->SetRenderTarget(surface);
aContainer->ReplaceEffectiveTransform(origTransform);
// If this native-VR child layer does not have sizes that match
// the eye resolution (that is, returned by the recommended
// render rect from the HMD device), then we need to scale it
// up/down.
nsIntRect layerBounds;
// XXX this is a hack! Canvas layers aren't reporting the
// proper bounds here (visible region bounds are 0,0,0,0)
// and I'm not sure if this is the bounds we want anyway.
if (layer->GetType() == Layer::TYPE_CANVAS) {
layerBounds = static_cast<CanvasLayer*>(layer)->GetBounds();
} else {
layerBounds = layer->GetEffectiveVisibleRegion().GetBounds();
}
DUMP(" layer %p [type %d] bounds [%d %d %d %d] surfaceRect [%d %d %d %d]\n", layer, (int) layer->GetType(),
XYWH(layerBounds), XYWH(surfaceRect));
const gfx::Matrix4x4 childTransform = layer->GetEffectiveTransform();
bool restoreTransform = false;
if ((layerBounds.width != 0 && layerBounds.height != 0) &&
(layerBounds.width != surfaceRect.width ||
layerBounds.height != surfaceRect.height))
{
DUMP(" layer %p doesn't match, prescaling by %f %f\n", layer,
surfaceRect.width / float(layerBounds.width),
surfaceRect.height / float(layerBounds.height));
gfx::Matrix4x4 scaledChildTransform(childTransform);
scaledChildTransform.PreScale(surfaceRect.width / float(layerBounds.width),
surfaceRect.height / float(layerBounds.height),
1.0f);
// Unbind the current surface and rebind the previous one.
#ifdef MOZ_DUMP_PAINTING
if (gfxUtils::sDumpPainting) {
RefPtr<gfx::DataSourceSurface> surf = surface->Dump(aManager->GetCompositor());
if (surf) {
WriteSnapshotToDumpFile(aContainer, surf);
layer->ReplaceEffectiveTransform(scaledChildTransform);
restoreTransform = true;
}
// XXX these are both clip rects, which end up as scissor rects in the compositor. So we just
// pass the full target surface rect here.
layerToRender->Prepare(RenderTargetIntRect(surfaceRect.x, surfaceRect.y,
surfaceRect.width, surfaceRect.height));
layerToRender->RenderLayer(surfaceRect);
if (restoreTransform) {
layer->ReplaceEffectiveTransform(childTransform);
}
} else {
// Gecko-rendered CSS VR -- not supported yet, so just don't render this layer!
}
}
#endif
DUMP(" -- ContainerRenderVR [%p] after child layers\n", aContainer);
// Now put back the original transfom on this container
aContainer->ReplaceEffectiveTransform(origTransform);
// then bind the original target and draw with distortion
compositor->SetRenderTarget(previousTarget);
gfx::Rect rect(visibleRect.x, visibleRect.y, visibleRect.width, visibleRect.height);
if (vrRendering) {
vrRendering->SubmitFrame(aContainer->mVRRenderTargetSet);
DUMP("<<< ContainerRenderVR [used vrRendering] [%p]\n", aContainer);
if (!gfxPrefs::VRMirrorTextures()) {
return;
}
}
gfx::Rect rect(surfaceRect.x, surfaceRect.y, surfaceRect.width, surfaceRect.height);
gfx::Rect clipRect(aClipRect.x, aClipRect.y, aClipRect.width, aClipRect.height);
// The VR geometry may not cover the entire area; we need to fill with a solid color
@ -213,18 +287,29 @@ ContainerRenderVR(ContainerT* aContainer,
// the entire rect)
EffectChain solidEffect(aContainer);
solidEffect.mPrimaryEffect = new EffectSolidColor(Color(0.0, 0.0, 0.0, 1.0));
aManager->GetCompositor()->DrawQuad(rect, clipRect, solidEffect, opacity,
aContainer->GetEffectiveTransform());
aManager->GetCompositor()->DrawQuad(rect, rect, solidEffect, 1.0, gfx::Matrix4x4());
// draw the temporary surface with VR distortion to the original destination
gfx::Matrix4x4 scaleTransform = aContainer->GetEffectiveTransform();
EffectChain vrEffect(aContainer);
vrEffect.mPrimaryEffect = new EffectVRDistortion(aHMD, surface);
bool skipDistortion = vrRendering || PR_GetEnv("MOZ_GFX_VR_NO_DISTORTION");
if (skipDistortion) {
vrEffect.mPrimaryEffect = new EffectRenderTarget(surface);
scaleTransform.PreScale(rtBounds.width / float(surfaceRect.width),
rtBounds.height / float(surfaceRect.height),
1.0f);
} else {
vrEffect.mPrimaryEffect = new EffectVRDistortion(aHMD, surface);
// no need to scale, because the VRDistortion effect will sample from surface
}
// XXX we shouldn't use visibleRect here -- the VR distortion needs to know the
// full rect, not just the visible one. Luckily, right now, VR distortion is only
// rendered when the element is fullscreen, so the visibleRect will be right anyway.
aManager->GetCompositor()->DrawQuad(rect, clipRect, vrEffect, opacity,
aContainer->GetEffectiveTransform());
scaleTransform);
DUMP("<<< ContainerRenderVR [%p]\n", aContainer);
}
/* all of the prepared data that we need in RenderLayer() */

View File

@ -11,6 +11,7 @@
#include "mozilla/UniquePtr.h" // for UniquePtr
#include "mozilla/layers/LayerManagerComposite.h"
#include "mozilla/gfx/Rect.h"
#include "gfxVR.h"
namespace mozilla {
namespace layers {
@ -115,6 +116,7 @@ public:
UniquePtr<PreparedData> mPrepared;
RefPtr<CompositingRenderTarget> mLastIntermediateSurface;
RefPtr<gfx::VRHMDRenderingSupport::RenderTargetSet> mVRRenderTargetSet;
};
class RefLayerComposite : public RefLayer,
@ -180,6 +182,7 @@ public:
virtual const char* Name() const override { return "RefLayerComposite"; }
UniquePtr<PreparedData> mPrepared;
RefPtr<CompositingRenderTarget> mLastIntermediateSurface;
nsRefPtr<gfx::VRHMDRenderingSupport::RenderTargetSet> mVRRenderTargetSet;
};
} // namespace layers

View File

@ -247,6 +247,8 @@ private:
DECL_GFX_PREF(Once, "gfx.touch.resample.old-touch-threshold",TouchResampleOldTouchThreshold, int32_t, 17);
DECL_GFX_PREF(Once, "gfx.touch.resample.vsync-adjust", TouchVsyncSampleAdjust, int32_t, 5);
DECL_GFX_PREF(Once, "gfx.vr.mirror-textures", VRMirrorTextures, bool, false);
DECL_GFX_PREF(Live, "gfx.vsync.collect-scroll-transforms", CollectScrollTransforms, bool, false);
DECL_GFX_PREF(Once, "gfx.vsync.compositor", VsyncAlignedCompositor, bool, false);
// On b2g, in really bad cases, I've seen up to 80 ms delays between touch events and the main thread

View File

@ -18,6 +18,13 @@
#include "nsServiceManagerUtils.h"
#include "nsIScreenManager.h"
#include "mozilla/layers/Compositor.h"
#include "mozilla/layers/TextureHost.h"
#ifndef M_PI
# define M_PI 3.14159265358979323846
#endif
using namespace mozilla;
using namespace mozilla::gfx;
@ -134,3 +141,52 @@ VRHMDManager::AllocateDeviceIndex()
{
return ++sDeviceBase;
}
/* static */ already_AddRefed<nsIScreen>
VRHMDManager::MakeFakeScreen(int32_t x, int32_t y, uint32_t width, uint32_t height)
{
nsCOMPtr<nsIScreen> screen = new FakeScreen(IntRect(x, y, width, height));
return screen.forget();
}
VRHMDRenderingSupport::RenderTargetSet::RenderTargetSet()
: currentRenderTarget(0)
{
}
VRHMDRenderingSupport::RenderTargetSet::~RenderTargetSet()
{
}
Matrix4x4
VRFieldOfView::ConstructProjectionMatrix(float zNear, float zFar, bool rightHanded)
{
float upTan = tan(upDegrees * M_PI / 180.0);
float downTan = tan(downDegrees * M_PI / 180.0);
float leftTan = tan(leftDegrees * M_PI / 180.0);
float rightTan = tan(rightDegrees * M_PI / 180.0);
float handednessScale = rightHanded ? -1.0 : 1.0;
float pxscale = 2.0f / (leftTan + rightTan);
float pxoffset = (leftTan - rightTan) * pxscale * 0.5;
float pyscale = 2.0f / (upTan + downTan);
float pyoffset = (upTan - downTan) * pyscale * 0.5;
Matrix4x4 mobj;
float *m = &mobj._11;
m[0*4+0] = pxscale;
m[2*4+0] = pxoffset * handednessScale;
m[1*4+1] = pyscale;
m[2*4+1] = -pyoffset * handednessScale;
m[2*4+2] = zFar / (zNear - zFar) * -handednessScale;
m[3*4+2] = (zFar * zNear) / (zNear - zFar);
m[2*4+3] = handednessScale;
m[3*4+3] = 0.0f;
return mobj;
}

View File

@ -17,6 +17,11 @@
#include "mozilla/Atomics.h"
namespace mozilla {
namespace layers {
class Compositor;
class CompositingRenderTarget;
}
namespace gfx {
enum class VRHMDType : uint16_t {
@ -58,6 +63,8 @@ struct VRFieldOfView {
leftDegrees == 0.0;
}
Matrix4x4 ConstructProjectionMatrix(float zNear, float zFar, bool rightHanded);
double upDegrees;
double rightDegrees;
double downDegrees;
@ -125,6 +132,30 @@ struct VRHMDConfiguration {
VRFieldOfView fov[2];
};
class VRHMDRenderingSupport {
public:
struct RenderTargetSet {
RenderTargetSet();
NS_INLINE_DECL_REFCOUNTING(RenderTargetSet)
nsRefPtr<layers::Compositor> compositor;
IntSize size;
nsTArray<nsRefPtr<layers::CompositingRenderTarget>> renderTargets;
int32_t currentRenderTarget;
virtual already_AddRefed<layers::CompositingRenderTarget> GetNextRenderTarget() = 0;
protected:
virtual ~RenderTargetSet();
};
virtual already_AddRefed<RenderTargetSet> CreateRenderTargetSet(layers::Compositor *aCompositor, const IntSize& aSize) = 0;
virtual void DestroyRenderTargetSet(RenderTargetSet *aRTSet) = 0;
virtual void SubmitFrame(RenderTargetSet *aRTSet) = 0;
protected:
VRHMDRenderingSupport() { }
};
class VRHMDInfo {
public:
enum Eye {
@ -170,6 +201,11 @@ public:
virtual void ZeroSensor() = 0;
// if rendering is offloaded
virtual VRHMDRenderingSupport *GetRenderingSupport() { return nullptr; }
// distortion mesh stuff; we should implement renderingsupport for this
virtual void FillDistortionConstants(uint32_t whichEye,
const IntSize& textureSize, // the full size of the texture
const IntRect& eyeViewport, // the viewport within the texture for the current eye
@ -210,6 +246,7 @@ public:
static void ManagerDestroy();
static void GetAllHMDs(nsTArray<nsRefPtr<VRHMDInfo>>& aHMDResult);
static uint32_t AllocateDeviceIndex();
static already_AddRefed<nsIScreen> MakeFakeScreen(int32_t x, int32_t y, uint32_t width, uint32_t height);
protected:
typedef nsTArray<nsRefPtr<VRHMDManager>> VRHMDManagerArray;

View File

@ -26,10 +26,6 @@
#define LOG(...) do { } while(0)
#endif
#ifndef M_PI
# define M_PI 3.14159265358979323846
#endif
// 1/sqrt(2) (aka sqrt(2)/2)
#ifndef M_SQRT1_2
# define M_SQRT1_2 0.70710678118654752440
@ -99,6 +95,18 @@ HMDInfoCardboard::HMDInfoCardboard()
mMaximumEyeFOV[Eye_Right] = VRFieldOfView(45.0, 45.0, 45.0, 45.0);
SetFOV(mRecommendedEyeFOV[Eye_Left], mRecommendedEyeFOV[Eye_Right], 0.01, 10000.0);
#if 1
int32_t xcoord = 0;
if (getenv("FAKE_CARDBOARD_SCREEN")) {
const char *env = getenv("FAKE_CARDBOARD_SCREEN");
nsresult err;
xcoord = nsCString(env).ToInteger(&err);
if (err != NS_OK) xcoord = 0;
}
mScreen = VRHMDManager::MakeFakeScreen(xcoord, 0, 1920, 1080);
#endif
}
bool
@ -129,9 +137,9 @@ HMDInfoCardboard::Notify(const mozilla::hal::ScreenConfiguration& config)
mOrient = config.orientation();
if (mOrient == eScreenOrientation_LandscapePrimary) {
mScreenTransform = Quaternion(0.f, 0.f, M_SQRT1_2, M_SQRT1_2);
mScreenTransform = Quaternion(0.f, 0.f, (float) M_SQRT1_2, (float) M_SQRT1_2);
} else if (mOrient == eScreenOrientation_LandscapeSecondary) {
mScreenTransform = Quaternion(0.f, 0.f, -M_SQRT1_2, M_SQRT1_2);
mScreenTransform = Quaternion(0.f, 0.f, (float) -M_SQRT1_2, (float) M_SQRT1_2);
} else if (mOrient == eScreenOrientation_PortraitPrimary) {
mScreenTransform = Quaternion();
} else if (mOrient == eScreenOrientation_PortraitSecondary) {
@ -215,38 +223,6 @@ HMDInfoCardboard::ZeroSensor()
mSensorZeroInverse.Invert();
}
static Matrix4x4
ConstructProjectionMatrix(const VRFieldOfView& fov, bool rightHanded, double zNear, double zFar)
{
float upTan = tan(fov.upDegrees * M_PI / 180.0);
float downTan = tan(fov.downDegrees * M_PI / 180.0);
float leftTan = tan(fov.leftDegrees * M_PI / 180.0);
float rightTan = tan(fov.rightDegrees * M_PI / 180.0);
float handednessScale = rightHanded ? -1.0 : 1.0;
float pxscale = 2.0f / (leftTan + rightTan);
float pxoffset = (leftTan - rightTan) * pxscale * 0.5;
float pyscale = 2.0f / (upTan + downTan);
float pyoffset = (upTan - downTan) * pyscale * 0.5;
Matrix4x4 mobj;
float *m = &mobj._11;
m[0*4+0] = pxscale;
m[0*4+2] = pxoffset * handednessScale;
m[1*4+1] = pyscale;
m[1*4+2] = -pyoffset * handednessScale;
m[2*4+2] = zFar / (zNear - zFar) * -handednessScale;
m[2*4+3] = (zFar * zNear) / (zNear - zFar);
m[3*4+2] = handednessScale;
return mobj;
}
bool
HMDInfoCardboard::SetFOV(const VRFieldOfView& aFOVLeft,
const VRFieldOfView& aFOVRight,
@ -257,7 +233,7 @@ HMDInfoCardboard::SetFOV(const VRFieldOfView& aFOVLeft,
for (uint32_t eye = 0; eye < NumEyes; eye++) {
mEyeFOV[eye] = eye == Eye_Left ? aFOVLeft : aFOVRight;
mEyeTranslation[eye] = Point3D(standardIPD * (eye == Eye_Left ? -1.0 : 1.0), 0.0, 0.0);
mEyeProjectionMatrix[eye] = ConstructProjectionMatrix(mEyeFOV[eye], true, zNear, zFar);
mEyeProjectionMatrix[eye] = mEyeFOV[eye].ConstructProjectionMatrix(zNear, zFar, true);
mDistortionMesh[eye].mVertices.SetLength(4);
mDistortionMesh[eye].mIndices.SetLength(6);

View File

@ -12,6 +12,12 @@
#include "nsString.h"
#include "mozilla/Preferences.h"
#include "mozilla/gfx/Quaternion.h"
#ifdef XP_WIN
#include "../layers/d3d11/CompositorD3D11.h"
#endif
#include "gfxVROculus.h"
#include "nsServiceManagerUtils.h"
@ -29,32 +35,26 @@ namespace {
#ifdef OVR_CAPI_LIMITED_MOZILLA
static pfn_ovr_Initialize ovr_Initialize = nullptr;
static pfn_ovr_Shutdown ovr_Shutdown = nullptr;
static pfn_ovr_GetTimeInSeconds ovr_GetTimeInSeconds = nullptr;
static pfn_ovrHmd_Detect ovrHmd_Detect = nullptr;
static pfn_ovrHmd_Create ovrHmd_Create = nullptr;
static pfn_ovrHmd_Destroy ovrHmd_Destroy = nullptr;
static pfn_ovrHmd_CreateDebug ovrHmd_CreateDebug = nullptr;
static pfn_ovrHmd_GetLastError ovrHmd_GetLastError = nullptr;
static pfn_ovrHmd_AttachToWindow ovrHmd_AttachToWindow = nullptr;
static pfn_ovrHmd_GetEnabledCaps ovrHmd_GetEnabledCaps = nullptr;
static pfn_ovrHmd_SetEnabledCaps ovrHmd_SetEnabledCaps = nullptr;
static pfn_ovrHmd_Destroy ovrHmd_Destroy = nullptr;
static pfn_ovrHmd_ConfigureTracking ovrHmd_ConfigureTracking = nullptr;
static pfn_ovrHmd_RecenterPose ovrHmd_RecenterPose = nullptr;
static pfn_ovrHmd_GetTrackingState ovrHmd_GetTrackingState = nullptr;
static pfn_ovrHmd_GetFovTextureSize ovrHmd_GetFovTextureSize = nullptr;
static pfn_ovrHmd_GetRenderDesc ovrHmd_GetRenderDesc = nullptr;
static pfn_ovrHmd_CreateDistortionMesh ovrHmd_CreateDistortionMesh = nullptr;
static pfn_ovrHmd_DestroyDistortionMesh ovrHmd_DestroyDistortionMesh = nullptr;
static pfn_ovrHmd_GetRenderScaleAndOffset ovrHmd_GetRenderScaleAndOffset = nullptr;
static pfn_ovrHmd_GetFrameTiming ovrHmd_GetFrameTiming = nullptr;
static pfn_ovrHmd_BeginFrameTiming ovrHmd_BeginFrameTiming = nullptr;
static pfn_ovrHmd_EndFrameTiming ovrHmd_EndFrameTiming = nullptr;
static pfn_ovrHmd_ResetFrameTiming ovrHmd_ResetFrameTiming = nullptr;
static pfn_ovrHmd_GetEyePoses ovrHmd_GetEyePoses = nullptr;
static pfn_ovrHmd_GetHmdPosePerEye ovrHmd_GetHmdPosePerEye = nullptr;
static pfn_ovrHmd_GetEyeTimewarpMatrices ovrHmd_GetEyeTimewarpMatrices = nullptr;
static pfn_ovrMatrix4f_Projection ovrMatrix4f_Projection = nullptr;
static pfn_ovrMatrix4f_OrthoSubProjection ovrMatrix4f_OrthoSubProjection = nullptr;
static pfn_ovr_GetTimeInSeconds ovr_GetTimeInSeconds = nullptr;
static pfn_ovrHmd_DestroySwapTextureSet ovrHmd_DestroySwapTextureSet = nullptr;
static pfn_ovrHmd_SubmitFrame ovrHmd_SubmitFrame = nullptr;
#ifdef XP_WIN
static pfn_ovrHmd_CreateSwapTextureSetD3D11 ovrHmd_CreateSwapTextureSetD3D11 = nullptr;
#endif
static pfn_ovrHmd_CreateSwapTextureSetGL ovrHmd_CreateSwapTextureSetGL = nullptr;
#ifdef HAVE_64BIT_BUILD
#define BUILD_BITS 64
@ -62,9 +62,9 @@ static pfn_ovr_GetTimeInSeconds ovr_GetTimeInSeconds = nullptr;
#define BUILD_BITS 32
#endif
#define LIBOVR_PRODUCT_VERSION 0
#define LIBOVR_MAJOR_VERSION 5
#define LIBOVR_MINOR_VERSION 0
#define OVR_PRODUCT_VERSION 0
#define OVR_MAJOR_VERSION 6
#define OVR_MINOR_VERSION 0
static bool
InitializeOculusCAPI()
@ -90,26 +90,26 @@ InitializeOculusCAPI()
searchPath.SetLength(realLen);
libSearchPaths.AppendElement(searchPath);
}
libName.AppendPrintf("LibOVRRT%d_%d_%d.dll", BUILD_BITS, LIBOVR_PRODUCT_VERSION, LIBOVR_MAJOR_VERSION);
libName.AppendPrintf("LibOVRRT%d_%d_%d.dll", BUILD_BITS, OVR_PRODUCT_VERSION, OVR_MAJOR_VERSION);
#elif defined(__APPLE__)
searchPath.Truncate();
searchPath.AppendPrintf("/Library/Frameworks/LibOVRRT_%d.framework/Versions/%d", LIBOVR_PRODUCT_VERSION, LIBOVR_MAJOR_VERSION);
searchPath.AppendPrintf("/Library/Frameworks/LibOVRRT_%d.framework/Versions/%d", OVR_PRODUCT_VERSION, OVR_MAJOR_VERSION);
libSearchPaths.AppendElement(searchPath);
if (PR_GetEnv("HOME")) {
searchPath.Truncate();
searchPath.AppendPrintf("%s/Library/Frameworks/LibOVRRT_%d.framework/Versions/%d", PR_GetEnv("HOME"), LIBOVR_PRODUCT_VERSION, LIBOVR_MAJOR_VERSION);
searchPath.AppendPrintf("%s/Library/Frameworks/LibOVRRT_%d.framework/Versions/%d", PR_GetEnv("HOME"), OVR_PRODUCT_VERSION, OVR_MAJOR_VERSION);
libSearchPaths.AppendElement(searchPath);
}
// The following will match the va_list overload of AppendPrintf if the product version is 0
// That's bad times.
//libName.AppendPrintf("LibOVRRT_%d", LIBOVR_PRODUCT_VERSION);
//libName.AppendPrintf("LibOVRRT_%d", OVR_PRODUCT_VERSION);
libName.Append("LibOVRRT_");
libName.AppendInt(LIBOVR_PRODUCT_VERSION);
libName.AppendInt(OVR_PRODUCT_VERSION);
#else
libSearchPaths.AppendElement(nsCString("/usr/local/lib"));
libSearchPaths.AppendElement(nsCString("/usr/lib"));
libName.AppendPrintf("libOVRRT%d_%d.so.%d", BUILD_BITS, LIBOVR_PRODUCT_VERSION, LIBOVR_MAJOR_VERSION);
libName.AppendPrintf("libOVRRT%d_%d.so.%d", BUILD_BITS, OVR_PRODUCT_VERSION, OVR_MAJOR_VERSION);
#endif
// If the pref is present, we override libName
@ -167,33 +167,25 @@ InitializeOculusCAPI()
REQUIRE_FUNCTION(ovr_Initialize);
REQUIRE_FUNCTION(ovr_Shutdown);
REQUIRE_FUNCTION(ovr_GetTimeInSeconds);
REQUIRE_FUNCTION(ovrHmd_Detect);
REQUIRE_FUNCTION(ovrHmd_Create);
REQUIRE_FUNCTION(ovrHmd_Destroy);
REQUIRE_FUNCTION(ovrHmd_CreateDebug);
REQUIRE_FUNCTION(ovrHmd_GetLastError);
REQUIRE_FUNCTION(ovrHmd_AttachToWindow);
REQUIRE_FUNCTION(ovrHmd_GetEnabledCaps);
REQUIRE_FUNCTION(ovrHmd_SetEnabledCaps);
REQUIRE_FUNCTION(ovrHmd_Destroy);
REQUIRE_FUNCTION(ovrHmd_ConfigureTracking);
REQUIRE_FUNCTION(ovrHmd_RecenterPose);
REQUIRE_FUNCTION(ovrHmd_GetTrackingState);
REQUIRE_FUNCTION(ovrHmd_GetFovTextureSize);
REQUIRE_FUNCTION(ovrHmd_GetRenderDesc);
REQUIRE_FUNCTION(ovrHmd_CreateDistortionMesh);
REQUIRE_FUNCTION(ovrHmd_DestroyDistortionMesh);
REQUIRE_FUNCTION(ovrHmd_GetRenderScaleAndOffset);
REQUIRE_FUNCTION(ovrHmd_GetFrameTiming);
REQUIRE_FUNCTION(ovrHmd_BeginFrameTiming);
REQUIRE_FUNCTION(ovrHmd_EndFrameTiming);
REQUIRE_FUNCTION(ovrHmd_ResetFrameTiming);
REQUIRE_FUNCTION(ovrHmd_GetEyePoses);
REQUIRE_FUNCTION(ovrHmd_GetHmdPosePerEye);
REQUIRE_FUNCTION(ovrHmd_GetEyeTimewarpMatrices);
REQUIRE_FUNCTION(ovrMatrix4f_Projection);
REQUIRE_FUNCTION(ovrMatrix4f_OrthoSubProjection);
REQUIRE_FUNCTION(ovr_GetTimeInSeconds);
REQUIRE_FUNCTION(ovrHmd_DestroySwapTextureSet);
REQUIRE_FUNCTION(ovrHmd_SubmitFrame);
#ifdef XP_WIN
REQUIRE_FUNCTION(ovrHmd_CreateSwapTextureSetD3D11);
#endif
REQUIRE_FUNCTION(ovrHmd_CreateSwapTextureSetGL);
#undef REQUIRE_FUNCTION
@ -205,13 +197,35 @@ InitializeOculusCAPI()
}
#else
#include <OVR_Version.h>
// we're statically linked; it's available
static bool InitializeOculusCAPI()
{
return true;
}
#endif
static void
do_CalcEyePoses(ovrPosef headPose,
const ovrVector3f hmdToEyeViewOffset[2],
ovrPosef outEyePoses[2])
{
if (!hmdToEyeViewOffset || !outEyePoses)
return;
for (uint32_t i = 0; i < 2; ++i) {
gfx::Quaternion o(headPose.Orientation.x, headPose.Orientation.y, headPose.Orientation.z, headPose.Orientation.w);
Point3D vo(hmdToEyeViewOffset[i].x, hmdToEyeViewOffset[i].y, hmdToEyeViewOffset[i].z);
Point3D p = o.RotatePoint(vo);
outEyePoses[i].Orientation = headPose.Orientation;
outEyePoses[i].Position.x = p.x + headPose.Position.x;
outEyePoses[i].Position.y = p.y + headPose.Position.y;
outEyePoses[i].Position.z = p.z + headPose.Position.z;
}
}
ovrFovPort
ToFovPort(const VRFieldOfView& aFOV)
{
@ -262,12 +276,22 @@ HMDInfoOculus::HMDInfoOculus(ovrHmd aHMD)
SetFOV(mRecommendedEyeFOV[Eye_Left], mRecommendedEyeFOV[Eye_Right], 0.01, 10000.0);
nsCOMPtr<nsIScreenManager> screenmgr = do_GetService("@mozilla.org/gfx/screenmanager;1");
if (screenmgr) {
screenmgr->ScreenForRect(mHMD->WindowsPos.x, mHMD->WindowsPos.y,
mHMD->Resolution.w, mHMD->Resolution.h,
getter_AddRefs(mScreen));
#if 1
int32_t xcoord = 0;
if (getenv("FAKE_OCULUS_SCREEN")) {
const char *env = getenv("FAKE_OCULUS_SCREEN");
nsresult err;
xcoord = nsCString(env).ToInteger(&err);
if (err != NS_OK) xcoord = 0;
}
uint32_t w = mHMD->Resolution.w;
uint32_t h = mHMD->Resolution.h;
mScreen = VRHMDManager::MakeFakeScreen(xcoord, 0, std::max(w, h), std::min(w, h));
#ifdef DEBUG
printf_stderr("OCULUS SCREEN: %d %d %d %d\n", xcoord, 0, std::max(w, h), std::min(w, h));
#endif
#endif
}
void
@ -286,8 +310,6 @@ HMDInfoOculus::SetFOV(const VRFieldOfView& aFOVLeft, const VRFieldOfView& aFOVRi
float pixelsPerDisplayPixel = 1.0;
ovrSizei texSize[2];
uint32_t caps = ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette; // XXX TODO add TimeWarp
// get eye parameters and create the mesh
for (uint32_t eye = 0; eye < NumEyes; eye++) {
mEyeFOV[eye] = eye == 0 ? aFOVLeft : aFOVRight;
@ -295,47 +317,13 @@ HMDInfoOculus::SetFOV(const VRFieldOfView& aFOVLeft, const VRFieldOfView& aFOVRi
ovrEyeRenderDesc renderDesc = ovrHmd_GetRenderDesc(mHMD, (ovrEyeType) eye, mFOVPort[eye]);
// these values are negated so that content can add the adjustment to its camera position,
// instead of subtracting
mEyeTranslation[eye] = Point3D(-renderDesc.HmdToEyeViewOffset.x, -renderDesc.HmdToEyeViewOffset.y, -renderDesc.HmdToEyeViewOffset.z);
// As of Oculus 0.6.0, the HmdToEyeViewOffset values are correct and don't need to be negated.
mEyeTranslation[eye] = Point3D(renderDesc.HmdToEyeViewOffset.x, renderDesc.HmdToEyeViewOffset.y, renderDesc.HmdToEyeViewOffset.z);
// note that we are using a right-handed coordinate system here, to match CSS
ovrMatrix4f projMatrix = ovrMatrix4f_Projection(mFOVPort[eye], zNear, zFar, true);
// XXX this is gross, we really need better methods on Matrix4x4
memcpy(&mEyeProjectionMatrix[eye], projMatrix.M, sizeof(ovrMatrix4f));
mEyeProjectionMatrix[eye].Transpose();
mEyeProjectionMatrix[eye] = mEyeFOV[eye].ConstructProjectionMatrix(zNear, zFar, true);
texSize[eye] = ovrHmd_GetFovTextureSize(mHMD, (ovrEyeType) eye, mFOVPort[eye], pixelsPerDisplayPixel);
ovrDistortionMesh mesh;
bool ok = ovrHmd_CreateDistortionMesh(mHMD, (ovrEyeType) eye, mFOVPort[eye], caps, &mesh);
if (!ok)
return false;
mDistortionMesh[eye].mVertices.SetLength(mesh.VertexCount);
mDistortionMesh[eye].mIndices.SetLength(mesh.IndexCount);
ovrDistortionVertex *srcv = mesh.pVertexData;
HMDInfoOculus::DistortionVertex *destv = reinterpret_cast<HMDInfoOculus::DistortionVertex*>(mDistortionMesh[eye].mVertices.Elements());
memset(destv, 0, mesh.VertexCount * sizeof(VRDistortionVertex));
for (uint32_t i = 0; i < mesh.VertexCount; ++i) {
destv[i].pos[0] = srcv[i].ScreenPosNDC.x;
destv[i].pos[1] = srcv[i].ScreenPosNDC.y;
destv[i].texR[0] = srcv[i].TanEyeAnglesR.x;
destv[i].texR[1] = srcv[i].TanEyeAnglesR.y;
destv[i].texG[0] = srcv[i].TanEyeAnglesG.x;
destv[i].texG[1] = srcv[i].TanEyeAnglesG.y;
destv[i].texB[0] = srcv[i].TanEyeAnglesB.x;
destv[i].texB[1] = srcv[i].TanEyeAnglesB.y;
destv[i].genericAttribs[0] = srcv[i].VignetteFactor;
destv[i].genericAttribs[1] = srcv[i].TimeWarpFactor;
}
memcpy(mDistortionMesh[eye].mIndices.Elements(), mesh.pIndexData, mesh.IndexCount * sizeof(uint16_t));
ovrHmd_DestroyDistortionMesh(&mesh);
}
// take the max of both for eye resolution
@ -348,8 +336,6 @@ HMDInfoOculus::SetFOV(const VRFieldOfView& aFOVLeft, const VRFieldOfView& aFOVRi
mConfiguration.fov[1] = aFOVRight;
return true;
//* need to call this during rendering each frame I think? */
//ovrHmd_GetRenderScaleAndOffset(fovPort, texSize, renderViewport, uvScaleOffsetOut);
}
void
@ -360,33 +346,6 @@ HMDInfoOculus::FillDistortionConstants(uint32_t whichEye,
const Rect& destRect,
VRDistortionConstants& values)
{
ovrSizei texSize = { textureSize.width, textureSize.height };
ovrRecti eyePort = { { eyeViewport.x, eyeViewport.y }, { eyeViewport.width, eyeViewport.height } };
ovrVector2f scaleOut[2];
ovrHmd_GetRenderScaleAndOffset(mFOVPort[whichEye], texSize, eyePort, scaleOut);
values.eyeToSourceScaleAndOffset[0] = scaleOut[1].x;
values.eyeToSourceScaleAndOffset[1] = scaleOut[1].y;
values.eyeToSourceScaleAndOffset[2] = scaleOut[0].x;
values.eyeToSourceScaleAndOffset[3] = scaleOut[0].y;
// These values are in clip space [-1..1] range, but we're providing
// scaling in the 0..2 space for sanity.
// this is the destRect in clip space
float x0 = destRect.x / destViewport.width * 2.0 - 1.0;
float x1 = (destRect.x + destRect.width) / destViewport.width * 2.0 - 1.0;
float y0 = destRect.y / destViewport.height * 2.0 - 1.0;
float y1 = (destRect.y + destRect.height) / destViewport.height * 2.0 - 1.0;
// offset
values.destinationScaleAndOffset[0] = (x0+x1) / 2.0;
values.destinationScaleAndOffset[1] = (y0+y1) / 2.0;
// scale
values.destinationScaleAndOffset[2] = destRect.width / destViewport.width;
values.destinationScaleAndOffset[3] = destRect.height / destViewport.height;
}
bool
@ -462,9 +421,160 @@ HMDInfoOculus::GetSensorState(double timeOffset)
result.linearAcceleration[2] = pose.LinearAcceleration.z;
}
mLastTrackingState = state;
return result;
}
struct RenderTargetSetOculus : public VRHMDRenderingSupport::RenderTargetSet
{
RenderTargetSetOculus(const IntSize& aSize,
HMDInfoOculus *aHMD,
ovrSwapTextureSet *aTS)
: hmd(aHMD)
{
textureSet = aTS;
size = aSize;
}
already_AddRefed<layers::CompositingRenderTarget> GetNextRenderTarget() override {
currentRenderTarget = (currentRenderTarget + 1) % renderTargets.Length();
textureSet->CurrentIndex = currentRenderTarget;
renderTargets[currentRenderTarget]->ClearOnBind();
nsRefPtr<layers::CompositingRenderTarget> rt = renderTargets[currentRenderTarget];
return rt.forget();
}
void Destroy() {
if (!hmd)
return;
if (hmd->GetOculusHMD()) {
// If the ovrHmd was already destroyed, so were all associated
// texture sets
ovrHmd_DestroySwapTextureSet(hmd->GetOculusHMD(), textureSet);
}
hmd = nullptr;
textureSet = nullptr;
}
~RenderTargetSetOculus() {
Destroy();
}
nsRefPtr<HMDInfoOculus> hmd;
ovrSwapTextureSet *textureSet;
};
#ifdef XP_WIN
class BasicTextureSourceD3D11 : public layers::TextureSourceD3D11
{
public:
BasicTextureSourceD3D11(ID3D11Texture2D *aTexture, const IntSize& aSize) {
mTexture = aTexture;
mSize = aSize;
}
};
struct RenderTargetSetD3D11 : public RenderTargetSetOculus
{
RenderTargetSetD3D11(layers::CompositorD3D11 *aCompositor,
const IntSize& aSize,
HMDInfoOculus *aHMD,
ovrSwapTextureSet *aTS)
: RenderTargetSetOculus(aSize, aHMD, aTS)
{
compositor = aCompositor;
renderTargets.SetLength(aTS->TextureCount);
currentRenderTarget = aTS->CurrentIndex;
for (int i = 0; i < aTS->TextureCount; ++i) {
ovrD3D11Texture *tex11;
nsRefPtr<layers::CompositingRenderTargetD3D11> rt;
tex11 = (ovrD3D11Texture*)&aTS->Textures[i];
rt = new layers::CompositingRenderTargetD3D11(tex11->D3D11.pTexture, IntPoint(0, 0));
rt->SetSize(size);
renderTargets[i] = rt;
}
}
};
#endif
already_AddRefed<VRHMDRenderingSupport::RenderTargetSet>
HMDInfoOculus::CreateRenderTargetSet(layers::Compositor *aCompositor, const IntSize& aSize)
{
#ifdef XP_WIN
if (aCompositor->GetBackendType() == layers::LayersBackend::LAYERS_D3D11)
{
layers::CompositorD3D11 *comp11 = static_cast<layers::CompositorD3D11*>(aCompositor);
CD3D11_TEXTURE2D_DESC desc(DXGI_FORMAT_B8G8R8A8_UNORM, aSize.width, aSize.height, 1, 1,
D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_RENDER_TARGET);
ovrSwapTextureSet *ts = nullptr;
ovrResult orv = ovrHmd_CreateSwapTextureSetD3D11(mHMD, comp11->GetDevice(), &desc, &ts);
if (orv != ovrSuccess) {
return nullptr;
}
nsRefPtr<RenderTargetSetD3D11> rts = new RenderTargetSetD3D11(comp11, aSize, this, ts);
return rts.forget();
}
#endif
if (aCompositor->GetBackendType() == layers::LayersBackend::LAYERS_OPENGL) {
}
return nullptr;
}
void
HMDInfoOculus::DestroyRenderTargetSet(RenderTargetSet *aRTSet)
{
RenderTargetSetOculus *rts = static_cast<RenderTargetSetOculus*>(aRTSet);
rts->Destroy();
}
void
HMDInfoOculus::SubmitFrame(RenderTargetSet *aRTSet)
{
RenderTargetSetOculus *rts = static_cast<RenderTargetSetOculus*>(aRTSet);
MOZ_ASSERT(rts->hmd != nullptr);
MOZ_ASSERT(rts->textureSet != nullptr);
ovrLayerEyeFov layer;
layer.Header.Type = ovrLayerType_EyeFov;
layer.Header.Flags = 0;
layer.ColorTexture[0] = rts->textureSet;
layer.ColorTexture[1] = nullptr;
layer.Fov[0] = mFOVPort[0];
layer.Fov[1] = mFOVPort[1];
layer.Viewport[0].Pos.x = 0;
layer.Viewport[0].Pos.y = 0;
layer.Viewport[0].Size.w = rts->size.width / 2;
layer.Viewport[0].Size.h = rts->size.height;
layer.Viewport[1].Pos.x = rts->size.width / 2;
layer.Viewport[1].Pos.y = 0;
layer.Viewport[1].Size.w = rts->size.width / 2;
layer.Viewport[1].Size.h = rts->size.height;
const Point3D& l = rts->hmd->mEyeTranslation[0];
const Point3D& r = rts->hmd->mEyeTranslation[1];
const ovrVector3f hmdToEyeViewOffset[2] = { { l.x, l.y, l.z },
{ r.x, r.y, r.z } };
do_CalcEyePoses(rts->hmd->mLastTrackingState.HeadPose.ThePose, hmdToEyeViewOffset, layer.RenderPose);
ovrLayerHeader *layers = &layer.Header;
ovrResult orv = ovrHmd_SubmitFrame(mHMD, 0, nullptr, &layers, 1);
//printf_stderr("Submitted frame %d, result: %d\n", rts->textureSet->CurrentIndex, orv);
if (orv != ovrSuccess) {
// not visible? failed?
}
}
bool
VRHMDManagerOculus::PlatformInit()
{
@ -479,13 +589,13 @@ VRHMDManagerOculus::PlatformInit()
ovrInitParams params;
params.Flags = ovrInit_RequestVersion;
params.RequestedMinorVersion = LIBOVR_MINOR_VERSION;
params.RequestedMinorVersion = OVR_MINOR_VERSION;
params.LogCallback = nullptr;
params.ConnectionTimeoutMS = 0;
bool ok = ovr_Initialize(&params);
ovrResult orv = ovr_Initialize(&params);
if (!ok)
if (orv != ovrSuccess)
return false;
mOculusPlatformInitialized = true;
@ -501,11 +611,13 @@ VRHMDManagerOculus::Init()
if (!PlatformInit())
return false;
ovrResult orv;
int count = ovrHmd_Detect();
for (int i = 0; i < count; ++i) {
ovrHmd hmd = ovrHmd_Create(i);
if (hmd) {
ovrHmd hmd;
orv = ovrHmd_Create(i, &hmd);
if (orv == ovrSuccess) {
nsRefPtr<HMDInfoOculus> oc = new HMDInfoOculus(hmd);
mOculusHMDs.AppendElement(oc);
}
@ -516,8 +628,9 @@ VRHMDManagerOculus::Init()
if ((count == 0 && gfxPrefs::VRAddTestDevices() == 1) ||
(gfxPrefs::VRAddTestDevices() == 2))
{
ovrHmd hmd = ovrHmd_CreateDebug(ovrHmd_DK2);
if (hmd) {
ovrHmd hmd;
orv = ovrHmd_CreateDebug(ovrHmd_DK2, &hmd);
if (orv == ovrSuccess) {
nsRefPtr<HMDInfoOculus> oc = new HMDInfoOculus(hmd);
mOculusHMDs.AppendElement(oc);
}

View File

@ -15,13 +15,15 @@
#include "mozilla/EnumeratedArray.h"
#include "gfxVR.h"
//#include <OVR_CAPI.h>
//#include <OVR_CAPI_D3D.h>
#include "ovr_capi_dynamic.h"
namespace mozilla {
namespace gfx {
namespace impl {
class HMDInfoOculus : public VRHMDInfo {
class HMDInfoOculus : public VRHMDInfo, public VRHMDRenderingSupport {
public:
explicit HMDInfoOculus(ovrHmd aHMD);
@ -38,8 +40,17 @@ public:
const Size& destViewport, const Rect& destRect,
VRDistortionConstants& values) override;
VRHMDRenderingSupport* GetRenderingSupport() override { return this; }
void Destroy();
/* VRHMDRenderingSupport */
already_AddRefed<RenderTargetSet> CreateRenderTargetSet(layers::Compositor *aCompositor, const IntSize& aSize) override;
void DestroyRenderTargetSet(RenderTargetSet *aRTSet) override;
void SubmitFrame(RenderTargetSet *aRTSet) override;
ovrHmd GetOculusHMD() const { return mHMD; }
protected:
// must match the size of VRDistortionVertex
struct DistortionVertex {
@ -58,6 +69,7 @@ protected:
ovrHmd mHMD;
ovrFovPort mFOVPort[2];
uint32_t mStartCount;
ovrTrackingState mLastTrackingState;
};
} // namespace impl

View File

@ -18,6 +18,16 @@ UNIFIED_SOURCES += [
'gfxVROculus.cpp',
]
# For building with the real SDK instead of our local hack
#SOURCES += [
# 'OVR_CAPI_Util.cpp',
# 'OVR_CAPIShim.c',
# 'OVR_StereoProjection.cpp',
#]
#
#CXXFLAGS += ["-Ic:/proj/ovr/OculusSDK-0.6.0-beta/LibOVR/Include"]
#CFLAGS += ["-Ic:/proj/ovr/OculusSDK-0.6.0-beta/LibOVR/Include"]
CXXFLAGS += CONFIG['MOZ_CAIRO_CFLAGS']
CXXFLAGS += CONFIG['TK_CFLAGS']
CFLAGS += CONFIG['MOZ_CAIRO_CFLAGS']

View File

@ -11,7 +11,11 @@
*/
#ifdef OVR_CAPI_h
#warning OVR_CAPI.h included before ovr_capi_dynamic.h, skpping this
#ifdef _MSC_VER
#pragma message("ovr_capi_dyanmic.h: OVR_CAPI.h included before ovr_capi_dynamic.h, skipping this")
#else
#warning OVR_CAPI.h included before ovr_capi_dynamic.h, skipping this
#endif
#define mozilla_ovr_capi_dynamic_h_
#else
@ -21,6 +25,14 @@
#define OVR_CAPI_LIMITED_MOZILLA 1
#ifdef HAVE_64BIT_BUILD
#define OVR_PTR_SIZE 8
#define OVR_ON64(x) x
#else
#define OVR_PTR_SIZE 4
#define OVR_ON64(x) /**/
#endif
#if defined(_WIN32)
#define OVR_PFN __cdecl
#else
@ -51,6 +63,7 @@
extern "C" {
#endif
typedef int32_t ovrResult;
typedef char ovrBool;
typedef struct { int x, y; } ovrVector2i;
typedef struct { int w, h; } ovrSizei;
@ -94,16 +107,11 @@ typedef enum {
} ovrHmdType;
typedef enum {
ovrHmdCap_Present = 0x0001,
ovrHmdCap_Available = 0x0002,
ovrHmdCap_Captured = 0x0004,
ovrHmdCap_ExtendDesktop = 0x0008,
ovrHmdCap_DebugDevice = 0x0010,
ovrHmdCap_DisplayOff = 0x0040,
ovrHmdCap_LowPersistence = 0x0080,
ovrHmdCap_DynamicPrediction = 0x0200,
ovrHmdCap_NoVSync = 0x1000,
ovrHmdCap_NoMirrorToWindow = 0x2000
ovrHmdCap_EnumSize = 0x7fffffff
} ovrHmdCapBits;
typedef enum
@ -115,22 +123,6 @@ typedef enum
ovrTrackingCap_EnumSize = 0x7fffffff
} ovrTrackingCaps;
typedef enum {
ovrDistortionCap_Chromatic = 0x01,
ovrDistortionCap_TimeWarp = 0x02,
ovrDistortionCap_Vignette = 0x08,
ovrDistortionCap_NoRestore = 0x10,
ovrDistortionCap_FlipInput = 0x20,
ovrDistortionCap_SRGB = 0x40,
ovrDistortionCap_Overdrive = 0x80,
ovrDistortionCap_HqDistortion = 0x100,
ovrDistortionCap_LinuxDevFullscreen = 0x200,
ovrDistortionCap_ComputeShader = 0x400,
ovrDistortionCap_TimewarpJitDelay = 0x1000,
ovrDistortionCap_ProfileNoSpinWaits = 0x10000,
ovrDistortionCap_EnumSize = 0x7fffffff
} ovrDistortionCaps;
typedef enum {
ovrEye_Left = 0,
ovrEye_Right = 1,
@ -138,10 +130,11 @@ typedef enum {
ovrEye_EnumSize = 0x7fffffff
} ovrEyeType;
typedef struct ovrHmdDesc_ {
typedef struct OVR_ALIGNAS(OVR_PTR_SIZE) {
void* Handle;
ovrHmdType Type;
const char* ProductName;
OVR_ON64(uint32_t pad0;)
const char* ProductName;
const char* Manufacturer;
short VendorId;
short ProductId;
@ -155,17 +148,12 @@ typedef struct ovrHmdDesc_ {
unsigned int HmdCaps;
unsigned int TrackingCaps;
unsigned int DistortionCaps;
ovrFovPort DefaultEyeFov[ovrEye_Count];
ovrFovPort MaxEyeFov[ovrEye_Count];
ovrEyeType EyeRenderOrder[ovrEye_Count];
ovrSizei Resolution;
ovrVector2i WindowsPos;
const char* DisplayDeviceName;
int DisplayId;
} ovrHmdDesc;
typedef const ovrHmdDesc* ovrHmd;
@ -179,7 +167,7 @@ typedef enum {
ovrStatus_EnumSize = 0x7fffffff
} ovrStatusBits;
typedef struct ovrSensorData_ {
typedef struct OVR_ALIGNAS(4) {
ovrVector3f Accelerometer;
ovrVector3f Gyro;
ovrVector3f Magnetometer;
@ -188,28 +176,24 @@ typedef struct ovrSensorData_ {
} ovrSensorData;
typedef struct ovrTrackingState_ {
typedef struct OVR_ALIGNAS(8) {
ovrPoseStatef HeadPose;
ovrPosef CameraPose;
ovrPosef LeveledCameraPose;
ovrSensorData RawSensorData;
unsigned int StatusFlags;
double LastVisionProcessingTime;
uint32_t LastCameraFrameCounter;
uint32_t Pad;
uint32_t pad0;
} ovrTrackingState;
typedef struct OVR_ALIGNAS(8) ovrFrameTiming_ {
float DeltaSeconds;
float Pad;
double ThisFrameSeconds;
double TimewarpPointSeconds;
double NextFrameSeconds;
double ScanoutMidpointSeconds;
double EyeScanoutSeconds[2];
typedef struct OVR_ALIGNAS(8) {
double DisplayMidpointSeconds;
double FrameIntervalSeconds;
unsigned AppFrameIndex;
unsigned DisplayFrameIndex;
} ovrFrameTiming;
typedef struct ovrEyeRenderDesc_ {
typedef struct OVR_ALIGNAS(4) {
ovrEyeType Eye;
ovrFovPort Fov;
ovrRecti DistortedViewport;
@ -217,72 +201,183 @@ typedef struct ovrEyeRenderDesc_ {
ovrVector3f HmdToEyeViewOffset;
} ovrEyeRenderDesc;
typedef struct ovrDistortionVertex_ {
ovrVector2f ScreenPosNDC;
float TimeWarpFactor;
float VignetteFactor;
ovrVector2f TanEyeAnglesR;
ovrVector2f TanEyeAnglesG;
ovrVector2f TanEyeAnglesB;
} ovrDistortionVertex;
typedef struct OVR_ALIGNAS(4) {
float Projection22;
float Projection23;
float Projection32;
} ovrTimewarpProjectionDesc;
typedef struct ovrDistortionMesh_ {
ovrDistortionVertex* pVertexData;
unsigned short* pIndexData;
unsigned int VertexCount;
unsigned int IndexCount;
} ovrDistortionMesh;
typedef struct OVR_ALIGNAS(4) {
ovrVector3f HmdToEyeViewOffset[ovrEye_Count];
float HmdSpaceToWorldScaleInMeters;
} ovrViewScaleDesc;
typedef enum {
ovrRenderAPI_None,
ovrRenderAPI_OpenGL,
ovrRenderAPI_Android_GLES,
ovrRenderAPI_D3D9_Obsolete,
ovrRenderAPI_D3D10_Obsolete,
ovrRenderAPI_D3D11,
ovrRenderAPI_Count,
ovrRenderAPI_EnumSize = 0x7fffffff
} ovrRenderAPIType;
typedef struct OVR_ALIGNAS(4) {
ovrRenderAPIType API;
ovrSizei TextureSize;
} ovrTextureHeader;
typedef struct OVR_ALIGNAS(OVR_PTR_SIZE) {
ovrTextureHeader Header;
OVR_ON64(uint32_t pad0;)
uintptr_t PlatformData[8];
} ovrTexture;
typedef struct OVR_ALIGNAS(OVR_PTR_SIZE) {
ovrTexture* Textures;
int TextureCount;
int CurrentIndex;
} ovrSwapTextureSet;
typedef enum {
ovrInit_Debug = 0x00000001,
ovrInit_ServerOptional = 0x00000002,
ovrInit_RequestVersion = 0x00000004,
ovrInit_ForceNoDebug = 0x00000008
ovrInit_ForceNoDebug = 0x00000008,
ovrInit_EnumSize = 0x7fffffff
} ovrInitFlags;
typedef enum {
ovrLogLevel_Debug = 0,
ovrLogLevel_Info = 1,
ovrLogLevel_Error = 2
ovrLogLevel_Error = 2,
ovrLogLevel_EnumSize = 0x7fffffff
} ovrLogLevel;
typedef enum {
ovrLayerType_Disabled = 0,
ovrLayerType_EyeFov = 1,
ovrLayerType_EyeFovDepth = 2,
ovrLayerType_QuadInWorld = 3,
ovrLayerType_QuadHeadLocked = 4,
ovrLayerType_Direct = 6,
ovrLayerType_EnumSize = 0x7fffffff
} ovrLayerType;
typedef enum {
ovrLayerFlag_HighQuality = 0x01,
ovrLayerFlag_TextureOriginAtBottomLeft = 0x02
} ovrLayerFlags;
typedef struct OVR_ALIGNAS(OVR_PTR_SIZE) {
ovrLayerType Type;
unsigned Flags;
} ovrLayerHeader;
typedef struct OVR_ALIGNAS(OVR_PTR_SIZE) {
ovrLayerHeader Header;
ovrSwapTextureSet* ColorTexture[ovrEye_Count];
ovrRecti Viewport[ovrEye_Count];
ovrFovPort Fov[ovrEye_Count];
ovrPosef RenderPose[ovrEye_Count];
} ovrLayerEyeFov;
typedef void (OVR_PFN *ovrLogCallback)(int level, const char* message);
typedef struct {
typedef struct OVR_ALIGNAS(8) {
uint32_t Flags;
uint32_t RequestedMinorVersion;
ovrLogCallback LogCallback;
uint32_t ConnectionTimeoutMS;
OVR_ON64(uint32_t pad0;)
} ovrInitParams;
typedef ovrBool (OVR_PFN *pfn_ovr_Initialize)(ovrInitParams const* params);
enum {
ovrSuccess = 0,
ovrError_MemoryAllocationFailure = -1000,
ovrError_SocketCreationFailure = -1001,
ovrError_InvalidHmd = -1002,
ovrError_Timeout = -1003,
ovrError_NotInitialized = -1004,
ovrError_InvalidParameter = -1005,
ovrError_ServiceError = -1006,
ovrError_NoHmd = -1007,
ovrError_AudioReservedBegin = -2000,
ovrError_AudioReservedEnd = -2999,
ovrError_Initialize = -3000,
ovrError_LibLoad = -3001,
ovrError_LibVersion = -3002,
ovrError_ServiceConnection = -3003,
ovrError_ServiceVersion = -3004,
ovrError_IncompatibleOS = -3005,
ovrError_DisplayInit = -3006,
ovrError_ServerStart = -3007,
ovrError_Reinitialization = -3008,
ovrError_InvalidBundleAdjustment = -4000,
ovrError_USBBandwidth = -4001
};
typedef ovrResult (OVR_PFN *pfn_ovr_Initialize)(ovrInitParams const* params);
typedef void (OVR_PFN *pfn_ovr_Shutdown)();
typedef int (OVR_PFN *pfn_ovrHmd_Detect)();
typedef ovrHmd (OVR_PFN *pfn_ovrHmd_Create)(int index);
typedef double (OVR_PFN *pfn_ovr_GetTimeInSeconds)();
typedef ovrResult (OVR_PFN *pfn_ovrHmd_Detect)();
typedef ovrResult (OVR_PFN *pfn_ovrHmd_Create)(int index, ovrHmd*);
typedef ovrResult (OVR_PFN *pfn_ovrHmd_CreateDebug)(ovrHmdType type, ovrHmd*);
typedef void (OVR_PFN *pfn_ovrHmd_Destroy)(ovrHmd hmd);
typedef ovrHmd (OVR_PFN *pfn_ovrHmd_CreateDebug)(ovrHmdType type);
typedef const char* (OVR_PFN *pfn_ovrHmd_GetLastError)(ovrHmd hmd);
typedef ovrBool (OVR_PFN *pfn_ovrHmd_AttachToWindow)(ovrHmd hmd, void* window, const ovrRecti* destMirrorRect, const ovrRecti* sourceRenderTargetRect);
typedef unsigned int (OVR_PFN *pfn_ovrHmd_GetEnabledCaps)(ovrHmd hmd);
typedef void (OVR_PFN *pfn_ovrHmd_SetEnabledCaps)(ovrHmd hmd, unsigned int hmdCaps);
typedef ovrBool (OVR_PFN *pfn_ovrHmd_ConfigureTracking)(ovrHmd hmd, unsigned int supportedTrackingCaps, unsigned int requiredTrackingCaps);
typedef ovrResult (OVR_PFN *pfn_ovrHmd_ConfigureTracking)(ovrHmd hmd, unsigned int supportedTrackingCaps, unsigned int requiredTrackingCaps);
typedef void (OVR_PFN *pfn_ovrHmd_RecenterPose)(ovrHmd hmd);
typedef ovrTrackingState (OVR_PFN *pfn_ovrHmd_GetTrackingState)(ovrHmd hmd, double absTime);
typedef ovrSizei (OVR_PFN *pfn_ovrHmd_GetFovTextureSize)(ovrHmd hmd, ovrEyeType eye, ovrFovPort fov, float pixelsPerDisplayPixel);
typedef ovrEyeRenderDesc (OVR_PFN *pfn_ovrHmd_GetRenderDesc)(ovrHmd hmd, ovrEyeType eyeType, ovrFovPort fov);
typedef ovrBool (OVR_PFN *pfn_ovrHmd_CreateDistortionMesh)(ovrHmd hmd, ovrEyeType eyeType, ovrFovPort fov, unsigned int distortionCaps, ovrDistortionMesh *meshData);
typedef void (OVR_PFN *pfn_ovrHmd_DestroyDistortionMesh)(ovrDistortionMesh* meshData);
typedef void (OVR_PFN *pfn_ovrHmd_GetRenderScaleAndOffset)(ovrFovPort fov, ovrSizei textureSize, ovrRecti renderViewport, ovrVector2f uvScaleOffsetOut[2]);
typedef ovrFrameTiming (OVR_PFN *pfn_ovrHmd_GetFrameTiming)(ovrHmd hmd, unsigned int frameIndex);
typedef ovrFrameTiming (OVR_PFN *pfn_ovrHmd_BeginFrameTiming)(ovrHmd hmd, unsigned int frameIndex);
typedef void (OVR_PFN *pfn_ovrHmd_EndFrameTiming)(ovrHmd hmd);
typedef void (OVR_PFN *pfn_ovrHmd_ResetFrameTiming)(ovrHmd hmd, unsigned int frameIndex, bool vsync);
typedef void (OVR_PFN *pfn_ovrHmd_GetEyePoses)(ovrHmd hmd, unsigned int frameIndex, ovrVector3f hmdToEyeViewOffset[2], ovrPosef outEyePoses[2], ovrTrackingState* outHmdTrackingState);
typedef ovrPosef (OVR_PFN *pfn_ovrHmd_GetHmdPosePerEye)(ovrHmd hmd, ovrEyeType eye);
typedef void (OVR_PFN *pfn_ovrHmd_GetEyeTimewarpMatrices)(ovrHmd hmd, ovrEyeType eye, ovrPosef renderPose, ovrMatrix4f twmOut[2]);
typedef ovrMatrix4f (OVR_PFN *pfn_ovrMatrix4f_Projection) (ovrFovPort fov, float znear, float zfar, ovrBool rightHanded );
typedef ovrMatrix4f (OVR_PFN *pfn_ovrMatrix4f_OrthoSubProjection) (ovrFovPort fov, ovrVector2f orthoScale, float orthoDistance, float eyeViewAdjustX);
typedef double (OVR_PFN *pfn_ovr_GetTimeInSeconds)();
typedef void (OVR_PFN *pfn_ovrHmd_DestroySwapTextureSet)(ovrHmd hmd, ovrSwapTextureSet* textureSet);
typedef ovrResult (OVR_PFN *pfn_ovrHmd_SubmitFrame)(ovrHmd hmd, unsigned int frameIndex,
const ovrViewScaleDesc* viewScaleDesc,
ovrLayerHeader const * const * layerPtrList, unsigned int layerCount);
#ifdef XP_WIN
struct D3D11_TEXTURE2D_DESC;
struct ID3D11Device;
struct ID3D11Texture2D;
struct ID3D11ShaderResourceView;
typedef struct OVR_ALIGNAS(OVR_PTR_SIZE) {
ovrTextureHeader Header;
OVR_ON64(uint32_t pad0;)
ID3D11Texture2D* pTexture;
ID3D11ShaderResourceView* pSRView;
} ovrD3D11TextureData;
typedef union {
ovrTexture Texture;
ovrD3D11TextureData D3D11;
} ovrD3D11Texture;
typedef ovrResult (OVR_PFN *pfn_ovrHmd_CreateSwapTextureSetD3D11)(ovrHmd hmd, ID3D11Device* device,
const D3D11_TEXTURE2D_DESC* desc,
ovrSwapTextureSet** outTextureSet);
#endif
typedef struct {
ovrTextureHeader Header;
uint32_t TexId;
} ovrGLTextureData;
typedef union {
ovrTexture Texture;
ovrGLTextureData OGL;
} ovrGLTexture;
typedef ovrResult (OVR_PFN *pfn_ovrHmd_CreateSwapTextureSetGL)(ovrHmd hmd, uint32_t format,
int width, int height,
ovrSwapTextureSet** outTextureSet);
#ifdef __cplusplus
}

View File

@ -4622,6 +4622,9 @@ pref("dom.placeholder.show_on_focus", true);
pref("dom.vr.enabled", false);
// 0 = never; 1 = only if real devices aren't there; 2 = always
pref("dom.vr.add-test-devices", 1);
// true = show the VR textures in our compositing output; false = don't.
// true might have performance impact
pref("gfx.vr.mirror-textures", false);
// MMS UA Profile settings
pref("wap.UAProf.url", "");