diff options
Diffstat (limited to 'Samples/OculusRoomTiny/Win32_OculusRoomTiny.cpp')
-rw-r--r-- | Samples/OculusRoomTiny/Win32_OculusRoomTiny.cpp | 334 |
1 files changed, 205 insertions, 129 deletions
diff --git a/Samples/OculusRoomTiny/Win32_OculusRoomTiny.cpp b/Samples/OculusRoomTiny/Win32_OculusRoomTiny.cpp index 8caf3d5..48373ad 100644 --- a/Samples/OculusRoomTiny/Win32_OculusRoomTiny.cpp +++ b/Samples/OculusRoomTiny/Win32_OculusRoomTiny.cpp @@ -3,7 +3,7 @@ Filename : Win32_OculusRoomTiny2.cpp Content : First-person view test application for Oculus Rift Created : October 4, 2012 -Authors : Michael Antonov, Andrew Reisse +Authors : Michael Antonov, Andrew Reisse, Tom Heath, Volga Aksoy Copyright : Copyright 2012 Oculus, Inc. All Rights reserved. @@ -27,78 +27,68 @@ limitations under the License. // 'W', 'S', 'A', 'D', 'F' - Move forward, back; strafe left/right, toggle freeze in timewarp. // The world right handed coordinate system is defined as Y -> Up, Z -> Back, X -> Right -//Include the OculusVR SDK +// Include the OculusVR SDK #include "OVR_CAPI.h" -// ***** Choices and settings +// Choose whether the SDK performs rendering/distortion, or the application. +#define SDK_RENDER 1 //Do NOT switch until you have viewed and understood the Health and Safety message. + //Disabling this makes it a non-compliant app, and not suitable for demonstration. In place for development only. +const bool FullScreen = true; //Should be true for correct timing. Use false for debug only. -// Whether the SDK performs rendering/distortion, or the app. -#define SDK_RENDER 1 - -const unsigned DistortionCaps = ovrDistortionCap_Chromatic | ovrDistortionCap_TimeWarp; -const bool VSyncEnabled = true; -const bool FullScreen = true; // Include Non-SDK supporting Utilities from other files #include "RenderTiny_D3D11_Device.h" - -RenderDevice * Util_InitWindowAndGraphics (Recti vp, int fullscreen, int multiSampleCount); -void Util_ReleaseWindowAndGraphics (RenderDevice* pRender); -bool Util_RespondToControls (float & EyeYaw, Vector3f & EyePos, - float deltaTime, Quatf PoseOrientation); -void PopulateRoomScene (Scene* scene, RenderDevice* render); +HWND Util_InitWindowAndGraphics (Recti vp, int fullscreen, int multiSampleCount, bool UseAppWindowFrame, RenderDevice ** pDevice); +void Util_ReleaseWindowAndGraphics (RenderDevice* pRender); +bool Util_RespondToControls (float & EyeYaw, Vector3f & EyePos, Quatf PoseOrientation); +void PopulateRoomScene (Scene* scene, RenderDevice* render); //Structures for the application -ovrHmd HMD; -ovrHmdDesc HMDDesc; -ovrEyeRenderDesc EyeRenderDesc[2]; -ovrRecti EyeRenderViewport[2]; -RenderDevice* pRender = 0; -Texture* pRendertargetTexture = 0; -Scene* pRoomScene = 0; - -// Specifics for whether the SDK or the app is doing the distortion. +ovrHmd HMD; +ovrEyeRenderDesc EyeRenderDesc[2]; +ovrRecti EyeRenderViewport[2]; +RenderDevice* pRender = 0; +Texture* pRendertargetTexture = 0; +Scene* pRoomScene = 0; + +// Specifics for whether the SDK or the APP is doing the distortion. #if SDK_RENDER #define OVR_D3D_VERSION 11 #include "OVR_CAPI_D3D.h" - ovrD3D11Texture EyeTexture[2]; + ovrD3D11Texture EyeTexture[2]; #else - void DistortionMeshInit (unsigned distortionCaps, ovrHmd HMD, - ovrEyeRenderDesc eyeRenderDesc[2], - ovrSizei textureSize, ovrRecti viewports[2], - RenderDevice * pRender); - void DistortionMeshRender(unsigned distortionCaps, ovrHmd HMD, - double timwarpTimePoint, ovrPosef eyeRenderPoses[2], - RenderDevice * pRender, Texture* pRendertargetTexture); + ShaderSet * Shaders; + ID3D11InputLayout * VertexIL; + Ptr<Buffer> MeshVBs[2]; + Ptr<Buffer> MeshIBs[2]; + ovrVector2f UVScaleOffset[2][2]; #endif //------------------------------------------------------------------------------------- - int Init() { - // Initializes LibOVR. + // Initializes LibOVR, and the Rift ovr_Initialize(); - HMD = ovrHmd_Create(0); if (!HMD) { MessageBoxA(NULL,"Oculus Rift not detected.","", MB_OK); return(1); } - //Get more details about the HMD - ovrHmd_GetDesc(HMD, &HMDDesc); - if (HMDDesc.DisplayDeviceName[0] == '\0') + if (HMD->ProductName[0] == '\0') MessageBoxA(NULL,"Rift detected, display not enabled.","", MB_OK); - //Setup Window and Graphics + //Setup Window and Graphics - use window frame if relying on Oculus driver const int backBufferMultisample = 1; - pRender = Util_InitWindowAndGraphics(Recti(HMDDesc.WindowsPos, HMDDesc.Resolution), - FullScreen, backBufferMultisample); - if (!pRender) return 1; - + bool UseAppWindowFrame = (HMD->HmdCaps & ovrHmdCap_ExtendDesktop) ? false : true; + HWND window = Util_InitWindowAndGraphics(Recti(HMD->WindowsPos, HMD->Resolution), + FullScreen, backBufferMultisample, UseAppWindowFrame,&pRender); + if (!window) return 1; + ovrHmd_AttachToWindow(HMD, window, NULL, NULL); + //Configure Stereo settings. - Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Left, HMDDesc.DefaultEyeFov[0], 1.0f); - Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Right, HMDDesc.DefaultEyeFov[1], 1.0f); + Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Left, HMD->DefaultEyeFov[0], 1.0f); + Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Right, HMD->DefaultEyeFov[1], 1.0f); Sizei RenderTargetSize; RenderTargetSize.w = recommenedTex0Size.w + recommenedTex1Size.w; RenderTargetSize.h = max ( recommenedTex0Size.h, recommenedTex1Size.h ); @@ -111,91 +101,156 @@ int Init() RenderTargetSize.w = pRendertargetTexture->GetWidth(); RenderTargetSize.h = pRendertargetTexture->GetHeight(); - // Initialize eye rendering information for ovrHmd_Configure. + // Initialize eye rendering information. // The viewport sizes are re-computed in case RenderTargetSize changed due to HW limitations. - ovrFovPort eyeFov[2] = { HMDDesc.DefaultEyeFov[0], HMDDesc.DefaultEyeFov[1] } ; + ovrFovPort eyeFov[2] = { HMD->DefaultEyeFov[0], HMD->DefaultEyeFov[1] } ; EyeRenderViewport[0].Pos = Vector2i(0,0); EyeRenderViewport[0].Size = Sizei(RenderTargetSize.w / 2, RenderTargetSize.h); EyeRenderViewport[1].Pos = Vector2i((RenderTargetSize.w + 1) / 2, 0); EyeRenderViewport[1].Size = EyeRenderViewport[0].Size; -#if SDK_RENDER + #if SDK_RENDER // Query D3D texture data. - Texture* rtt = (Texture*)pRendertargetTexture; EyeTexture[0].D3D11.Header.API = ovrRenderAPI_D3D11; EyeTexture[0].D3D11.Header.TextureSize = RenderTargetSize; EyeTexture[0].D3D11.Header.RenderViewport = EyeRenderViewport[0]; - EyeTexture[0].D3D11.pTexture = rtt->Tex.GetPtr(); - EyeTexture[0].D3D11.pSRView = rtt->TexSv.GetPtr(); + EyeTexture[0].D3D11.pTexture = pRendertargetTexture->Tex.GetPtr(); + EyeTexture[0].D3D11.pSRView = pRendertargetTexture->TexSv.GetPtr(); // Right eye uses the same texture, but different rendering viewport. EyeTexture[1] = EyeTexture[0]; EyeTexture[1].D3D11.Header.RenderViewport = EyeRenderViewport[1]; // Configure d3d11. - RenderDevice* render = (RenderDevice*)pRender; ovrD3D11Config d3d11cfg; d3d11cfg.D3D11.Header.API = ovrRenderAPI_D3D11; - d3d11cfg.D3D11.Header.RTSize = Sizei(HMDDesc.Resolution.w, HMDDesc.Resolution.h); + d3d11cfg.D3D11.Header.RTSize = Sizei(HMD->Resolution.w, HMD->Resolution.h); d3d11cfg.D3D11.Header.Multisample = backBufferMultisample; - d3d11cfg.D3D11.pDevice = render->Device; - d3d11cfg.D3D11.pDeviceContext = render->Context; - d3d11cfg.D3D11.pBackBufferRT = render->BackBufferRT; - d3d11cfg.D3D11.pSwapChain = render->SwapChain; + d3d11cfg.D3D11.pDevice = pRender->Device; + d3d11cfg.D3D11.pDeviceContext = pRender->Context; + d3d11cfg.D3D11.pBackBufferRT = pRender->BackBufferRT; + d3d11cfg.D3D11.pSwapChain = pRender->SwapChain; if (!ovrHmd_ConfigureRendering(HMD, &d3d11cfg.Config, - DistortionCaps, - eyeFov, EyeRenderDesc)) return(1); -#else // !SDK_RENDER - EyeRenderDesc[0] = ovrHmd_GetRenderDesc(HMD, ovrEye_Left, eyeFov[0]); - EyeRenderDesc[1] = ovrHmd_GetRenderDesc(HMD, ovrEye_Right, eyeFov[1]); - - // Create our own distortion mesh and shaders - DistortionMeshInit(DistortionCaps, HMD, EyeRenderDesc, - RenderTargetSize, EyeRenderViewport, pRender); -#endif + ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette | + ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive, + eyeFov, EyeRenderDesc)) return(1); + #else + //Shader vertex format + D3D11_INPUT_ELEMENT_DESC DistortionMeshVertexDesc[] = { + {"Position", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0}, + {"Position", 1, DXGI_FORMAT_R32_FLOAT, 0, 8, D3D11_INPUT_PER_VERTEX_DATA, 0}, + {"Position", 2, DXGI_FORMAT_R32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0}, + {"TexCoord", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 16, D3D11_INPUT_PER_VERTEX_DATA, 0}, + {"TexCoord", 1, DXGI_FORMAT_R32G32_FLOAT, 0, 24, D3D11_INPUT_PER_VERTEX_DATA, 0}, + {"TexCoord", 2, DXGI_FORMAT_R32G32_FLOAT, 0, 32, D3D11_INPUT_PER_VERTEX_DATA, 0}}; + + //Distortion vertex shader + const char* vertexShader = + "float2 EyeToSourceUVScale, EyeToSourceUVOffset; \n" + "float4x4 EyeRotationStart, EyeRotationEnd; \n" + "float2 TimewarpTexCoord(float2 TexCoord, float4x4 rotMat) \n" + "{ \n" + // Vertex inputs are in TanEyeAngle space for the R,G,B channels (i.e. after chromatic + // aberration and distortion). These are now "real world" vectors in direction (x,y,1) + // relative to the eye of the HMD. Apply the 3x3 timewarp rotation to these vectors. + " float3 transformed = float3( mul ( rotMat, float4(TexCoord.xy, 1, 1) ).xyz); \n" + // Project them back onto the Z=1 plane of the rendered images. + " float2 flattened = (transformed.xy / transformed.z); \n" + // Scale them into ([0,0.5],[0,1]) or ([0.5,0],[0,1]) UV lookup space (depending on eye) + " return(EyeToSourceUVScale * flattened + EyeToSourceUVOffset); \n" + "} \n" + "void main(in float2 Position : POSITION, in float timewarpLerpFactor : POSITION1, \n" + " in float Vignette : POSITION2, in float2 TexCoord0 : TEXCOORD0, \n" + " in float2 TexCoord1 : TEXCOORD1, in float2 TexCoord2 : TEXCOORD2, \n" + " out float4 oPosition : SV_Position, \n" + " out float2 oTexCoord0 : TEXCOORD0, out float2 oTexCoord1 : TEXCOORD1, \n" + " out float2 oTexCoord2 : TEXCOORD2, out float oVignette : TEXCOORD3) \n" + "{ \n" + " float4x4 lerpedEyeRot = lerp(EyeRotationStart, EyeRotationEnd, timewarpLerpFactor);\n" + " oTexCoord0 = TimewarpTexCoord(TexCoord0,lerpedEyeRot); \n" + " oTexCoord1 = TimewarpTexCoord(TexCoord1,lerpedEyeRot); \n" + " oTexCoord2 = TimewarpTexCoord(TexCoord2,lerpedEyeRot); \n" + " oPosition = float4(Position.xy, 0.5, 1.0); oVignette = Vignette; \n" + "}"; + + //Distortion pixel shader + const char* pixelShader = + "Texture2D Texture : register(t0); \n" + "SamplerState Linear : register(s0); \n" + "float4 main(in float4 oPosition : SV_Position, in float2 oTexCoord0 : TEXCOORD0, \n" + " in float2 oTexCoord1 : TEXCOORD1, in float2 oTexCoord2 : TEXCOORD2, \n" + " in float oVignette : TEXCOORD3) : SV_Target \n" + "{ \n" + // 3 samples for fixing chromatic aberrations + " float R = Texture.Sample(Linear, oTexCoord0.xy).r; \n" + " float G = Texture.Sample(Linear, oTexCoord1.xy).g; \n" + " float B = Texture.Sample(Linear, oTexCoord2.xy).b; \n" + " return (oVignette*float4(R,G,B,1)); \n" + "}"; + pRender->InitShaders(vertexShader, pixelShader, &Shaders, &VertexIL,DistortionMeshVertexDesc,6); + + for ( int eyeNum = 0; eyeNum < 2; eyeNum++ ) + { + // Allocate mesh vertices, registering with renderer using the OVR vertex format. + ovrDistortionMesh meshData; + ovrHmd_CreateDistortionMesh(HMD, (ovrEyeType) eyeNum, eyeFov[eyeNum], + ovrDistortionCap_Chromatic | ovrDistortionCap_TimeWarp, &meshData); + MeshVBs[eyeNum] = *pRender->CreateBuffer(); + MeshVBs[eyeNum]->Data(Buffer_Vertex,meshData.pVertexData,sizeof(ovrDistortionVertex)*meshData.VertexCount); + MeshIBs[eyeNum] = *pRender->CreateBuffer(); + MeshIBs[eyeNum]->Data(Buffer_Index,meshData.pIndexData,sizeof(unsigned short) * meshData.IndexCount); + ovrHmd_DestroyDistortionMesh( &meshData ); + + //Create eye render description for use later + EyeRenderDesc[eyeNum] = ovrHmd_GetRenderDesc(HMD, (ovrEyeType) eyeNum, eyeFov[eyeNum]); + + //Do scale and offset + ovrHmd_GetRenderScaleAndOffset(eyeFov[eyeNum],RenderTargetSize, EyeRenderViewport[eyeNum], UVScaleOffset[eyeNum]); + } - ovrHmd_SetEnabledCaps(HMD, ovrHmdCap_LowPersistence | - ovrHmdCap_LatencyTest); + #endif + + ovrHmd_SetEnabledCaps(HMD, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction); // Start the sensor which informs of the Rift's pose and motion - ovrHmd_StartSensor(HMD, ovrSensorCap_Orientation | - ovrSensorCap_YawCorrection | - ovrSensorCap_Position, 0); + ovrHmd_ConfigureTracking(HMD, ovrTrackingCap_Orientation | + ovrTrackingCap_MagYawCorrection | + ovrTrackingCap_Position, 0); // This creates lights and models. pRoomScene = new Scene; - PopulateRoomScene(pRoomScene, pRender); + PopulateRoomScene(pRoomScene, pRender); - return 0; + return (0); } //------------------------------------------------------------------------------------- - void ProcessAndRender() { -#if SDK_RENDER - ovrFrameTiming frameTiming = ovrHmd_BeginFrame(HMD, 0); -#else - ovrFrameTiming frameTiming = ovrHmd_BeginFrameTiming(HMD, 0); -#endif - - //Adjust eye position and rotation from controls, maintaining y position from HMD. - static Vector3f EyePos(0.0f, 1.6f, -5.0f); - static float EyeYaw(3.141592f); - - Transformf movePose = ovrHmd_GetSensorState(HMD, frameTiming.ScanoutMidpointSeconds).Predicted.Pose; static ovrPosef eyeRenderPose[2]; - EyePos.y = ovrHmd_GetFloat(HMD, OVR_KEY_EYE_HEIGHT, EyePos.y); - bool freezeEyeRender = Util_RespondToControls(EyeYaw, EyePos, - frameTiming.DeltaSeconds, movePose.Rotation); - - pRender->BeginScene(); + // Start timing + #if SDK_RENDER + ovrHmd_BeginFrame(HMD, 0); + #else + ovrHmd_BeginFrameTiming(HMD, 0); + // Retrieve data useful for handling the Health and Safety Warning - unused, but here for reference + ovrHSWDisplayState hswDisplayState; + ovrHmd_GetHSWDisplayState(HMD, &hswDisplayState); + #endif + + // Adjust eye position and rotation from controls, maintaining y position from HMD. + static float BodyYaw(3.141592f); + static Vector3f HeadPos(0.0f, 1.6f, -5.0f); + HeadPos.y = ovrHmd_GetFloat(HMD, OVR_KEY_EYE_HEIGHT, HeadPos.y); + bool freezeEyeRender = Util_RespondToControls(BodyYaw, HeadPos, eyeRenderPose[1].Orientation); + + pRender->BeginScene(); - //Render the two undistorted eye views into their render buffers. - if (!freezeEyeRender) // freeze to debug, especially for time warp + // Render the two undistorted eye views into their render buffers. + if (!freezeEyeRender) // freeze to debug for time warp { pRender->SetRenderTarget ( pRendertargetTexture ); pRender->SetViewport (Recti(0,0, pRendertargetTexture->GetWidth(), @@ -203,49 +258,61 @@ void ProcessAndRender() pRender->Clear(); for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++) { - ovrEyeType eye = HMDDesc.EyeRenderOrder[eyeIndex]; -#if SDK_RENDER - eyeRenderPose[eye] = ovrHmd_BeginEyeRender(HMD, eye); -#else + ovrEyeType eye = HMD->EyeRenderOrder[eyeIndex]; eyeRenderPose[eye] = ovrHmd_GetEyePose(HMD, eye); -#endif - // Get view matrix - Matrix4f rollPitchYaw = Matrix4f::RotationY(EyeYaw); + // Get view and projection matrices + Matrix4f rollPitchYaw = Matrix4f::RotationY(BodyYaw); Matrix4f finalRollPitchYaw = rollPitchYaw * Matrix4f(eyeRenderPose[eye].Orientation); Vector3f finalUp = finalRollPitchYaw.Transform(Vector3f(0,1,0)); Vector3f finalForward = finalRollPitchYaw.Transform(Vector3f(0,0,-1)); - Vector3f shiftedEyePos = EyePos + rollPitchYaw.Transform(eyeRenderPose[eye].Position); - - Matrix4f view = Matrix4f::LookAtRH(shiftedEyePos, - shiftedEyePos + finalForward, finalUp); - + Vector3f shiftedEyePos = HeadPos + rollPitchYaw.Transform(eyeRenderPose[eye].Position); + Matrix4f view = Matrix4f::LookAtRH(shiftedEyePos, shiftedEyePos + finalForward, finalUp); Matrix4f proj = ovrMatrix4f_Projection(EyeRenderDesc[eye].Fov, 0.01f, 10000.0f, true); pRender->SetViewport(Recti(EyeRenderViewport[eye])); pRender->SetProjection(proj); pRender->SetDepthMode(true, true); pRoomScene->Render(pRender, Matrix4f::Translation(EyeRenderDesc[eye].ViewAdjust) * view); - - #if SDK_RENDER - ovrHmd_EndEyeRender(HMD, eye, eyeRenderPose[eye], &EyeTexture[eye].Texture); - #endif } } pRender->FinishScene(); - // Now render the distorted view and finish. -#if SDK_RENDER - // Let OVR do distortion rendering, Present and flush/sync - ovrHmd_EndFrame(HMD); + #if SDK_RENDER // Let OVR do distortion rendering, Present and flush/sync + ovrHmd_EndFrame(HMD, eyeRenderPose, &EyeTexture[0].Texture); + #else + // Clear screen + pRender->SetDefaultRenderTarget(); + pRender->SetFullViewport(); + pRender->Clear(0.0f, 0.0f, 0.0f, 0.0f); + + // Setup shader + ShaderFill distortionShaderFill(Shaders); + distortionShaderFill.SetTexture(0, pRendertargetTexture); + distortionShaderFill.SetInputLayout(VertexIL); + + for(int eyeNum = 0; eyeNum < 2; eyeNum++) + { + // Get and set shader constants + Shaders->SetUniform2f("EyeToSourceUVScale", UVScaleOffset[eyeNum][0].x, UVScaleOffset[eyeNum][0].y); + Shaders->SetUniform2f("EyeToSourceUVOffset", UVScaleOffset[eyeNum][1].x, UVScaleOffset[eyeNum][1].y); + ovrMatrix4f timeWarpMatrices[2]; + ovrHmd_GetEyeTimewarpMatrices(HMD, (ovrEyeType)eyeNum, eyeRenderPose[eyeNum], timeWarpMatrices); + Shaders->SetUniform4x4f("EyeRotationStart", timeWarpMatrices[0]); //Nb transposed when set + Shaders->SetUniform4x4f("EyeRotationEnd", timeWarpMatrices[1]); //Nb transposed when set + // Perform distortion + pRender->Render(&distortionShaderFill, MeshVBs[eyeNum], MeshIBs[eyeNum],sizeof(ovrDistortionVertex)); + } -#else - DistortionMeshRender(DistortionCaps, HMD, frameTiming.TimewarpPointSeconds, - eyeRenderPose, pRender, pRendertargetTexture); - pRender->Present( VSyncEnabled ); - pRender->WaitUntilGpuIdle(); //for lowest latency - ovrHmd_EndFrameTiming(HMD); -#endif + pRender->SetDefaultRenderTarget(); + + pRender->Present( true ); // Vsync enabled + + // Only flush GPU for ExtendDesktop; not needed in Direct App Renering with Oculus driver. + if (HMD->HmdCaps & ovrHmdCap_ExtendDesktop) + pRender->WaitUntilGpuIdle(); + ovrHmd_EndFrameTiming(HMD); + #endif } /* @@ -271,18 +338,27 @@ ovrHmd_EndFrame(hmd); //------------------------------------------------------------------------------------- void Release(void) { - if (pRendertargetTexture) { - pRendertargetTexture->Release(); - pRendertargetTexture = 0; + if (pRendertargetTexture) pRendertargetTexture->Release(); + + #if !SDK_RENDER + for(int eyeNum = 0; eyeNum < 2; eyeNum++) + { + MeshVBs[eyeNum].Clear(); + MeshIBs[eyeNum].Clear(); + } + if (Shaders) + { + Shaders->UnsetShader(Shader_Vertex); + Shaders->UnsetShader(Shader_Pixel); + Shaders->Release(); } + #endif + ovrHmd_Destroy(HMD); Util_ReleaseWindowAndGraphics(pRender); - pRender = 0; - if (pRoomScene) - { - delete pRoomScene; - pRoomScene = 0; - } + if (pRoomScene) delete pRoomScene; + + // No OVR functions involving memory are allowed after this. ovr_Shutdown(); } |