//------------------------------------------------------------------------------------- int Init() { // Initializes LibOVR, and the Rift ovr_Initialize(); HMD = ovrHmd_Create(0); if (!HMD) { MessageBoxA(NULL,"Oculus Rift not detected.","", MB_OK); return(1); } if (HMD->ProductName[0] == '\0') MessageBoxA(NULL,"Rift detected, display not enabled.","", MB_OK); //Setup Window and Graphics - use window frame if relying on Oculus driver const int backBufferMultisample = 1; bool UseAppWindowFrame = (HMD->HmdCaps & ovrHmdCap_ExtendDesktop) ? false : true; HWND window = Util_InitWindowAndGraphics(Recti(HMD->WindowsPos, HMD->Resolution), FullScreen, backBufferMultisample, UseAppWindowFrame,&pRender); if (!window) return 1; ovrHmd_AttachToWindow(HMD, window, NULL, NULL); //Configure Stereo settings. Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Left, HMD->DefaultEyeFov[0], 1.0f); Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Right, HMD->DefaultEyeFov[1], 1.0f); Sizei RenderTargetSize; RenderTargetSize.w = recommenedTex0Size.w + recommenedTex1Size.w; RenderTargetSize.h = max ( recommenedTex0Size.h, recommenedTex1Size.h ); const int eyeRenderMultisample = 1; pRendertargetTexture = pRender->CreateTexture(Texture_RGBA | Texture_RenderTarget | eyeRenderMultisample, RenderTargetSize.w, RenderTargetSize.h, NULL); // The actual RT size may be different due to HW limits. RenderTargetSize.w = pRendertargetTexture->GetWidth(); RenderTargetSize.h = pRendertargetTexture->GetHeight(); // Initialize eye rendering information. // The viewport sizes are re-computed in case RenderTargetSize changed due to HW limitations. ovrFovPort eyeFov[2] = { HMD->DefaultEyeFov[0], HMD->DefaultEyeFov[1] } ; EyeRenderViewport[0].Pos = Vector2i(0,0); EyeRenderViewport[0].Size = Sizei(RenderTargetSize.w / 2, RenderTargetSize.h); EyeRenderViewport[1].Pos = Vector2i((RenderTargetSize.w + 1) / 2, 0); EyeRenderViewport[1].Size = EyeRenderViewport[0].Size; #if SDK_RENDER // Query D3D texture data. EyeTexture[0].D3D11.Header.API = ovrRenderAPI_D3D11; EyeTexture[0].D3D11.Header.TextureSize = RenderTargetSize; EyeTexture[0].D3D11.Header.RenderViewport = EyeRenderViewport[0]; EyeTexture[0].D3D11.pTexture = pRendertargetTexture->Tex.GetPtr(); EyeTexture[0].D3D11.pSRView = pRendertargetTexture->TexSv.GetPtr(); // Right eye uses the same texture, but different rendering viewport. EyeTexture[1] = EyeTexture[0]; EyeTexture[1].D3D11.Header.RenderViewport = EyeRenderViewport[1]; // Configure d3d11. ovrD3D11Config d3d11cfg; d3d11cfg.D3D11.Header.API = ovrRenderAPI_D3D11; d3d11cfg.D3D11.Header.RTSize = Sizei(HMD->Resolution.w, HMD->Resolution.h); d3d11cfg.D3D11.Header.Multisample = backBufferMultisample; d3d11cfg.D3D11.pDevice = pRender->Device; d3d11cfg.D3D11.pDeviceContext = pRender->Context; d3d11cfg.D3D11.pBackBufferRT = pRender->BackBufferRT; d3d11cfg.D3D11.pSwapChain = pRender->SwapChain; if (!ovrHmd_ConfigureRendering(HMD, &d3d11cfg.Config, ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette | ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive, eyeFov, EyeRenderDesc)) return(1); #else //Shader vertex format D3D11_INPUT_ELEMENT_DESC DistortionMeshVertexDesc[] = { {"Position", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0}, {"Position", 1, DXGI_FORMAT_R32_FLOAT, 0, 8, D3D11_INPUT_PER_VERTEX_DATA, 0}, {"Position", 2, DXGI_FORMAT_R32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0}, {"TexCoord", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 16, D3D11_INPUT_PER_VERTEX_DATA, 0}, {"TexCoord", 1, DXGI_FORMAT_R32G32_FLOAT, 0, 24, D3D11_INPUT_PER_VERTEX_DATA, 0}, {"TexCoord", 2, DXGI_FORMAT_R32G32_FLOAT, 0, 32, D3D11_INPUT_PER_VERTEX_DATA, 0}}; //Distortion vertex shader const char* vertexShader = "float2 EyeToSourceUVScale, EyeToSourceUVOffset; \n" "float4x4 EyeRotationStart, EyeRotationEnd; \n" "float2 TimewarpTexCoord(float2 TexCoord, float4x4 rotMat) \n" "{ \n" // Vertex inputs are in TanEyeAngle space for the R,G,B channels (i.e. after chromatic // aberration and distortion). These are now "real world" vectors in direction (x,y,1) // relative to the eye of the HMD. Apply the 3x3 timewarp rotation to these vectors. " float3 transformed = float3( mul ( rotMat, float4(TexCoord.xy, 1, 1) ).xyz); \n" // Project them back onto the Z=1 plane of the rendered images. " float2 flattened = (transformed.xy / transformed.z); \n" // Scale them into ([0,0.5],[0,1]) or ([0.5,0],[0,1]) UV lookup space (depending on eye) " return(EyeToSourceUVScale * flattened + EyeToSourceUVOffset); \n" "} \n" "void main(in float2 Position : POSITION, in float timewarpLerpFactor : POSITION1, \n" " in float Vignette : POSITION2, in float2 TexCoord0 : TEXCOORD0, \n" " in float2 TexCoord1 : TEXCOORD1, in float2 TexCoord2 : TEXCOORD2, \n" " out float4 oPosition : SV_Position, \n" " out float2 oTexCoord0 : TEXCOORD0, out float2 oTexCoord1 : TEXCOORD1, \n" " out float2 oTexCoord2 : TEXCOORD2, out float oVignette : TEXCOORD3) \n" "{ \n" " float4x4 lerpedEyeRot = lerp(EyeRotationStart, EyeRotationEnd, timewarpLerpFactor);\n" " oTexCoord0 = TimewarpTexCoord(TexCoord0,lerpedEyeRot); \n" " oTexCoord1 = TimewarpTexCoord(TexCoord1,lerpedEyeRot); \n" " oTexCoord2 = TimewarpTexCoord(TexCoord2,lerpedEyeRot); \n" " oPosition = float4(Position.xy, 0.5, 1.0); oVignette = Vignette; \n" "}"; //Distortion pixel shader const char* pixelShader = "Texture2D Texture : register(t0); \n" "SamplerState Linear : register(s0); \n" "float4 main(in float4 oPosition : SV_Position, in float2 oTexCoord0 : TEXCOORD0, \n" " in float2 oTexCoord1 : TEXCOORD1, in float2 oTexCoord2 : TEXCOORD2, \n" " in float oVignette : TEXCOORD3) : SV_Target \n" "{ \n" // 3 samples for fixing chromatic aberrations " float R = Texture.Sample(Linear, oTexCoord0.xy).r; \n" " float G = Texture.Sample(Linear, oTexCoord1.xy).g; \n" " float B = Texture.Sample(Linear, oTexCoord2.xy).b; \n" " return (oVignette*float4(R,G,B,1)); \n" "}"; pRender->InitShaders(vertexShader, pixelShader, &Shaders, &VertexIL,DistortionMeshVertexDesc,6); for ( int eyeNum = 0; eyeNum < 2; eyeNum++ ) { // Allocate mesh vertices, registering with renderer using the OVR vertex format. ovrDistortionMesh meshData; ovrHmd_CreateDistortionMesh(HMD, (ovrEyeType) eyeNum, eyeFov[eyeNum], ovrDistortionCap_Chromatic | ovrDistortionCap_TimeWarp, &meshData); MeshVBs[eyeNum] = *pRender->CreateBuffer(); MeshVBs[eyeNum]->Data(Buffer_Vertex,meshData.pVertexData,sizeof(ovrDistortionVertex)*meshData.VertexCount); MeshIBs[eyeNum] = *pRender->CreateBuffer(); MeshIBs[eyeNum]->Data(Buffer_Index,meshData.pIndexData,sizeof(unsigned short) * meshData.IndexCount); ovrHmd_DestroyDistortionMesh( &meshData ); //Create eye render description for use later EyeRenderDesc[eyeNum] = ovrHmd_GetRenderDesc(HMD, (ovrEyeType) eyeNum, eyeFov[eyeNum]); //Do scale and offset ovrHmd_GetRenderScaleAndOffset(eyeFov[eyeNum],RenderTargetSize, EyeRenderViewport[eyeNum], UVScaleOffset[eyeNum]); } #endif ovrHmd_SetEnabledCaps(HMD, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction); // Start the sensor which informs of the Rift's pose and motion ovrHmd_ConfigureTracking(HMD, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position, 0); // This creates lights and models. pRoomScene = new Scene; sbuilder.PopulateRoomScene(pRoomScene, pRender); return (0); }
int Init() { ovr_Initialize(); HMD = ovrHmd_Create(0); if (!HMD) { MessageBox(NULL, "Oculus Rift not detected.", "", MB_OK); return 1; } if (HMD->ProductName[0] == '\0') { MessageBox(NULL, "Rift detected, display not enabled.", "", MB_OK); } //Setup Window and Graphics - use window frame if relying on Oculus driver const int backBufferMultisample = 1; bool UseAppWindowFrame = true; HWND window = Util_InitWindowAndGraphics(Recti(HMD->WindowsPos, HMD->Resolution), FullScreen, backBufferMultisample, UseAppWindowFrame, &pRender); if (!window) return 1; ovrHmd_AttachToWindow(HMD, window, NULL, NULL); Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Left, HMD->DefaultEyeFov[0], 1.0f); Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Right, HMD->DefaultEyeFov[1], 1.0f); Sizei RenderTargetSize; RenderTargetSize.w = recommenedTex0Size.w + recommenedTex1Size.w; RenderTargetSize.h = max(recommenedTex0Size.h, recommenedTex1Size.h); RenderTargetSize.w = HMD->Resolution.w; RenderTargetSize.h = HMD->Resolution.h; //const int eyeRenderMultisample = 1; pRendertargetTexture = pRender->CreateRenderTarget(RenderTargetSize.w/2, RenderTargetSize.h/2); //pRendertargetTexture = pRender->CreateRenderTarget(512, 512); RenderTargetSize.w = pRendertargetTexture->Width; RenderTargetSize.h = pRendertargetTexture->Height; IDirect3DSurface9 *zb = 0; pRender->Device->GetDepthStencilSurface(&zb); D3DSURFACE_DESC d; zb->GetDesc(&d); // Initialize eye rendering information. // The viewport sizes are re-computed in case RenderTargetSize due to HW limitations. ovrFovPort eyeFov[2] = { HMD->DefaultEyeFov[0], HMD->DefaultEyeFov[1] }; EyeRenderViewport[0].Pos = Vector2i(0, 0); EyeRenderViewport[0].Size = Sizei(RenderTargetSize.w / 2, RenderTargetSize.h); EyeRenderViewport[1].Pos = Vector2i((RenderTargetSize.w + 1) / 2, 0); EyeRenderViewport[1].Size = EyeRenderViewport[0].Size; // --------------------- DistortionShaders = pRender->CreateShaderSet(); DistortionShaders->SetShader(pRender->LoadBuiltinShader(Shader_Vertex, VShader_Distortion)); DistortionShaders->SetShader(pRender->LoadBuiltinShader(Shader_Pixel, PShader_Distortion)); DistortionDecl = VertexDecl::GetDecl(VertexType_Distortion); for (int eyeNum = 0; eyeNum < 2; ++eyeNum) { ovrDistortionMesh meshData; ovrHmd_CreateDistortionMesh(HMD, (ovrEyeType)eyeNum, eyeFov[eyeNum], ovrDistortionCap_Chromatic | ovrDistortionCap_TimeWarp, &meshData); MeshVBs[eyeNum] = pRender->CreateVertexBuffer(); MeshVBs[eyeNum]->Data(meshData.pVertexData, sizeof(ovrDistortionVertex)*meshData.VertexCount); MeshIBs[eyeNum] = pRender->CreateIndexBuffer(); MeshIBs[eyeNum]->Data(meshData.pIndexData, sizeof(unsigned short)*meshData.IndexCount); MeshVBCnts[eyeNum] = meshData.VertexCount; MeshIBCnts[eyeNum] = meshData.IndexCount; ovrHmd_DestroyDistortionMesh(&meshData); EyeRenderDesc[eyeNum] = ovrHmd_GetRenderDesc(HMD, (ovrEyeType)eyeNum, eyeFov[eyeNum]); ovrHmd_GetRenderScaleAndOffset(eyeFov[eyeNum], RenderTargetSize, EyeRenderViewport[eyeNum], UVScaleOffset[eyeNum]); } ovrHmd_SetEnabledCaps(HMD, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction); ovrHmd_ConfigureTracking(HMD, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position, 0); // --------------------- pRoomScene = new Scene; PopulateRoomScene(pRoomScene, pRender); // texture model ShaderSet* ss = pRender->CreateShaderSet(); ss->SetShader(pRender->LoadBuiltinShader(Shader_Vertex, VShader_MVP_UV)); ss->SetShader(pRender->LoadBuiltinShader(Shader_Pixel, PShader_UV)); Model<VertexXYZUV> *pModel2 = new Model<VertexXYZUV>(); pModel2->Decl = VertexDecl::GetDecl(VertexType_XYZUV); pModel2->Fill = new ShaderFill(ss); //Texture* ttt = new Texture(pRender); //ttt->LoadFromFile("face.tga"); pModel2->Fill->SetTexture(0, pRendertargetTexture); pModel2->AddVertex(VertexXYZUV(0.5f, -1.0f, 0.0f, 0.0f, 0.0f)); pModel2->AddVertex(VertexXYZUV(2.5f, -1.0f, 0.0f, 1.0f, 0.0f)); pModel2->AddVertex(VertexXYZUV(0.5f, 1.0f, 0.0f, 0.0f, 1.0f)); pModel2->AddVertex(VertexXYZUV(2.5f, 1.0f, 0.0f, 1.0f, 1.0f)); pModel2->AddVertex(VertexXYZUV(-1.0f, -1.5f, -1.0f, 0.0f, 0.0f)); pModel2->AddVertex(VertexXYZUV(1.0f, -1.5f, -1.0f, 1.0f, 0.0f)); pModel2->AddVertex(VertexXYZUV(-1.0f, -1.5f, 1.0f, 0.0f, 1.0f)); pModel2->AddVertex(VertexXYZUV(1.0f, -1.5f, 1.0f, 1.0f, 1.0f)); pModel2->AddTriangle(0, 1, 2); pModel2->AddTriangle(2, 1, 3); pModel2->AddTriangle(4, 5, 6); pModel2->AddTriangle(6, 5, 7); pScene = new Scene; pScene->World.Add(pModel2); return (0); }
//------------------------------------------------------------------------------------- int Init() { // Initializes LibOVR, and the Rift ovr_Initialize(); if (!HMD) { HMD = ovrHmd_Create(0); if (!HMD) { MessageBoxA(NULL, "Oculus Rift not detected.", "", MB_OK); return(1); } if (HMD->ProductName[0] == '\0') MessageBoxA(NULL, "Rift detected, display not enabled.", "", MB_OK); } //Setup Window and Graphics - use window frame if relying on Oculus driver const int backBufferMultisample = 1; bool UseAppWindowFrame = (HMD->HmdCaps & ovrHmdCap_ExtendDesktop) ? false : true; HWND window = Util_InitWindowAndGraphics(Recti(HMD->WindowsPos, HMD->Resolution), FullScreen, backBufferMultisample, UseAppWindowFrame,&pRender); if (!window) return 1; ovrHmd_AttachToWindow(HMD, window, NULL, NULL); //Configure Stereo settings. Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Left, HMD->DefaultEyeFov[0], 1.0f); Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Right, HMD->DefaultEyeFov[1], 1.0f); Sizei RenderTargetSize; RenderTargetSize.w = recommenedTex0Size.w + recommenedTex1Size.w; RenderTargetSize.h = max ( recommenedTex0Size.h, recommenedTex1Size.h ); const int eyeRenderMultisample = 1; pRendertargetTexture = pRender->CreateTexture(Texture_RGBA | Texture_RenderTarget | eyeRenderMultisample, RenderTargetSize.w, RenderTargetSize.h, NULL); // The actual RT size may be different due to HW limits. RenderTargetSize.w = pRendertargetTexture->GetWidth(); RenderTargetSize.h = pRendertargetTexture->GetHeight(); // Initialize eye rendering information. // The viewport sizes are re-computed in case RenderTargetSize changed due to HW limitations. ovrFovPort eyeFov[2] = { HMD->DefaultEyeFov[0], HMD->DefaultEyeFov[1] } ; EyeRenderViewport[0].Pos = Vector2i(0,0); EyeRenderViewport[0].Size = Sizei(RenderTargetSize.w / 2, RenderTargetSize.h); EyeRenderViewport[1].Pos = Vector2i((RenderTargetSize.w + 1) / 2, 0); EyeRenderViewport[1].Size = EyeRenderViewport[0].Size; #if SDK_RENDER // Query OGL texture data. EyeTexture[0].OGL.Header.API = ovrRenderAPI_OpenGL; EyeTexture[0].OGL.Header.TextureSize = RenderTargetSize; EyeTexture[0].OGL.Header.RenderViewport = EyeRenderViewport[0]; EyeTexture[0].OGL.TexId = pRendertargetTexture->TexId; // Right eye uses the same texture, but different rendering viewport. EyeTexture[1] = EyeTexture[0]; EyeTexture[1].OGL.Header.RenderViewport = EyeRenderViewport[1]; // Configure OpenGL. ovrGLConfig oglcfg; oglcfg.OGL.Header.API = ovrRenderAPI_OpenGL; oglcfg.OGL.Header.RTSize = Sizei(HMD->Resolution.w, HMD->Resolution.h); oglcfg.OGL.Header.Multisample = backBufferMultisample; oglcfg.OGL.Window = window; oglcfg.OGL.DC = GetDC(window); if (!ovrHmd_ConfigureRendering(HMD, &oglcfg.Config, ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette | ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive, eyeFov, EyeRenderDesc)) return(1); #else //Distortion vertex shader const char* vertexShader = "#version 110 \n" "uniform vec2 EyeToSourceUVScale; \n" "uniform vec2 EyeToSourceUVOffset; \n" "uniform mat4 EyeRotationStart; \n" "uniform mat4 EyeRotationEnd; \n" "attribute vec2 Position; \n" "attribute vec2 inTWLF_V; \n" "attribute vec2 inTexCoord0; \n" "attribute vec2 inTexCoord1; \n" "attribute vec2 inTexCoord2; \n" "varying vec4 oPosition; \n" "varying vec2 oTexCoord0; \n" "varying vec2 oTexCoord1; \n" "varying vec2 oTexCoord2; \n" "varying float oVignette; \n" "vec2 TexCoord0 = vec2(inTexCoord0.x, -inTexCoord0.y); \n" "vec2 TexCoord1 = vec2(inTexCoord1.x, -inTexCoord1.y); \n" "vec2 TexCoord2 = vec2(inTexCoord2.x, -inTexCoord2.y); \n" "float timewarpLerpFactor = inTWLF_V.x; \n" "float Vignette = inTWLF_V.y; \n" "vec2 TimewarpTexCoord( in vec2 TexCoord, in mat4 rotMat ) \n" "{ \n" // Vertex inputs are in TanEyeAngle space for the R,G,B channels (i.e. after chromatic // aberration and distortion). These are now "real world" vectors in direction (x,y,1) // relative to the eye of the HMD. Apply the 3x3 timewarp rotation to these vectors. " vec3 transformed = vec3( ( rotMat * vec4( TexCoord.xy , 1.00000, 1.00000) ).xyz ); \n" // Project them back onto the Z=1 plane of the rendered images. " vec2 flattened = (transformed.xy / transformed.z ); \n" // Scale them into ([0,0.5],[0,1]) or ([0.5,0],[0,1]) UV lookup space (depending on eye) " return ((EyeToSourceUVScale * flattened) + EyeToSourceUVOffset); \n" "} \n" "mat4 mat4_lerp( in mat4 x, in mat4 y, in mat4 s ) \n" "{ \n" " return mat4(mix(x[0],y[0],s[0]), mix(x[1],y[1],s[1]), mix(x[2],y[2],s[2]), mix(x[3],y[3],s[3]));\n" "} \n" "void main() \n" "{ \n" " mat4 lerpedEyeRot = mat4_lerp( EyeRotationStart, EyeRotationEnd, mat4( timewarpLerpFactor));\n" " oTexCoord0 = TimewarpTexCoord( TexCoord0, lerpedEyeRot); \n" " oTexCoord1 = TimewarpTexCoord( TexCoord1, lerpedEyeRot); \n" " oTexCoord2 = TimewarpTexCoord( TexCoord2, lerpedEyeRot); \n" " oPosition = vec4( Position.xy , 0.500000, 1.00000); \n" " oVignette = Vignette; \n" " gl_Position = oPosition; \n" "}"; //Distortion pixel shader const char* pixelShader = "#version 110 \n" "uniform sampler2D Texture0; \n" "varying vec4 oPosition; \n" "varying vec2 oTexCoord0; \n" "varying vec2 oTexCoord1; \n" "varying vec2 oTexCoord2; \n" "varying float oVignette; \n" "void main() \n" "{ \n" // 3 samples for fixing chromatic aberrations " float R = texture2D(Texture0, oTexCoord0.xy).r; \n" " float G = texture2D(Texture0, oTexCoord1.xy).g; \n" " float B = texture2D(Texture0, oTexCoord2.xy).b; \n" " gl_FragColor = (oVignette*vec4(R,G,B,1)); \n" "}"; pRender->InitShaders(vertexShader, pixelShader, &Shaders); for ( int eyeNum = 0; eyeNum < 2; eyeNum++ ) { // Allocate mesh vertices, registering with renderer using the OVR vertex format. ovrDistortionMesh meshData; ovrHmd_CreateDistortionMesh(HMD, (ovrEyeType) eyeNum, eyeFov[eyeNum], ovrDistortionCap_Chromatic | ovrDistortionCap_TimeWarp, &meshData); MeshVBs[eyeNum] = *pRender->CreateBuffer(); MeshVBs[eyeNum]->Data(Buffer_Vertex,meshData.pVertexData,sizeof(ovrDistortionVertex)*meshData.VertexCount); MeshIBs[eyeNum] = *pRender->CreateBuffer(); MeshIBs[eyeNum]->Data(Buffer_Index,meshData.pIndexData,sizeof(unsigned short) * meshData.IndexCount); ovrHmd_DestroyDistortionMesh( &meshData ); //Create eye render description for use later EyeRenderDesc[eyeNum] = ovrHmd_GetRenderDesc(HMD, (ovrEyeType) eyeNum, eyeFov[eyeNum]); //Do scale and offset ovrHmd_GetRenderScaleAndOffset(eyeFov[eyeNum],RenderTargetSize, EyeRenderViewport[eyeNum], UVScaleOffset[eyeNum]); } #endif ovrHmd_SetEnabledCaps(HMD, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction); // Start the sensor which informs of the Rift's pose and motion ovrHmd_ConfigureTracking(HMD, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position, 0); // This creates lights and models. pRoomScene = new Scene; PopulateRoomScene(pRoomScene, pRender); return (0); }
GMO double linkWindowHandle(void* windowHandle) { const int eyeRenderMultisample = 1; const int backBufferMultisample = 1; //HWND handle = GetWindow((HWND)(int)windowHandle, GW_OWNER); //HWND handle = (HWND) (int) windowHandle; HWND handle = (HWND) windowHandle; /* * This function returns the passed windows' title. Just to debug / test LPWSTR title; GetWindowText(handle, title, GetWindowTextLength(handle) + 1); MessageBox(NULL, (LPCWSTR)title, (LPCWSTR)title, MB_ICONWARNING); MessageBoxA(NULL, (LPCSTR)title, (LPCSTR)title, MB_ICONWARNING); */ hWnd = handle; ovrHmd_AttachToWindow(HMD, handle, NULL, NULL); Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Left, HMD->DefaultEyeFov[0], 1.0f); Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Right, HMD->DefaultEyeFov[1], 1.0f); Sizei RenderTargetSize; RenderTargetSize.w = recommenedTex0Size.w + recommenedTex1Size.w; RenderTargetSize.h = max ( recommenedTex0Size.h, recommenedTex1Size.h ); bool UseAppWindowFrame = true;//(HMD->HmdCaps & ovrHmdCap_ExtendDesktop) ? false : true; HWND window = Util_InitWindowAndGraphics(Recti(HMD->WindowsPos, HMD->Resolution), FullScreen, backBufferMultisample, 1,&pRender, handle); pRendertargetTexture = pRender->CreateTexture(Texture_RGBA | Texture_RenderTarget | eyeRenderMultisample, RenderTargetSize.w, RenderTargetSize.h, NULL); // The actual RT size may be different due to HW limits. RenderTargetSize.w = pRendertargetTexture->GetWidth(); RenderTargetSize.h = pRendertargetTexture->GetHeight(); // Initialize eye rendering information. // The viewport sizes are re-computed in case RenderTargetSize changed due to HW limitations. ovrFovPort eyeFov[2] = { HMD->DefaultEyeFov[0], HMD->DefaultEyeFov[1] } ; EyeRenderViewport[0].Pos = Vector2i(0,0); EyeRenderViewport[0].Size = Sizei(RenderTargetSize.w / 2, RenderTargetSize.h); EyeRenderViewport[1].Pos = Vector2i((RenderTargetSize.w + 1) / 2, 0); EyeRenderViewport[1].Size = EyeRenderViewport[0].Size; EyeTexture[0].D3D11.Header.API = ovrRenderAPI_D3D11; EyeTexture[0].D3D11.Header.TextureSize = RenderTargetSize; EyeTexture[0].D3D11.Header.RenderViewport = EyeRenderViewport[0]; EyeTexture[0].D3D11.pTexture = pRendertargetTexture->Tex.GetPtr(); EyeTexture[0].D3D11.pSRView = pRendertargetTexture->TexSv.GetPtr(); // Right eye uses the same texture, but different rendering viewport. EyeTexture[1] = EyeTexture[0]; EyeTexture[1].D3D11.Header.RenderViewport = EyeRenderViewport[1]; // Configure d3d11. ovrD3D11Config d3d11cfg; d3d11cfg.D3D11.Header.API = ovrRenderAPI_D3D11; d3d11cfg.D3D11.Header.RTSize = Sizei(HMD->Resolution.w, HMD->Resolution.h); d3d11cfg.D3D11.Header.Multisample = backBufferMultisample; d3d11cfg.D3D11.pDevice = pRender->Device; d3d11cfg.D3D11.pDeviceContext = pRender->Context; d3d11cfg.D3D11.pBackBufferRT = pRender->BackBufferRT; d3d11cfg.D3D11.pSwapChain = pRender->SwapChain; if (!ovrHmd_ConfigureRendering(HMD, &d3d11cfg.Config, ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette | ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive, eyeFov, EyeRenderDesc)) return -2; // Some settings might be changed here lateron. ovrHmd_SetEnabledCaps(HMD, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction);// | ovrHmdCap_ExtendDesktop); // Start the sensor which informs of the Rift's pose and motion ovrHmd_ConfigureTracking(HMD, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position, 0); return 1; }