/** * Build a Direct HMD mode window, binding the OVR SDK to the native window object. */ GLFWwindow * createDirectHmdModeWindow(ovrHmd hmd, glm::uvec2 & outSize) { // On linux it's recommended to leave the screen in its default portrait orientation. // The SDK currently allows no mechanism to test if this is the case. // So in direct mode, we need to swap the x and y value. ON_LINUX([&] { std::swap(outSize.x, outSize.y); }); // In direct HMD mode, we always use the native resolution, because the // user has no control over it. // In direct mode, try to put the output window on a secondary screen // (for easier debugging, assuming your dev environment is on the primary) GLFWwindow * window = glfw::createSecondaryScreenWindow(outSize); // Attach the OVR SDK to the native window void * nativeWindowHandle = glfw::getNativeWindowHandle(window); if (nullptr != nativeWindowHandle) { ovrHmd_AttachToWindow(hmd, nativeWindowHandle, nullptr, nullptr); } // A bug in some versions of the SDK (0.4.x) prevents Direct Mode from // engaging properly unless you call the GetEyePoses function. { static ovrVector3f offsets[2]; static ovrPosef poses[2]; ovrHmd_GetEyePoses(hmd, 0, offsets, poses, nullptr); } return window; }
bool HeadMountedDisplay::attachToWindow( void* window, const ovrRecti* dst_mirror_rect, const ovrRecti* src_render_target_rect ) { ovrBool ret; KVS_OVR_CALL( ret = ovrHmd_AttachToWindow( m_handler, window, dst_mirror_rect, src_render_target_rect ) ); return ret == ovrTrue; }
VR::VR(Game &game) { // create HMD if (!(m_hmd = ovrHmd_Create(0))) { std::cerr << "couldn't create Oculus HMD, falling back to debug HMD" << std::endl; if (!(m_hmd = ovrHmd_CreateDebug(ovrHmd_DK2))) throw Error("couldn't create debug HMD"); } orient_window(game); // enable position, rotation tracking ovrHmd_ConfigureTracking(m_hmd, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position, 0); // calculate framebuffer resolution and create framebuffer ovrSizei eye_res[2]; eye_res[0] = ovrHmd_GetFovTextureSize(m_hmd, ovrEye_Left, m_hmd->DefaultEyeFov[0], 1.0); eye_res[1] = ovrHmd_GetFovTextureSize(m_hmd, ovrEye_Right, m_hmd->DefaultEyeFov[1], 1.0); m_fb_width = eye_res[0].w + eye_res[1].w; m_fb_height = eye_res[0].h > eye_res[1].h ? eye_res[0].h : eye_res[1].h; update_fb(); // fill in ovrGLConfig ovrGLConfig glcfg; memset(&glcfg, 0, sizeof glcfg); glcfg.OGL.Header.API = ovrRenderAPI_OpenGL; glcfg.OGL.Header.RTSize = m_hmd->Resolution; glcfg.OGL.Header.Multisample = 1; glcfg.OGL.Window = GetActiveWindow(); glcfg.OGL.DC = wglGetCurrentDC(); if (!(m_hmd->HmdCaps & ovrHmdCap_ExtendDesktop)) ovrHmd_AttachToWindow(m_hmd, glcfg.OGL.Window, 0, 0); // enable HMD, distortion capabilities and enable SDK rendering ovrHmd_SetEnabledCaps(m_hmd, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction); if (!ovrHmd_ConfigureRendering(m_hmd, &glcfg.Config, ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette | ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive, m_hmd->DefaultEyeFov, m_eye_rdesc)) throw Error("failed to configure distortion rendering"); // disable health/safety warning ovrhmd_EnableHSWDisplaySDKRender(m_hmd, 0); }
void Oculus::ConfigureBackBufferRendering(ViewState* view_state, Texture* back_buffer) { ovrD3D11Config oculus_config; oculus_config.D3D11.Header.API = ovrRenderAPI_D3D11; oculus_config.D3D11.Header.BackBufferSize = OculusHelper::ConvertArrayToSizei(view_state->window_details.screen_size); oculus_config.D3D11.Header.Multisample = 1; oculus_config.D3D11.pDevice = view_state->device_interface; oculus_config.D3D11.pDeviceContext = view_state->device_context; oculus_config.D3D11.pBackBufferRT = back_buffer->GetRenderTargetView(); oculus_config.D3D11.pSwapChain = view_state->swap_chain; ovrHmd_ConfigureRendering(head_mounted_display, &oculus_config.Config, ovrDistortionCap_Chromatic | ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive, eye_fovs, eye_rendering_desc); ovrHmd_AttachToWindow(head_mounted_display, view_state->window_handler, NULL, NULL); }
int32_t OVR_Enable(void) { int i; eyeScaleOffset_t camera; if (!glConfig.arb_texture_float) return 0; if (hmd && !(hmd->HmdCaps & ovrHmdCap_ExtendDesktop)) { #ifdef WIN32 ovrHmd_AttachToWindow(hmd,mainWindowInfo.info.win.window,NULL,NULL); #endif } for (i = 0; i < 2; i++) { if (renderInfo[i].eyeFBO.valid) R_DelFBO(&renderInfo[i].eyeFBO); if (offscreen[i].valid) R_DelFBO(&offscreen[i]); } camera.x.offset = 0.0; camera.x.scale = 1.0 / tanf(hmd->CameraFrustumHFovInRadians * 0.5); camera.y.offset = 0.0; camera.y.scale = 1.0 / tanf(hmd->CameraFrustumVFovInRadians * 0.5); R_MakePerspectiveFromScale(camera,hmd->CameraFrustumNearZInMeters, hmd->CameraFrustumFarZInMeters, cameraFrustum); R_CreateIVBO(&renderInfo[0].eye,GL_STATIC_DRAW); R_CreateIVBO(&renderInfo[1].eye,GL_STATIC_DRAW); //VR_FrameStart(1); VR_OVR_InitShader(&ovr_distortion_shaders[0],&ovr_shader_norm); VR_OVR_InitShader(&ovr_distortion_shaders[1],&ovr_shader_chrm); VR_OVR_InitShader(&ovr_timewarp_shaders[0],&ovr_shader_warp); VR_OVR_InitShader(&ovr_timewarp_shaders[1],&ovr_shader_chrm_warp); //OVR_FrameStart(true); Cvar_ForceSet("vr_hmdstring",(char *)hmd->ProductName); return true; }
/** * ERRORODE 0 => OK * ERRORCODE 1 => Unable to configure OVR Render */ int init_render_ovr(){ // Configure and Init rendering using the OVR render Core. // Input => rendered 3D texture (Two passes: 1 Left, 1 Right) // Output auto, on defined window // Configure rendering with OpenGL ovrGLConfig cfg; cfg.OGL.Header.API = ovrRenderAPI_OpenGL; cfg.OGL.Header.RTSize = OVR::Sizei( hmd->Resolution.w, hmd->Resolution.h ); cfg.OGL.Header.Multisample = 1; cfg.OGL.Window = sdl_window_info.info.win.window; cfg.OGL.DC = GetWindowDC(sdl_window_info.info.win.window); ovrFovPort eyesFov[2] = { hmd->DefaultEyeFov[0], hmd->DefaultEyeFov[1] }; if ( mode == MODE_OCULUS ){ if ( !ovrHmd_ConfigureRendering(hmd, &cfg.Config, hmd->DistortionCaps, eyesFov, eyesRenderDesc) ) return 1; // Direct OVR SDK output to Oculus Display ovrHmd_AttachToWindow(hmd, sdl_window_info.info.win.window, nullptr, nullptr); } EyeTexture[0].OGL.Header.API = ovrRenderAPI_OpenGL; EyeTexture[0].OGL.Header.TextureSize = renderTargetSize; EyeTexture[0].OGL.Header.RenderViewport.Size = recommendedTex0Size; EyeTexture[0].OGL.Header.RenderViewport.Pos.x = 0; EyeTexture[0].OGL.Header.RenderViewport.Pos.y = 0; EyeTexture[0].OGL.TexId = renderedTex; EyeTexture[1].OGL.Header.API = ovrRenderAPI_OpenGL; EyeTexture[1].OGL.Header.TextureSize = renderTargetSize; EyeTexture[1].OGL.Header.RenderViewport.Size = recommendedTex1Size; EyeTexture[1].OGL.Header.RenderViewport.Pos.x = recommendedTex0Size.w; EyeTexture[1].OGL.Header.RenderViewport.Pos.y = 0; EyeTexture[1].OGL.TexId = renderedTex; eyeTex[0] = EyeTexture[0].Texture; eyeTex[1] = EyeTexture[1].Texture; return 0; }
//------------------------------------------------------------------------------------- int Init() { // Initializes LibOVR, and the Rift ovr_Initialize(); HMD = ovrHmd_Create(0); if (!HMD) { MessageBoxA(NULL,"Oculus Rift not detected.","", MB_OK); return(1); } if (HMD->ProductName[0] == '\0') MessageBoxA(NULL,"Rift detected, display not enabled.","", MB_OK); //Setup Window and Graphics - use window frame if relying on Oculus driver const int backBufferMultisample = 1; bool UseAppWindowFrame = (HMD->HmdCaps & ovrHmdCap_ExtendDesktop) ? false : true; HWND window = Util_InitWindowAndGraphics(Recti(HMD->WindowsPos, HMD->Resolution), FullScreen, backBufferMultisample, UseAppWindowFrame,&pRender); if (!window) return 1; ovrHmd_AttachToWindow(HMD, window, NULL, NULL); //Configure Stereo settings. Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Left, HMD->DefaultEyeFov[0], 1.0f); Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Right, HMD->DefaultEyeFov[1], 1.0f); Sizei RenderTargetSize; RenderTargetSize.w = recommenedTex0Size.w + recommenedTex1Size.w; RenderTargetSize.h = max ( recommenedTex0Size.h, recommenedTex1Size.h ); const int eyeRenderMultisample = 1; pRendertargetTexture = pRender->CreateTexture(Texture_RGBA | Texture_RenderTarget | eyeRenderMultisample, RenderTargetSize.w, RenderTargetSize.h, NULL); // The actual RT size may be different due to HW limits. RenderTargetSize.w = pRendertargetTexture->GetWidth(); RenderTargetSize.h = pRendertargetTexture->GetHeight(); // Initialize eye rendering information. // The viewport sizes are re-computed in case RenderTargetSize changed due to HW limitations. ovrFovPort eyeFov[2] = { HMD->DefaultEyeFov[0], HMD->DefaultEyeFov[1] } ; EyeRenderViewport[0].Pos = Vector2i(0,0); EyeRenderViewport[0].Size = Sizei(RenderTargetSize.w / 2, RenderTargetSize.h); EyeRenderViewport[1].Pos = Vector2i((RenderTargetSize.w + 1) / 2, 0); EyeRenderViewport[1].Size = EyeRenderViewport[0].Size; #if SDK_RENDER // Query D3D texture data. EyeTexture[0].D3D11.Header.API = ovrRenderAPI_D3D11; EyeTexture[0].D3D11.Header.TextureSize = RenderTargetSize; EyeTexture[0].D3D11.Header.RenderViewport = EyeRenderViewport[0]; EyeTexture[0].D3D11.pTexture = pRendertargetTexture->Tex.GetPtr(); EyeTexture[0].D3D11.pSRView = pRendertargetTexture->TexSv.GetPtr(); // Right eye uses the same texture, but different rendering viewport. EyeTexture[1] = EyeTexture[0]; EyeTexture[1].D3D11.Header.RenderViewport = EyeRenderViewport[1]; // Configure d3d11. ovrD3D11Config d3d11cfg; d3d11cfg.D3D11.Header.API = ovrRenderAPI_D3D11; d3d11cfg.D3D11.Header.RTSize = Sizei(HMD->Resolution.w, HMD->Resolution.h); d3d11cfg.D3D11.Header.Multisample = backBufferMultisample; d3d11cfg.D3D11.pDevice = pRender->Device; d3d11cfg.D3D11.pDeviceContext = pRender->Context; d3d11cfg.D3D11.pBackBufferRT = pRender->BackBufferRT; d3d11cfg.D3D11.pSwapChain = pRender->SwapChain; if (!ovrHmd_ConfigureRendering(HMD, &d3d11cfg.Config, ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette | ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive, eyeFov, EyeRenderDesc)) return(1); #else //Shader vertex format D3D11_INPUT_ELEMENT_DESC DistortionMeshVertexDesc[] = { {"Position", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0}, {"Position", 1, DXGI_FORMAT_R32_FLOAT, 0, 8, D3D11_INPUT_PER_VERTEX_DATA, 0}, {"Position", 2, DXGI_FORMAT_R32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0}, {"TexCoord", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 16, D3D11_INPUT_PER_VERTEX_DATA, 0}, {"TexCoord", 1, DXGI_FORMAT_R32G32_FLOAT, 0, 24, D3D11_INPUT_PER_VERTEX_DATA, 0}, {"TexCoord", 2, DXGI_FORMAT_R32G32_FLOAT, 0, 32, D3D11_INPUT_PER_VERTEX_DATA, 0}}; //Distortion vertex shader const char* vertexShader = "float2 EyeToSourceUVScale, EyeToSourceUVOffset; \n" "float4x4 EyeRotationStart, EyeRotationEnd; \n" "float2 TimewarpTexCoord(float2 TexCoord, float4x4 rotMat) \n" "{ \n" // Vertex inputs are in TanEyeAngle space for the R,G,B channels (i.e. after chromatic // aberration and distortion). These are now "real world" vectors in direction (x,y,1) // relative to the eye of the HMD. Apply the 3x3 timewarp rotation to these vectors. " float3 transformed = float3( mul ( rotMat, float4(TexCoord.xy, 1, 1) ).xyz); \n" // Project them back onto the Z=1 plane of the rendered images. " float2 flattened = (transformed.xy / transformed.z); \n" // Scale them into ([0,0.5],[0,1]) or ([0.5,0],[0,1]) UV lookup space (depending on eye) " return(EyeToSourceUVScale * flattened + EyeToSourceUVOffset); \n" "} \n" "void main(in float2 Position : POSITION, in float timewarpLerpFactor : POSITION1, \n" " in float Vignette : POSITION2, in float2 TexCoord0 : TEXCOORD0, \n" " in float2 TexCoord1 : TEXCOORD1, in float2 TexCoord2 : TEXCOORD2, \n" " out float4 oPosition : SV_Position, \n" " out float2 oTexCoord0 : TEXCOORD0, out float2 oTexCoord1 : TEXCOORD1, \n" " out float2 oTexCoord2 : TEXCOORD2, out float oVignette : TEXCOORD3) \n" "{ \n" " float4x4 lerpedEyeRot = lerp(EyeRotationStart, EyeRotationEnd, timewarpLerpFactor);\n" " oTexCoord0 = TimewarpTexCoord(TexCoord0,lerpedEyeRot); \n" " oTexCoord1 = TimewarpTexCoord(TexCoord1,lerpedEyeRot); \n" " oTexCoord2 = TimewarpTexCoord(TexCoord2,lerpedEyeRot); \n" " oPosition = float4(Position.xy, 0.5, 1.0); oVignette = Vignette; \n" "}"; //Distortion pixel shader const char* pixelShader = "Texture2D Texture : register(t0); \n" "SamplerState Linear : register(s0); \n" "float4 main(in float4 oPosition : SV_Position, in float2 oTexCoord0 : TEXCOORD0, \n" " in float2 oTexCoord1 : TEXCOORD1, in float2 oTexCoord2 : TEXCOORD2, \n" " in float oVignette : TEXCOORD3) : SV_Target \n" "{ \n" // 3 samples for fixing chromatic aberrations " float R = Texture.Sample(Linear, oTexCoord0.xy).r; \n" " float G = Texture.Sample(Linear, oTexCoord1.xy).g; \n" " float B = Texture.Sample(Linear, oTexCoord2.xy).b; \n" " return (oVignette*float4(R,G,B,1)); \n" "}"; pRender->InitShaders(vertexShader, pixelShader, &Shaders, &VertexIL,DistortionMeshVertexDesc,6); for ( int eyeNum = 0; eyeNum < 2; eyeNum++ ) { // Allocate mesh vertices, registering with renderer using the OVR vertex format. ovrDistortionMesh meshData; ovrHmd_CreateDistortionMesh(HMD, (ovrEyeType) eyeNum, eyeFov[eyeNum], ovrDistortionCap_Chromatic | ovrDistortionCap_TimeWarp, &meshData); MeshVBs[eyeNum] = *pRender->CreateBuffer(); MeshVBs[eyeNum]->Data(Buffer_Vertex,meshData.pVertexData,sizeof(ovrDistortionVertex)*meshData.VertexCount); MeshIBs[eyeNum] = *pRender->CreateBuffer(); MeshIBs[eyeNum]->Data(Buffer_Index,meshData.pIndexData,sizeof(unsigned short) * meshData.IndexCount); ovrHmd_DestroyDistortionMesh( &meshData ); //Create eye render description for use later EyeRenderDesc[eyeNum] = ovrHmd_GetRenderDesc(HMD, (ovrEyeType) eyeNum, eyeFov[eyeNum]); //Do scale and offset ovrHmd_GetRenderScaleAndOffset(eyeFov[eyeNum],RenderTargetSize, EyeRenderViewport[eyeNum], UVScaleOffset[eyeNum]); } #endif ovrHmd_SetEnabledCaps(HMD, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction); // Start the sensor which informs of the Rift's pose and motion ovrHmd_ConfigureTracking(HMD, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position, 0); // This creates lights and models. pRoomScene = new Scene; sbuilder.PopulateRoomScene(pRoomScene, pRender); return (0); }
bool OVR::postReset(void* _nwh, ovrRenderAPIConfig* _config, bool _debug) { if (_debug) { switch (_config->Header.API) { #if BGFX_CONFIG_RENDERER_DIRECT3D11 case ovrRenderAPI_D3D11: { ovrD3D11ConfigData* data = (ovrD3D11ConfigData*)_config; # if OVR_VERSION > OVR_VERSION_043 m_rtSize = data->Header.BackBufferSize; # else m_rtSize = data->Header.RTSize; # endif // OVR_VERSION > OVR_VERSION_043 } break; #endif // BGFX_CONFIG_RENDERER_DIRECT3D11 #if BGFX_CONFIG_RENDERER_OPENGL case ovrRenderAPI_OpenGL: { ovrGLConfigData* data = (ovrGLConfigData*)_config; # if OVR_VERSION > OVR_VERSION_043 m_rtSize = data->Header.BackBufferSize; # else m_rtSize = data->Header.RTSize; # endif // OVR_VERSION > OVR_VERSION_043 } break; #endif // BGFX_CONFIG_RENDERER_OPENGL case ovrRenderAPI_None: default: BX_CHECK(false, "You should not be here!"); break; } m_debug = true; return false; } if (!m_initialized) { return false; } if (!_debug) { m_hmd = ovrHmd_Create(0); } if (NULL == m_hmd) { m_hmd = ovrHmd_CreateDebug(ovrHmd_DK2); BX_WARN(NULL != m_hmd, "Unable to initialize OVR."); if (NULL == m_hmd) { return false; } } BX_TRACE("HMD: %s, %s, firmware: %d.%d" , m_hmd->ProductName , m_hmd->Manufacturer , m_hmd->FirmwareMajor , m_hmd->FirmwareMinor ); ovrBool result; result = ovrHmd_AttachToWindow(m_hmd, _nwh, NULL, NULL); if (!result) { goto ovrError; } ovrFovPort eyeFov[2] = { m_hmd->DefaultEyeFov[0], m_hmd->DefaultEyeFov[1] }; result = ovrHmd_ConfigureRendering(m_hmd , _config , 0 #if OVR_VERSION < OVR_VERSION_050 | ovrDistortionCap_Chromatic // permanently enabled >= v5.0 #endif | ovrDistortionCap_Vignette | ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive | ovrDistortionCap_NoRestore | ovrDistortionCap_HqDistortion , eyeFov , m_erd ); if (!result) { goto ovrError; } ovrHmd_SetEnabledCaps(m_hmd , 0 | ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction ); result = ovrHmd_ConfigureTracking(m_hmd , 0 | ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position , 0 ); if (!result) { ovrError: BX_TRACE("Failed to initialize OVR."); ovrHmd_Destroy(m_hmd); m_hmd = NULL; return false; } ovrSizei sizeL = ovrHmd_GetFovTextureSize(m_hmd, ovrEye_Left, m_hmd->DefaultEyeFov[0], 1.0f); ovrSizei sizeR = ovrHmd_GetFovTextureSize(m_hmd, ovrEye_Right, m_hmd->DefaultEyeFov[1], 1.0f); m_rtSize.w = sizeL.w + sizeR.w; m_rtSize.h = bx::uint32_max(sizeL.h, sizeR.h); m_warning = true; return true; }
COculusVR::COculusVR(bool latency) { m_isReady = true; // Initializes LibOVR, and the Rift ovr_Initialize(); Hmd = ovrHmd_Create(0); if (!Hmd) { MessageBoxA(NULL, "Oculus Rift not detected.", "", MB_OK); return; } if (Hmd->ProductName[0] == '\0') MessageBoxA(NULL, "Rift detected, display not enabled.", "", MB_OK); if (Hmd->HmdCaps & ovrHmdCap_ExtendDesktop) { WindowSize = Hmd->Resolution; } else { // In Direct App-rendered mode, we can use smaller window size, // as it can have its own contents and isn't tied to the buffer. WindowSize = Sizei(1100, 618);//Sizei(960, 540); avoid rotated output bug. } ovrHmd_AttachToWindow(Hmd, wzGetWindowHandle(), NULL, NULL); // Configure Stereo settings. Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(Hmd, ovrEye_Left, Hmd->DefaultEyeFov[0], 1.0f); Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(Hmd, ovrEye_Right, Hmd->DefaultEyeFov[1], 1.0f); EyeRenderTargetSize.w = recommenedTex0Size.w + recommenedTex1Size.w; EyeRenderTargetSize.h = Alg::Max( recommenedTex0Size.h, recommenedTex1Size.h ); //Create Framebuffer wzCreateRenderTarget(&m_screenRender); wzCreateRenderBufferDepth(&m_screenBuffer,EyeRenderTargetSize.w,EyeRenderTargetSize.h); wzCreateTexture(&m_screenTex,EyeRenderTargetSize.w,EyeRenderTargetSize.h,WZ_FORMATTYPE_RGB,NULL); //attach wzSetRenderBuffer(&m_screenRender,&m_screenBuffer); wzSetRenderTexture(&m_screenRender,&m_screenTex); // Initialize eye rendering information. // The viewport sizes are re-computed in case RenderTargetSize changed due to HW limitations. ovrFovPort eyeFov[2] = { Hmd->DefaultEyeFov[0], Hmd->DefaultEyeFov[1] } ; EyeRenderViewport[0].Pos = Vector2i(0,0); EyeRenderViewport[0].Size = Sizei(EyeRenderTargetSize.w / 2, EyeRenderTargetSize.h); EyeRenderViewport[1].Pos = Vector2i((EyeRenderTargetSize.w + 1) / 2, 0); EyeRenderViewport[1].Size = EyeRenderViewport[0].Size; //Shader vertex format wzVertexElements ve_var[] = { {WZVETYPE_FLOAT2,"position"}, {WZVETYPE_FLOAT1,"timewarpLerpFactor"}, {WZVETYPE_FLOAT1,"vignette"}, {WZVETYPE_FLOAT2,"texCoord0"}, {WZVETYPE_FLOAT2,"texCoord1"}, {WZVETYPE_FLOAT2,"texCoord2"}, WZVE_TMT() }; //carete mesh for ( int eyeNum = 0; eyeNum < 2; eyeNum++ ) { // Allocate mesh vertices, registering with renderer using the OVR vertex format. ovrDistortionMesh meshData; ovrHmd_CreateDistortionMesh(Hmd, (ovrEyeType) eyeNum, eyeFov[eyeNum], ovrDistortionCap_Chromatic | ovrDistortionCap_TimeWarp, &meshData); //Create datas wzVector2* vertex_pos = new wzVector2[meshData.VertexCount]; float* vertex_posTimewarp = new float[meshData.VertexCount]; float* vertex_posVignette = new float[meshData.VertexCount]; wzVector2* vertex_textanR = new wzVector2[meshData.VertexCount]; wzVector2* vertex_textanG = new wzVector2[meshData.VertexCount]; wzVector2* vertex_textanB = new wzVector2[meshData.VertexCount]; //data copy for(unsigned int i = 0; i < meshData.VertexCount; i++) { vertex_pos[i].x = meshData.pVertexData[i].ScreenPosNDC.x; vertex_pos[i].y = meshData.pVertexData[i].ScreenPosNDC.y; vertex_posTimewarp[i] = meshData.pVertexData[i].TimeWarpFactor; vertex_posVignette[i] = meshData.pVertexData[i].VignetteFactor; vertex_textanR[i].x = meshData.pVertexData[i].TanEyeAnglesR.x; vertex_textanR[i].y = meshData.pVertexData[i].TanEyeAnglesR.y; vertex_textanG[i].x = meshData.pVertexData[i].TanEyeAnglesG.x; vertex_textanG[i].y = meshData.pVertexData[i].TanEyeAnglesG.y; vertex_textanB[i].x = meshData.pVertexData[i].TanEyeAnglesB.x; vertex_textanB[i].y = meshData.pVertexData[i].TanEyeAnglesB.y; } void* vertex_pointer[] = {vertex_pos,vertex_posTimewarp,vertex_posVignette,vertex_textanR,vertex_textanG,vertex_textanB}; if(wzCreateMesh(&MeshBuffer[eyeNum], vertex_pointer, ve_var, meshData.pIndexData, meshData.VertexCount, meshData.IndexCount)) { MessageBoxA(NULL, "Lens Distort Mesh Error.", "", MB_OK); delete[] vertex_pos; delete[] vertex_posTimewarp; delete[] vertex_posVignette; delete[] vertex_textanR; delete[] vertex_textanG; delete[] vertex_textanB; return; //error } wzChangeDrawMode(&MeshBuffer[eyeNum],WZ_MESH_DF_TRIANGLELIST); delete[] vertex_pos; delete[] vertex_posTimewarp; delete[] vertex_posVignette; delete[] vertex_textanR; delete[] vertex_textanG; delete[] vertex_textanB; ovrHmd_DestroyDistortionMesh(&meshData); //Create eye render description for use later EyeRenderDesc[eyeNum] = ovrHmd_GetRenderDesc(Hmd, (ovrEyeType) eyeNum, eyeFov[eyeNum]); //Do scale and offset ovrHmd_GetRenderScaleAndOffset(eyeFov[eyeNum],EyeRenderTargetSize, EyeRenderViewport[eyeNum], UVScaleOffset[eyeNum]); } //Create shader if(wzCreateShader(&LensShader, ols_vertexshader,ols_flagshader, ve_var)) { MessageBoxA(NULL, "Lens Shader Compile Error.", "", MB_OK); return; } if(latency) ovrHmd_SetEnabledCaps(Hmd, ovrHmdCap_DynamicPrediction); //ovrHmdCap_LowPersistence // Start the sensor which informs of the Rift's pose and motion ovrHmd_ConfigureTracking(Hmd, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection, 0); //not use : ovrTrackingCap_Position m_isReady = false; }
void Renderer::initOVR() { ovr_Initialize(); Config& config = Core::get().config(); if(!config.getBool("Renderer.OVR", false)) { return; } hmd_ = ovrHmd_Create(0); if(!hmd_) { fprintf(stderr, "Failed to create OVR HMD, falling back to fake one\n"); hmd_ = ovrHmd_CreateDebug(ovrHmd_DK2); } ovrSizei leftEyeTexSize = ovrHmd_GetFovTextureSize(hmd_, ovrEye_Left, hmd_->DefaultEyeFov[ovrEye_Left], 1.0f); ovrSizei rightEyeTexSize = ovrHmd_GetFovTextureSize(hmd_, ovrEye_Right, hmd_->DefaultEyeFov[ovrEye_Right], 1.0f); renderTexSize_.w = leftEyeTexSize.w + rightEyeTexSize.w; renderTexSize_.h = max(leftEyeTexSize.h, rightEyeTexSize.h); glGenTextures(1, &renderTex_); glBindTexture(GL_TEXTURE_2D, renderTex_); glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, renderTexSize_.w, renderTexSize_.h, 0, GL_RGBA, GL_UNSIGNED_BYTE, nullptr); glGetTexLevelParameteriv(GL_TEXTURE_2D, 0, GL_TEXTURE_WIDTH, &renderTexSize_.w); glGetTexLevelParameteriv(GL_TEXTURE_2D, 0, GL_TEXTURE_HEIGHT, &renderTexSize_.h); glGenTextures(1, &depthTex_); glBindTexture(GL_TEXTURE_2D, depthTex_); glTexImage2D(GL_TEXTURE_2D, 0, GL_DEPTH_COMPONENT24, renderTexSize_.w, renderTexSize_.h, 0, GL_DEPTH_COMPONENT, GL_UNSIGNED_BYTE, nullptr); eyeViewport_[ovrEye_Left].Pos.x = 0; eyeViewport_[ovrEye_Left].Pos.y = 0; eyeViewport_[ovrEye_Left].Size.w = renderTexSize_.w / 2; eyeViewport_[ovrEye_Left].Size.h = renderTexSize_.h; eyeViewport_[ovrEye_Right].Pos.x = renderTexSize_.w / 2; eyeViewport_[ovrEye_Right].Pos.y = 0; eyeViewport_[ovrEye_Right].Size.w = renderTexSize_.w / 2; eyeViewport_[ovrEye_Right].Size.h = renderTexSize_.h; eyeTexture_[ovrEye_Left].OGL.Header.API = ovrRenderAPI_OpenGL; eyeTexture_[ovrEye_Left].OGL.Header.TextureSize = renderTexSize_; eyeTexture_[ovrEye_Left].OGL.Header.RenderViewport = eyeViewport_[ovrEye_Left]; eyeTexture_[ovrEye_Left].OGL.TexId = renderTex_; eyeTexture_[ovrEye_Right].OGL.Header.API = ovrRenderAPI_OpenGL; eyeTexture_[ovrEye_Right].OGL.Header.TextureSize = renderTexSize_; eyeTexture_[ovrEye_Right].OGL.Header.RenderViewport = eyeViewport_[ovrEye_Right]; eyeTexture_[ovrEye_Right].OGL.TexId = renderTex_; ovrSizei targetSize; SDL_GetWindowSize(window_, &targetSize.w, &targetSize.h); SDL_SysWMinfo wmInfo; SDL_VERSION(&wmInfo.version); if(!SDL_GetWindowWMInfo(window_, &wmInfo)) { throw runtime_error("Failed to get window info"); } ovrGLConfig cfg; cfg.OGL.Header.API = ovrRenderAPI_OpenGL; cfg.OGL.Header.RTSize = targetSize; cfg.OGL.Header.Multisample = 1; // yes? #if defined(OVR_OS_WIN32) cfg.OGL.Window = wmInfo.info.win.window; cfg.OGL.DC = GetDC(wmInfo.info.win.window); #elif defined(OVR_OS_MAC) // Mac does not have any fields #else #error Implement for this OS. #endif unsigned int distortionCaps = ovrDistortionCap_Chromatic|ovrDistortionCap_TimeWarp|ovrDistortionCap_Overdrive; if(!ovrHmd_ConfigureRendering(hmd_, &cfg.Config, distortionCaps, hmd_->DefaultEyeFov, eyeRenderDesc_)) { throw runtime_error("Failed to configure HMD rendering"); } #ifdef OVR_OS_WIN32 if(!ovrHmd_AttachToWindow(hmd_, wmInfo.info.win.window, nullptr, nullptr)) { throw runtime_error("Failed to attach HMD to window"); } #endif glGenFramebuffers(1, &framebuffer_); glBindFramebuffer(GL_FRAMEBUFFER, framebuffer_); glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, renderTex_, 0); glFramebufferTexture2D(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_TEXTURE_2D, depthTex_, 0); if(glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) { throw runtime_error("Framebuffer not complete"); } glBindFramebuffer(GL_FRAMEBUFFER, 0); unsigned int trackingCaps = ovrTrackingCap_Orientation|ovrTrackingCap_Position; if(!ovrHmd_ConfigureTracking(hmd_, trackingCaps, 0)) { throw runtime_error("Failed to configure HMD tracking"); } // warning will disappear as soon as the timeout expires ovrHmd_DismissHSWDisplay(hmd_); }
void OcculusCameraComponent::init() { ovr_Initialize(); parent->getStage()->getGame()->getGraphicsHandle()->setAutoBufferSwap( false ); hmd = ovrHmd_Create(0); if (hmd) { ovrSizei resolution = hmd->Resolution; resolution; } else { hmd = ovrHmd_CreateDebug( ovrHmdType::ovrHmd_DK2 ); } // Start the sensor which provides the Rift’s pose and motion. //ovrHmd_SetEnabledCaps(hmd, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction); ovrHmd_ConfigureTracking(hmd, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position, 0); ovrHmd_RecenterPose( hmd ); ovrFovPort eyeFov[2] = { hmd->DefaultEyeFov[0], hmd->DefaultEyeFov[1] } ; ovrGLConfig oglConfig; oglConfig.OGL.Header.API = ovrRenderAPI_OpenGL; oglConfig.OGL.Header.RTSize = OVR::Sizei(hmd->Resolution.w, hmd->Resolution.h); oglConfig.OGL.Header.Multisample = 1; oglConfig.OGL.Window = parent->getStage()->getGame()->getGraphicsHandle()->getHandle(); oglConfig.OGL.DC = parent->getStage()->getGame()->getGraphicsHandle()->getHDC(); #pragma comment(lib,"libovrd.lib") ovrBool result = ovrHmd_ConfigureRendering( hmd, &oglConfig.Config, ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette | ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive, eyeFov, EyeRenderDesc); result; ovrHmd_AttachToWindow(hmd, oglConfig.OGL.Window, NULL, NULL); //Sets up FBOS // Configure Stereo settings. OVR::Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(hmd, ovrEye_Left, hmd->DefaultEyeFov[0], 1.0f); OVR::Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(hmd, ovrEye_Right, hmd->DefaultEyeFov[1], 1.0f); OVR::Sizei renderTargetSize; renderTargetSize.w = recommenedTex0Size.w + recommenedTex1Size.w; renderTargetSize.h = max ( recommenedTex0Size.h, recommenedTex1Size.h ); renderTarget = FrameBufferObject::createFrameBuffer( renderTargetSize.w, renderTargetSize.h ); //Set up viewports EyeRenderViewport[0].Pos = OVR::Vector2i(0,0); EyeRenderViewport[0].Size = OVR::Sizei(renderTarget->width / 2, renderTarget->height); EyeRenderViewport[1].Pos = OVR::Vector2i((renderTarget->width + 1) / 2, 0); EyeRenderViewport[1].Size = EyeRenderViewport[0].Size; EyeTexture[0].OGL.Header.API = ovrRenderAPI_OpenGL; EyeTexture[0].OGL.Header.TextureSize = renderTargetSize; EyeTexture[0].OGL.Header.RenderViewport = EyeRenderViewport[0]; EyeTexture[0].OGL.TexId = renderTarget->colorTexture->textureID; EyeTexture[1].OGL.Header.API = ovrRenderAPI_OpenGL; EyeTexture[1].OGL.Header.TextureSize = renderTargetSize; EyeTexture[1].OGL.Header.RenderViewport = EyeRenderViewport[1]; EyeTexture[1].OGL.TexId = renderTarget->colorTexture->textureID; }
//------------------------------------------------------------------------------------- int Init() { // Initializes LibOVR, and the Rift ovr_Initialize(); if (!HMD) { HMD = ovrHmd_Create(0); if (!HMD) { MessageBoxA(NULL, "Oculus Rift not detected.", "", MB_OK); return(1); } if (HMD->ProductName[0] == '\0') MessageBoxA(NULL, "Rift detected, display not enabled.", "", MB_OK); } //Setup Window and Graphics - use window frame if relying on Oculus driver const int backBufferMultisample = 1; bool UseAppWindowFrame = (HMD->HmdCaps & ovrHmdCap_ExtendDesktop) ? false : true; HWND window = Util_InitWindowAndGraphics(Recti(HMD->WindowsPos, HMD->Resolution), FullScreen, backBufferMultisample, UseAppWindowFrame,&pRender); if (!window) return 1; ovrHmd_AttachToWindow(HMD, window, NULL, NULL); //Configure Stereo settings. Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Left, HMD->DefaultEyeFov[0], 1.0f); Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Right, HMD->DefaultEyeFov[1], 1.0f); Sizei RenderTargetSize; RenderTargetSize.w = recommenedTex0Size.w + recommenedTex1Size.w; RenderTargetSize.h = max ( recommenedTex0Size.h, recommenedTex1Size.h ); const int eyeRenderMultisample = 1; pRendertargetTexture = pRender->CreateTexture(Texture_RGBA | Texture_RenderTarget | eyeRenderMultisample, RenderTargetSize.w, RenderTargetSize.h, NULL); // The actual RT size may be different due to HW limits. RenderTargetSize.w = pRendertargetTexture->GetWidth(); RenderTargetSize.h = pRendertargetTexture->GetHeight(); // Initialize eye rendering information. // The viewport sizes are re-computed in case RenderTargetSize changed due to HW limitations. ovrFovPort eyeFov[2] = { HMD->DefaultEyeFov[0], HMD->DefaultEyeFov[1] } ; EyeRenderViewport[0].Pos = Vector2i(0,0); EyeRenderViewport[0].Size = Sizei(RenderTargetSize.w / 2, RenderTargetSize.h); EyeRenderViewport[1].Pos = Vector2i((RenderTargetSize.w + 1) / 2, 0); EyeRenderViewport[1].Size = EyeRenderViewport[0].Size; #if SDK_RENDER // Query OGL texture data. EyeTexture[0].OGL.Header.API = ovrRenderAPI_OpenGL; EyeTexture[0].OGL.Header.TextureSize = RenderTargetSize; EyeTexture[0].OGL.Header.RenderViewport = EyeRenderViewport[0]; EyeTexture[0].OGL.TexId = pRendertargetTexture->TexId; // Right eye uses the same texture, but different rendering viewport. EyeTexture[1] = EyeTexture[0]; EyeTexture[1].OGL.Header.RenderViewport = EyeRenderViewport[1]; // Configure OpenGL. ovrGLConfig oglcfg; oglcfg.OGL.Header.API = ovrRenderAPI_OpenGL; oglcfg.OGL.Header.RTSize = Sizei(HMD->Resolution.w, HMD->Resolution.h); oglcfg.OGL.Header.Multisample = backBufferMultisample; oglcfg.OGL.Window = window; oglcfg.OGL.DC = GetDC(window); if (!ovrHmd_ConfigureRendering(HMD, &oglcfg.Config, ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette | ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive, eyeFov, EyeRenderDesc)) return(1); #else //Distortion vertex shader const char* vertexShader = "#version 110 \n" "uniform vec2 EyeToSourceUVScale; \n" "uniform vec2 EyeToSourceUVOffset; \n" "uniform mat4 EyeRotationStart; \n" "uniform mat4 EyeRotationEnd; \n" "attribute vec2 Position; \n" "attribute vec2 inTWLF_V; \n" "attribute vec2 inTexCoord0; \n" "attribute vec2 inTexCoord1; \n" "attribute vec2 inTexCoord2; \n" "varying vec4 oPosition; \n" "varying vec2 oTexCoord0; \n" "varying vec2 oTexCoord1; \n" "varying vec2 oTexCoord2; \n" "varying float oVignette; \n" "vec2 TexCoord0 = vec2(inTexCoord0.x, -inTexCoord0.y); \n" "vec2 TexCoord1 = vec2(inTexCoord1.x, -inTexCoord1.y); \n" "vec2 TexCoord2 = vec2(inTexCoord2.x, -inTexCoord2.y); \n" "float timewarpLerpFactor = inTWLF_V.x; \n" "float Vignette = inTWLF_V.y; \n" "vec2 TimewarpTexCoord( in vec2 TexCoord, in mat4 rotMat ) \n" "{ \n" // Vertex inputs are in TanEyeAngle space for the R,G,B channels (i.e. after chromatic // aberration and distortion). These are now "real world" vectors in direction (x,y,1) // relative to the eye of the HMD. Apply the 3x3 timewarp rotation to these vectors. " vec3 transformed = vec3( ( rotMat * vec4( TexCoord.xy , 1.00000, 1.00000) ).xyz ); \n" // Project them back onto the Z=1 plane of the rendered images. " vec2 flattened = (transformed.xy / transformed.z ); \n" // Scale them into ([0,0.5],[0,1]) or ([0.5,0],[0,1]) UV lookup space (depending on eye) " return ((EyeToSourceUVScale * flattened) + EyeToSourceUVOffset); \n" "} \n" "mat4 mat4_lerp( in mat4 x, in mat4 y, in mat4 s ) \n" "{ \n" " return mat4(mix(x[0],y[0],s[0]), mix(x[1],y[1],s[1]), mix(x[2],y[2],s[2]), mix(x[3],y[3],s[3]));\n" "} \n" "void main() \n" "{ \n" " mat4 lerpedEyeRot = mat4_lerp( EyeRotationStart, EyeRotationEnd, mat4( timewarpLerpFactor));\n" " oTexCoord0 = TimewarpTexCoord( TexCoord0, lerpedEyeRot); \n" " oTexCoord1 = TimewarpTexCoord( TexCoord1, lerpedEyeRot); \n" " oTexCoord2 = TimewarpTexCoord( TexCoord2, lerpedEyeRot); \n" " oPosition = vec4( Position.xy , 0.500000, 1.00000); \n" " oVignette = Vignette; \n" " gl_Position = oPosition; \n" "}"; //Distortion pixel shader const char* pixelShader = "#version 110 \n" "uniform sampler2D Texture0; \n" "varying vec4 oPosition; \n" "varying vec2 oTexCoord0; \n" "varying vec2 oTexCoord1; \n" "varying vec2 oTexCoord2; \n" "varying float oVignette; \n" "void main() \n" "{ \n" // 3 samples for fixing chromatic aberrations " float R = texture2D(Texture0, oTexCoord0.xy).r; \n" " float G = texture2D(Texture0, oTexCoord1.xy).g; \n" " float B = texture2D(Texture0, oTexCoord2.xy).b; \n" " gl_FragColor = (oVignette*vec4(R,G,B,1)); \n" "}"; pRender->InitShaders(vertexShader, pixelShader, &Shaders); for ( int eyeNum = 0; eyeNum < 2; eyeNum++ ) { // Allocate mesh vertices, registering with renderer using the OVR vertex format. ovrDistortionMesh meshData; ovrHmd_CreateDistortionMesh(HMD, (ovrEyeType) eyeNum, eyeFov[eyeNum], ovrDistortionCap_Chromatic | ovrDistortionCap_TimeWarp, &meshData); MeshVBs[eyeNum] = *pRender->CreateBuffer(); MeshVBs[eyeNum]->Data(Buffer_Vertex,meshData.pVertexData,sizeof(ovrDistortionVertex)*meshData.VertexCount); MeshIBs[eyeNum] = *pRender->CreateBuffer(); MeshIBs[eyeNum]->Data(Buffer_Index,meshData.pIndexData,sizeof(unsigned short) * meshData.IndexCount); ovrHmd_DestroyDistortionMesh( &meshData ); //Create eye render description for use later EyeRenderDesc[eyeNum] = ovrHmd_GetRenderDesc(HMD, (ovrEyeType) eyeNum, eyeFov[eyeNum]); //Do scale and offset ovrHmd_GetRenderScaleAndOffset(eyeFov[eyeNum],RenderTargetSize, EyeRenderViewport[eyeNum], UVScaleOffset[eyeNum]); } #endif ovrHmd_SetEnabledCaps(HMD, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction); // Start the sensor which informs of the Rift's pose and motion ovrHmd_ConfigureTracking(HMD, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position, 0); // This creates lights and models. pRoomScene = new Scene; PopulateRoomScene(pRoomScene, pRender); return (0); }
//------------------------------------------------------------------------------------- int WINAPI WinMain(HINSTANCE hinst, HINSTANCE, LPSTR, int) { // Initializes LibOVR, and the Rift ovr_Initialize(); HMD = ovrHmd_Create(0); if (!HMD) { MessageBoxA(NULL,"Oculus Rift not detected.","", MB_OK); return(0); } if (HMD->ProductName[0] == '\0') MessageBoxA(NULL,"Rift detected, display not enabled.", "", MB_OK); // Setup Window and Graphics - use window frame if relying on Oculus driver bool windowed = (HMD->HmdCaps & ovrHmdCap_ExtendDesktop) ? false : true; if (!WND.InitWindowAndDevice(hinst, Recti(HMD->WindowsPos, HMD->Resolution), windowed)) return(0); WND.SetMaxFrameLatency(1); ovrHmd_AttachToWindow(HMD, WND.Window, NULL, NULL); ovrHmd_SetEnabledCaps(HMD, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction); // Start the sensor which informs of the Rift's pose and motion ovrHmd_ConfigureTracking(HMD, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position, 0); // Make the eye render buffers (caution if actual size < requested due to HW limits). for (int eye=0; eye<2; eye++) { Sizei idealSize = ovrHmd_GetFovTextureSize(HMD, (ovrEyeType)eye, HMD->DefaultEyeFov[eye], 1.0f); pEyeRenderTexture[eye] = new ImageBuffer(true, false, idealSize); pEyeDepthBuffer[eye] = new ImageBuffer(true, true, pEyeRenderTexture[eye]->Size); EyeRenderViewport[eye].Pos = Vector2i(0, 0); EyeRenderViewport[eye].Size = pEyeRenderTexture[eye]->Size; } // Setup VR components #if SDK_RENDER #if RENDER_OPENGL ovrGLConfig oglcfg; oglcfg.OGL.Header.API = ovrRenderAPI_OpenGL; oglcfg.OGL.Header.BackBufferSize = Sizei(HMD->Resolution.w, HMD->Resolution.h); oglcfg.OGL.Header.Multisample = 1; oglcfg.OGL.Window = OGL.Window; oglcfg.OGL.DC = GetDC(OGL.Window); if (!ovrHmd_ConfigureRendering(HMD, &oglcfg.Config, ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette | ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive, HMD->DefaultEyeFov, EyeRenderDesc)) return(1); #else ovrD3D11Config d3d11cfg; d3d11cfg.D3D11.Header.API = ovrRenderAPI_D3D11; d3d11cfg.D3D11.Header.BackBufferSize = Sizei(HMD->Resolution.w, HMD->Resolution.h); d3d11cfg.D3D11.Header.Multisample = 1; d3d11cfg.D3D11.pDevice = WND.Device; d3d11cfg.D3D11.pDeviceContext = WND.Context; d3d11cfg.D3D11.pBackBufferRT = WND.BackBufferRT; d3d11cfg.D3D11.pSwapChain = WND.SwapChain; if (!ovrHmd_ConfigureRendering(HMD, &d3d11cfg.Config, ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette | ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive, HMD->DefaultEyeFov, EyeRenderDesc)) return(1); #endif #else APP_RENDER_SetupGeometryAndShaders(); #endif // Create the room model Scene roomScene(false); // Can simplify scene further with parameter if required. // Initialize Webcams and threads WebCamManager WebCamMngr(HMD); // MAIN LOOP // ========= while (!(WND.Key['Q'] && WND.Key[VK_CONTROL]) && !WND.Key[VK_ESCAPE]) { WND.HandleMessages(); float speed = 1.0f; // Can adjust the movement speed. int timesToRenderScene = 1; // Can adjust the render burden on the app. ovrVector3f useHmdToEyeViewOffset[2] = {EyeRenderDesc[0].HmdToEyeViewOffset, EyeRenderDesc[1].HmdToEyeViewOffset}; // Start timing #if SDK_RENDER ovrHmd_BeginFrame(HMD, 0); #else ovrHmd_BeginFrameTiming(HMD, 0); #endif // Handle key toggles for re-centering, meshes, FOV, etc. ExampleFeatures1(&speed, ×ToRenderScene, useHmdToEyeViewOffset); // Keyboard inputs to adjust player orientation if (WND.Key[VK_LEFT]) Yaw += 0.02f; if (WND.Key[VK_RIGHT]) Yaw -= 0.02f; // Keyboard inputs to adjust player position if (WND.Key['W']||WND.Key[VK_UP]) Pos+=Matrix4f::RotationY(Yaw).Transform(Vector3f(0,0,-speed*0.05f)); if (WND.Key['S']||WND.Key[VK_DOWN]) Pos+=Matrix4f::RotationY(Yaw).Transform(Vector3f(0,0,+speed*0.05f)); if (WND.Key['D']) Pos+=Matrix4f::RotationY(Yaw).Transform(Vector3f(+speed*0.05f,0,0)); if (WND.Key['A']) Pos+=Matrix4f::RotationY(Yaw).Transform(Vector3f(-speed*0.05f,0,0)); Pos.y = ovrHmd_GetFloat(HMD, OVR_KEY_EYE_HEIGHT, Pos.y); // Animate the cube if (speed) roomScene.Models[0]->Pos = Vector3f(9*sin(0.01f*clock),3,9*cos(0.01f*clock)); // Get both eye poses simultaneously, with IPD offset already included. ovrPosef temp_EyeRenderPose[2]; ovrHmd_GetEyePoses(HMD, 0, useHmdToEyeViewOffset, temp_EyeRenderPose, NULL); // Update textures with WebCams' frames WebCamMngr.Update(); // Render the two undistorted eye views into their render buffers. for (int eye = 0; eye < 2; eye++) { ImageBuffer * useBuffer = pEyeRenderTexture[eye]; ovrPosef * useEyePose = &EyeRenderPose[eye]; float * useYaw = &YawAtRender[eye]; bool clearEyeImage = true; bool updateEyeImage = true; // Handle key toggles for half-frame rendering, buffer resolution, etc. ExampleFeatures2(eye, &useBuffer, &useEyePose, &useYaw, &clearEyeImage, &updateEyeImage); if (clearEyeImage) #if RENDER_OPENGL WND.ClearAndSetRenderTarget(useBuffer, Recti(EyeRenderViewport[eye])); #else WND.ClearAndSetRenderTarget(useBuffer->TexRtv, pEyeDepthBuffer[eye], Recti(EyeRenderViewport[eye])); #endif if (updateEyeImage) { // Write in values actually used (becomes significant in Example features) *useEyePose = temp_EyeRenderPose[eye]; *useYaw = Yaw; // Get view and projection matrices (note near Z to reduce eye strain) Matrix4f rollPitchYaw = Matrix4f::RotationY(Yaw); Matrix4f finalRollPitchYaw = rollPitchYaw * Matrix4f(useEyePose->Orientation); Vector3f finalUp = finalRollPitchYaw.Transform(Vector3f(0, 1, 0)); Vector3f finalForward = finalRollPitchYaw.Transform(Vector3f(0, 0, -1)); Vector3f shiftedEyePos = Pos + rollPitchYaw.Transform(useEyePose->Position); Matrix4f view = Matrix4f::LookAtRH(shiftedEyePos, shiftedEyePos + finalForward, finalUp); Matrix4f proj = ovrMatrix4f_Projection(EyeRenderDesc[eye].Fov, 0.2f, 1000.0f, true); // Keyboard input to switch from "look through" to scene mode static bool bOldLookThrough = false; static bool bLookThrough = true; if (WND.Key['X'] && bOldLookThrough != WND.Key['X']) { bLookThrough = !bLookThrough; } bOldLookThrough = WND.Key['X']; if(!bLookThrough) { // Render the scene for (int t=0; t<timesToRenderScene; t++) roomScene.Render(view, proj.Transposed()); WebCamMngr.DrawBoard(view, proj.Transposed()); } else { WebCamMngr.DrawLookThrough(eye); } } } // Do distortion rendering, Present and flush/sync #if SDK_RENDER #if RENDER_OPENGL ovrGLTexture eyeTexture[2]; // Gather data for eye textures for (int eye = 0; eye<2; eye++) { eyeTexture[eye].OGL.Header.API = ovrRenderAPI_OpenGL; eyeTexture[eye].OGL.Header.TextureSize = pEyeRenderTexture[eye]->Size; eyeTexture[eye].OGL.Header.RenderViewport = EyeRenderViewport[eye]; eyeTexture[eye].OGL.TexId = pEyeRenderTexture[eye]->TexId; } #else ovrD3D11Texture eyeTexture[2]; // Gather data for eye textures for (int eye = 0; eye<2; eye++) { eyeTexture[eye].D3D11.Header.API = ovrRenderAPI_D3D11; eyeTexture[eye].D3D11.Header.TextureSize = pEyeRenderTexture[eye]->Size; eyeTexture[eye].D3D11.Header.RenderViewport = EyeRenderViewport[eye]; eyeTexture[eye].D3D11.pTexture = pEyeRenderTexture[eye]->Tex; eyeTexture[eye].D3D11.pSRView = pEyeRenderTexture[eye]->TexSv; } #endif ovrHmd_EndFrame(HMD, EyeRenderPose, &eyeTexture[0].Texture); #else APP_RENDER_DistortAndPresent(); #endif } WebCamMngr.StopCapture(); // Release and close down ovrHmd_Destroy(HMD); ovr_Shutdown(); WND.ReleaseWindow(hinst); return(0); }
int Init() { ovr_Initialize(); HMD = ovrHmd_Create(0); if (!HMD) { MessageBox(NULL, "Oculus Rift not detected.", "", MB_OK); return 1; } if (HMD->ProductName[0] == '\0') { MessageBox(NULL, "Rift detected, display not enabled.", "", MB_OK); } //Setup Window and Graphics - use window frame if relying on Oculus driver const int backBufferMultisample = 1; bool UseAppWindowFrame = true; HWND window = Util_InitWindowAndGraphics(Recti(HMD->WindowsPos, HMD->Resolution), FullScreen, backBufferMultisample, UseAppWindowFrame, &pRender); if (!window) return 1; ovrHmd_AttachToWindow(HMD, window, NULL, NULL); Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Left, HMD->DefaultEyeFov[0], 1.0f); Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Right, HMD->DefaultEyeFov[1], 1.0f); Sizei RenderTargetSize; RenderTargetSize.w = recommenedTex0Size.w + recommenedTex1Size.w; RenderTargetSize.h = max(recommenedTex0Size.h, recommenedTex1Size.h); RenderTargetSize.w = HMD->Resolution.w; RenderTargetSize.h = HMD->Resolution.h; //const int eyeRenderMultisample = 1; pRendertargetTexture = pRender->CreateRenderTarget(RenderTargetSize.w/2, RenderTargetSize.h/2); //pRendertargetTexture = pRender->CreateRenderTarget(512, 512); RenderTargetSize.w = pRendertargetTexture->Width; RenderTargetSize.h = pRendertargetTexture->Height; IDirect3DSurface9 *zb = 0; pRender->Device->GetDepthStencilSurface(&zb); D3DSURFACE_DESC d; zb->GetDesc(&d); // Initialize eye rendering information. // The viewport sizes are re-computed in case RenderTargetSize due to HW limitations. ovrFovPort eyeFov[2] = { HMD->DefaultEyeFov[0], HMD->DefaultEyeFov[1] }; EyeRenderViewport[0].Pos = Vector2i(0, 0); EyeRenderViewport[0].Size = Sizei(RenderTargetSize.w / 2, RenderTargetSize.h); EyeRenderViewport[1].Pos = Vector2i((RenderTargetSize.w + 1) / 2, 0); EyeRenderViewport[1].Size = EyeRenderViewport[0].Size; // --------------------- DistortionShaders = pRender->CreateShaderSet(); DistortionShaders->SetShader(pRender->LoadBuiltinShader(Shader_Vertex, VShader_Distortion)); DistortionShaders->SetShader(pRender->LoadBuiltinShader(Shader_Pixel, PShader_Distortion)); DistortionDecl = VertexDecl::GetDecl(VertexType_Distortion); for (int eyeNum = 0; eyeNum < 2; ++eyeNum) { ovrDistortionMesh meshData; ovrHmd_CreateDistortionMesh(HMD, (ovrEyeType)eyeNum, eyeFov[eyeNum], ovrDistortionCap_Chromatic | ovrDistortionCap_TimeWarp, &meshData); MeshVBs[eyeNum] = pRender->CreateVertexBuffer(); MeshVBs[eyeNum]->Data(meshData.pVertexData, sizeof(ovrDistortionVertex)*meshData.VertexCount); MeshIBs[eyeNum] = pRender->CreateIndexBuffer(); MeshIBs[eyeNum]->Data(meshData.pIndexData, sizeof(unsigned short)*meshData.IndexCount); MeshVBCnts[eyeNum] = meshData.VertexCount; MeshIBCnts[eyeNum] = meshData.IndexCount; ovrHmd_DestroyDistortionMesh(&meshData); EyeRenderDesc[eyeNum] = ovrHmd_GetRenderDesc(HMD, (ovrEyeType)eyeNum, eyeFov[eyeNum]); ovrHmd_GetRenderScaleAndOffset(eyeFov[eyeNum], RenderTargetSize, EyeRenderViewport[eyeNum], UVScaleOffset[eyeNum]); } ovrHmd_SetEnabledCaps(HMD, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction); ovrHmd_ConfigureTracking(HMD, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position, 0); // --------------------- pRoomScene = new Scene; PopulateRoomScene(pRoomScene, pRender); // texture model ShaderSet* ss = pRender->CreateShaderSet(); ss->SetShader(pRender->LoadBuiltinShader(Shader_Vertex, VShader_MVP_UV)); ss->SetShader(pRender->LoadBuiltinShader(Shader_Pixel, PShader_UV)); Model<VertexXYZUV> *pModel2 = new Model<VertexXYZUV>(); pModel2->Decl = VertexDecl::GetDecl(VertexType_XYZUV); pModel2->Fill = new ShaderFill(ss); //Texture* ttt = new Texture(pRender); //ttt->LoadFromFile("face.tga"); pModel2->Fill->SetTexture(0, pRendertargetTexture); pModel2->AddVertex(VertexXYZUV(0.5f, -1.0f, 0.0f, 0.0f, 0.0f)); pModel2->AddVertex(VertexXYZUV(2.5f, -1.0f, 0.0f, 1.0f, 0.0f)); pModel2->AddVertex(VertexXYZUV(0.5f, 1.0f, 0.0f, 0.0f, 1.0f)); pModel2->AddVertex(VertexXYZUV(2.5f, 1.0f, 0.0f, 1.0f, 1.0f)); pModel2->AddVertex(VertexXYZUV(-1.0f, -1.5f, -1.0f, 0.0f, 0.0f)); pModel2->AddVertex(VertexXYZUV(1.0f, -1.5f, -1.0f, 1.0f, 0.0f)); pModel2->AddVertex(VertexXYZUV(-1.0f, -1.5f, 1.0f, 0.0f, 1.0f)); pModel2->AddVertex(VertexXYZUV(1.0f, -1.5f, 1.0f, 1.0f, 1.0f)); pModel2->AddTriangle(0, 1, 2); pModel2->AddTriangle(2, 1, 3); pModel2->AddTriangle(4, 5, 6); pModel2->AddTriangle(6, 5, 7); pScene = new Scene; pScene->World.Add(pModel2); return (0); }
GMO double linkWindowHandle(void* windowHandle) { const int eyeRenderMultisample = 1; const int backBufferMultisample = 1; //HWND handle = GetWindow((HWND)(int)windowHandle, GW_OWNER); //HWND handle = (HWND) (int) windowHandle; HWND handle = (HWND) windowHandle; /* * This function returns the passed windows' title. Just to debug / test LPWSTR title; GetWindowText(handle, title, GetWindowTextLength(handle) + 1); MessageBox(NULL, (LPCWSTR)title, (LPCWSTR)title, MB_ICONWARNING); MessageBoxA(NULL, (LPCSTR)title, (LPCSTR)title, MB_ICONWARNING); */ hWnd = handle; ovrHmd_AttachToWindow(HMD, handle, NULL, NULL); Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Left, HMD->DefaultEyeFov[0], 1.0f); Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Right, HMD->DefaultEyeFov[1], 1.0f); Sizei RenderTargetSize; RenderTargetSize.w = recommenedTex0Size.w + recommenedTex1Size.w; RenderTargetSize.h = max ( recommenedTex0Size.h, recommenedTex1Size.h ); bool UseAppWindowFrame = true;//(HMD->HmdCaps & ovrHmdCap_ExtendDesktop) ? false : true; HWND window = Util_InitWindowAndGraphics(Recti(HMD->WindowsPos, HMD->Resolution), FullScreen, backBufferMultisample, 1,&pRender, handle); pRendertargetTexture = pRender->CreateTexture(Texture_RGBA | Texture_RenderTarget | eyeRenderMultisample, RenderTargetSize.w, RenderTargetSize.h, NULL); // The actual RT size may be different due to HW limits. RenderTargetSize.w = pRendertargetTexture->GetWidth(); RenderTargetSize.h = pRendertargetTexture->GetHeight(); // Initialize eye rendering information. // The viewport sizes are re-computed in case RenderTargetSize changed due to HW limitations. ovrFovPort eyeFov[2] = { HMD->DefaultEyeFov[0], HMD->DefaultEyeFov[1] } ; EyeRenderViewport[0].Pos = Vector2i(0,0); EyeRenderViewport[0].Size = Sizei(RenderTargetSize.w / 2, RenderTargetSize.h); EyeRenderViewport[1].Pos = Vector2i((RenderTargetSize.w + 1) / 2, 0); EyeRenderViewport[1].Size = EyeRenderViewport[0].Size; EyeTexture[0].D3D11.Header.API = ovrRenderAPI_D3D11; EyeTexture[0].D3D11.Header.TextureSize = RenderTargetSize; EyeTexture[0].D3D11.Header.RenderViewport = EyeRenderViewport[0]; EyeTexture[0].D3D11.pTexture = pRendertargetTexture->Tex.GetPtr(); EyeTexture[0].D3D11.pSRView = pRendertargetTexture->TexSv.GetPtr(); // Right eye uses the same texture, but different rendering viewport. EyeTexture[1] = EyeTexture[0]; EyeTexture[1].D3D11.Header.RenderViewport = EyeRenderViewport[1]; // Configure d3d11. ovrD3D11Config d3d11cfg; d3d11cfg.D3D11.Header.API = ovrRenderAPI_D3D11; d3d11cfg.D3D11.Header.RTSize = Sizei(HMD->Resolution.w, HMD->Resolution.h); d3d11cfg.D3D11.Header.Multisample = backBufferMultisample; d3d11cfg.D3D11.pDevice = pRender->Device; d3d11cfg.D3D11.pDeviceContext = pRender->Context; d3d11cfg.D3D11.pBackBufferRT = pRender->BackBufferRT; d3d11cfg.D3D11.pSwapChain = pRender->SwapChain; if (!ovrHmd_ConfigureRendering(HMD, &d3d11cfg.Config, ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette | ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive, eyeFov, EyeRenderDesc)) return -2; // Some settings might be changed here lateron. ovrHmd_SetEnabledCaps(HMD, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction);// | ovrHmdCap_ExtendDesktop); // Start the sensor which informs of the Rift's pose and motion ovrHmd_ConfigureTracking(HMD, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position, 0); return 1; }
OculusManager& OculusManager::getOculusManager() { static OculusManager* oculusManager = NULL; if (oculusManager == NULL) { oculusManager = new OculusManager(); if (!ovr_Initialize()) { fprintf(stderr, "Failed to initialize the Oculus SDK"); } //= *OculusManager::getHmd(); g_Hmd = ovrHmd_Create(0); if (!g_Hmd) { printf("No Oculus Rift device attached, using virtual version...\n"); g_Hmd = ovrHmd_CreateDebug(ovrHmd_DK2); } printf("initialized HMD: %s - %s\n", g_Hmd->Manufacturer, g_Hmd->ProductName); if (!glfwInit()) exit(EXIT_FAILURE); if (l_MultiSampling) glfwWindowHint(GLFW_SAMPLES, 4); else glfwWindowHint(GLFW_SAMPLES, 0); bool l_DirectMode = ((g_Hmd->HmdCaps & ovrHmdCap_ExtendDesktop) == 0); GLFWmonitor* l_Monitor; ovrSizei l_ClientSize; if (l_DirectMode) { printf("Running in \"Direct\" mode...\n"); l_Monitor = NULL; l_ClientSize.w = g_Hmd->Resolution.w / 2; // Something reasonable, smaller, but maintain aspect ratio... l_ClientSize.h = g_Hmd->Resolution.h / 2; // Something reasonable, smaller, but maintain aspect ratio... } else // Extended Desktop mode... { printf("Running in \"Extended Desktop\" mode...\n"); int l_Count; GLFWmonitor** l_Monitors = glfwGetMonitors(&l_Count); switch (l_Count) { case 0: printf("No monitors found, exiting...\n"); exit(EXIT_FAILURE); break; case 1: printf("Two monitors expected, found only one, using primary...\n"); l_Monitor = glfwGetPrimaryMonitor(); break; case 2: printf("Two monitors found, using second monitor...\n"); l_Monitor = l_Monitors[1]; break; default: printf("More than two monitors found, using second monitor...\n"); l_Monitor = l_Monitors[1]; } l_ClientSize.w = g_Hmd->Resolution.w; // 1920 for DK2... l_ClientSize.h = g_Hmd->Resolution.h; // 1080 for DK2... } l_Window = glfwCreateWindow(l_ClientSize.w, l_ClientSize.h, "GLFW Oculus Rift Test", l_Monitor, NULL); if (!l_Window) { glfwTerminate(); exit(EXIT_FAILURE); } #if defined(_WIN32) if (l_DirectMode) { ovrBool l_AttachResult = ovrHmd_AttachToWindow(g_Hmd, glfwGetWin32Window(l_Window), NULL, NULL); if (!l_AttachResult) { printf("Could not attach to window..."); exit(EXIT_FAILURE); } } #endif glfwMakeContextCurrent(l_Window); glewExperimental = GL_TRUE; GLenum l_GlewResult = glewInit(); if (l_GlewResult != GLEW_OK) { printf("glewInit() error.\n"); exit(EXIT_FAILURE); } int l_Major = glfwGetWindowAttrib(l_Window, GLFW_CONTEXT_VERSION_MAJOR); int l_Minor = glfwGetWindowAttrib(l_Window, GLFW_CONTEXT_VERSION_MINOR); int l_Profile = glfwGetWindowAttrib(l_Window, GLFW_OPENGL_PROFILE); printf("OpenGL: %d.%d ", l_Major, l_Minor); if (l_Major >= 3) // Profiles introduced in OpenGL 3.0... { if (l_Profile == GLFW_OPENGL_COMPAT_PROFILE) printf("GLFW_OPENGL_COMPAT_PROFILE\n"); else printf("GLFW_OPENGL_CORE_PROFILE\n"); } printf("Vendor: %s\n", (char*)glGetString(GL_VENDOR)); printf("Renderer: %s\n", (char*)glGetString(GL_RENDERER)); ovrSizei l_EyeTextureSizes[2]; l_EyeTextureSizes[ovrEye_Left] = ovrHmd_GetFovTextureSize(g_Hmd, ovrEye_Left, g_Hmd->MaxEyeFov[ovrEye_Left], 1.0f); l_EyeTextureSizes[ovrEye_Right] = ovrHmd_GetFovTextureSize(g_Hmd, ovrEye_Right, g_Hmd->MaxEyeFov[ovrEye_Right], 1.0f); // Combine for one texture for both eyes... g_RenderTargetSize.w = l_EyeTextureSizes[ovrEye_Left].w + l_EyeTextureSizes[ovrEye_Right].w; g_RenderTargetSize.h = (l_EyeTextureSizes[ovrEye_Left].h > l_EyeTextureSizes[ovrEye_Right].h ? l_EyeTextureSizes[ovrEye_Left].h : l_EyeTextureSizes[ovrEye_Right].h); // Create the FBO being a single one for both eyes (this is open for debate)... glGenFramebuffers(1, &l_FBOId); glBindFramebuffer(GL_FRAMEBUFFER, l_FBOId); // The texture we're going to render to... glGenTextures(1, &l_TextureId); // "Bind" the newly created texture : all future texture functions will modify this texture... glBindTexture(GL_TEXTURE_2D, l_TextureId); // Give an empty image to OpenGL (the last "0") glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, g_RenderTargetSize.w, g_RenderTargetSize.h, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0); // Linear filtering... glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); // Create Depth Buffer... glGenRenderbuffers(1, &l_DepthBufferId); glBindRenderbuffer(GL_RENDERBUFFER, l_DepthBufferId); glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT, g_RenderTargetSize.w, g_RenderTargetSize.h); glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, l_DepthBufferId); // Set the texture as our colour attachment #0... glFramebufferTexture(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, l_TextureId, 0); // Set the list of draw buffers... GLenum l_GLDrawBuffers[1] = { GL_COLOR_ATTACHMENT0 }; glDrawBuffers(1, l_GLDrawBuffers); // "1" is the size of DrawBuffers // Check if everything is OK... GLenum l_Check = glCheckFramebufferStatus(GL_DRAW_FRAMEBUFFER); if (l_Check != GL_FRAMEBUFFER_COMPLETE) { printf("There is a problem with the FBO.\n"); exit(EXIT_FAILURE); } // Unbind... glBindRenderbuffer(GL_RENDERBUFFER, 0); glBindTexture(GL_TEXTURE_2D, 0); glBindFramebuffer(GL_FRAMEBUFFER, 0); // Setup textures for each eye... // Left eye... g_EyeTextures[ovrEye_Left].Header.API = ovrRenderAPI_OpenGL; g_EyeTextures[ovrEye_Left].Header.TextureSize = g_RenderTargetSize; g_EyeTextures[ovrEye_Left].Header.RenderViewport.Pos.x = 0; g_EyeTextures[ovrEye_Left].Header.RenderViewport.Pos.y = 0; g_EyeTextures[ovrEye_Left].Header.RenderViewport.Size = l_EyeTextureSizes[ovrEye_Left]; ((ovrGLTexture&)(g_EyeTextures[ovrEye_Left])).OGL.TexId = l_TextureId; // Right eye (mostly the same as left but with the viewport on the right side of the texture)... g_EyeTextures[ovrEye_Right] = g_EyeTextures[ovrEye_Left]; g_EyeTextures[ovrEye_Right].Header.RenderViewport.Pos.x = (g_RenderTargetSize.w + 1) / 2; g_EyeTextures[ovrEye_Right].Header.RenderViewport.Pos.y = 0; // Oculus Rift eye configurations... g_Cfg.OGL.Header.API = ovrRenderAPI_OpenGL; g_Cfg.OGL.Header.RTSize.w = l_ClientSize.w; g_Cfg.OGL.Header.RTSize.h = l_ClientSize.h; g_Cfg.OGL.Header.Multisample = (l_MultiSampling ? 1 : 0); #if defined(_WIN32) g_Cfg.OGL.Window = glfwGetWin32Window(l_Window); g_Cfg.OGL.DC = GetDC(g_Cfg.OGL.Window); #elif defined(__linux__) l_Cfg.OGL.Win = glfwGetX11Window(l_Window); l_Cfg.OGL.Disp = glfwGetX11Display(); #endif // Enable capabilities... // ovrHmd_SetEnabledCaps(g_Hmd, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction); ovrBool l_ConfigureResult = ovrHmd_ConfigureRendering(g_Hmd, &g_Cfg.Config, g_DistortionCaps, g_Hmd->MaxEyeFov, g_EyeRenderDesc); glUseProgram(0); // Avoid OpenGL state leak in ovrHmd_ConfigureRendering... if (!l_ConfigureResult) { printf("Configure failed.\n"); exit(EXIT_FAILURE); } // Start the sensor which provides the Rift’s pose and motion... uint32_t l_SupportedSensorCaps = ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position; uint32_t l_RequiredTrackingCaps = 0; ovrBool l_TrackingResult = ovrHmd_ConfigureTracking(g_Hmd, l_SupportedSensorCaps, l_RequiredTrackingCaps); if (!l_TrackingResult) { printf("Could not start tracking..."); exit(EXIT_FAILURE); } // Projection matrici for each eye will not change at runtime, we can set them here... g_ProjectionMatrici[ovrEye_Left] = ovrMatrix4f_Projection(g_EyeRenderDesc[ovrEye_Left].Fov, 0.3f, 100.0f, true); g_ProjectionMatrici[ovrEye_Right] = ovrMatrix4f_Projection(g_EyeRenderDesc[ovrEye_Right].Fov, 0.3f, 100.0f, true); // IPD offset values will not change at runtime, we can set them here... g_EyeOffsets[ovrEye_Left] = g_EyeRenderDesc[ovrEye_Left].HmdToEyeViewOffset; g_EyeOffsets[ovrEye_Right] = g_EyeRenderDesc[ovrEye_Right].HmdToEyeViewOffset; ovrHmd_RecenterPose(g_Hmd); return *oculusManager; } }