bool Ocudump::ovrHmdCreateVersioned() { #if defined(OVRSDK5) hmd = ovrHmd_Create(0); return hmd ? true : false; #elif defined(OVRSDK6) return (ovrHmd_Create(0, &hmd)==ovrSuccess); #endif }
void OVRSDK06AppSkeleton::initHMD() { ovrInitParams initParams = { 0 }; if (ovrSuccess != ovr_Initialize(NULL)) { LOG_INFO("Failed to initialize the Oculus SDK"); } if (ovrSuccess != ovrHmd_Create(0, &m_Hmd)) { LOG_INFO("Could not create HMD"); if (ovrSuccess != ovrHmd_CreateDebug(ovrHmd_DK2, &m_Hmd)) { LOG_ERROR("Could not create Debug HMD"); } m_usingDebugHmd = true; } const ovrBool ret = ovrHmd_ConfigureTracking(m_Hmd, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position, ovrTrackingCap_Orientation); if (!OVR_SUCCESS(ret)) { LOG_ERROR("Error calling ovrHmd_ConfigureTracking"); } }
//====================================================================================== // Oculus Rift使用の初期化 //====================================================================================== int InitOculusRift() { // LibOVRの初期化 ovr_Initialize(); // 接続されている HMD を取得 HMD = ovrHmd_Create(0); if (!HMD) { // 存在しなかった MessageBoxA(NULL, "Oculus Riftがみつかりませんでした。", "", MB_OK); return(1); } // HMDの詳細情報を取得 ovrHmd_GetDesc(HMD, &HMDDesc); if (HMDDesc.DisplayDeviceName[0] == '\0') { // HMDは存在したが該当するディスプレイがなかった。 MessageBoxA(NULL, "Oculus Riftはみつかりましたが出力先ディスプレイがみつかりませんでした。", "", MB_OK); } return 0; }
/** * ERRORCODE 0 => OK * ERRORCODE 1 => No HMD found * ERRORCODE 2 => Sensor Start failed */ int init_ovr(){ // Init the OVR library ovr_Initialize(); // Create the software device and connect the physical device hmd = ovrHmd_Create(0); //if (!hmd) hmd = ovrHmd_CreateDebug(ovrHmd_DK1); if (!hmd) return 1; // Starts the sensor input with check required Capabilities if ( !ovrHmd_ConfigureTracking(hmd, hmd->TrackingCaps, hmd->TrackingCaps) ) return 2; if ( mode == MODE_DEBUG ){ recommendedTex0Size = OVR::Sizei ( 1280, 720 ); recommendedTex1Size = OVR::Sizei ( 1280, 720 ); renderTargetSize.w = 1280; renderTargetSize.h = 720; }else{ //Configuring the Texture size (bigger than screen for barreldistortion) recommendedTex0Size = ovrHmd_GetFovTextureSize(hmd, ovrEye_Left, hmd->DefaultEyeFov[0], 1.0f); recommendedTex1Size = ovrHmd_GetFovTextureSize(hmd, ovrEye_Right, hmd->DefaultEyeFov[1], 1.0f); renderTargetSize.w = recommendedTex0Size.w + recommendedTex1Size.w; renderTargetSize.h = std::max( recommendedTex0Size.h, recommendedTex1Size.h ); } return 0; }
int OVRInitialize(int debug) { unsigned int supportedTrackingCaps = ovrTrackingCap_Orientation| ovrTrackingCap_MagYawCorrection| ovrTrackingCap_Position; ovrHmdType debugHMDType = ovrHmd_None; if ( debug != 0 ) { debugHMDType = (debug == 1 ? ovrHmd_DK1 : debug == 2 ? ovrHmd_DK2 : ovrHmd_Other); } if ( ! ovr_Initialize() ) { return 0; } _OVRGlobals.HMD = ovrHmd_Create(0); // no HMD? check for vr_debug and attempt to create a debug HMD if ( ! _OVRGlobals.HMD && ( ! ( debugHMDType != ovrHmd_None ) || ! ( _OVRGlobals.HMD = ovrHmd_CreateDebug( debugHMDType ) ) ) ) { return 0; } if ( ! ovrHmd_ConfigureTracking( _OVRGlobals.HMD, supportedTrackingCaps, ovrTrackingCap_Orientation ) ) { return 0; } return 1; }
void gkOculus::OnFrameBegin() { if(HMD) { ovrHmd_BeginFrameTiming(HMD, 0); } else { HMD = ovrHmd_Create(0, 0.01f); if (!HMD) { //MessageBoxA(NULL, "Oculus Rift not detected.", "", MB_OK); //return(1); return; } if (HMD->ProductName[0] == '\0') { //MessageBoxA(NULL, "Rift detected, display not enabled.", "", MB_OK); return; } InitHMD(); } }
void OculusInterface::initOculus(float _devicePixelAspect) { m_devicePixelAspect=_devicePixelAspect; std::cout<<"setting device aspect "<<m_devicePixelAspect<<"\n"; m_hmd = ovrHmd_Create(0); if (!m_hmd) { std::cerr<<"Unable to create HMD: "<< ovrHmd_GetLastError(NULL)<<std::endl; std::cerr<<"Attempting to run without HMD\n"; // If we didn't detect an Hmd, create a simulated one for debugging. m_hmd = ovrHmd_CreateDebug(ovrHmd_DK1); if (!m_hmd) { // Failed Hmd creation. exit(EXIT_FAILURE); } } m_windowWidth=m_hmd->Resolution.w; m_windowHeight=m_hmd->Resolution.h; oculusDebug(); // Start the sensor which provides the Rift’s pose and motion. ovrHmd_ConfigureTracking(m_hmd, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position, 0); // let's fill in some info about oculus m_eyeres[0] = ovrHmd_GetFovTextureSize(m_hmd, ovrEye_Left, m_hmd->DefaultEyeFov[0], 1.0); m_eyeres[1] = ovrHmd_GetFovTextureSize(m_hmd, ovrEye_Right, m_hmd->DefaultEyeFov[1],1.0); /* and create a single render target texture to encompass both eyes */ //m_fbWidth = m_eyeres[0].w + m_eyeres[1].w; //m_fbHeight = m_eyeres[0].h > m_eyeres[1].h ? m_eyeres[0].h : m_eyeres[1].h; ovrSizei recommenedTex0Size = ovrHmd_GetFovTextureSize(m_hmd, ovrEye_Left,m_hmd->DefaultEyeFov[0], m_devicePixelAspect); ovrSizei recommenedTex1Size = ovrHmd_GetFovTextureSize(m_hmd, ovrEye_Right,m_hmd->DefaultEyeFov[1], m_devicePixelAspect); // Determine dimensions to fit into a single render target. ovrSizei renderTargetSize; m_fbWidth = recommenedTex0Size.w + recommenedTex1Size.w; m_fbHeight = std::max ( recommenedTex0Size.h, recommenedTex1Size.h ); createRenderTarget(); createOVRGLConfig(); createOVRTextureBuffers(); /* enable low-persistence display and dynamic prediction for lattency compensation */ ovrHmd_SetEnabledCaps(m_hmd, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction); /* configure SDK-rendering and enable chromatic abberation correction, vignetting, and * timewrap, which shifts the image before drawing to counter any lattency between the call * to ovrHmd_GetEyePose and ovrHmd_EndFrame. */ unsigned int dcaps = ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette | ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive; if(!ovrHmd_ConfigureRendering(m_hmd, &m_glcfg.Config, dcaps, m_hmd->DefaultEyeFov, m_eyeRdesc)) { fprintf(stderr, "failed to configure distortion renderer\n"); } }
sensor() { session = ovrHmd_Create(0); if (session) desc = *session; //res = desc.Resolution; }
void RiftSetup() { ovr_Initialize(); s_hmd = ovrHmd_Create(0); if (!s_hmd) { s_hmd = ovrHmd_CreateDebug(ovrHmd_DK1); } ovrHmd_GetDesc(s_hmd, &s_hmdDesc); DumpHMDInfo(s_hmdDesc); uint32_t supportedSensorCaps = ovrSensorCap_Orientation; uint32_t requiredSensorCaps = ovrSensorCap_Orientation; ovrBool success = ovrHmd_StartSensor(s_hmd, supportedSensorCaps, requiredSensorCaps); if (!success) { fprintf(stderr, "ERROR: HMD does not have required capabilities!\n"); exit(2); } // Figure out dimensions of render target ovrSizei recommenedTex0Size = ovrHmd_GetFovTextureSize(s_hmd, ovrEye_Left, s_hmdDesc.DefaultEyeFov[0], 1.0f); ovrSizei recommenedTex1Size = ovrHmd_GetFovTextureSize(s_hmd, ovrEye_Right, s_hmdDesc.DefaultEyeFov[1], 1.0f); s_renderTargetSize.w = recommenedTex0Size.w + recommenedTex1Size.w; s_renderTargetSize.h = std::max(recommenedTex0Size.h, recommenedTex1Size.h); CreateRenderTarget(s_renderTargetSize.w, s_renderTargetSize.h); s_eyeTexture[0].Header.API = ovrRenderAPI_OpenGL; s_eyeTexture[0].Header.TextureSize = s_renderTargetSize; s_eyeTexture[0].Header.RenderViewport.Pos = {0, 0}; s_eyeTexture[0].Header.RenderViewport.Size = {s_renderTargetSize.w / 2, s_renderTargetSize.h}; ((ovrGLTexture*)(&s_eyeTexture[0]))->OGL.TexId = s_fboTex; s_eyeTexture[1].Header.API = ovrRenderAPI_OpenGL; s_eyeTexture[1].Header.TextureSize = s_renderTargetSize; s_eyeTexture[1].Header.RenderViewport.Pos = {s_renderTargetSize.w / 2, 0}; s_eyeTexture[1].Header.RenderViewport.Size = {s_renderTargetSize.w / 2, s_renderTargetSize.h}; ((ovrGLTexture*)(&s_eyeTexture[1]))->OGL.TexId = s_fboTex; // Configure ovr SDK Rendering ovrGLConfig cfg; memset(&cfg, 0, sizeof(ovrGLConfig)); cfg.OGL.Header.API = ovrRenderAPI_OpenGL; cfg.OGL.Header.RTSize = {s_config->width, s_config->height}; cfg.OGL.Header.Multisample = 0; // TODO: on windows need to set HWND, on Linux need to set other parameters if (!ovrHmd_ConfigureRendering(s_hmd, &cfg.Config, s_hmdDesc.DistortionCaps, s_hmdDesc.DefaultEyeFov, s_eyeRenderDesc)) { fprintf(stderr, "ERROR: HMD configure rendering failed!\n"); exit(3); } }
HelloRift() { ovr_Initialize(); hmd = ovrHmd_Create(0); if (nullptr == hmd) { debugDevice = true; hmd = ovrHmd_CreateDebug(ovrHmd_DK2); } ovrHmd_ConfigureTracking(hmd, ovrTrackingCap_Orientation | ovrTrackingCap_Position, 0); resetPosition(); }
void Oculus::Initialize() { ovr_Initialize(); head_mounted_display = ovrHmd_Create(0); bool tracking_setup = ovrHmd_ConfigureTracking(head_mounted_display, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position, 0); ConfigureEyeViewportInformation(); }
OvrSdkRenderer::OvrSdkRenderer() { ovr_Initialize(); hmd = ovrHmd_Create(0); if (hmd) { ovrHmd_ConfigureTracking(hmd, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position, // supported ovrTrackingCap_Orientation); // required // Set low persistence mode int hmdCaps = ovrHmd_GetEnabledCaps(hmd); ovrHmd_SetEnabledCaps(hmd, hmdCaps | ovrHmdCap_LowPersistence); } }
VR::VR(Game &game) { // create HMD if (!(m_hmd = ovrHmd_Create(0))) { std::cerr << "couldn't create Oculus HMD, falling back to debug HMD" << std::endl; if (!(m_hmd = ovrHmd_CreateDebug(ovrHmd_DK2))) throw Error("couldn't create debug HMD"); } orient_window(game); // enable position, rotation tracking ovrHmd_ConfigureTracking(m_hmd, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position, 0); // calculate framebuffer resolution and create framebuffer ovrSizei eye_res[2]; eye_res[0] = ovrHmd_GetFovTextureSize(m_hmd, ovrEye_Left, m_hmd->DefaultEyeFov[0], 1.0); eye_res[1] = ovrHmd_GetFovTextureSize(m_hmd, ovrEye_Right, m_hmd->DefaultEyeFov[1], 1.0); m_fb_width = eye_res[0].w + eye_res[1].w; m_fb_height = eye_res[0].h > eye_res[1].h ? eye_res[0].h : eye_res[1].h; update_fb(); // fill in ovrGLConfig ovrGLConfig glcfg; memset(&glcfg, 0, sizeof glcfg); glcfg.OGL.Header.API = ovrRenderAPI_OpenGL; glcfg.OGL.Header.RTSize = m_hmd->Resolution; glcfg.OGL.Header.Multisample = 1; glcfg.OGL.Window = GetActiveWindow(); glcfg.OGL.DC = wglGetCurrentDC(); if (!(m_hmd->HmdCaps & ovrHmdCap_ExtendDesktop)) ovrHmd_AttachToWindow(m_hmd, glcfg.OGL.Window, 0, 0); // enable HMD, distortion capabilities and enable SDK rendering ovrHmd_SetEnabledCaps(m_hmd, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction); if (!ovrHmd_ConfigureRendering(m_hmd, &glcfg.Config, ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette | ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive, m_hmd->DefaultEyeFov, m_eye_rdesc)) throw Error("failed to configure distortion rendering"); // disable health/safety warning ovrhmd_EnableHSWDisplaySDKRender(m_hmd, 0); }
void initGl() { GlfwApp::initGl(); glClearColor(0.1f, 0.1f, 0.1f, 1.0f); ovr_Initialize(); hmd = ovrHmd_Create(0); if (!hmd) { FAIL("Unable to open HMD"); } if (!ovrHmd_ConfigureTracking(hmd, ovrTrackingCap_Orientation, 0)) { FAIL("Unable to locate Rift sensor device"); } }
///@brief Set this up early so we can get the HMD's display dimensions to create a window. void RiftAppSkeleton::initHMD() { ovr_Initialize(); m_Hmd = ovrHmd_Create(0); if (m_Hmd == NULL) { m_Hmd = ovrHmd_CreateDebug(ovrHmd_DK1); } m_ovrScene.SetHmdPointer(m_Hmd); m_ovrScene.SetChassisPosPointer(&m_chassisPos); m_ovrScene.SetChassisYawPointer(&m_chassisYaw); // Both ovrVector3f and glm::vec3 are at heart a float[3], so this works fine. m_fm.SetChassisPosPointer(reinterpret_cast<glm::vec3*>(&m_chassisPos)); m_fm.SetChassisYawPointer(&m_chassisYaw); }
int main(int argc, char *argv[]) { time_offset = kpSysTimeNs(); kp__X11Init(); #ifdef KP_OVR ovr_Initialize(); ovrHmd hmd = ovrHmd_Create(0); if (hmd) { KP__L("name: %s, manf: %s", hmd->ProductName, hmd->Manufacturer); KP__L("%d,%d %dx%d", hmd->WindowsPos.x, hmd->WindowsPos.y, hmd->Resolution.w, hmd->Resolution.h); } #endif kpuserAppCreate(argc, (const char**)argv); kp__X11Run(); kpuserAppDestroy(); return 0; }
GMO double initialize() { ovr_Initialize(); HMD = ovrHmd_Create(0); if (!HMD) { #if DEBUG MessageBoxA(NULL, "No oculus device found", "No oculus device found", MB_ICONWARNING); #endif return 0; //No device found } if (HMD->ProductName[0] == '\0') { #if DEBUG MessageBoxA(NULL, "Rift detected, display disabled", "Rift detected, display disabled", MB_ICONWARNING); #endif return 2; //Rift deetected, display disabled } return 1; }
qboolean VR_Enable() { int i; if( ovr_Initialize(NULL) != ovrSuccess ) { Con_Printf("Failed to Initialize Oculus SDK"); return false; } if( ovrHmd_Create(0, &hmd) != ovrSuccess ) { Con_Printf("Failed to get HMD"); return false; } if( !InitOpenGLExtensions() ) { Con_Printf("Failed to initialize OpenGL extensions"); return false; } ovrHmd_CreateMirrorTextureGL(hmd, GL_RGBA, glwidth, glheight, (ovrTexture**)&mirror_texture); glGenFramebuffersEXT(1, &mirror_fbo); glBindFramebufferEXT(GL_READ_FRAMEBUFFER_EXT, mirror_fbo); glFramebufferTexture2DEXT(GL_READ_FRAMEBUFFER_EXT, GL_COLOR_ATTACHMENT0_EXT, GL_TEXTURE_2D, mirror_texture->OGL.TexId, 0); glFramebufferRenderbufferEXT(GL_READ_FRAMEBUFFER_EXT, GL_DEPTH_ATTACHMENT_EXT, GL_RENDERBUFFER_EXT, 0); glBindFramebufferEXT(GL_READ_FRAMEBUFFER_EXT, 0); for( i = 0; i < 2; i++ ) { ovrSizei size = ovrHmd_GetFovTextureSize(hmd, (ovrEyeType)i, hmd->DefaultEyeFov[i], 1); eyes[i].index = i; eyes[i].fbo = CreateFBO(size.w, size.h); eyes[i].render_desc = ovrHmd_GetRenderDesc(hmd, (ovrEyeType)i, hmd->DefaultEyeFov[i]); eyes[i].fov_x = (atan(hmd->DefaultEyeFov[i].LeftTan) + atan(hmd->DefaultEyeFov[i].RightTan)) / M_PI_DIV_180; eyes[i].fov_y = (atan(hmd->DefaultEyeFov[i].UpTan) + atan(hmd->DefaultEyeFov[i].DownTan)) / M_PI_DIV_180; } ovrHmd_SetEnabledCaps(hmd, ovrHmdCap_LowPersistence|ovrHmdCap_DynamicPrediction); ovrHmd_ConfigureTracking(hmd, ovrTrackingCap_Orientation|ovrTrackingCap_MagYawCorrection|ovrTrackingCap_Position, 0); wglSwapIntervalEXT(0); // Disable V-Sync vr_initialized = true; return true; }
void OculusInterface::init() { try { ovr_Initialize(); hmd = ovrHmd_Create(0); if(hmd) ovrHmd_GetDesc(hmd, &hmdDesc); else throw 0; } catch(int e) { cout << "Cannot get HMD" << endl; //for now. // initialized = false; // return; //TODO replace content of this exeption catch by creating a virtual debug HMD to run correctly hmd = ovrHmd_CreateDebug(ovrHmd_DK2); ovrHmd_GetDesc(hmd, &hmdDesc); } customReport(); try { if(!ovrHmd_StartSensor(hmd,ovrSensorCap_Orientation |ovrSensorCap_YawCorrection |ovrSensorCap_Position,ovrSensorCap_Orientation)) //minial required throw string("Unable to start sensor! The detected device by OVR is not capable to get sensor state. We cannot do anything with that..."); } catch (string const& e) { cerr << e << endl; ovrHmd_Destroy(hmd); ovr_Shutdown(); abort(); } initialized = true; }
RiftDisplay() { hmd = ovrHmd_Create(0); if (!hmd) { FAIL("Unable to detect Rift display"); } windowPosition = glm::ivec2( hmd->WindowsPos.x, hmd->WindowsPos.y); GLFWmonitor * hmdMonitor = GlfwApp::getMonitorAtPosition(windowPosition); const GLFWvidmode * videoMode = glfwGetVideoMode(hmdMonitor); windowSize = glm::uvec2( videoMode->width, videoMode->height); eyeSize = windowSize; eyeSize.x /= 2; }
virtual GLFWwindow * createRenderingTarget(glm::uvec2 & outSize, glm::ivec2 & outPosition) { outSize = glm::uvec2(800, 600); outPosition = glm::ivec2(100, 100); Stacks::projection().top() = glm::perspective( PI / 3.0f, aspect(outSize), 0.01f, 10000.0f); Stacks::modelview().top() = glm::lookAt( glm::vec3(0.0f, 0.0f, 3.5f), Vectors::ORIGIN, Vectors::UP); GLFWwindow * result = glfw::createWindow(outSize, outPosition); ovr_Initialize(); hmd = ovrHmd_Create(0); if (!hmd || !ovrHmd_ConfigureTracking(hmd, ovrTrackingCap_Orientation, 0)) { FAIL("Unable to locate Rift sensors"); } return result; }
virtual int run() { SAY("Initializing SDK"); ovr_Initialize(); int hmdCount = ovrHmd_Detect(); SAY("Found %d connected Rift device(s)", hmdCount); for (int i = 0; i < hmdCount; ++i) { ovrHmd hmd = ovrHmd_Create(i); SAY(hmd->ProductName); ovrHmd_Destroy(hmd); } ovrHmd hmd = ovrHmd_CreateDebug(ovrHmd_DK2); SAY(hmd->ProductName); ovrHmd_Destroy(hmd); ovr_Shutdown(); SAY("Exiting"); return 0; }
bool OVR_SDL2_app::init_OVR() { ovr_Initialize(); // Use the first connected HMD. hmd = ovrHmd_Create(0); // Fall back on a DK1 debug configuration if no HMD is available. if (hmd == 0) hmd = ovrHmd_CreateDebug(ovrHmd_DK1); // Enable all tracking capabilities on this HMD. if (hmd) ovrHmd_ConfigureTracking(hmd, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position, 0); return (hmd != 0); }
void gkOculus::Init() { ovr_Initialize(); if (!HMD) { HMD = ovrHmd_Create(0); if (!HMD) { //MessageBoxA(NULL, "Oculus Rift not detected.", "", MB_OK); //return(1); return; } if (HMD->ProductName[0] == '\0') { //MessageBoxA(NULL, "Rift detected, display not enabled.", "", MB_OK); return; } InitHMD(); } }
OculusRift::OculusRift() { //init ovr_Initialize(); //create hmd mHmd = ovrHmd_Create(0); if (mHmd) { // Get more details about the HMD. ovrSizei resolution = mHmd->Resolution; console() << "hmd detected! " << mHmd->ProductName << " size: " << resolution.w << ", " << resolution.h << " reco Fov: " << mHmd->DefaultEyeFov[0].LeftTan << endl; } else { //no oculus console() << "No Oculus found! Check your settings." << endl; return; } ovrHmd_SetEnabledCaps(mHmd, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction); ovrHmd_ConfigureTracking(mHmd, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position, 0); }
//------------------------------------------------------------------------------------- int Init() { // Initializes LibOVR, and the Rift ovr_Initialize(); HMD = ovrHmd_Create(0); if (!HMD) { MessageBoxA(NULL,"Oculus Rift not detected.","", MB_OK); return(1); } if (HMD->ProductName[0] == '\0') MessageBoxA(NULL,"Rift detected, display not enabled.","", MB_OK); //Setup Window and Graphics - use window frame if relying on Oculus driver const int backBufferMultisample = 1; bool UseAppWindowFrame = (HMD->HmdCaps & ovrHmdCap_ExtendDesktop) ? false : true; HWND window = Util_InitWindowAndGraphics(Recti(HMD->WindowsPos, HMD->Resolution), FullScreen, backBufferMultisample, UseAppWindowFrame,&pRender); if (!window) return 1; ovrHmd_AttachToWindow(HMD, window, NULL, NULL); //Configure Stereo settings. Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Left, HMD->DefaultEyeFov[0], 1.0f); Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Right, HMD->DefaultEyeFov[1], 1.0f); Sizei RenderTargetSize; RenderTargetSize.w = recommenedTex0Size.w + recommenedTex1Size.w; RenderTargetSize.h = max ( recommenedTex0Size.h, recommenedTex1Size.h ); const int eyeRenderMultisample = 1; pRendertargetTexture = pRender->CreateTexture(Texture_RGBA | Texture_RenderTarget | eyeRenderMultisample, RenderTargetSize.w, RenderTargetSize.h, NULL); // The actual RT size may be different due to HW limits. RenderTargetSize.w = pRendertargetTexture->GetWidth(); RenderTargetSize.h = pRendertargetTexture->GetHeight(); // Initialize eye rendering information. // The viewport sizes are re-computed in case RenderTargetSize changed due to HW limitations. ovrFovPort eyeFov[2] = { HMD->DefaultEyeFov[0], HMD->DefaultEyeFov[1] } ; EyeRenderViewport[0].Pos = Vector2i(0,0); EyeRenderViewport[0].Size = Sizei(RenderTargetSize.w / 2, RenderTargetSize.h); EyeRenderViewport[1].Pos = Vector2i((RenderTargetSize.w + 1) / 2, 0); EyeRenderViewport[1].Size = EyeRenderViewport[0].Size; #if SDK_RENDER // Query D3D texture data. EyeTexture[0].D3D11.Header.API = ovrRenderAPI_D3D11; EyeTexture[0].D3D11.Header.TextureSize = RenderTargetSize; EyeTexture[0].D3D11.Header.RenderViewport = EyeRenderViewport[0]; EyeTexture[0].D3D11.pTexture = pRendertargetTexture->Tex.GetPtr(); EyeTexture[0].D3D11.pSRView = pRendertargetTexture->TexSv.GetPtr(); // Right eye uses the same texture, but different rendering viewport. EyeTexture[1] = EyeTexture[0]; EyeTexture[1].D3D11.Header.RenderViewport = EyeRenderViewport[1]; // Configure d3d11. ovrD3D11Config d3d11cfg; d3d11cfg.D3D11.Header.API = ovrRenderAPI_D3D11; d3d11cfg.D3D11.Header.RTSize = Sizei(HMD->Resolution.w, HMD->Resolution.h); d3d11cfg.D3D11.Header.Multisample = backBufferMultisample; d3d11cfg.D3D11.pDevice = pRender->Device; d3d11cfg.D3D11.pDeviceContext = pRender->Context; d3d11cfg.D3D11.pBackBufferRT = pRender->BackBufferRT; d3d11cfg.D3D11.pSwapChain = pRender->SwapChain; if (!ovrHmd_ConfigureRendering(HMD, &d3d11cfg.Config, ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette | ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive, eyeFov, EyeRenderDesc)) return(1); #else //Shader vertex format D3D11_INPUT_ELEMENT_DESC DistortionMeshVertexDesc[] = { {"Position", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0}, {"Position", 1, DXGI_FORMAT_R32_FLOAT, 0, 8, D3D11_INPUT_PER_VERTEX_DATA, 0}, {"Position", 2, DXGI_FORMAT_R32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0}, {"TexCoord", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 16, D3D11_INPUT_PER_VERTEX_DATA, 0}, {"TexCoord", 1, DXGI_FORMAT_R32G32_FLOAT, 0, 24, D3D11_INPUT_PER_VERTEX_DATA, 0}, {"TexCoord", 2, DXGI_FORMAT_R32G32_FLOAT, 0, 32, D3D11_INPUT_PER_VERTEX_DATA, 0}}; //Distortion vertex shader const char* vertexShader = "float2 EyeToSourceUVScale, EyeToSourceUVOffset; \n" "float4x4 EyeRotationStart, EyeRotationEnd; \n" "float2 TimewarpTexCoord(float2 TexCoord, float4x4 rotMat) \n" "{ \n" // Vertex inputs are in TanEyeAngle space for the R,G,B channels (i.e. after chromatic // aberration and distortion). These are now "real world" vectors in direction (x,y,1) // relative to the eye of the HMD. Apply the 3x3 timewarp rotation to these vectors. " float3 transformed = float3( mul ( rotMat, float4(TexCoord.xy, 1, 1) ).xyz); \n" // Project them back onto the Z=1 plane of the rendered images. " float2 flattened = (transformed.xy / transformed.z); \n" // Scale them into ([0,0.5],[0,1]) or ([0.5,0],[0,1]) UV lookup space (depending on eye) " return(EyeToSourceUVScale * flattened + EyeToSourceUVOffset); \n" "} \n" "void main(in float2 Position : POSITION, in float timewarpLerpFactor : POSITION1, \n" " in float Vignette : POSITION2, in float2 TexCoord0 : TEXCOORD0, \n" " in float2 TexCoord1 : TEXCOORD1, in float2 TexCoord2 : TEXCOORD2, \n" " out float4 oPosition : SV_Position, \n" " out float2 oTexCoord0 : TEXCOORD0, out float2 oTexCoord1 : TEXCOORD1, \n" " out float2 oTexCoord2 : TEXCOORD2, out float oVignette : TEXCOORD3) \n" "{ \n" " float4x4 lerpedEyeRot = lerp(EyeRotationStart, EyeRotationEnd, timewarpLerpFactor);\n" " oTexCoord0 = TimewarpTexCoord(TexCoord0,lerpedEyeRot); \n" " oTexCoord1 = TimewarpTexCoord(TexCoord1,lerpedEyeRot); \n" " oTexCoord2 = TimewarpTexCoord(TexCoord2,lerpedEyeRot); \n" " oPosition = float4(Position.xy, 0.5, 1.0); oVignette = Vignette; \n" "}"; //Distortion pixel shader const char* pixelShader = "Texture2D Texture : register(t0); \n" "SamplerState Linear : register(s0); \n" "float4 main(in float4 oPosition : SV_Position, in float2 oTexCoord0 : TEXCOORD0, \n" " in float2 oTexCoord1 : TEXCOORD1, in float2 oTexCoord2 : TEXCOORD2, \n" " in float oVignette : TEXCOORD3) : SV_Target \n" "{ \n" // 3 samples for fixing chromatic aberrations " float R = Texture.Sample(Linear, oTexCoord0.xy).r; \n" " float G = Texture.Sample(Linear, oTexCoord1.xy).g; \n" " float B = Texture.Sample(Linear, oTexCoord2.xy).b; \n" " return (oVignette*float4(R,G,B,1)); \n" "}"; pRender->InitShaders(vertexShader, pixelShader, &Shaders, &VertexIL,DistortionMeshVertexDesc,6); for ( int eyeNum = 0; eyeNum < 2; eyeNum++ ) { // Allocate mesh vertices, registering with renderer using the OVR vertex format. ovrDistortionMesh meshData; ovrHmd_CreateDistortionMesh(HMD, (ovrEyeType) eyeNum, eyeFov[eyeNum], ovrDistortionCap_Chromatic | ovrDistortionCap_TimeWarp, &meshData); MeshVBs[eyeNum] = *pRender->CreateBuffer(); MeshVBs[eyeNum]->Data(Buffer_Vertex,meshData.pVertexData,sizeof(ovrDistortionVertex)*meshData.VertexCount); MeshIBs[eyeNum] = *pRender->CreateBuffer(); MeshIBs[eyeNum]->Data(Buffer_Index,meshData.pIndexData,sizeof(unsigned short) * meshData.IndexCount); ovrHmd_DestroyDistortionMesh( &meshData ); //Create eye render description for use later EyeRenderDesc[eyeNum] = ovrHmd_GetRenderDesc(HMD, (ovrEyeType) eyeNum, eyeFov[eyeNum]); //Do scale and offset ovrHmd_GetRenderScaleAndOffset(eyeFov[eyeNum],RenderTargetSize, EyeRenderViewport[eyeNum], UVScaleOffset[eyeNum]); } #endif ovrHmd_SetEnabledCaps(HMD, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction); // Start the sensor which informs of the Rift's pose and motion ovrHmd_ConfigureTracking(HMD, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position, 0); // This creates lights and models. pRoomScene = new Scene; sbuilder.PopulateRoomScene(pRoomScene, pRender); return (0); }
bool OVR::postReset(void* _nwh, ovrRenderAPIConfig* _config, bool _debug) { if (_debug) { switch (_config->Header.API) { #if BGFX_CONFIG_RENDERER_DIRECT3D11 case ovrRenderAPI_D3D11: { ovrD3D11ConfigData* data = (ovrD3D11ConfigData*)_config; # if OVR_VERSION > OVR_VERSION_043 m_rtSize = data->Header.BackBufferSize; # else m_rtSize = data->Header.RTSize; # endif // OVR_VERSION > OVR_VERSION_043 } break; #endif // BGFX_CONFIG_RENDERER_DIRECT3D11 #if BGFX_CONFIG_RENDERER_OPENGL case ovrRenderAPI_OpenGL: { ovrGLConfigData* data = (ovrGLConfigData*)_config; # if OVR_VERSION > OVR_VERSION_043 m_rtSize = data->Header.BackBufferSize; # else m_rtSize = data->Header.RTSize; # endif // OVR_VERSION > OVR_VERSION_043 } break; #endif // BGFX_CONFIG_RENDERER_OPENGL case ovrRenderAPI_None: default: BX_CHECK(false, "You should not be here!"); break; } m_debug = true; return false; } if (!m_initialized) { return false; } if (!_debug) { m_hmd = ovrHmd_Create(0); } if (NULL == m_hmd) { m_hmd = ovrHmd_CreateDebug(ovrHmd_DK2); BX_WARN(NULL != m_hmd, "Unable to initialize OVR."); if (NULL == m_hmd) { return false; } } BX_TRACE("HMD: %s, %s, firmware: %d.%d" , m_hmd->ProductName , m_hmd->Manufacturer , m_hmd->FirmwareMajor , m_hmd->FirmwareMinor ); ovrBool result; result = ovrHmd_AttachToWindow(m_hmd, _nwh, NULL, NULL); if (!result) { goto ovrError; } ovrFovPort eyeFov[2] = { m_hmd->DefaultEyeFov[0], m_hmd->DefaultEyeFov[1] }; result = ovrHmd_ConfigureRendering(m_hmd , _config , 0 #if OVR_VERSION < OVR_VERSION_050 | ovrDistortionCap_Chromatic // permanently enabled >= v5.0 #endif | ovrDistortionCap_Vignette | ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive | ovrDistortionCap_NoRestore | ovrDistortionCap_HqDistortion , eyeFov , m_erd ); if (!result) { goto ovrError; } ovrHmd_SetEnabledCaps(m_hmd , 0 | ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction ); result = ovrHmd_ConfigureTracking(m_hmd , 0 | ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position , 0 ); if (!result) { ovrError: BX_TRACE("Failed to initialize OVR."); ovrHmd_Destroy(m_hmd); m_hmd = NULL; return false; } ovrSizei sizeL = ovrHmd_GetFovTextureSize(m_hmd, ovrEye_Left, m_hmd->DefaultEyeFov[0], 1.0f); ovrSizei sizeR = ovrHmd_GetFovTextureSize(m_hmd, ovrEye_Right, m_hmd->DefaultEyeFov[1], 1.0f); m_rtSize.w = sizeL.w + sizeR.w; m_rtSize.h = bx::uint32_max(sizeL.h, sizeR.h); m_warning = true; return true; }
COculusVR::COculusVR(bool latency) { m_isReady = true; // Initializes LibOVR, and the Rift ovr_Initialize(); Hmd = ovrHmd_Create(0); if (!Hmd) { MessageBoxA(NULL, "Oculus Rift not detected.", "", MB_OK); return; } if (Hmd->ProductName[0] == '\0') MessageBoxA(NULL, "Rift detected, display not enabled.", "", MB_OK); if (Hmd->HmdCaps & ovrHmdCap_ExtendDesktop) { WindowSize = Hmd->Resolution; } else { // In Direct App-rendered mode, we can use smaller window size, // as it can have its own contents and isn't tied to the buffer. WindowSize = Sizei(1100, 618);//Sizei(960, 540); avoid rotated output bug. } ovrHmd_AttachToWindow(Hmd, wzGetWindowHandle(), NULL, NULL); // Configure Stereo settings. Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(Hmd, ovrEye_Left, Hmd->DefaultEyeFov[0], 1.0f); Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(Hmd, ovrEye_Right, Hmd->DefaultEyeFov[1], 1.0f); EyeRenderTargetSize.w = recommenedTex0Size.w + recommenedTex1Size.w; EyeRenderTargetSize.h = Alg::Max( recommenedTex0Size.h, recommenedTex1Size.h ); //Create Framebuffer wzCreateRenderTarget(&m_screenRender); wzCreateRenderBufferDepth(&m_screenBuffer,EyeRenderTargetSize.w,EyeRenderTargetSize.h); wzCreateTexture(&m_screenTex,EyeRenderTargetSize.w,EyeRenderTargetSize.h,WZ_FORMATTYPE_RGB,NULL); //attach wzSetRenderBuffer(&m_screenRender,&m_screenBuffer); wzSetRenderTexture(&m_screenRender,&m_screenTex); // Initialize eye rendering information. // The viewport sizes are re-computed in case RenderTargetSize changed due to HW limitations. ovrFovPort eyeFov[2] = { Hmd->DefaultEyeFov[0], Hmd->DefaultEyeFov[1] } ; EyeRenderViewport[0].Pos = Vector2i(0,0); EyeRenderViewport[0].Size = Sizei(EyeRenderTargetSize.w / 2, EyeRenderTargetSize.h); EyeRenderViewport[1].Pos = Vector2i((EyeRenderTargetSize.w + 1) / 2, 0); EyeRenderViewport[1].Size = EyeRenderViewport[0].Size; //Shader vertex format wzVertexElements ve_var[] = { {WZVETYPE_FLOAT2,"position"}, {WZVETYPE_FLOAT1,"timewarpLerpFactor"}, {WZVETYPE_FLOAT1,"vignette"}, {WZVETYPE_FLOAT2,"texCoord0"}, {WZVETYPE_FLOAT2,"texCoord1"}, {WZVETYPE_FLOAT2,"texCoord2"}, WZVE_TMT() }; //carete mesh for ( int eyeNum = 0; eyeNum < 2; eyeNum++ ) { // Allocate mesh vertices, registering with renderer using the OVR vertex format. ovrDistortionMesh meshData; ovrHmd_CreateDistortionMesh(Hmd, (ovrEyeType) eyeNum, eyeFov[eyeNum], ovrDistortionCap_Chromatic | ovrDistortionCap_TimeWarp, &meshData); //Create datas wzVector2* vertex_pos = new wzVector2[meshData.VertexCount]; float* vertex_posTimewarp = new float[meshData.VertexCount]; float* vertex_posVignette = new float[meshData.VertexCount]; wzVector2* vertex_textanR = new wzVector2[meshData.VertexCount]; wzVector2* vertex_textanG = new wzVector2[meshData.VertexCount]; wzVector2* vertex_textanB = new wzVector2[meshData.VertexCount]; //data copy for(unsigned int i = 0; i < meshData.VertexCount; i++) { vertex_pos[i].x = meshData.pVertexData[i].ScreenPosNDC.x; vertex_pos[i].y = meshData.pVertexData[i].ScreenPosNDC.y; vertex_posTimewarp[i] = meshData.pVertexData[i].TimeWarpFactor; vertex_posVignette[i] = meshData.pVertexData[i].VignetteFactor; vertex_textanR[i].x = meshData.pVertexData[i].TanEyeAnglesR.x; vertex_textanR[i].y = meshData.pVertexData[i].TanEyeAnglesR.y; vertex_textanG[i].x = meshData.pVertexData[i].TanEyeAnglesG.x; vertex_textanG[i].y = meshData.pVertexData[i].TanEyeAnglesG.y; vertex_textanB[i].x = meshData.pVertexData[i].TanEyeAnglesB.x; vertex_textanB[i].y = meshData.pVertexData[i].TanEyeAnglesB.y; } void* vertex_pointer[] = {vertex_pos,vertex_posTimewarp,vertex_posVignette,vertex_textanR,vertex_textanG,vertex_textanB}; if(wzCreateMesh(&MeshBuffer[eyeNum], vertex_pointer, ve_var, meshData.pIndexData, meshData.VertexCount, meshData.IndexCount)) { MessageBoxA(NULL, "Lens Distort Mesh Error.", "", MB_OK); delete[] vertex_pos; delete[] vertex_posTimewarp; delete[] vertex_posVignette; delete[] vertex_textanR; delete[] vertex_textanG; delete[] vertex_textanB; return; //error } wzChangeDrawMode(&MeshBuffer[eyeNum],WZ_MESH_DF_TRIANGLELIST); delete[] vertex_pos; delete[] vertex_posTimewarp; delete[] vertex_posVignette; delete[] vertex_textanR; delete[] vertex_textanG; delete[] vertex_textanB; ovrHmd_DestroyDistortionMesh(&meshData); //Create eye render description for use later EyeRenderDesc[eyeNum] = ovrHmd_GetRenderDesc(Hmd, (ovrEyeType) eyeNum, eyeFov[eyeNum]); //Do scale and offset ovrHmd_GetRenderScaleAndOffset(eyeFov[eyeNum],EyeRenderTargetSize, EyeRenderViewport[eyeNum], UVScaleOffset[eyeNum]); } //Create shader if(wzCreateShader(&LensShader, ols_vertexshader,ols_flagshader, ve_var)) { MessageBoxA(NULL, "Lens Shader Compile Error.", "", MB_OK); return; } if(latency) ovrHmd_SetEnabledCaps(Hmd, ovrHmdCap_DynamicPrediction); //ovrHmdCap_LowPersistence // Start the sensor which informs of the Rift's pose and motion ovrHmd_ConfigureTracking(Hmd, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection, 0); //not use : ovrTrackingCap_Position m_isReady = false; }
CoinRiftWidget::CoinRiftWidget() : QGLWidget() { for (int eye = 0; eye < 2; eye++) { reinterpret_cast<ovrGLTextureData*>(&eyeTexture[eye])->TexId = 0; #ifdef USE_FRAMEBUFFER frameBufferID[eye] = 0; depthBufferID[eye] = 0; #endif } // OVR will do the swapping. setAutoBufferSwap(false); hmd = ovrHmd_Create(0); if (!hmd) { qDebug() << "Could not find Rift device."; throw; } if (!ovrHmd_ConfigureTracking (hmd, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position )) { // Capabilities we require. qDebug() << "Could not start Rift motion sensor."; throw; } resize(hmd->Resolution.w, hmd->Resolution.h); // Configure stereo settings. ovrSizei recommenedTex0Size = ovrHmd_GetFovTextureSize(hmd, ovrEye_Left, hmd->DefaultEyeFov[0], 1.0f); ovrSizei recommenedTex1Size = ovrHmd_GetFovTextureSize(hmd, ovrEye_Right, hmd->DefaultEyeFov[1], 1.0f); #ifdef USE_SO_OFFSCREEN_RENDERER renderer = new SoOffscreenRenderer(SbViewportRegion(std::max(recommenedTex0Size.w, recommenedTex0Size.w), std::max(recommenedTex1Size.h, recommenedTex1Size.h))); renderer->setComponents(SoOffscreenRenderer::RGB_TRANSPARENCY); BackgroundColor = SbColor(.0f, .0f, .8f); renderer->setBackgroundColor(BackgroundColor); #endif #ifdef USE_FRAMEBUFFER m_sceneManager = new SoSceneManager(); m_sceneManager->setViewportRegion(SbViewportRegion(std::max(recommenedTex0Size.w, recommenedTex0Size.w), std::max(recommenedTex1Size.h, recommenedTex1Size.h))); m_sceneManager->setBackgroundColor(SbColor(.0f, .0f, .8f)); #endif basePosition = SbVec3f(0.0f, 0.0f, -2.0f); // light handling SoDirectionalLight *light = new SoDirectionalLight(); light->direction.setValue(1,-1,-1); SoDirectionalLight *light2 = new SoDirectionalLight(); light2->direction.setValue(-1,-1,-1); light2->intensity.setValue(0.6); light2->color.setValue(0.8,0.8,1); scene = new SoSeparator(0); // Placeholder. for (int eye = 0; eye < 2; eye++) { rootScene[eye] = new SoSeparator(); rootScene[eye]->ref(); camera[eye] = new SoFrustumCamera(); camera[eye]->position.setValue(basePosition); camera[eye]->focalDistance.setValue(5.0f); camera[eye]->viewportMapping.setValue(SoCamera::LEAVE_ALONE); rootScene[eye]->addChild(camera[eye]); rootScene[eye]->addChild(light); rootScene[eye]->addChild(light2); rootScene[eye]->addChild(scene); } // Populate ovrEyeDesc[2]. eyeRenderDesc[0].Eye = ovrEye_Left; eyeRenderDesc[1].Eye = ovrEye_Right; eyeRenderDesc[0].Fov = hmd->DefaultEyeFov[0]; eyeRenderDesc[1].Fov = hmd->DefaultEyeFov[1]; #ifdef USE_SO_OFFSCREEN_RENDERER eyeTexture[0].Header.TextureSize.w = renderer->getViewportRegion().getViewportSizePixels().getValue()[0]; eyeTexture[0].Header.TextureSize.h = renderer->getViewportRegion().getViewportSizePixels().getValue()[1]; eyeTexture[1].Header.TextureSize = eyeTexture[0].Header.TextureSize; #endif #ifdef USE_FRAMEBUFFER eyeTexture[0].Header.TextureSize = recommenedTex0Size; eyeTexture[1].Header.TextureSize = recommenedTex1Size; #endif eyeTexture[0].Header.RenderViewport.Pos.x = 0; eyeTexture[0].Header.RenderViewport.Pos.y = 0; eyeTexture[0].Header.RenderViewport.Size = eyeTexture[0].Header.TextureSize; eyeTexture[1].Header.RenderViewport.Pos = eyeTexture[0].Header.RenderViewport.Pos; eyeTexture[1].Header.RenderViewport.Size = eyeTexture[1].Header.TextureSize; const int backBufferMultisample = 0; // TODO This is a guess? ovrGLConfig cfg; cfg.OGL.Header.API = ovrRenderAPI_OpenGL; cfg.OGL.Header.RTSize = hmd->Resolution; cfg.OGL.Header.Multisample = backBufferMultisample; cfg.OGL.Window = reinterpret_cast<HWND>(winId()); makeCurrent(); //cfg.OGL.WglContext = wglGetCurrentContext(); // http://stackoverflow.com/questions/17532033/qglwidget-get-gl-contextes-for-windows cfg.OGL.DC = wglGetCurrentDC(); qDebug() << "Window:" << cfg.OGL.Window; //qDebug() << "Context:" << cfg.OGL.WglContext; qDebug() << "DC:" << cfg.OGL.DC; int DistortionCaps = 0; DistortionCaps |= ovrDistortionCap_Chromatic; // DistortionCaps |= ovrDistortionCap_TimeWarp; // Produces black screen... DistortionCaps |= ovrDistortionCap_Vignette; DistortionCaps |= ovrDistortionCap_HqDistortion; bool VSyncEnabled(false); // TODO This is a guess. if (!ovrHmd_ConfigureRendering( hmd, &cfg.Config, /*(VSyncEnabled ? 0 : ovrHmdCap_NoVSync),*/ DistortionCaps, hmd->DefaultEyeFov,//eyes, eyeRenderDesc)) { qDebug() << "Could not configure OVR rendering."; throw; } static const float nearPlane = 0.01; for (int eye = 0; eye < 2; eye++) { camera[eye]->aspectRatio.setValue((eyeRenderDesc[eye].Fov.LeftTan + eyeRenderDesc[eye].Fov.RightTan) / (eyeRenderDesc[eye].Fov.UpTan + eyeRenderDesc[eye].Fov.DownTan)); camera[eye]->nearDistance.setValue(nearPlane); camera[eye]->farDistance.setValue(10000.0f); camera[eye]->left.setValue(-eyeRenderDesc[eye].Fov.LeftTan * nearPlane); camera[eye]->right.setValue(eyeRenderDesc[eye].Fov.RightTan * nearPlane); camera[eye]->top.setValue(eyeRenderDesc[eye].Fov.UpTan * nearPlane); camera[eye]->bottom.setValue(-eyeRenderDesc[eye].Fov.DownTan * nearPlane); } }
int setup_rift(struct weston_compositor *compositor) { struct oculus_rift *rift = compositor->rift; rift->enabled = 1; rift->screen_z = -5.0; rift->screen_scale = 1.0; weston_compositor_add_key_binding(compositor, KEY_5, MODIFIER_SUPER, toggle_sbs, compositor); weston_compositor_add_key_binding(compositor, KEY_6, MODIFIER_SUPER, toggle_rotate, compositor); weston_compositor_add_key_binding(compositor, KEY_7, MODIFIER_SUPER, move_in, compositor); weston_compositor_add_key_binding(compositor, KEY_8, MODIFIER_SUPER, move_out, compositor); weston_compositor_add_key_binding(compositor, KEY_9, MODIFIER_SUPER, scale_up, compositor); weston_compositor_add_key_binding(compositor, KEY_0, MODIFIER_SUPER, scale_down, compositor); /*// use this at some point in the future to detect and grab the rift display struct weston_output *output; wl_list_for_each(output, &compositor->output_list, link) { weston_log("Output (%i): %s\n\t%ix%i\n", output->id, output->name, output->width, output->height); }*/ rift->distortion_shader = calloc(1, sizeof *(rift->distortion_shader)); struct distortion_shader_ *d = rift->distortion_shader; d->program = CreateProgram(distortion_vertex_shader, distortion_fragment_shader); d->EyeToSourceUVScale = glGetUniformLocation(d->program, "EyeToSourceUVScale"); d->EyeToSourceUVOffset = glGetUniformLocation(d->program, "EyeToSourceUVOffset"); d->RightEye = glGetUniformLocation(d->program, "RightEye"); d->angle = glGetUniformLocation(d->program, "angle"); d->Position = glGetAttribLocation(d->program, "Position"); d->TexCoord0 = glGetAttribLocation(d->program, "TexCoord0"); d->TexCoordR = glGetAttribLocation(d->program, "TexCoordR"); d->TexCoordG = glGetAttribLocation(d->program, "TexCoordG"); d->TexCoordB = glGetAttribLocation(d->program, "TexCoordB"); d->eyeTexture = glGetAttribLocation(d->program, "Texture0"); rift->eye_shader = calloc(1, sizeof *(rift->eye_shader)); struct eye_shader_ *e = rift->eye_shader; e->program = CreateProgram(eye_vertex_shader, eye_fragment_shader); e->Position = glGetAttribLocation(d->program, "Position"); e->TexCoord0 = glGetAttribLocation(d->program, "TexCoord0"); e->Projection = glGetUniformLocation(e->program, "Projection"); e->ModelView = glGetUniformLocation(e->program, "ModelView"); e->virtualScreenTexture = glGetAttribLocation(d->program, "Texture0"); rift->scene = calloc(1, sizeof *(rift->scene)); glGenBuffers(1, &rift->scene->vertexBuffer); glBindBuffer(GL_ARRAY_BUFFER, rift->scene->vertexBuffer); static const GLfloat rectangle[] = {-1.0f, -1.0f, -0.5f, 1.0f, -1.0f, -0.5f, -1.0f, 1.0f, -0.5f, 1.0f, -1.0f, -0.5f, 1.0f, 1.0f, -0.5f, -1.0f, 1.0f, -0.5f}; glBufferData(GL_ARRAY_BUFFER, sizeof(rectangle), rectangle, GL_STATIC_DRAW); glGenBuffers(2, &rift->scene->SBSuvsBuffer[0]); glGenBuffers(1, &rift->scene->uvsBuffer); static const GLfloat uvs[3][12] = {{ 0.0, 0.0, 0.5, 0.0, 0.0, 1.0, 0.5, 0.0, 0.5, 1.0, 0.0, 1.0}, { 0.5, 0.0, 1.0, 0.0, 0.5, 1.0, 1.0, 0.0, 1.0, 1.0, 0.5, 1.0}, { 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0}}; glBindBuffer(GL_ARRAY_BUFFER, rift->scene->SBSuvsBuffer[0]); glBufferData(GL_ARRAY_BUFFER, sizeof(uvs[0]), uvs[0], GL_STATIC_DRAW); glBindBuffer(GL_ARRAY_BUFFER, rift->scene->SBSuvsBuffer[1]); glBufferData(GL_ARRAY_BUFFER, sizeof(uvs[1]), uvs[1], GL_STATIC_DRAW); glBindBuffer(GL_ARRAY_BUFFER, rift->scene->uvsBuffer); glBufferData(GL_ARRAY_BUFFER, sizeof(uvs[2]), uvs[2], GL_STATIC_DRAW); rift->width = 1920; rift->height = 1080; glGenTextures(1, &rift->fbTexture); glBindTexture(GL_TEXTURE_2D, rift->fbTexture); glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, rift->width, rift->height, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); glGenFramebuffers(1, &rift->redirectedFramebuffer); glBindFramebuffer(GL_FRAMEBUFFER, rift->redirectedFramebuffer); glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, rift->fbTexture, 0); show_error(); if(glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) { switch(glCheckFramebufferStatus(GL_FRAMEBUFFER)) { case GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT: weston_log("incomplete attachment\n"); break; case GL_FRAMEBUFFER_INCOMPLETE_DIMENSIONS: weston_log("incomplete dimensions\n"); break; case GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT: weston_log("incomplete missing attachment\n"); break; case GL_FRAMEBUFFER_UNSUPPORTED: weston_log("unsupported\n"); break; } weston_log("framebuffer not working\n"); show_error(); exit(1); } glClear(GL_COLOR_BUFFER_BIT); /*EGLint pbufferAttributes[] = { EGL_WIDTH, rift->width, EGL_HEIGHT, rift->height, EGL_TEXTURE_FORMAT, EGL_TEXTURE_RGB, EGL_TEXTURE_TARGET, EGL_TEXTURE_2D, EGL_NONE }; rift->pbuffer = eglCreatePbufferSurface( rift->egl_display, rift->egl_config, pbufferAttributes); glGenTextures(1, &(rift->texture)); glBindTexture(GL_TEXTURE_2D, rift->texture); //glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, rift->width, rift->height, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); eglMakeCurrent(rift->egl_display, rift->pbuffer, rift->pbuffer, rift->egl_context); eglBindTexImage(rift->egl_display, rift->pbuffer, EGL_BACK_BUFFER); eglMakeCurrent(rift->egl_display, rift->orig_surface, rift->orig_surface, rift->egl_context);*/ ovr_Initialize(0); rift->hmd = ovrHmd_Create(0); if(rift->hmd == NULL) { rift->hmd = ovrHmd_CreateDebug(ovrHmd_DK2); } ovrHmd_ConfigureTracking(rift->hmd, ovrTrackingCap_Orientation | ovrTrackingCap_Position | ovrTrackingCap_MagYawCorrection, 0); ovrHmd_ResetFrameTiming(rift->hmd, 0); int eye; for(eye = 0; eye < 2; eye++) { ovrFovPort fov = rift->hmd->DefaultEyeFov[eye]; ovrEyeRenderDesc renderDesc = ovrHmd_GetRenderDesc(rift->hmd, eye, fov); struct EyeArg *eyeArg = &rift->eyeArgs[eye]; eyeArg->projection = ovrMatrix4f_Projection(fov, 0.1, 100000, true); /*int j, k; for(k=0; k<4; k++) { for(j=0; j<4; j++) { printf("%f\t", eyeArg->projection.M[k][j]); } printf("\n"); }*/ rift->hmdToEyeOffsets[eye] = renderDesc.HmdToEyeViewOffset; ovrRecti texRect; texRect.Size = ovrHmd_GetFovTextureSize(rift->hmd, eye, rift->hmd->DefaultEyeFov[eye], 1.0f); texRect.Pos.x = texRect.Pos.y = 0; eyeArg->textureWidth = texRect.Size.w; eyeArg->textureHeight = texRect.Size.h; glGenTextures(1, &eyeArg->texture); glBindTexture(GL_TEXTURE_2D, eyeArg->texture); glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, eyeArg->textureWidth, eyeArg->textureHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); glGenFramebuffers(1, &eyeArg->framebuffer); show_error(); glBindFramebuffer(GL_FRAMEBUFFER, eyeArg->framebuffer); show_error(); glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, eyeArg->texture, 0); show_error(); if(glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) { switch(glCheckFramebufferStatus(GL_FRAMEBUFFER)) { case GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT: weston_log("incomplete attachment\n"); break; case GL_FRAMEBUFFER_INCOMPLETE_DIMENSIONS: weston_log("incomplete dimensions\n"); break; case GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT: weston_log("incomplete missing attachment\n"); break; case GL_FRAMEBUFFER_UNSUPPORTED: weston_log("unsupported\n"); break; } weston_log("framebuffer not working\n"); show_error(); exit(1); } if(eye) { glClearColor(1.0, 0.0, 0.0, 1.0); show_error(); } else { glClearColor(0.0, 1.0, 0.0, 1.0); show_error(); } glClear(GL_COLOR_BUFFER_BIT); show_error(); /*EGLint eyePbufferAttributes[] = { EGL_WIDTH, texRect.Size.w, EGL_HEIGHT, texRect.Size.h, EGL_TEXTURE_FORMAT, EGL_TEXTURE_RGB, EGL_TEXTURE_TARGET, EGL_TEXTURE_2D, EGL_NONE }; eyeArg.surface = eglCreatePbufferSurface( rift->egl_display, rift->egl_config, eyePbufferAttributes);*/ ovrVector2f scaleAndOffset[2]; ovrHmd_GetRenderScaleAndOffset(fov, texRect.Size, texRect, scaleAndOffset); eyeArg->scale = scaleAndOffset[0]; eyeArg->offset = scaleAndOffset[1]; ovrHmd_CreateDistortionMesh(rift->hmd, eye, fov, 0, &eyeArg->mesh); glGenBuffers(1, &eyeArg->indexBuffer); glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, eyeArg->indexBuffer); glBufferData(GL_ELEMENT_ARRAY_BUFFER, eyeArg->mesh.IndexCount * sizeof(unsigned short), eyeArg->mesh.pIndexData, GL_STATIC_DRAW); glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0); float vertices_buffer[eyeArg->mesh.VertexCount*2]; float uvs_buffer[3][eyeArg->mesh.VertexCount*2]; uint i; for(i=0; i<eyeArg->mesh.VertexCount; i++) { ovrDistortionVertex vertex = eyeArg->mesh.pVertexData[i]; vertices_buffer[i*2] = vertex.ScreenPosNDC.x; vertices_buffer[(i*2)+1] = vertex.ScreenPosNDC.y; uvs_buffer[0][i*2] = vertex.TanEyeAnglesR.x; uvs_buffer[0][(i*2)+1] = vertex.TanEyeAnglesR.y; uvs_buffer[1][i*2] = vertex.TanEyeAnglesG.x; uvs_buffer[1][(i*2)+1] = vertex.TanEyeAnglesG.y; uvs_buffer[2][i*2] = vertex.TanEyeAnglesB.x; uvs_buffer[2][(i*2)+1] = vertex.TanEyeAnglesB.y; } glGenBuffers(1, &eyeArg->vertexBuffer); glBindBuffer(GL_ARRAY_BUFFER, eyeArg->vertexBuffer); glBufferData(GL_ARRAY_BUFFER, eyeArg->mesh.VertexCount * sizeof(GL_FLOAT) * 2, vertices_buffer, GL_STATIC_DRAW); glGenBuffers(3, &eyeArg->uvsBuffer[0]); for(i=0; i<3; i++) { glBindBuffer(GL_ARRAY_BUFFER, eyeArg->uvsBuffer[i]); glBufferData(GL_ARRAY_BUFFER, eyeArg->mesh.VertexCount * sizeof(GL_FLOAT) * 2, uvs_buffer[i], GL_STATIC_DRAW); glBindBuffer(GL_ARRAY_BUFFER, 0); } } return 0; }