DLL_EXPORT_API npBool xnOvrCommitFrame(xnOvrSession* session, int numberOfExtraLayers, xnOvrQuadLayer** extraLayers) { ovrLayerHeader* layers[1 + numberOfExtraLayers]; //add the default layer first layers[0] = &session->Layer.Header; //commit the default fov layer ovr_CommitTextureSwapChain(session->Session, session->SwapChain); for (auto i = 0; i < numberOfExtraLayers; i++) { //add further quad layers layers[i + 1] = &extraLayers[i]->Layer.Header; //also commit the quad layer ovr_CommitTextureSwapChain(session->Session, extraLayers[i]->SwapChain); } if(!OVR_SUCCESS(ovr_SubmitFrame(session->Session, 0, NULL, layers, 1 + numberOfExtraLayers))) { return false; } ovrSessionStatus status; if (!OVR_SUCCESS(ovr_GetSessionStatus(session->Session, &status))) { return false; } if(status.ShouldRecenter) { ovr_RecenterTrackingOrigin(session->Session); } return true; }
void VRImplOVR::recenter() { if (NULL != m_session) { ovr_RecenterTrackingOrigin(m_session); } }
void OculusVR::OnKeyPress(KeyCode key) { switch (key) { case KEY_SPACE: ovr_RecenterTrackingOrigin(m_hmdSession); break; } }
DLL_EXPORT_API void xnOvrRecenter(xnOvrSession* session) { ovr_RecenterTrackingOrigin(session->Session); }
int main(int argc, char **argv) { // Initialize SDL2's context SDL_Init(SDL_INIT_VIDEO); // Initialize Oculus' context ovrResult result = ovr_Initialize(nullptr); if (OVR_FAILURE(result)) { std::cout << "ERROR: Failed to initialize libOVR" << std::endl; SDL_Quit(); return -1; } ovrSession session; ovrGraphicsLuid luid; // Connect to the Oculus headset result = ovr_Create(&session, &luid); if (OVR_FAILURE(result)) { std::cout << "ERROR: Oculus Rift not detected" << std::endl; ovr_Shutdown(); SDL_Quit(); return -1; } int x = SDL_WINDOWPOS_CENTERED, y = SDL_WINDOWPOS_CENTERED; int winWidth = 1280; int winHeight = 720; Uint32 flags = SDL_WINDOW_OPENGL | SDL_WINDOW_SHOWN; // Create SDL2 Window SDL_Window* window = SDL_CreateWindow("OVR ZED App", x, y, winWidth, winHeight, flags); // Create OpenGL context SDL_GLContext glContext = SDL_GL_CreateContext(window); // Initialize GLEW glewInit(); // Turn off vsync to let the compositor do its magic SDL_GL_SetSwapInterval(0); // Initialize the ZED Camera sl::zed::Camera* zed = 0; zed = new sl::zed::Camera(sl::zed::HD720); sl::zed::ERRCODE zederr = zed->init(sl::zed::MODE::PERFORMANCE, 0); int zedWidth = zed->getImageSize().width; int zedHeight = zed->getImageSize().height; if (zederr != sl::zed::SUCCESS) { std::cout << "ERROR: " << sl::zed::errcode2str(zederr) << std::endl; ovr_Destroy(session); ovr_Shutdown(); SDL_GL_DeleteContext(glContext); SDL_DestroyWindow(window); SDL_Quit(); delete zed; return -1; } GLuint zedTextureID_L, zedTextureID_R; // Generate OpenGL texture for left images of the ZED camera glGenTextures(1, &zedTextureID_L); glBindTexture(GL_TEXTURE_2D, zedTextureID_L); glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, zedWidth, zedHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE, NULL); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); // Generate OpenGL texture for right images of the ZED camera glGenTextures(1, &zedTextureID_R); glBindTexture(GL_TEXTURE_2D, zedTextureID_R); glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, zedWidth, zedHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE, NULL); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); glBindTexture(GL_TEXTURE_2D, 0); #if OPENGL_GPU_INTEROP cudaGraphicsResource* cimg_L; cudaGraphicsResource* cimg_R; cudaError_t errL, errR; errL = cudaGraphicsGLRegisterImage(&cimg_L, zedTextureID_L, GL_TEXTURE_2D, cudaGraphicsMapFlagsNone); errR = cudaGraphicsGLRegisterImage(&cimg_R, zedTextureID_R, GL_TEXTURE_2D, cudaGraphicsMapFlagsNone); if (errL != cudaSuccess || errR != cudaSuccess) { std::cout << "ERROR: cannot create CUDA texture : " << errL << "|" << errR << std::endl; } #endif ovrHmdDesc hmdDesc = ovr_GetHmdDesc(session); // Get the texture sizes of Oculus eyes ovrSizei textureSize0 = ovr_GetFovTextureSize(session, ovrEye_Left, hmdDesc.DefaultEyeFov[0], 1.0f); ovrSizei textureSize1 = ovr_GetFovTextureSize(session, ovrEye_Right, hmdDesc.DefaultEyeFov[1], 1.0f); // Compute the final size of the render buffer ovrSizei bufferSize; bufferSize.w = textureSize0.w + textureSize1.w; bufferSize.h = std::max(textureSize0.h, textureSize1.h); // Initialize OpenGL swap textures to render ovrTextureSwapChain textureChain = nullptr; // Description of the swap chain ovrTextureSwapChainDesc descTextureSwap = {}; descTextureSwap.Type = ovrTexture_2D; descTextureSwap.ArraySize = 1; descTextureSwap.Width = bufferSize.w; descTextureSwap.Height = bufferSize.h; descTextureSwap.MipLevels = 1; descTextureSwap.Format = OVR_FORMAT_R8G8B8A8_UNORM_SRGB; descTextureSwap.SampleCount = 1; descTextureSwap.StaticImage = ovrFalse; // Create the OpenGL texture swap chain result = ovr_CreateTextureSwapChainGL(session, &descTextureSwap, &textureChain); int length = 0; ovr_GetTextureSwapChainLength(session, textureChain, &length); if (OVR_SUCCESS(result)) { for (int i = 0; i < length; ++i) { GLuint chainTexId; ovr_GetTextureSwapChainBufferGL(session, textureChain, i, &chainTexId); glBindTexture(GL_TEXTURE_2D, chainTexId); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); } } else { std::cout << "ERROR: failed creating swap texture" << std::endl; ovr_Destroy(session); ovr_Shutdown(); SDL_GL_DeleteContext(glContext); SDL_DestroyWindow(window); SDL_Quit(); delete zed; return -1; } // Generate frame buffer to render GLuint fboID; glGenFramebuffers(1, &fboID); // Generate depth buffer of the frame buffer GLuint depthBuffID; glGenTextures(1, &depthBuffID); glBindTexture(GL_TEXTURE_2D, depthBuffID); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); GLenum internalFormat = GL_DEPTH_COMPONENT24; GLenum type = GL_UNSIGNED_INT; glTexImage2D(GL_TEXTURE_2D, 0, internalFormat, bufferSize.w, bufferSize.h, 0, GL_DEPTH_COMPONENT, type, NULL); // Create a mirror texture to display the render result in the SDL2 window ovrMirrorTextureDesc descMirrorTexture; memset(&descMirrorTexture, 0, sizeof(descMirrorTexture)); descMirrorTexture.Width = winWidth; descMirrorTexture.Height = winHeight; descMirrorTexture.Format = OVR_FORMAT_R8G8B8A8_UNORM_SRGB; ovrMirrorTexture mirrorTexture = nullptr; result = ovr_CreateMirrorTextureGL(session, &descMirrorTexture, &mirrorTexture); if (!OVR_SUCCESS(result)) { std::cout << "ERROR: Failed to create mirror texture" << std::endl; } GLuint mirrorTextureId; ovr_GetMirrorTextureBufferGL(session, mirrorTexture, &mirrorTextureId); GLuint mirrorFBOID; glGenFramebuffers(1, &mirrorFBOID); glBindFramebuffer(GL_READ_FRAMEBUFFER, mirrorFBOID); glFramebufferTexture2D(GL_READ_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, mirrorTextureId, 0); glFramebufferRenderbuffer(GL_READ_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, 0); glBindFramebuffer(GL_READ_FRAMEBUFFER, 0); // Frame index used by the compositor // it needs to be updated each new frame long long frameIndex = 0; // FloorLevel will give tracking poses where the floor height is 0 ovr_SetTrackingOriginType(session, ovrTrackingOrigin_FloorLevel); // Initialize a default Pose ovrPosef eyeRenderPose[2]; // Get the render description of the left and right "eyes" of the Oculus headset ovrEyeRenderDesc eyeRenderDesc[2]; eyeRenderDesc[0] = ovr_GetRenderDesc(session, ovrEye_Left, hmdDesc.DefaultEyeFov[0]); eyeRenderDesc[1] = ovr_GetRenderDesc(session, ovrEye_Right, hmdDesc.DefaultEyeFov[1]); // Get the Oculus view scale description ovrVector3f hmdToEyeOffset[2]; double sensorSampleTime; // Create and compile the shader's sources Shader shader(OVR_ZED_VS, OVR_ZED_FS); // Compute the ZED image field of view with the ZED parameters float zedFovH = atanf(zed->getImageSize().width / (zed->getParameters()->LeftCam.fx *2.f)) * 2.f; // Compute the Horizontal Oculus' field of view with its parameters float ovrFovH = (atanf(hmdDesc.DefaultEyeFov[0].LeftTan) + atanf(hmdDesc.DefaultEyeFov[0].RightTan)); // Compute the useful part of the ZED image unsigned int usefulWidth = zed->getImageSize().width * ovrFovH / zedFovH; // Compute the size of the final image displayed in the headset with the ZED image's aspect-ratio kept unsigned int widthFinal = bufferSize.w / 2; float heightGL = 1.f; float widthGL = 1.f; if (usefulWidth > 0.f) { unsigned int heightFinal = zed->getImageSize().height * widthFinal / usefulWidth; // Convert this size to OpenGL viewport's frame's coordinates heightGL = (heightFinal) / (float)(bufferSize.h); widthGL = ((zed->getImageSize().width * (heightFinal / (float)zed->getImageSize().height)) / (float)widthFinal); } else { std::cout << "WARNING: ZED parameters got wrong values." "Default vertical and horizontal FOV are used.\n" "Check your calibration file or check if your ZED is not too close to a surface or an object." << std::endl; } // Compute the Vertical Oculus' field of view with its parameters float ovrFovV = (atanf(hmdDesc.DefaultEyeFov[0].UpTan) + atanf(hmdDesc.DefaultEyeFov[0].DownTan)); // Compute the center of the optical lenses of the headset float offsetLensCenterX = ((atanf(hmdDesc.DefaultEyeFov[0].LeftTan)) / ovrFovH) * 2.f - 1.f; float offsetLensCenterY = ((atanf(hmdDesc.DefaultEyeFov[0].UpTan)) / ovrFovV) * 2.f - 1.f; // Create a rectangle with the computed coordinates and push it in GPU memory. struct GLScreenCoordinates { float left, up, right, down; } screenCoord; screenCoord.up = heightGL + offsetLensCenterY; screenCoord.down = heightGL - offsetLensCenterY; screenCoord.right = widthGL + offsetLensCenterX; screenCoord.left = widthGL - offsetLensCenterX; float rectVertices[12] = { -screenCoord.left, -screenCoord.up, 0, screenCoord.right, -screenCoord.up, 0, screenCoord.right, screenCoord.down, 0, -screenCoord.left, screenCoord.down, 0 }; GLuint rectVBO[3]; glGenBuffers(1, &rectVBO[0]); glBindBuffer(GL_ARRAY_BUFFER, rectVBO[0]); glBufferData(GL_ARRAY_BUFFER, sizeof(rectVertices), rectVertices, GL_STATIC_DRAW); float rectTexCoord[8] = { 0, 1, 1, 1, 1, 0, 0, 0 }; glGenBuffers(1, &rectVBO[1]); glBindBuffer(GL_ARRAY_BUFFER, rectVBO[1]); glBufferData(GL_ARRAY_BUFFER, sizeof(rectTexCoord), rectTexCoord, GL_STATIC_DRAW); unsigned int rectIndices[6] = { 0, 1, 2, 0, 2, 3 }; glGenBuffers(1, &rectVBO[2]); glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, rectVBO[2]); glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(rectIndices), rectIndices, GL_STATIC_DRAW); glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0); glBindBuffer(GL_ARRAY_BUFFER, 0); // Initialize hit value float hit = 0.02f; // Initialize a boolean that will be used to stop the application’s loop and another one to pause/unpause rendering bool end = false; bool refresh = true; // SDL variable that will be used to store input events SDL_Event events; // Initialize time variables. They will be used to limit the number of frames rendered per second. // Frame counter unsigned int riftc = 0, zedc = 1; // Chronometer unsigned int rifttime = 0, zedtime = 0, zedFPS = 0; int time1 = 0, timePerFrame = 0; int frameRate = (int)(1000 / MAX_FPS); // This boolean is used to test if the application is focused bool isVisible = true; // Enable the shader glUseProgram(shader.getProgramId()); // Bind the Vertex Buffer Objects of the rectangle that displays ZED images // vertices glEnableVertexAttribArray(Shader::ATTRIB_VERTICES_POS); glBindBuffer(GL_ARRAY_BUFFER, rectVBO[0]); glVertexAttribPointer(Shader::ATTRIB_VERTICES_POS, 3, GL_FLOAT, GL_FALSE, 0, 0); // indices glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, rectVBO[2]); // texture coordinates glEnableVertexAttribArray(Shader::ATTRIB_TEXTURE2D_POS); glBindBuffer(GL_ARRAY_BUFFER, rectVBO[1]); glVertexAttribPointer(Shader::ATTRIB_TEXTURE2D_POS, 2, GL_FLOAT, GL_FALSE, 0, 0); // Main loop while (!end) { // Compute the time used to render the previous frame timePerFrame = SDL_GetTicks() - time1; // If the previous frame has been rendered too fast if (timePerFrame < frameRate) { // Pause the loop to have a max FPS equal to MAX_FPS SDL_Delay(frameRate - timePerFrame); timePerFrame = frameRate; } // Increment the ZED chronometer zedtime += timePerFrame; // If ZED chronometer reached 1 second if (zedtime > 1000) { zedFPS = zedc; zedc = 0; zedtime = 0; } // Increment the Rift chronometer and the Rift frame counter rifttime += timePerFrame; riftc++; // If Rift chronometer reached 200 milliseconds if (rifttime > 200) { // Display FPS std::cout << "\rRIFT FPS: " << 1000 / (rifttime / riftc) << " | ZED FPS: " << zedFPS; // Reset Rift chronometer rifttime = 0; // Reset Rift frame counter riftc = 0; } // Start frame chronometer time1 = SDL_GetTicks(); // While there is an event catched and not tested while (SDL_PollEvent(&events)) { // If a key is released if (events.type == SDL_KEYUP) { // If Q quit the application if (events.key.keysym.scancode == SDL_SCANCODE_Q) end = true; // If R reset the hit value else if (events.key.keysym.scancode == SDL_SCANCODE_R) hit = 0.0f; // If C pause/unpause rendering else if (events.key.keysym.scancode == SDL_SCANCODE_C) refresh = !refresh; } // If the mouse wheel is used if (events.type == SDL_MOUSEWHEEL) { // Increase or decrease hit value float s; events.wheel.y > 0 ? s = 1.0f : s = -1.0f; hit += 0.005f * s; } } // Get texture swap index where we must draw our frame GLuint curTexId; int curIndex; ovr_GetTextureSwapChainCurrentIndex(session, textureChain, &curIndex); ovr_GetTextureSwapChainBufferGL(session, textureChain, curIndex, &curTexId); // Call ovr_GetRenderDesc each frame to get the ovrEyeRenderDesc, as the returned values (e.g. HmdToEyeOffset) may change at runtime. eyeRenderDesc[0] = ovr_GetRenderDesc(session, ovrEye_Left, hmdDesc.DefaultEyeFov[0]); eyeRenderDesc[1] = ovr_GetRenderDesc(session, ovrEye_Right, hmdDesc.DefaultEyeFov[1]); hmdToEyeOffset[0] = eyeRenderDesc[0].HmdToEyeOffset; hmdToEyeOffset[1] = eyeRenderDesc[1].HmdToEyeOffset; // Get eye poses, feeding in correct IPD offset ovr_GetEyePoses(session, frameIndex, ovrTrue, hmdToEyeOffset, eyeRenderPose, &sensorSampleTime); // If the application is focused if (isVisible) { // If successful grab a new ZED image if (!zed->grab(sl::zed::SENSING_MODE::RAW, false, false)) { // Update the ZED frame counter zedc++; if (refresh) { #if OPENGL_GPU_INTEROP sl::zed::Mat m = zed->retrieveImage_gpu(sl::zed::SIDE::LEFT); cudaArray_t arrIm; cudaGraphicsMapResources(1, &cimg_L, 0); cudaGraphicsSubResourceGetMappedArray(&arrIm, cimg_L, 0, 0); cudaMemcpy2DToArray(arrIm, 0, 0, m.data, m.step, zedWidth * 4, zedHeight, cudaMemcpyDeviceToDevice); cudaGraphicsUnmapResources(1, &cimg_L, 0); m = zed->retrieveImage_gpu(sl::zed::SIDE::RIGHT); cudaGraphicsMapResources(1, &cimg_R, 0); cudaGraphicsSubResourceGetMappedArray(&arrIm, cimg_R, 0, 0); cudaMemcpy2DToArray(arrIm, 0, 0, m.data, m.step, zedWidth * 4, zedHeight, cudaMemcpyDeviceToDevice); // *4 = 4 channels * 1 bytes (uint) cudaGraphicsUnmapResources(1, &cimg_R, 0); #endif // Bind the frame buffer glBindFramebuffer(GL_FRAMEBUFFER, fboID); // Set its color layer 0 as the current swap texture glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, curTexId, 0); // Set its depth layer as our depth buffer glFramebufferTexture2D(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_TEXTURE_2D, depthBuffID, 0); // Clear the frame buffer glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); glClearColor(0, 0, 0, 1); // Render for each Oculus eye the equivalent ZED image for (int eye = 0; eye < 2; eye++) { // Set the left or right vertical half of the buffer as the viewport glViewport(eye == ovrEye_Left ? 0 : bufferSize.w / 2, 0, bufferSize.w / 2, bufferSize.h); // Bind the left or right ZED image glBindTexture(GL_TEXTURE_2D, eye == ovrEye_Left ? zedTextureID_L : zedTextureID_R); #if !OPENGL_GPU_INTEROP glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, zedWidth, zedHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE, zed->retrieveImage(eye == ovrEye_Left ? sl::zed::SIDE::LEFT : sl::zed::SIDE::RIGHT).data); #endif // Bind the hit value glUniform1f(glGetUniformLocation(shader.getProgramId(), "hit"), eye == ovrEye_Left ? hit : -hit); // Bind the isLeft value glUniform1ui(glGetUniformLocation(shader.getProgramId(), "isLeft"), eye == ovrEye_Left ? 1U : 0U); // Draw the ZED image glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0); } // Avoids an error when calling SetAndClearRenderSurface during next iteration. // Without this, during the next while loop iteration SetAndClearRenderSurface // would bind a framebuffer with an invalid COLOR_ATTACHMENT0 because the texture ID // associated with COLOR_ATTACHMENT0 had been unlocked by calling wglDXUnlockObjectsNV. glBindFramebuffer(GL_FRAMEBUFFER, fboID); glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, 0, 0); glFramebufferTexture2D(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_TEXTURE_2D, 0, 0); // Commit changes to the textures so they get picked up frame ovr_CommitTextureSwapChain(session, textureChain); } // Do not forget to increment the frameIndex! frameIndex++; } } /* Note: Even if we don't ask to refresh the framebuffer or if the Camera::grab() doesn't catch a new frame, we have to submit an image to the Rift; it needs 75Hz refresh. Else there will be jumbs, black frames and/or glitches in the headset. */ ovrLayerEyeFov ld; ld.Header.Type = ovrLayerType_EyeFov; // Tell to the Oculus compositor that our texture origin is at the bottom left ld.Header.Flags = ovrLayerFlag_TextureOriginAtBottomLeft; // Because OpenGL | Disable head tracking // Set the Oculus layer eye field of view for each view for (int eye = 0; eye < 2; ++eye) { // Set the color texture as the current swap texture ld.ColorTexture[eye] = textureChain; // Set the viewport as the right or left vertical half part of the color texture ld.Viewport[eye] = OVR::Recti(eye == ovrEye_Left ? 0 : bufferSize.w / 2, 0, bufferSize.w / 2, bufferSize.h); // Set the field of view ld.Fov[eye] = hmdDesc.DefaultEyeFov[eye]; // Set the pose matrix ld.RenderPose[eye] = eyeRenderPose[eye]; } ld.SensorSampleTime = sensorSampleTime; ovrLayerHeader* layers = &ld.Header; // Submit the frame to the Oculus compositor // which will display the frame in the Oculus headset result = ovr_SubmitFrame(session, frameIndex, nullptr, &layers, 1); if (!OVR_SUCCESS(result)) { std::cout << "ERROR: failed to submit frame" << std::endl; glDeleteBuffers(3, rectVBO); ovr_DestroyTextureSwapChain(session, textureChain); ovr_DestroyMirrorTexture(session, mirrorTexture); ovr_Destroy(session); ovr_Shutdown(); SDL_GL_DeleteContext(glContext); SDL_DestroyWindow(window); SDL_Quit(); delete zed; return -1; } if (result == ovrSuccess && !isVisible) { std::cout << "The application is now shown in the headset." << std::endl; } isVisible = (result == ovrSuccess); // This is not really needed for this application but it may be usefull for an more advanced application ovrSessionStatus sessionStatus; ovr_GetSessionStatus(session, &sessionStatus); if (sessionStatus.ShouldRecenter) { std::cout << "Recenter Tracking asked by Session" << std::endl; ovr_RecenterTrackingOrigin(session); } // Copy the frame to the mirror buffer // which will be drawn in the SDL2 image glBindFramebuffer(GL_READ_FRAMEBUFFER, mirrorFBOID); glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0); GLint w = winWidth; GLint h = winHeight; glBlitFramebuffer(0, h, w, 0, 0, 0, w, h, GL_COLOR_BUFFER_BIT, GL_NEAREST); glBindFramebuffer(GL_READ_FRAMEBUFFER, 0); // Swap the SDL2 window SDL_GL_SwapWindow(window); } // Disable all OpenGL buffer glDisableVertexAttribArray(Shader::ATTRIB_TEXTURE2D_POS); glDisableVertexAttribArray(Shader::ATTRIB_VERTICES_POS); glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0); glBindBuffer(GL_ARRAY_BUFFER, 0); glBindTexture(GL_TEXTURE_2D, 0); glUseProgram(0); glBindVertexArray(0); // Delete the Vertex Buffer Objects of the rectangle glDeleteBuffers(3, rectVBO); // Delete SDL, OpenGL, Oculus and ZED context ovr_DestroyTextureSwapChain(session, textureChain); ovr_DestroyMirrorTexture(session, mirrorTexture); ovr_Destroy(session); ovr_Shutdown(); SDL_GL_DeleteContext(glContext); SDL_DestroyWindow(window); SDL_Quit(); delete zed; // quit return 0; }
// return true to retry later (e.g. after display lost) static bool MainLoop(bool retryCreate) { // Initialize these to nullptr here to handle device lost failures cleanly ovrMirrorTexture mirrorTexture = nullptr; OculusEyeTexture* pEyeRenderTexture[2] = { nullptr, nullptr }; Scene* roomScene = nullptr; Camera* mainCam = nullptr; ovrMirrorTextureDesc mirrorDesc = {}; ovrSession session; ovrGraphicsLuid luid; ovrResult result = ovr_Create(&session, &luid); if (!OVR_SUCCESS(result)) return retryCreate; ovrHmdDesc hmdDesc = ovr_GetHmdDesc(session); // Setup Device and Graphics // Note: the mirror window can be any size, for this sample we use 1/2 the HMD resolution if (!DIRECTX.InitDevice(hmdDesc.Resolution.w / 2, hmdDesc.Resolution.h / 2, reinterpret_cast<LUID*>(&luid))) goto Done; // Make the eye render buffers (caution if actual size < requested due to HW limits). ovrRecti eyeRenderViewport[2]; for (int eye = 0; eye < 2; ++eye) { ovrSizei idealSize = ovr_GetFovTextureSize(session, (ovrEyeType)eye, hmdDesc.DefaultEyeFov[eye], 1.0f); pEyeRenderTexture[eye] = new OculusEyeTexture(); if (!pEyeRenderTexture[eye]->Init(session, idealSize.w, idealSize.h, true)) { if (retryCreate) goto Done; FATALERROR("Failed to create eye texture."); } eyeRenderViewport[eye].Pos.x = 0; eyeRenderViewport[eye].Pos.y = 0; eyeRenderViewport[eye].Size = idealSize; if (!pEyeRenderTexture[eye]->TextureChain) { if (retryCreate) goto Done; FATALERROR("Failed to create texture."); } } // Create a mirror to see on the monitor. mirrorDesc.Format = OVR_FORMAT_R8G8B8A8_UNORM_SRGB; mirrorDesc.Width = DIRECTX.WinSizeW; mirrorDesc.Height = DIRECTX.WinSizeH; result = ovr_CreateMirrorTextureDX(session, DIRECTX.CommandQueue, &mirrorDesc, &mirrorTexture); if (!OVR_SUCCESS(result)) { if (retryCreate) goto Done; FATALERROR("Failed to create mirror texture."); } // Create the room model roomScene = new Scene(false); // Create camera mainCam = new Camera(XMVectorSet(0.0f, 1.6f, 5.0f, 0), XMQuaternionIdentity()); // Setup VR components, filling out description ovrEyeRenderDesc eyeRenderDesc[2]; eyeRenderDesc[0] = ovr_GetRenderDesc(session, ovrEye_Left, hmdDesc.DefaultEyeFov[0]); eyeRenderDesc[1] = ovr_GetRenderDesc(session, ovrEye_Right, hmdDesc.DefaultEyeFov[1]); long long frameIndex = 0; bool drawMirror = true; DIRECTX.InitFrame(drawMirror); // Main loop while (DIRECTX.HandleMessages()) { ovrSessionStatus sessionStatus; ovr_GetSessionStatus(session, &sessionStatus); if (sessionStatus.ShouldQuit) { // Because the application is requested to quit, should not request retry retryCreate = false; break; } if (sessionStatus.ShouldRecenter) ovr_RecenterTrackingOrigin(session); if (sessionStatus.IsVisible) { XMVECTOR forward = XMVector3Rotate(XMVectorSet(0, 0, -0.05f, 0), mainCam->GetRotVec()); XMVECTOR right = XMVector3Rotate(XMVectorSet(0.05f, 0, 0, 0), mainCam->GetRotVec()); XMVECTOR mainCamPos = mainCam->GetPosVec(); XMVECTOR mainCamRot = mainCam->GetRotVec(); if (DIRECTX.Key['W'] || DIRECTX.Key[VK_UP]) mainCamPos = XMVectorAdd( mainCamPos, forward); if (DIRECTX.Key['S'] || DIRECTX.Key[VK_DOWN]) mainCamPos = XMVectorSubtract(mainCamPos, forward); if (DIRECTX.Key['D']) mainCamPos = XMVectorAdd( mainCamPos, right); if (DIRECTX.Key['A']) mainCamPos = XMVectorSubtract(mainCamPos, right); static float Yaw = 0; if (DIRECTX.Key[VK_LEFT]) mainCamRot = XMQuaternionRotationRollPitchYaw(0, Yaw += 0.02f, 0); if (DIRECTX.Key[VK_RIGHT]) mainCamRot = XMQuaternionRotationRollPitchYaw(0, Yaw -= 0.02f, 0); mainCam->SetPosVec(mainCamPos); mainCam->SetRotVec(mainCamRot); // Animate the cube static float cubeClock = 0; roomScene->Models[0]->Pos = XMFLOAT3(9 * sin(cubeClock), 3, 9 * cos(cubeClock += 0.015f)); // Get both eye poses simultaneously, with IPD offset already included. ovrPosef EyeRenderPose[2]; ovrVector3f HmdToEyeOffset[2] = { eyeRenderDesc[0].HmdToEyeOffset, eyeRenderDesc[1].HmdToEyeOffset }; double sensorSampleTime; // sensorSampleTime is fed into the layer later ovr_GetEyePoses(session, frameIndex, ovrTrue, HmdToEyeOffset, EyeRenderPose, &sensorSampleTime); // Render Scene to Eye Buffers for (int eye = 0; eye < 2; ++eye) { DIRECTX.SetActiveContext(eye == 0 ? DrawContext_EyeRenderLeft : DrawContext_EyeRenderRight); DIRECTX.SetActiveEye(eye); CD3DX12_RESOURCE_BARRIER resBar = CD3DX12_RESOURCE_BARRIER::Transition(pEyeRenderTexture[eye]->GetD3DResource(), D3D12_RESOURCE_STATE_PIXEL_SHADER_RESOURCE, D3D12_RESOURCE_STATE_RENDER_TARGET); DIRECTX.CurrentFrameResources().CommandLists[DIRECTX.ActiveContext]->ResourceBarrier(1, &resBar); DIRECTX.SetAndClearRenderTarget(pEyeRenderTexture[eye]->GetRtv(), pEyeRenderTexture[eye]->GetDsv()); DIRECTX.SetViewport((float)eyeRenderViewport[eye].Pos.x, (float)eyeRenderViewport[eye].Pos.y, (float)eyeRenderViewport[eye].Size.w, (float)eyeRenderViewport[eye].Size.h); //Get the pose information in XM format XMVECTOR eyeQuat = XMVectorSet(EyeRenderPose[eye].Orientation.x, EyeRenderPose[eye].Orientation.y, EyeRenderPose[eye].Orientation.z, EyeRenderPose[eye].Orientation.w); XMVECTOR eyePos = XMVectorSet(EyeRenderPose[eye].Position.x, EyeRenderPose[eye].Position.y, EyeRenderPose[eye].Position.z, 0); // Get view and projection matrices for the Rift camera Camera finalCam(XMVectorAdd(mainCamPos, XMVector3Rotate(eyePos, mainCamRot)), XMQuaternionMultiply(eyeQuat, mainCamRot)); XMMATRIX view = finalCam.GetViewMatrix(); ovrMatrix4f p = ovrMatrix4f_Projection(eyeRenderDesc[eye].Fov, 0.2f, 1000.0f, ovrProjection_None); XMMATRIX proj = XMMatrixSet(p.M[0][0], p.M[1][0], p.M[2][0], p.M[3][0], p.M[0][1], p.M[1][1], p.M[2][1], p.M[3][1], p.M[0][2], p.M[1][2], p.M[2][2], p.M[3][2], p.M[0][3], p.M[1][3], p.M[2][3], p.M[3][3]); XMMATRIX prod = XMMatrixMultiply(view, proj); roomScene->Render(&prod, 1, 1, 1, 1, true); resBar = CD3DX12_RESOURCE_BARRIER::Transition(pEyeRenderTexture[eye]->GetD3DResource(), D3D12_RESOURCE_STATE_RENDER_TARGET, D3D12_RESOURCE_STATE_PIXEL_SHADER_RESOURCE); DIRECTX.CurrentFrameResources().CommandLists[DIRECTX.ActiveContext]->ResourceBarrier(1, &resBar); // Commit rendering to the swap chain pEyeRenderTexture[eye]->Commit(); // kick off eye render command lists before ovr_SubmitFrame() DIRECTX.SubmitCommandList(DIRECTX.ActiveContext); } // Initialize our single full screen Fov layer. ovrLayerEyeFov ld = {}; ld.Header.Type = ovrLayerType_EyeFov; ld.Header.Flags = 0; for (int eye = 0; eye < 2; ++eye) { ld.ColorTexture[eye] = pEyeRenderTexture[eye]->TextureChain; ld.Viewport[eye] = eyeRenderViewport[eye]; ld.Fov[eye] = hmdDesc.DefaultEyeFov[eye]; ld.RenderPose[eye] = EyeRenderPose[eye]; ld.SensorSampleTime = sensorSampleTime; } ovrLayerHeader* layers = &ld.Header; result = ovr_SubmitFrame(session, frameIndex, nullptr, &layers, 1); // exit the rendering loop if submit returns an error, will retry on ovrError_DisplayLost if (!OVR_SUCCESS(result)) goto Done; frameIndex++; } if (drawMirror) { DIRECTX.SetActiveContext(DrawContext_Final); DIRECTX.SetViewport(0.0f, 0.0f, (float)hmdDesc.Resolution.w / 2, (float)hmdDesc.Resolution.h / 2); // Render mirror ID3D12Resource* mirrorTexRes = nullptr; ovr_GetMirrorTextureBufferDX(session, mirrorTexture, IID_PPV_ARGS(&mirrorTexRes)); //DIRECTX.SetAndClearRenderTarget(DIRECTX.CurrentFrameResources().SwapChainRtvHandle, nullptr, 1.0f, 0.5f, 0.0f, 1.0f); CD3DX12_RESOURCE_BARRIER preMirrorBlitBar[] = { CD3DX12_RESOURCE_BARRIER::Transition(DIRECTX.CurrentFrameResources().SwapChainBuffer, D3D12_RESOURCE_STATE_RENDER_TARGET, D3D12_RESOURCE_STATE_COPY_DEST), CD3DX12_RESOURCE_BARRIER::Transition(mirrorTexRes, D3D12_RESOURCE_STATE_RENDER_TARGET, D3D12_RESOURCE_STATE_COPY_SOURCE) }; // Indicate that the back buffer will now be copied into DIRECTX.CurrentFrameResources().CommandLists[DIRECTX.ActiveContext]->ResourceBarrier(ARRAYSIZE(preMirrorBlitBar), preMirrorBlitBar); DIRECTX.CurrentFrameResources().CommandLists[DIRECTX.ActiveContext]->CopyResource(DIRECTX.CurrentFrameResources().SwapChainBuffer, mirrorTexRes); CD3DX12_RESOURCE_BARRIER resBar = CD3DX12_RESOURCE_BARRIER::Transition(mirrorTexRes, D3D12_RESOURCE_STATE_COPY_SOURCE, D3D12_RESOURCE_STATE_RENDER_TARGET); DIRECTX.CurrentFrameResources().CommandLists[DIRECTX.ActiveContext]->ResourceBarrier(1, &resBar); } DIRECTX.SubmitCommandListAndPresent(drawMirror); } // Release resources Done: delete mainCam; delete roomScene; if (mirrorTexture) ovr_DestroyMirrorTexture(session, mirrorTexture); for (int eye = 0; eye < 2; ++eye) { delete pEyeRenderTexture[eye]; } DIRECTX.ReleaseDevice(); ovr_Destroy(session); // Retry on ovrError_DisplayLost return retryCreate || (result == ovrError_DisplayLost); }
void OculusBaseDisplayPlugin::resetSensors() { ovr_RecenterTrackingOrigin(_session); _currentRenderFrameInfo.renderPose = glm::mat4(); // identity }
static void TW_CALL RecenterPoseCB(void*) { ovr_RecenterTrackingOrigin(g_session); }
void joystick_XboxController( int, // joyidx const float* pAxisStates, int numAxes, const unsigned char* pButtonStates, int numButtons, const char* pLastButtonStates) { //ASSERT(numAxes == 5); //ASSERT(numButtons == 14); if (numAxes != 5) return; if (numButtons != 14) return; // Xbox controller layout in glfw: // numAxes 5, numButtons 14 // 0 A (down position) // 1 B (right position) // 2 X (left position) // 3 Y (up position) // 4 L bumper // 5 R bumper // 6 Back (left center) // 7 Start (right center) // 8 Left stick push // 9 Right stick push // 10 Dpad Up // 11 Dpad right // 12 Dpad down // 13 Dpad left // Axis 0 1 Left stick x y // Axis 2 triggers, left positive right negative // Axis 3 4 right stick y x glm::vec3 joystickMove(0.0f, 0.0f, 0.0f); // Xbox controller Left stick controls movement if (numAxes >= 2) { const float x_move = pAxisStates[0]; const float y_move = pAxisStates[1]; const glm::vec3 forward(0.f, 0.f, -1.f); const glm::vec3 right(1.f, 0.f, 0.f); const float deadzone = 0.5f; if (fabs(x_move) > deadzone) joystickMove += x_move * right; if (fabs(y_move) > deadzone) joystickMove -= y_move * forward; } if (pButtonStates[0] == GLFW_PRESS) // A button joystickMove += glm::vec3(0.f, 1.f, 0.f); if (pButtonStates[1] == GLFW_PRESS) // B button joystickMove += glm::vec3(0.f, -1.f, 0.f); float mag = 1.f; if (numAxes > 2) { // Xbox left and right analog triggers control speed mag = pow(10.f, pAxisStates[2]); } m_joystickMove = mag * joystickMove; // Right stick controls yaw ///@todo Pitch, Roll(instant nausea!) if (numAxes > 3) { float x_move = pAxisStates[4]; const glm::vec3 up(0.f, 1.f, 0.f); const float deadzone = 0.2f; if (fabs(x_move) < deadzone) x_move = 0.f; m_joystickYaw = 0.75f * static_cast<float>(x_move); } // Check for recent button pushes const float f = 0.9f; for (int i = 0; i<numButtons; ++i) { const bool pressed = (pButtonStates[i] == GLFW_PRESS) && (pLastButtonStates[i] != GLFW_PRESS); const bool released = (pButtonStates[i] != GLFW_PRESS) && (pLastButtonStates[i] == GLFW_PRESS); if (pressed) { if (i == 13) // Dpad left { m_fboScale *= f; m_fboScale = std::max(.05f, m_fboScale); } if (i == 11) // Dpad right { m_fboScale /= f; m_fboScale = std::min(1.f, m_fboScale); } if (i == 10) // Dpad up { m_cinemaScope += 0.1f; m_cinemaScope = std::min(.95f, m_cinemaScope); } if (i == 12) // Dpad down { m_cinemaScope -= 0.1f; m_cinemaScope = std::max(0.f, m_cinemaScope); } if (i == 4) // Left Bumper { ovr_RecenterTrackingOrigin(g_session); } if (i == 5) // Right Bumper { m_chassisPos = glm::vec3(0.f, 1.f, 0.f); } if (i == 7) // Start { g_gallery.ToggleShaderWorld(); } if (i == 3) // Y button { g_tweakbarQuad.m_showQuadInWorld = !g_tweakbarQuad.m_showQuadInWorld; } } if (pressed || released) { if (i == 2) // X button { g_tweakbarQuad.MouseClick(pressed ? 1 : 0); } } } }
void keyboard(GLFWwindow* pWindow, int key, int codes, int action, int mods) { (void)pWindow; (void)codes; if ((key > -1) && (key <= GLFW_KEY_LAST)) { m_keyStates[key] = action; } switch (key) { default: break; case GLFW_KEY_BACKSLASH: { if (action == GLFW_PRESS) g_tweakbarQuad.MouseClick(1); else if (action == GLFW_RELEASE) g_tweakbarQuad.MouseClick(0); } break; case GLFW_KEY_SLASH: { if (action == GLFW_PRESS) g_tweakbarQuad.SetHoldingFlag(m_eyePoses[0], true); else if (action == GLFW_RELEASE) g_tweakbarQuad.SetHoldingFlag(m_eyePoses[0], false); } break; } const float yawIncr = 0.3f; if (action == GLFW_PRESS) { switch (key) { default: break; case GLFW_KEY_1: if (m_snapTurn == true) { m_chassisYaw -= yawIncr; } break; case GLFW_KEY_3: if (m_snapTurn == true) { m_chassisYaw += yawIncr; } break; case GLFW_KEY_SPACE: ovr_RecenterTrackingOrigin(g_session); break; case GLFW_KEY_R: m_chassisPos = glm::vec3(0.f, 1.f, 0.f); break; case GLFW_KEY_BACKSPACE: TogglePerfHud(); break; case GLFW_KEY_TAB: g_tweakbarQuad.m_showQuadInWorld = !g_tweakbarQuad.m_showQuadInWorld; break; case GLFW_KEY_ENTER: g_gallery.ToggleShaderWorld(); break; case GLFW_KEY_SLASH: break; case GLFW_KEY_ESCAPE: glfwSetWindowShouldClose(g_pMirrorWindow, 1); break; } } // Handle keyboard movement(WASD keys) const glm::vec3 forward(0.f, 0.f, -1.f); const glm::vec3 up(0.f, 1.f, 0.f); const glm::vec3 right(1.f, 0.f, 0.f); glm::vec3 keyboardMove(0.0f, 0.0f, 0.0f); float keyboardYaw = 0.f; if (m_keyStates['W'] != GLFW_RELEASE) { keyboardMove += forward; } if (m_keyStates['S'] != GLFW_RELEASE) { keyboardMove -= forward; } if (m_keyStates['A'] != GLFW_RELEASE) { keyboardMove -= right; } if (m_keyStates['D'] != GLFW_RELEASE) { keyboardMove += right; } if (m_keyStates['Q'] != GLFW_RELEASE) { keyboardMove -= up; } if (m_keyStates['E'] != GLFW_RELEASE) { keyboardMove += up; } if (m_keyStates[GLFW_KEY_UP] != GLFW_RELEASE) { keyboardMove += forward; } if (m_keyStates[GLFW_KEY_DOWN] != GLFW_RELEASE) { keyboardMove -= forward; } if (m_keyStates[GLFW_KEY_LEFT] != GLFW_RELEASE) { keyboardMove -= right; } if (m_keyStates[GLFW_KEY_RIGHT] != GLFW_RELEASE) { keyboardMove += right; } if (m_keyStates['1'] != GLFW_RELEASE) { keyboardYaw -= 1.f; } if (m_keyStates['3'] != GLFW_RELEASE) { keyboardYaw += 1.f; } float mag = 1.0f; if (m_keyStates[GLFW_KEY_LEFT_SHIFT] != GLFW_RELEASE) mag *= 0.1f; if (m_keyStates[GLFW_KEY_LEFT_CONTROL] != GLFW_RELEASE) mag *= 10.0f; m_keyboardMove = mag * keyboardMove; m_keyboardYaw = mag * keyboardYaw; }
// Display to an HMD with OVR SDK backend. void displayHMD() { ovrSessionStatus sessionStatus; ovr_GetSessionStatus(g_session, &sessionStatus); if (sessionStatus.HmdPresent == false) { displayMonitor(); return; } const ovrHmdDesc& hmdDesc = m_Hmd; double sensorSampleTime; // sensorSampleTime is fed into the layer later if (g_hmdVisible) { // Call ovr_GetRenderDesc each frame to get the ovrEyeRenderDesc, as the returned values (e.g. HmdToEyeOffset) may change at runtime. ovrEyeRenderDesc eyeRenderDesc[2]; eyeRenderDesc[0] = ovr_GetRenderDesc(g_session, ovrEye_Left, hmdDesc.DefaultEyeFov[0]); eyeRenderDesc[1] = ovr_GetRenderDesc(g_session, ovrEye_Right, hmdDesc.DefaultEyeFov[1]); // Get eye poses, feeding in correct IPD offset ovrVector3f HmdToEyeOffset[2] = { eyeRenderDesc[0].HmdToEyeOffset, eyeRenderDesc[1].HmdToEyeOffset }; #if 0 // Get both eye poses simultaneously, with IPD offset already included. double displayMidpointSeconds = ovr_GetPredictedDisplayTime(g_session, 0); ovrTrackingState hmdState = ovr_GetTrackingState(g_session, displayMidpointSeconds, ovrTrue); ovr_CalcEyePoses(hmdState.HeadPose.ThePose, HmdToEyeOffset, m_eyePoses); #else ovr_GetEyePoses(g_session, g_frameIndex, ovrTrue, HmdToEyeOffset, m_eyePoses, &sensorSampleTime); #endif storeHmdPose(m_eyePoses[0]); for (int eye = 0; eye < 2; ++eye) { const FBO& swapfbo = m_swapFBO[eye]; const ovrTextureSwapChain& chain = g_textureSwapChain[eye]; int curIndex; ovr_GetTextureSwapChainCurrentIndex(g_session, chain, &curIndex); GLuint curTexId; ovr_GetTextureSwapChainBufferGL(g_session, chain, curIndex, &curTexId); glBindFramebuffer(GL_FRAMEBUFFER, swapfbo.id); glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, curTexId, 0); glViewport(0, 0, swapfbo.w, swapfbo.h); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); glEnable(GL_FRAMEBUFFER_SRGB); { glClearColor(0.3f, 0.3f, 0.3f, 0.f); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); const ovrSizei& downSize = ovr_GetFovTextureSize(g_session, ovrEyeType(eye), hmdDesc.DefaultEyeFov[eye], m_fboScale); ovrRecti vp = { 0, 0, downSize.w, downSize.h }; const int texh = swapfbo.h; vp.Pos.y = (texh - vp.Size.h) / 2; glViewport(vp.Pos.x, vp.Pos.y, vp.Size.w, vp.Size.h); // Cinemascope - letterbox bars scissoring off pixels above and below vp center const float hc = .5f * m_cinemaScope; const int scisPx = static_cast<int>(hc * static_cast<float>(vp.Size.h)); ovrRecti sp = vp; sp.Pos.y += scisPx; sp.Size.h -= 2 * scisPx; glScissor(sp.Pos.x, sp.Pos.y, sp.Size.w, sp.Size.h); glEnable(GL_SCISSOR_TEST); glEnable(GL_DEPTH_TEST); // Render the scene for the current eye const ovrPosef& eyePose = m_eyePoses[eye]; const glm::mat4 mview = makeWorldToChassisMatrix() * makeMatrixFromPose(eyePose, m_headSize); const ovrMatrix4f ovrproj = ovrMatrix4f_Projection(hmdDesc.DefaultEyeFov[eye], 0.2f, 1000.0f, ovrProjection_None); const glm::mat4 proj = makeGlmMatrixFromOvrMatrix(ovrproj); g_pScene->RenderForOneEye(glm::value_ptr(glm::inverse(mview)), glm::value_ptr(proj)); const ovrTextureSwapChain& chain = g_textureSwapChain[eye]; const ovrResult commitres = ovr_CommitTextureSwapChain(g_session, chain); if (!OVR_SUCCESS(commitres)) { LOG_ERROR("ovr_CommitTextureSwapChain returned %d", commitres); return; } } glDisable(GL_SCISSOR_TEST); // Grab a copy of the left eye's undistorted render output for presentation // to the desktop window instead of the barrel distorted mirror texture. // This blit, while cheap, could cost some framerate to the HMD. // An over-the-shoulder view is another option, at a greater performance cost. if (0) { if (eye == ovrEyeType::ovrEye_Left) { BlitLeftEyeRenderToUndistortedMirrorTexture(); } } glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, 0, 0); glBindFramebuffer(GL_FRAMEBUFFER, 0); } } std::vector<const ovrLayerHeader*> layerHeaders; { // Do distortion rendering, Present and flush/sync ovrLayerEyeFov ld; ld.Header.Type = ovrLayerType_EyeFov; ld.Header.Flags = ovrLayerFlag_TextureOriginAtBottomLeft; // Because OpenGL. for (int eye = 0; eye < 2; ++eye) { const FBO& swapfbo = m_swapFBO[eye]; const ovrTextureSwapChain& chain = g_textureSwapChain[eye]; ld.ColorTexture[eye] = chain; const ovrSizei& downSize = ovr_GetFovTextureSize(g_session, ovrEyeType(eye), hmdDesc.DefaultEyeFov[eye], m_fboScale); ovrRecti vp = { 0, 0, downSize.w, downSize.h }; const int texh = swapfbo.h; vp.Pos.y = (texh - vp.Size.h) / 2; ld.Viewport[eye] = vp; ld.Fov[eye] = hmdDesc.DefaultEyeFov[eye]; ld.RenderPose[eye] = m_eyePoses[eye]; ld.SensorSampleTime = sensorSampleTime; } layerHeaders.push_back(&ld.Header); // Submit layers to HMD for display ovrLayerQuad ql; if (g_tweakbarQuad.m_showQuadInWorld) { ql.Header.Type = ovrLayerType_Quad; ql.Header.Flags = ovrLayerFlag_TextureOriginAtBottomLeft; // Because OpenGL. ql.ColorTexture = g_tweakbarQuad.m_swapChain; ovrRecti vp; vp.Pos.x = 0; vp.Pos.y = 0; vp.Size.w = 600; ///@todo vp.Size.h = 600; ///@todo ql.Viewport = vp; ql.QuadPoseCenter = g_tweakbarQuad.m_QuadPoseCenter; ql.QuadSize = { 1.f, 1.f }; ///@todo Pass in g_tweakbarQuad.SetHmdEyeRay(m_eyePoses[ovrEyeType::ovrEye_Left]); // Writes to m_layerQuad.QuadPoseCenter g_tweakbarQuad.DrawToQuad(); layerHeaders.push_back(&ql.Header); } } #if 0 ovrViewScaleDesc viewScaleDesc; viewScaleDesc.HmdToEyeOffset[0] = m_eyeOffsets[0]; viewScaleDesc.HmdToEyeOffset[1] = m_eyeOffsets[1]; viewScaleDesc.HmdSpaceToWorldScaleInMeters = 1.f; #endif const ovrResult result = ovr_SubmitFrame(g_session, g_frameIndex, nullptr, &layerHeaders[0], layerHeaders.size()); if (result == ovrSuccess) { g_hmdVisible = true; } else if (result == ovrSuccess_NotVisible) { g_hmdVisible = false; ///@todo Enter a lower-power, polling "no focus/HMD not worn" mode } else if (result == ovrError_DisplayLost) { LOG_INFO("ovr_SubmitFrame returned ovrError_DisplayLost"); g_hmdVisible = false; ///@todo Tear down textures and session and re-create } else { LOG_INFO("ovr_SubmitFrame returned %d", result); //g_hmdVisible = false; } // Handle OVR session events ovr_GetSessionStatus(g_session, &sessionStatus); if (sessionStatus.ShouldQuit) { glfwSetWindowShouldClose(g_pMirrorWindow, 1); } if (sessionStatus.ShouldRecenter) { ovr_RecenterTrackingOrigin(g_session); } // Blit mirror texture to monitor window if (g_hmdVisible) { glViewport(0, 0, g_mirrorWindowSz.x, g_mirrorWindowSz.y); const FBO& srcFBO = m_mirrorFBO; glBindFramebuffer(GL_READ_FRAMEBUFFER, srcFBO.id); glBlitFramebuffer( 0, srcFBO.h, srcFBO.w, 0, 0, 0, g_mirrorWindowSz.x, g_mirrorWindowSz.y, GL_COLOR_BUFFER_BIT, GL_NEAREST); glBindFramebuffer(GL_READ_FRAMEBUFFER, 0); } else { displayMonitor(); } ++g_frameIndex; #ifdef USE_ANTTWEAKBAR if (g_tweakbarQuad.m_showQuadInWorld) { TwDraw(); } #endif }