void TogglePerfHud() { int phm = static_cast<int>(m_perfHudMode); ++phm %= static_cast<int>(ovrPerfHud_Count); m_perfHudMode = static_cast<ovrPerfHudMode>(phm); ovr_SetInt(g_session, OVR_PERF_HUD_MODE, m_perfHudMode); }
void MainLoop() { Layer[0] = new VRLayer(HMD); while (HandleMessages()) { static float clock = 0; ++clock; // Toggle the debug HUD on and off, and which mode if (DIRECTX.Key['0']) ovr_SetInt(HMD, OVR_DEBUG_HUD_STEREO_MODE, int(ovrDebugHudStereo_Off)); if (DIRECTX.Key['1']) ovr_SetInt(HMD, OVR_DEBUG_HUD_STEREO_MODE, int(ovrDebugHudStereo_CrosshairAtInfinity)); if (DIRECTX.Key['2']) ovr_SetInt(HMD, OVR_DEBUG_HUD_STEREO_MODE, int(ovrDebugHudStereo_Quad)); // Vary some of the attributes of the DebugHUD, when number keys are pressed. float guideSize[2] = {1, 1}; float guidePosition[3] = {0, 0, -1.50f}; float guideRotation[3] = {0, 0, 0}; float guideColorRGBA[4] = {1, 0.5f, 0, 1}; if (DIRECTX.Key['3']) guideSize[0] = 1 + 0.5f * sin(0.02f * clock); // Vary width if (DIRECTX.Key['4']) guidePosition[0] = 0.5f * sin(0.02f * clock); // Vary X position if (DIRECTX.Key['5']) guideRotation[0] = 0.5f * sin(0.02f * clock); // Vary yaw if (DIRECTX.Key['6']) guideColorRGBA[1] = 0.5f + 0.5f * sin(0.1f * clock); // Vary green // Write in the new attributes into the SDK ovr_SetFloatArray(HMD, OVR_DEBUG_HUD_STEREO_GUIDE_SIZE, guideSize, 2); ovr_SetFloatArray(HMD, OVR_DEBUG_HUD_STEREO_GUIDE_POSITION, guidePosition, 3); ovr_SetFloatArray(HMD, OVR_DEBUG_HUD_STEREO_GUIDE_YAWPITCHROLL, guideRotation, 3); ovr_SetFloatArray(HMD, OVR_DEBUG_HUD_STEREO_GUIDE_COLOR, guideColorRGBA, 4); ActionFromInput(); Layer[0]->GetEyePoses(); for (int eye = 0; eye < 2; ++eye) { Layer[0]->RenderSceneToEyeBuffer(MainCam, RoomScene, eye); } Layer[0]->PrepareLayerHeader(); DistortAndPresent(1); } }
void OculusVR::ShowPerfStats(ovrPerfHudMode statsMode) { if (!IsDebugHMD()) { ovr_SetInt(m_hmdSession, "PerfHudMode", (int)statsMode); } else { LOG_MESSAGE("Performance Hud not available for debug HMD."); } }
/** * Destructor. ***/ OculusTracker::~OculusTracker() { if (m_hSession) { // set performance hud to zero ovr_SetInt(m_hSession, OVR_PERF_HUD_MODE, 0); ovr_Destroy(m_hSession); } ovr_Shutdown(); if (m_hBitmapControl) CloseHandle(m_hBitmapControl); if (m_hFont) CloseHandle(m_hFont); }
// return true to retry later (e.g. after display lost) static bool MainLoop(bool retryCreate) { // Initialize these to nullptr here to handle device lost failures cleanly ovrMirrorTexture mirrorTexture = nullptr; OculusTexture * pEyeRenderTexture = nullptr; DepthBuffer * pEyeDepthBuffer = nullptr; Scene * roomScene = nullptr; Camera * mainCam = nullptr; ovrMirrorTextureDesc desc = {}; bool isVisible = true; long long frameIndex = 0; bool useInstancing = false; const int repeatDrawing = 1; ovrSession session; ovrGraphicsLuid luid; ovrResult result = ovr_Create(&session, &luid); if (!OVR_SUCCESS(result)) return retryCreate; ovrHmdDesc hmdDesc = ovr_GetHmdDesc(session); // Setup Device and Graphics // Note: the mirror window can be any size, for this sample we use 1/2 the HMD resolution if (!DIRECTX.InitDevice(hmdDesc.Resolution.w / 2, hmdDesc.Resolution.h / 2, reinterpret_cast<LUID*>(&luid))) goto Done; ovrRecti eyeRenderViewport[2]; // Make a single eye texture { ovrSizei eyeTexSizeL = ovr_GetFovTextureSize(session, ovrEye_Left, hmdDesc.DefaultEyeFov[0], 1.0f); ovrSizei eyeTexSizeR = ovr_GetFovTextureSize(session, ovrEye_Right, hmdDesc.DefaultEyeFov[1], 1.0f); ovrSizei textureSize; textureSize.w = eyeTexSizeL.w + eyeTexSizeR.w; textureSize.h = max(eyeTexSizeL.h, eyeTexSizeR.h); pEyeRenderTexture = new OculusTexture(); if (!pEyeRenderTexture->Init(session, textureSize.w, textureSize.h)) { if (retryCreate) goto Done; VALIDATE(OVR_SUCCESS(result), "Failed to create eye texture."); } pEyeDepthBuffer = new DepthBuffer(DIRECTX.Device, textureSize.w, textureSize.h); // set viewports eyeRenderViewport[0].Pos.x = 0; eyeRenderViewport[0].Pos.y = 0; eyeRenderViewport[0].Size = eyeTexSizeL; eyeRenderViewport[1].Pos.x = eyeTexSizeL.w; eyeRenderViewport[1].Pos.y = 0; eyeRenderViewport[1].Size = eyeTexSizeR; } if (!pEyeRenderTexture->TextureChain) { if (retryCreate) goto Done; VALIDATE(false, "Failed to create texture."); } // Create a mirror to see on the monitor. desc.Format = OVR_FORMAT_R8G8B8A8_UNORM_SRGB; desc.Width = DIRECTX.WinSizeW; desc.Height = DIRECTX.WinSizeH; result = ovr_CreateMirrorTextureDX(session, DIRECTX.Device, &desc, &mirrorTexture); if (!OVR_SUCCESS(result)) { if (retryCreate) goto Done; VALIDATE(false, "Failed to create mirror texture."); } // Create the room model roomScene = new Scene(false); // Create camera mainCam = new Camera(&XMVectorSet(0.0f, 1.6f, 5.0f, 0), &XMQuaternionIdentity()); // Setup VR components, filling out description ovrEyeRenderDesc eyeRenderDesc[2]; eyeRenderDesc[0] = ovr_GetRenderDesc(session, ovrEye_Left, hmdDesc.DefaultEyeFov[0]); eyeRenderDesc[1] = ovr_GetRenderDesc(session, ovrEye_Right, hmdDesc.DefaultEyeFov[1]); // Main loop while (DIRECTX.HandleMessages()) { XMVECTOR forward = XMVector3Rotate(XMVectorSet(0, 0, -0.05f, 0), mainCam->Rot); XMVECTOR right = XMVector3Rotate(XMVectorSet(0.05f, 0, 0, 0), mainCam->Rot); XMVECTOR up = XMVector3Rotate(XMVectorSet(0, 0.05f, 0, 0), mainCam->Rot); if (DIRECTX.Key['W'] || DIRECTX.Key[VK_UP]) mainCam->Pos = XMVectorAdd(mainCam->Pos, forward); if (DIRECTX.Key['S'] || DIRECTX.Key[VK_DOWN]) mainCam->Pos = XMVectorSubtract(mainCam->Pos, forward); if (DIRECTX.Key['D']) mainCam->Pos = XMVectorAdd(mainCam->Pos, right); if (DIRECTX.Key['A']) mainCam->Pos = XMVectorSubtract(mainCam->Pos, right); if (DIRECTX.Key['Q']) mainCam->Pos = XMVectorAdd(mainCam->Pos, up); if (DIRECTX.Key['E']) mainCam->Pos = XMVectorSubtract(mainCam->Pos, up); static float Yaw = 0; if (DIRECTX.Key[VK_LEFT]) mainCam->Rot = XMQuaternionRotationRollPitchYaw(0, Yaw += 0.02f, 0); if (DIRECTX.Key[VK_RIGHT]) mainCam->Rot = XMQuaternionRotationRollPitchYaw(0, Yaw -= 0.02f, 0); if (DIRECTX.Key['P']) ovr_SetInt(session, OVR_PERF_HUD_MODE, int(ovrPerfHud_AppRenderTiming)); else ovr_SetInt(session, OVR_PERF_HUD_MODE, int(ovrPerfHud_Off)); useInstancing = DIRECTX.Key['I']; // Animate the cube static float cubeClock = 0; roomScene->Models[0]->Pos = XMFLOAT3(9 * sin(cubeClock), 3, 9 * cos(cubeClock += 0.015f)); // Get both eye poses simultaneously, with IPD offset already included. ovrPosef EyeRenderPose[2]; ovrVector3f HmdToEyeOffset[2] = { eyeRenderDesc[0].HmdToEyeOffset, eyeRenderDesc[1].HmdToEyeOffset }; double sensorSampleTime; // sensorSampleTime is fed into the layer later ovr_GetEyePoses(session, frameIndex, ovrTrue, HmdToEyeOffset, EyeRenderPose, &sensorSampleTime); // Render scene to eye texture if (isVisible) { DIRECTX.SetAndClearRenderTarget(pEyeRenderTexture->GetRTV(), pEyeDepthBuffer); // calculate eye transforms XMMATRIX viewProjMatrix[2]; for (int eye = 0; eye < 2; ++eye) { //Get the pose information in XM format XMVECTOR eyeQuat = XMLoadFloat4((XMFLOAT4 *)&EyeRenderPose[eye].Orientation.x); XMVECTOR eyePos = XMVectorSet(EyeRenderPose[eye].Position.x, EyeRenderPose[eye].Position.y, EyeRenderPose[eye].Position.z, 0); // Get view and projection matrices for the Rift camera XMVECTOR CombinedPos = XMVectorAdd(mainCam->Pos, XMVector3Rotate(eyePos, mainCam->Rot)); Camera finalCam(&CombinedPos, &(XMQuaternionMultiply(eyeQuat, mainCam->Rot))); XMMATRIX view = finalCam.GetViewMatrix(); ovrMatrix4f p = ovrMatrix4f_Projection(eyeRenderDesc[eye].Fov, 0.1f, 100.0f, ovrProjection_None); XMMATRIX proj = XMMatrixSet(p.M[0][0], p.M[1][0], p.M[2][0], p.M[3][0], p.M[0][1], p.M[1][1], p.M[2][1], p.M[3][1], p.M[0][2], p.M[1][2], p.M[2][2], p.M[3][2], p.M[0][3], p.M[1][3], p.M[2][3], p.M[3][3]); if (useInstancing) { // scale and offset projection matrix to shift image to correct part of texture for each eye XMMATRIX scale = XMMatrixScaling(0.5f, 1.0f, 1.0f); XMMATRIX translate = XMMatrixTranslation((eye==0) ? -0.5f : 0.5f, 0.0f, 0.0f); proj = XMMatrixMultiply(proj, scale); proj = XMMatrixMultiply(proj, translate); } viewProjMatrix[eye] = XMMatrixMultiply(view, proj); } if (useInstancing) { // use instancing for stereo DIRECTX.SetViewport(0.0f, 0.0f, (float)eyeRenderViewport[0].Size.w + eyeRenderViewport[1].Size.w, (float)eyeRenderViewport[0].Size.h); // render scene for (int i = 0; i < repeatDrawing; i++) roomScene->RenderInstanced(&viewProjMatrix[0], 1, 1, 1, 1, true); } else { // non-instanced path for (int eye = 0; eye < 2; ++eye) { // set viewport DIRECTX.SetViewport((float)eyeRenderViewport[eye].Pos.x, (float)eyeRenderViewport[eye].Pos.y, (float)eyeRenderViewport[eye].Size.w, (float)eyeRenderViewport[eye].Size.h); // render scene for (int i = 0; i < repeatDrawing; i++) roomScene->Render(&viewProjMatrix[eye], 1, 1, 1, 1, true); } } // Commit rendering to the swap chain pEyeRenderTexture->Commit(); } // Initialize our single full screen Fov layer. ovrLayerEyeFov ld = {}; ld.Header.Type = ovrLayerType_EyeFov; ld.Header.Flags = 0; ld.SensorSampleTime = sensorSampleTime; for (int eye = 0; eye < 2; ++eye) { ld.ColorTexture[eye] = pEyeRenderTexture->TextureChain; ld.Viewport[eye] = eyeRenderViewport[eye]; ld.Fov[eye] = hmdDesc.DefaultEyeFov[eye]; ld.RenderPose[eye] = EyeRenderPose[eye]; } ovrLayerHeader* layers = &ld.Header; result = ovr_SubmitFrame(session, frameIndex, nullptr, &layers, 1); // exit the rendering loop if submit returns an error, will retry on ovrError_DisplayLost if (!OVR_SUCCESS(result)) goto Done; isVisible = (result == ovrSuccess); // Render mirror ID3D11Texture2D* tex = nullptr; ovr_GetMirrorTextureBufferDX(session, mirrorTexture, IID_PPV_ARGS(&tex)); DIRECTX.Context->CopyResource(DIRECTX.BackBuffer, tex); tex->Release(); DIRECTX.SwapChain->Present(0, 0); frameIndex++; } // Release resources Done: delete mainCam; delete roomScene; if (mirrorTexture) ovr_DestroyMirrorTexture(session, mirrorTexture); delete pEyeRenderTexture; delete pEyeDepthBuffer; DIRECTX.ReleaseDevice(); ovr_Destroy(session); // Retry on ovrError_DisplayLost return retryCreate || OVR_SUCCESS(result) || (result == ovrError_DisplayLost); }
int OgreOculus::go(void) { // Create Root object root = new Ogre::Root("plugin.cfg", "ogre.cfg"); // OpenGL root->loadPlugin("RenderSystem_GL_d"); root->setRenderSystem(root->getRenderSystemByName("OpenGL Rendering Subsystem")); // Initialize Root root->initialise(false); // Initialize Oculus ovrHmd hmd; ovrHmdDesc hmdDesc; ovrGraphicsLuid luid; ovr_Initialize(nullptr); if(ovr_Create(&hmd, &luid) != ovrSuccess) exit(-1); hmdDesc = ovr_GetHmdDesc(hmd); if(ovr_ConfigureTracking(hmd, ovrTrackingCap_Orientation |ovrTrackingCap_MagYawCorrection |ovrTrackingCap_Position, 0) != ovrSuccess) exit(-2); // Turn off HUD ovr_SetInt(hmd, "PerfHudMode", ovrPerfHud_Off); // Create a window window = root->createRenderWindow("Ogre + Oculus = <3", hmdDesc.Resolution.w/2, hmdDesc.Resolution.h/2, false); // Create scene manager and cameras smgr = root->createSceneManager(Ogre::ST_GENERIC); // Load Ogre resource paths from config file Ogre::ConfigFile cf; cf.load("resources_d.cfg"); // Go through all sections & settings in the file and add resources Ogre::ConfigFile::SectionIterator seci = cf.getSectionIterator(); Ogre::String secName, typeName, archName; while (seci.hasMoreElements()) { secName = seci.peekNextKey(); Ogre::ConfigFile::SettingsMultiMap *settings = seci.getNext(); Ogre::ConfigFile::SettingsMultiMap::iterator i; for (i = settings->begin(); i != settings->end(); ++i) { typeName = i->first; archName = i->second; Ogre::ResourceGroupManager::getSingleton().addResourceLocation( archName, typeName, secName); } } // Set resources Ogre::TextureManager::getSingleton().setDefaultNumMipmaps(5); Ogre::ResourceGroupManager::getSingleton().initialiseAllResourceGroups(); // Create the model itself via OgreModel.cpp createOgreModel(smgr); // Create camera createCamera(); // Set viewport and background color Ogre::Viewport* vp = window->addViewport(mCamera); vp->setBackgroundColour(Ogre::ColourValue(34, 89, 0)); // Yellow // Set aspect ratio mCamera->setAspectRatio( Ogre::Real(vp->getActualWidth()) / Ogre::Real(vp->getActualHeight())); // Initialize glew if(glewInit() != GLEW_OK) exit(-3); // Get texture sizes ovrSizei texSizeL, texSizeR; texSizeL = ovr_GetFovTextureSize(hmd, ovrEye_Left, hmdDesc.DefaultEyeFov[left], 1); texSizeR = ovr_GetFovTextureSize(hmd, ovrEye_Right, hmdDesc.DefaultEyeFov[right], 1); // Calculate render buffer size ovrSizei bufferSize; bufferSize.w = texSizeL.w + texSizeR.w; bufferSize.h = max(texSizeL.h, texSizeR.h); // Create render texture set ovrSwapTextureSet* textureSet; if(ovr_CreateSwapTextureSetGL(hmd, GL_RGB, bufferSize.w, bufferSize.h, &textureSet) != ovrSuccess) exit(-4); // Create Ogre render texture Ogre::GLTextureManager* textureManager = static_cast<Ogre::GLTextureManager*>(Ogre::GLTextureManager::getSingletonPtr()); Ogre::TexturePtr rtt_texture(textureManager->createManual("RttTex", Ogre::ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME, Ogre::TEX_TYPE_2D, bufferSize.w, bufferSize.h, 0, Ogre::PF_R8G8B8, Ogre::TU_RENDERTARGET)); Ogre::RenderTexture* rttEyes = rtt_texture->getBuffer(0, 0)->getRenderTarget(); Ogre::GLTexture* gltex = static_cast<Ogre::GLTexture*>(Ogre::GLTextureManager::getSingleton().getByName("RttTex").getPointer()); GLuint renderTextureID = gltex->getGLID(); // Put camera viewport on the ogre render texture Ogre::Viewport* vpts[nbEyes]; vpts[left]=rttEyes->addViewport(cams[left], 0, 0, 0, 0.5f); vpts[right]=rttEyes->addViewport(cams[right], 1, 0.5f, 0, 0.5f); vpts[left]->setBackgroundColour(Ogre::ColourValue(34, 89, 0)); // Black background vpts[right]->setBackgroundColour(Ogre::ColourValue(34, 89, 0)); ovrTexture* mirrorTexture; if(ovr_CreateMirrorTextureGL(hmd, GL_RGB, hmdDesc.Resolution.w, hmdDesc.Resolution.h, &mirrorTexture) != ovrSuccess) exit(-5); Ogre::TexturePtr mirror_texture(textureManager->createManual("MirrorTex", Ogre::ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME, Ogre::TEX_TYPE_2D, hmdDesc.Resolution.w, hmdDesc.Resolution.h, 0, Ogre::PF_R8G8B8, Ogre::TU_RENDERTARGET)); // Get GLIDs GLuint ogreMirrorTextureID = static_cast<Ogre::GLTexture*>(Ogre::GLTextureManager::getSingleton().getByName("MirrorTex").getPointer())->getGLID(); GLuint oculusMirrorTextureID = ((ovrGLTexture*)mirrorTexture)->OGL.TexId; // Create EyeRenderDesc ovrEyeRenderDesc EyeRenderDesc[nbEyes]; EyeRenderDesc[left] = ovr_GetRenderDesc(hmd, ovrEye_Left, hmdDesc.DefaultEyeFov[left]); EyeRenderDesc[right] = ovr_GetRenderDesc(hmd, ovrEye_Right, hmdDesc.DefaultEyeFov[right]); // Get offsets ovrVector3f offset[nbEyes]; offset[left]=EyeRenderDesc[left].HmdToEyeViewOffset; offset[right]=EyeRenderDesc[right].HmdToEyeViewOffset; // Compositor layer ovrLayerEyeFov layer; layer.Header.Type = ovrLayerType_EyeFov; layer.Header.Flags = 0; layer.ColorTexture[left] = textureSet; layer.ColorTexture[right] = textureSet; layer.Fov[left] = EyeRenderDesc[left].Fov; layer.Fov[right] = EyeRenderDesc[right].Fov; layer.Viewport[left] = OVR::Recti(0, 0, bufferSize.w/2, bufferSize.h); layer.Viewport[right] = OVR::Recti(bufferSize.w/2, 0, bufferSize.w/2, bufferSize.h); // Get projection matrices for(size_t eyeIndex(0); eyeIndex < ovrEye_Count; eyeIndex++) { // Get the projection matrix OVR::Matrix4f proj = ovrMatrix4f_Projection(EyeRenderDesc[eyeIndex].Fov, static_cast<float>(0.01f), 4000, true); // Convert it to Ogre matrix Ogre::Matrix4 OgreProj; for(size_t x(0); x < 4; x++) for(size_t y(0); y < 4; y++) OgreProj[x][y] = proj.M[x][y]; // Set the matrix cams[eyeIndex]->setCustomProjectionMatrix(true, OgreProj); } // Variables for render loop bool render(true); ovrFrameTiming hmdFrameTiming; ovrTrackingState ts; OVR::Posef pose; ovrLayerHeader* layers; // Create event listener for handling user input createEventListener(); //Run physics loop in a new thread std::map<Ogre::Entity*, Ogre::Vector3> positionRequests; std::map<Ogre::Entity*, std::string> animationRequests; std::map<Ogre::Entity*, std::vector<int>> rotationRequests; std::map<std::string, std::string> message; std::thread physicsThread(physicsLoop, smgr, &message, &positionRequests, &animationRequests, &rotationRequests); // Render loop while(render) { // Suspend physics loop and perform requested movement/rotations/animations if(positionRequests.size() > 0 || animationRequests.size() > 0 || rotationRequests.size() > 0){ message.insert(std::pair<std::string, std::string>("", "")); for(auto const &request : positionRequests) { Ogre::Vector3 pos = request.second; Ogre::SceneNode* sceneNode = request.first->getParentSceneNode(); sceneNode->setPosition(pos); } for(auto const &request : animationRequests) { request.first->getAnimationState(request.second)->addTime(0.1); } for(auto const &request : rotationRequests) { Ogre::SceneNode* sceneNode = request.first->getParentSceneNode(); sceneNode->roll(Ogre::Degree(request.second[0])); sceneNode->pitch(Ogre::Degree(request.second[1])); sceneNode->yaw(Ogre::Degree(request.second[2])); } positionRequests.clear(); animationRequests.clear(); rotationRequests.clear(); // Resume physics loop message.clear(); } // Update Ogre window Ogre::WindowEventUtilities::messagePump(); // Advance textureset index textureSet->CurrentIndex = (textureSet->CurrentIndex + 1) % textureSet->TextureCount; // Capture user input mKeyboard->capture(); mMouse->capture(); // Movement calculations mPlayerNode->translate(mDirection, Ogre::Node::TS_LOCAL); hmdFrameTiming = ovr_GetFrameTiming(hmd, 0); ts = ovr_GetTrackingState(hmd, hmdFrameTiming.DisplayMidpointSeconds); pose = ts.HeadPose.ThePose; ovr_CalcEyePoses(pose, offset, layer.RenderPose); oculusOrient = pose.Rotation; oculusPos = pose.Translation; mHeadNode->setOrientation(Ogre::Quaternion(oculusOrient.w, oculusOrient.x, oculusOrient.y, oculusOrient.z) * initialOculusOrientation.Inverse()); // Apply head tracking mHeadNode->setPosition(headPositionTrackingSensitivity * Ogre::Vector3(oculusPos.x, oculusPos.y,oculusPos.z)); // Update Ogre viewports root->_fireFrameRenderingQueued(); vpts[left]->update(); vpts[right]->update(); // Copy the rendered image to the Oculus Swap Texture glCopyImageSubData(renderTextureID, GL_TEXTURE_2D, 0, 0, 0, 0, ((ovrGLTexture*)(&textureSet->Textures[textureSet->CurrentIndex]))->OGL.TexId, GL_TEXTURE_2D, 0, 0, 0, 0, bufferSize.w,bufferSize.h, 1); layers = &layer.Header; // Submit new frame to the Oculus and update window ovr_SubmitFrame(hmd, 0, nullptr, &layers, 1); window->update(); // Exit loop when window is closed if(window->isClosed()) render = false; } // Shud down Oculus ovr_Destroy(hmd); ovr_Shutdown(); // Delete Ogre root and return delete root; return EXIT_SUCCESS; }