SimpleScene() {
    ipd = ovrHmd_GetFloat(hmd, OVR_KEY_IPD, OVR_DEFAULT_IPD);
    eyeHeight = ovrHmd_GetFloat(hmd, OVR_KEY_PLAYER_HEIGHT, OVR_DEFAULT_PLAYER_HEIGHT);
    if (!ovrHmd_ConfigureTracking(hmd,
      ovrTrackingCap_Orientation | ovrTrackingCap_Position | ovrTrackingCap_MagYawCorrection, 0)) {
      SAY_ERR("Could not attach to sensor device");
    }
    for_each_eye([&](ovrEyeType eye){
      textureIds[eye] = 0;
    });

    memset(eyeTextures, 0, 2 * sizeof(ovrGLTexture));
    float eyeHeight = 1.5f;
    player = glm::inverse(glm::lookAt(
      glm::vec3(0, eyeHeight, 4),
      glm::vec3(0, eyeHeight, 0),
      glm::vec3(0, 1, 0)));

    for_each_eye([&](ovrEyeType eye){
      ovrSizei eyeTextureSize = ovrHmd_GetFovTextureSize(hmd, eye, hmd->MaxEyeFov[eye], 1.0f);

      ovrTextureHeader & eyeTextureHeader = eyeTextures[eye].Header;
      eyeTextureHeader.TextureSize = eyeTextureSize;
      eyeTextureHeader.RenderViewport.Size = eyeTextureSize;
      eyeTextureHeader.API = ovrRenderAPI_OpenGL;
    });

    resetCamera();
  }
CubeScene::CubeScene() {
  ipd = ovrHmd_GetFloat(hmd, OVR_KEY_IPD, OVR_DEFAULT_IPD);
  eyeHeight = ovrHmd_GetFloat(hmd,
      OVR_KEY_PLAYER_HEIGHT,
      OVR_DEFAULT_PLAYER_HEIGHT);
  resetCamera();
}
  CubeScene_Rift() {
    eyeHeight = ovrHmd_GetFloat(hmd, OVR_KEY_EYE_HEIGHT, eyeHeight);
    ipd = ovrHmd_GetFloat(hmd, OVR_KEY_IPD, ipd);

    Stacks::modelview().top() = glm::lookAt(
      vec3(0, eyeHeight, 0.5f),
      vec3(0, eyeHeight, 0),
      Vectors::UP);
  }
  CubeScene_RiftSensors() {
    eyeHeight = ovrHmd_GetFloat(hmd, OVR_KEY_EYE_HEIGHT, eyeHeight);
    ipd = ovrHmd_GetFloat(hmd, OVR_KEY_IPD, ipd);

    Stacks::modelview().top() = glm::lookAt(
      vec3(0, eyeHeight, 0.5f),
      vec3(0, eyeHeight, 0),
      Vectors::UP);

    if (!ovrHmd_ConfigureTracking(hmd,
      ovrTrackingCap_Orientation |
      ovrTrackingCap_Position, 0)) {
      SAY("Warning: Unable to locate Rift sensor device.  This demo is boring now.");
    }
  }
Esempio n. 5
0
void OculusVRDebug::OnRender(const ovrHmd hmd, const ovrTrackingState &trackingState, const ovrEyeRenderDesc *eyeRenderDescs, const ovrSizei &eyeTextureSize)
{
    glPolygonMode(GL_FRONT_AND_BACK, GL_FILL);
    glActiveTexture(GL_TEXTURE0);

    static const float xPos     = -0.3f;
    static const float ySpacing = 0.05f;
    std::stringstream statsStream;

    char buf[128];
    float hmdYaw, hmdPitch, hmdRoll;
    OVR::Quatf headOrientation(trackingState.HeadPose.ThePose.Orientation);
    headOrientation.GetEulerAngles<OVR::Axis_Y, OVR::Axis_X, OVR::Axis_Z>(&hmdYaw, &hmdPitch, &hmdRoll);

    OVR::OVR_sprintf(buf, sizeof(buf), "HMD YPR:%2.0f %2.0f %2.0f  HMD: %s", hmdYaw * PIdiv180inv, hmdPitch * PIdiv180inv, hmdRoll * PIdiv180inv, hmd->ProductName);
    m_font->drawText(buf, xPos, 0.1f, 0.f);

    OVR::OVR_sprintf(buf, sizeof(buf), "FPS: %.1f  ms/frame: %.1f  Frame: %03d %d", m_fps, m_secondsPerFrame * 1000.0f, m_frameCounter, m_totalFrameCounter % 2);
    m_font->drawText(buf, xPos, 0.1f - ySpacing, 0.f);

    OVR::OVR_sprintf(buf, sizeof(buf), "Pos: %2.2f %2.2f %2.2f  Tracking: %s", g_cameraDirector.GetActiveCamera()->Position().m_x, 
                                                                 g_cameraDirector.GetActiveCamera()->Position().m_y,
                                                                 g_cameraDirector.GetActiveCamera()->Position().m_z,
                                                                 trackingState.StatusFlags & ovrStatus_PositionTracked ? "YES" : "NO");
    m_font->drawText(buf, xPos, 0.1f - ySpacing * 2.f, 0.f);


    OVR::OVR_sprintf(buf, sizeof(buf), "EyeHeight: %2.2f IPD: %2.1fmm", ovrHmd_GetFloat(hmd, OVR_KEY_EYE_HEIGHT, 0.f), ovrHmd_GetFloat(hmd, OVR_KEY_IPD, 0.f) * 1000.f);
    m_font->drawText(buf, xPos, 0.1f - ySpacing * 3.f, 0.f);

    // Average FOVs.
    OVR::FovPort leftFov  = eyeRenderDescs[0].Fov;
    OVR::FovPort rightFov = eyeRenderDescs[1].Fov;

    // Rendered size changes based on selected options & dynamic rendering.
    OVR::OVR_sprintf(buf, sizeof(buf), "FOV %2.1fx%2.1f, Resolution: %ix%i", (leftFov.GetHorizontalFovDegrees() + rightFov.GetHorizontalFovDegrees()) * 0.5f,
                                                                             (leftFov.GetVerticalFovDegrees() + rightFov.GetVerticalFovDegrees()) * 0.5,
                                                                             eyeTextureSize.w, eyeTextureSize.h);
    m_font->drawText(buf, xPos, 0.1f - ySpacing * 4.f, 0.f);

    // latency readings
    float latencies[5] = {};
    if (ovrHmd_GetFloatArray(hmd, "DK2Latency", latencies, 5) == 5)
    {
        char text[5][32];
        for (int i = 0; i < 5; ++i)
        {
            FormatLatencyReading(text[i], sizeof(text[i]), latencies[i]);
        }

        statsStream.str("");
        statsStream << "M2P Latency  Ren: " << text[0] << " TWrp: " << text[1];
        m_font->drawText(statsStream.str(), xPos, 0.1f - ySpacing * 5.f, 0.f);

        statsStream.str("");
        statsStream << "PostPresent: " << text[2] << " Err: " << text[3] << " " << text[4];
        m_font->drawText(statsStream.str(), xPos, 0.1f - ySpacing * 6.f, 0.f);
    }
}
Esempio n. 6
0
ngl::Mat4 OculusInterface::getViewMatrix(int _eye)
{
	m_pose[_eye] = ovrHmd_GetEyePose(m_hmd,(ovrEyeType) _eye);
	ngl::Mat4 pos;
	pos.translate(m_eyeRdesc[_eye].ViewAdjust.x, m_eyeRdesc[_eye].ViewAdjust.y, m_eyeRdesc[_eye].ViewAdjust.z);
	ngl::Mat4 rotation ;
	ngl::Quaternion orientation(m_pose[_eye].Orientation.w,m_pose[_eye].Orientation.x,m_pose[_eye].Orientation.y,m_pose[_eye].Orientation.z);
	//	quat_to_matrix(&m_pose[_eye].Orientation.x, &rotation.m_m[0]);
	rotation=orientation.toMat4();
	rotation.transpose();
	ngl::Mat4  eyePos;
	eyePos.translate(-m_pose[_eye].Position.x, -m_pose[_eye].Position.y, -m_pose[_eye].Position.z);
	ngl::Mat4 eyeLevel;
	eyeLevel.translate(0, -ovrHmd_GetFloat(m_hmd, OVR_KEY_EYE_HEIGHT, 1.65), 0);
	// could optimize this
	return pos*rotation*eyePos*eyeLevel;

}
Esempio n. 7
0
GMO double endFrame() {
	static ovrPosef eyeRenderPose[2]; 
	static float    BodyYaw(3.141592f);
	static Vector3f HeadPos(0.0f, 1.6f, -5.0f);
	HeadPos.y = ovrHmd_GetFloat(HMD, OVR_KEY_EYE_HEIGHT, HeadPos.y);

    //pRender->SetRenderTarget ( pRendertargetTexture );
	
    //pRender->SetViewport (Recti(0,0, pRendertargetTexture->GetWidth(),
    //                                    pRendertargetTexture->GetHeight() ));  
    //pRender->Clear();
	for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++)
	{
        ovrEyeType eye = HMD->EyeRenderOrder[eyeIndex];
        eyeRenderPose[eye] = ovrHmd_GetEyePose(HMD, eye);

        // Get view and projection matrices
		Matrix4f rollPitchYaw       = Matrix4f::RotationY(BodyYaw);
		Matrix4f finalRollPitchYaw  = rollPitchYaw * Matrix4f(eyeRenderPose[eye].Orientation);
		Vector3f finalUp            = finalRollPitchYaw.Transform(Vector3f(0,1,0));
		Vector3f finalForward       = finalRollPitchYaw.Transform(Vector3f(0,0,-1));
		Vector3f shiftedEyePos      = HeadPos + rollPitchYaw.Transform(eyeRenderPose[eye].Position);
        Matrix4f view = Matrix4f::LookAtRH(shiftedEyePos, shiftedEyePos + finalForward, finalUp); 
		Matrix4f proj = ovrMatrix4f_Projection(EyeRenderDesc[eye].Fov, 0.01f, 10000.0f, true);

		//pRender->SetViewport(Recti(EyeRenderViewport[eye]));
		//pRender->SetProjection(proj);
		pRender->SetDepthMode(true, true);
		
		//pRoomScene->Render(pRender, Matrix4f::Translation(EyeRenderDesc[eye].ViewAdjust) * view);
	}
	//pRender->BlendState
	
	//pRender->Clear(0.0f, 0.0f, 0.0f, 0.0f);
	//pRender->Present( true );
	pRender->UpdateMonitorOutputs();
    pRender->FinishScene();
	ovrHmd_EndFrame(HMD, headPose, &EyeTexture[0].Texture);
	return 1;
}
		float GetIPD()
		{
			return ovrHmd_GetFloat(m_device, OVR_KEY_IPD, OVR_DEFAULT_IPD);
		}
		float GetEyeHeight()
		{
			return ovrHmd_GetFloat(m_device, OVR_KEY_EYE_HEIGHT, OVR_DEFAULT_EYE_HEIGHT);
		}
Esempio n. 10
0
void OculusWorldDemoApp::RenderTextInfoHud(float textHeight)
{
    // View port & 2D ortho projection must be set before call.
    
    float hmdYaw, hmdPitch, hmdRoll;
    switch(TextScreen)
    {
    case Text_Info:
    {
        char buf[512], gpustat[256];

        // Average FOVs.
        FovPort leftFov  = EyeRenderDesc[0].Fov;
        FovPort rightFov = EyeRenderDesc[1].Fov;
        
        // Rendered size changes based on selected options & dynamic rendering.
        int pixelSizeWidth = EyeTexture[0].Header.RenderViewport.Size.w +
                             ((!ForceZeroIpd) ?
                               EyeTexture[1].Header.RenderViewport.Size.w : 0);
        int pixelSizeHeight = ( EyeTexture[0].Header.RenderViewport.Size.h +
                                EyeTexture[1].Header.RenderViewport.Size.h ) / 2;

        // No DK2, no message.
        char latency2Text[128] = "";
        {
            //float latency2 = ovrHmd_GetMeasuredLatencyTest2(Hmd) * 1000.0f; // show it in ms
            //if (latency2 > 0)
            //    OVR_sprintf(latency2Text, sizeof(latency2Text), "%.2fms", latency2);

            float latencies[3] = { 0.0f, 0.0f, 0.0f };
            if (ovrHmd_GetFloatArray(Hmd, "DK2Latency", latencies, 3) == 3)
            {
                char latencyText0[32], latencyText1[32], latencyText2[32];
                FormatLatencyReading(latencyText0, sizeof(latencyText0), latencies[0]);
                FormatLatencyReading(latencyText1, sizeof(latencyText1), latencies[1]);
                FormatLatencyReading(latencyText2, sizeof(latencyText2), latencies[2]);

                OVR_sprintf(latency2Text, sizeof(latency2Text),
                            " DK2 Latency  Ren: %s  TWrp: %s\n"
                            " PostPresent: %s  ",
                            latencyText0, latencyText1, latencyText2);
            }
        }

        ThePlayer.HeadPose.Rotation.GetEulerAngles<Axis_Y, Axis_X, Axis_Z>(&hmdYaw, &hmdPitch, &hmdRoll);
        OVR_sprintf(buf, sizeof(buf),
                    " HMD YPR:%4.0f %4.0f %4.0f   Player Yaw: %4.0f\n"
                    " FPS: %.1f  ms/frame: %.1f Frame: %d\n"
                    " Pos: %3.2f, %3.2f, %3.2f  HMD: %s\n"
                    " EyeHeight: %3.2f, IPD: %3.1fmm\n" //", Lens: %s\n"
                    " FOV %3.1fx%3.1f, Resolution: %ix%i\n"
                    "%s",
                    RadToDegree(hmdYaw), RadToDegree(hmdPitch), RadToDegree(hmdRoll),
                    RadToDegree(ThePlayer.BodyYaw.Get()),
                    FPS, SecondsPerFrame * 1000.0f, FrameCounter,
                    ThePlayer.BodyPos.x, ThePlayer.BodyPos.y, ThePlayer.BodyPos.z,
                    //GetDebugNameHmdType ( TheHmdRenderInfo.HmdType ),
                    HmdDesc.ProductName,
                    ThePlayer.UserEyeHeight,
                    ovrHmd_GetFloat(Hmd, OVR_KEY_IPD, 0) * 1000.0f,
                    //( EyeOffsetFromNoseLeft + EyeOffsetFromNoseRight ) * 1000.0f,
                    //GetDebugNameEyeCupType ( TheHmdRenderInfo.EyeCups ),  // Lens/EyeCup not exposed
                    
                    (leftFov.GetHorizontalFovDegrees() + rightFov.GetHorizontalFovDegrees()) * 0.5f,
                    (leftFov.GetVerticalFovDegrees() + rightFov.GetVerticalFovDegrees()) * 0.5f,

                    pixelSizeWidth, pixelSizeHeight,

                    latency2Text
                    );

        size_t texMemInMB = pRender->GetTotalTextureMemoryUsage() / 1058576;
        if (texMemInMB)
        {
            OVR_sprintf(gpustat, sizeof(gpustat), " GPU Tex: %u MB", texMemInMB);
            OVR_strcat(buf, sizeof(buf), gpustat);
        }
        
        DrawTextBox(pRender, 0.0f, 0.0f, textHeight, buf, DrawText_Center);
    }
    break;
    
    case Text_Timing:    
        Profiler.DrawOverlay(pRender);    
    break;
    
    case Text_Help1:
        DrawTextBox(pRender, 0.0f, 0.0f, textHeight, HelpText1, DrawText_Center);
        break;
    case Text_Help2:
        DrawTextBox(pRender, 0.0f, 0.0f, textHeight, HelpText2, DrawText_Center);
        break;
    
    case Text_None:
        break;

    default:
        OVR_ASSERT ( !"Missing text screen" );
        break;    
    }
}
Esempio n. 11
0
int OculusWorldDemoApp::OnStartup(int argc, const char** argv)
{

    // *** Oculus HMD & Sensor Initialization

    // Create DeviceManager and first available HMDDevice from it.
    // Sensor object is created from the HMD, to ensure that it is on the
    // correct device.

    ovr_Initialize();

    Hmd = ovrHmd_Create(0);
    
    if (!Hmd)
    {
        // If we didn't detect an Hmd, create a simulated one for debugging.
        Hmd           = ovrHmd_CreateDebug(ovrHmd_DK1);
        UsingDebugHmd = true; 
        if (!Hmd)
        {   // Failed Hmd creation.
            return 1;
        }
    }

    // Get more details about the HMD.
    ovrHmd_GetDesc(Hmd, &HmdDesc);

    WindowSize = HmdDesc.Resolution;


    // ***** Setup System Window & rendering.

    if (!SetupWindowAndRendering(argc, argv))
        return 1;

    // Initialize FovSideTanMax, which allows us to change all Fov sides at once - Fov
    // starts at default and is clamped to this value.
    FovSideTanLimit = FovPort::Max(HmdDesc.MaxEyeFov[0], HmdDesc.MaxEyeFov[1]).GetMaxSideTan();
    FovSideTanMax   = FovPort::Max(HmdDesc.DefaultEyeFov[0], HmdDesc.DefaultEyeFov[1]).GetMaxSideTan();

    PositionTrackingEnabled = (HmdDesc.SensorCaps & ovrSensorCap_Position) ? true : false;


    // *** Configure HMD Stereo settings.
    
    CalculateHmdValues();

    // Query eye height.
    ThePlayer.UserEyeHeight = ovrHmd_GetFloat(Hmd, OVR_KEY_EYE_HEIGHT, ThePlayer.UserEyeHeight);
    ThePlayer.BodyPos.y     = ThePlayer.UserEyeHeight;
    // Center pupil for customization; real game shouldn't need to adjust this.
    CenterPupilDepthMeters  = ovrHmd_GetFloat(Hmd, "CenterPupilDepth", 0.0f);


    ThePlayer.bMotionRelativeToBody = false;  // Default to head-steering for DK1
    
    if (UsingDebugHmd)
        Menu.SetPopupMessage("NO HMD DETECTED");
    else if (!(ovrHmd_GetSensorState(Hmd, 0.0f).StatusFlags & ovrStatus_OrientationTracked))
        Menu.SetPopupMessage("NO SENSOR DETECTED");    
    else
        Menu.SetPopupMessage("Press F9 for Full-Screen on Rift");
    // Give first message 10 sec timeout, add border lines.
    Menu.SetPopupTimeout(10.0f, true);

    PopulateOptionMenu();

    // *** Identify Scene File & Prepare for Loading

    InitMainFilePath();  
    PopulatePreloadScene();
    
    LastUpdate = ovr_GetTimeInSeconds();
	
    return 0;
}
//-------------------------------------------------------------------------------------
void ProcessAndRender()
{
    static ovrPosef eyeRenderPose[2]; 

	// Start timing
    #if SDK_RENDER
	ovrHmd_BeginFrame(HMD, 0); 
    #else
	ovrHmd_BeginFrameTiming(HMD, 0); 
    // Retrieve data useful for handling the Health and Safety Warning - unused, but here for reference
    ovrHSWDisplayState hswDisplayState;
    ovrHmd_GetHSWDisplayState(HMD, &hswDisplayState);
    #endif

	// Adjust eye position and rotation from controls, maintaining y position from HMD.
	static float    BodyYaw(3.141592f);
	static Vector3f HeadPos(0.0f, 1.6f, -5.0f);
    static ovrTrackingState HmdState;

    ovrVector3f hmdToEyeViewOffset[2] = { EyeRenderDesc[0].HmdToEyeViewOffset, EyeRenderDesc[1].HmdToEyeViewOffset };
    ovrHmd_GetEyePoses(HMD, 0, hmdToEyeViewOffset, eyeRenderPose, &HmdState);

	HeadPos.y = ovrHmd_GetFloat(HMD, OVR_KEY_EYE_HEIGHT, HeadPos.y);
	bool freezeEyeRender = Util_RespondToControls(BodyYaw, HeadPos, HmdState.HeadPose.ThePose.Orientation);

    pRender->BeginScene();
    
	// Render the two undistorted eye views into their render buffers.
    if (!freezeEyeRender) // freeze to debug for time warp
    {
        pRender->SetRenderTarget ( pRendertargetTexture );
        pRender->SetViewport (Recti(0,0, pRendertargetTexture->GetWidth(),
                                         pRendertargetTexture->GetHeight() ));  
        pRender->Clear();
		for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++)
		{
            ovrEyeType eye = HMD->EyeRenderOrder[eyeIndex];

            // Get view and projection matrices
            Matrix4f rollPitchYaw       = Matrix4f::RotationY(BodyYaw);
            Matrix4f finalRollPitchYaw  = rollPitchYaw * Matrix4f(eyeRenderPose[eye].Orientation);
            Vector3f finalUp            = finalRollPitchYaw.Transform(Vector3f(0,1,0));
            Vector3f finalForward       = finalRollPitchYaw.Transform(Vector3f(0,0,-1));
            Vector3f shiftedEyePos      = HeadPos + rollPitchYaw.Transform(eyeRenderPose[eye].Position);
            Matrix4f view = Matrix4f::LookAtRH(shiftedEyePos, shiftedEyePos + finalForward, finalUp); 
			Matrix4f proj = ovrMatrix4f_Projection(EyeRenderDesc[eye].Fov, 0.01f, 10000.0f, true);

			pRender->SetViewport(Recti(EyeRenderViewport[eye]));
			pRender->SetProjection(proj);
			pRender->SetDepthMode(true, true);
			pRoomScene->Render(pRender, view);
		}
    }
    pRender->FinishScene();

    #if SDK_RENDER	// Let OVR do distortion rendering, Present and flush/sync
	ovrHmd_EndFrame(HMD, eyeRenderPose, &EyeTexture[0].Texture);
    #else
	// Clear screen
	pRender->SetDefaultRenderTarget();
	pRender->SetFullViewport();
	pRender->Clear(0.0f, 0.0f, 0.0f, 0.0f);

	// Setup shader
	ShaderFill distortionShaderFill(Shaders);
	distortionShaderFill.SetTexture(0, pRendertargetTexture);

	for(int eyeNum = 0; eyeNum < 2; eyeNum++)
	{
		// Get and set shader constants
		Shaders->SetUniform2f("EyeToSourceUVScale",   UVScaleOffset[eyeNum][0].x, UVScaleOffset[eyeNum][0].y);
		Shaders->SetUniform2f("EyeToSourceUVOffset",  UVScaleOffset[eyeNum][1].x, UVScaleOffset[eyeNum][1].y);
 		ovrMatrix4f timeWarpMatrices[2];
		ovrHmd_GetEyeTimewarpMatrices(HMD, (ovrEyeType)eyeNum, eyeRenderPose[eyeNum], timeWarpMatrices);
		Shaders->SetUniform4x4f("EyeRotationStart", timeWarpMatrices[0]);  //Nb transposed when set
		Shaders->SetUniform4x4f("EyeRotationEnd",   timeWarpMatrices[1]);  //Nb transposed when set
		// Perform distortion
		pRender->Render(&distortionShaderFill, MeshVBs[eyeNum], MeshIBs[eyeNum]);
	}
/*
    unsigned char latencyColor[3];
    ovrBool drawDk2LatencyQuad = ovrHmd_GetLatencyTest2DrawColor(HMD, latencyColor);
    if(drawDk2LatencyQuad)
    {
        const int latencyQuadSize = 20; // only needs to be 1-pixel, but larger helps visual debugging
        pRender->SetViewport(HMD->Resolution.w - latencyQuadSize, 0, latencyQuadSize, latencyQuadSize);
        pRender->Clear(latencyColor[0] / 255.0f, latencyColor[1] / 255.0f, latencyColor[2] / 255.0f, 0.0f);
    }
*/
	pRender->SetDefaultRenderTarget();

	pRender->Present( true ); // Vsync enabled

    // Only flush GPU for ExtendDesktop; not needed in Direct App Renering with Oculus driver.
    if (HMD->HmdCaps & ovrHmdCap_ExtendDesktop)
		pRender->WaitUntilGpuIdle();
  
	ovrHmd_EndFrameTiming(HMD);
    #endif
}
//-------------------------------------------------------------------------------------
int WINAPI WinMain(HINSTANCE hinst, HINSTANCE, LPSTR, int)
{
    // Initializes LibOVR, and the Rift
    ovr_Initialize();
    HMD = ovrHmd_Create(0);

    if (!HMD)                       { MessageBoxA(NULL,"Oculus Rift not detected.","", MB_OK); return(0); }
    if (HMD->ProductName[0] == '\0')  MessageBoxA(NULL,"Rift detected, display not enabled.", "", MB_OK);

    // Setup Window and Graphics - use window frame if relying on Oculus driver
    bool windowed = (HMD->HmdCaps & ovrHmdCap_ExtendDesktop) ? false : true;    
    if (!WND.InitWindowAndDevice(hinst, Recti(HMD->WindowsPos, HMD->Resolution), windowed))
        return(0);

    WND.SetMaxFrameLatency(1);
    ovrHmd_AttachToWindow(HMD, WND.Window, NULL, NULL);
    ovrHmd_SetEnabledCaps(HMD, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction);

    // Start the sensor which informs of the Rift's pose and motion
    ovrHmd_ConfigureTracking(HMD, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection |
                                  ovrTrackingCap_Position, 0);

    // Make the eye render buffers (caution if actual size < requested due to HW limits). 
    for (int eye=0; eye<2; eye++)
    {
        Sizei idealSize             = ovrHmd_GetFovTextureSize(HMD, (ovrEyeType)eye,
                                                               HMD->DefaultEyeFov[eye], 1.0f);
        pEyeRenderTexture[eye]      = new ImageBuffer(true, false, idealSize);
        pEyeDepthBuffer[eye]        = new ImageBuffer(true, true, pEyeRenderTexture[eye]->Size);
        EyeRenderViewport[eye].Pos  = Vector2i(0, 0);
        EyeRenderViewport[eye].Size = pEyeRenderTexture[eye]->Size;
    }

    // Setup VR components
#if SDK_RENDER
	#if RENDER_OPENGL
    ovrGLConfig oglcfg;
    oglcfg.OGL.Header.API				= ovrRenderAPI_OpenGL;
    oglcfg.OGL.Header.BackBufferSize	= Sizei(HMD->Resolution.w, HMD->Resolution.h);
    oglcfg.OGL.Header.Multisample		= 1;
	oglcfg.OGL.Window					= OGL.Window;
	oglcfg.OGL.DC						= GetDC(OGL.Window);

    if (!ovrHmd_ConfigureRendering(HMD, &oglcfg.Config,
		                           ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette |
                                   ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive,
								   HMD->DefaultEyeFov, EyeRenderDesc))	
		return(1);
	#else
    ovrD3D11Config d3d11cfg;
    d3d11cfg.D3D11.Header.API            = ovrRenderAPI_D3D11;
    d3d11cfg.D3D11.Header.BackBufferSize = Sizei(HMD->Resolution.w, HMD->Resolution.h);
    d3d11cfg.D3D11.Header.Multisample    = 1;
    d3d11cfg.D3D11.pDevice               = WND.Device;
    d3d11cfg.D3D11.pDeviceContext        = WND.Context;
    d3d11cfg.D3D11.pBackBufferRT         = WND.BackBufferRT;
    d3d11cfg.D3D11.pSwapChain            = WND.SwapChain;

    if (!ovrHmd_ConfigureRendering(HMD, &d3d11cfg.Config,
                                   ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette |
                                   ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive,
                                   HMD->DefaultEyeFov, EyeRenderDesc))
        return(1);
	#endif
#else
    APP_RENDER_SetupGeometryAndShaders();
#endif

    // Create the room model
    Scene roomScene(false); // Can simplify scene further with parameter if required.

    // Initialize Webcams and threads
	WebCamManager WebCamMngr(HMD);

    // MAIN LOOP
    // =========
    while (!(WND.Key['Q'] && WND.Key[VK_CONTROL]) && !WND.Key[VK_ESCAPE])
    {
        WND.HandleMessages();
        
        float       speed                    = 1.0f; // Can adjust the movement speed. 
        int         timesToRenderScene       = 1;    // Can adjust the render burden on the app.
		ovrVector3f useHmdToEyeViewOffset[2] = {EyeRenderDesc[0].HmdToEyeViewOffset,
			                                    EyeRenderDesc[1].HmdToEyeViewOffset};
        // Start timing
    #if SDK_RENDER
        ovrHmd_BeginFrame(HMD, 0);
    #else
        ovrHmd_BeginFrameTiming(HMD, 0);
    #endif

        // Handle key toggles for re-centering, meshes, FOV, etc.
        ExampleFeatures1(&speed, &timesToRenderScene, useHmdToEyeViewOffset);

        // Keyboard inputs to adjust player orientation
        if (WND.Key[VK_LEFT])  Yaw += 0.02f;
        if (WND.Key[VK_RIGHT]) Yaw -= 0.02f;

        // Keyboard inputs to adjust player position
        if (WND.Key['W']||WND.Key[VK_UP])   Pos+=Matrix4f::RotationY(Yaw).Transform(Vector3f(0,0,-speed*0.05f));
        if (WND.Key['S']||WND.Key[VK_DOWN]) Pos+=Matrix4f::RotationY(Yaw).Transform(Vector3f(0,0,+speed*0.05f));
        if (WND.Key['D'])                    Pos+=Matrix4f::RotationY(Yaw).Transform(Vector3f(+speed*0.05f,0,0));
        if (WND.Key['A'])                    Pos+=Matrix4f::RotationY(Yaw).Transform(Vector3f(-speed*0.05f,0,0));
        Pos.y = ovrHmd_GetFloat(HMD, OVR_KEY_EYE_HEIGHT, Pos.y);
  
        // Animate the cube
        if (speed)
            roomScene.Models[0]->Pos = Vector3f(9*sin(0.01f*clock),3,9*cos(0.01f*clock));

		// Get both eye poses simultaneously, with IPD offset already included. 
		ovrPosef temp_EyeRenderPose[2];
		ovrHmd_GetEyePoses(HMD, 0, useHmdToEyeViewOffset, temp_EyeRenderPose, NULL);

		// Update textures with WebCams' frames
		WebCamMngr.Update();	

        // Render the two undistorted eye views into their render buffers.  
        for (int eye = 0; eye < 2; eye++)
        {
            ImageBuffer * useBuffer      = pEyeRenderTexture[eye];  
            ovrPosef    * useEyePose     = &EyeRenderPose[eye];
            float       * useYaw         = &YawAtRender[eye];
            bool          clearEyeImage  = true;
            bool          updateEyeImage = true;

            // Handle key toggles for half-frame rendering, buffer resolution, etc.
            ExampleFeatures2(eye, &useBuffer, &useEyePose, &useYaw, &clearEyeImage, &updateEyeImage);

            if (clearEyeImage)
			#if RENDER_OPENGL
				WND.ClearAndSetRenderTarget(useBuffer, Recti(EyeRenderViewport[eye]));
			#else
                WND.ClearAndSetRenderTarget(useBuffer->TexRtv,
                                             pEyeDepthBuffer[eye], Recti(EyeRenderViewport[eye]));	
			#endif

            if (updateEyeImage)
            {
                // Write in values actually used (becomes significant in Example features)
                *useEyePose = temp_EyeRenderPose[eye];
                *useYaw     = Yaw;

                // Get view and projection matrices (note near Z to reduce eye strain)
                Matrix4f rollPitchYaw       = Matrix4f::RotationY(Yaw);
                Matrix4f finalRollPitchYaw  = rollPitchYaw * Matrix4f(useEyePose->Orientation);
                Vector3f finalUp            = finalRollPitchYaw.Transform(Vector3f(0, 1, 0));
                Vector3f finalForward       = finalRollPitchYaw.Transform(Vector3f(0, 0, -1));
                Vector3f shiftedEyePos      = Pos + rollPitchYaw.Transform(useEyePose->Position);

                Matrix4f view = Matrix4f::LookAtRH(shiftedEyePos, shiftedEyePos + finalForward, finalUp);
                Matrix4f proj = ovrMatrix4f_Projection(EyeRenderDesc[eye].Fov, 0.2f, 1000.0f, true); 

				// Keyboard input to switch from "look through" to scene mode
				static bool bOldLookThrough	= false;
				static bool bLookThrough	= true;
				if (WND.Key['X'] && bOldLookThrough != WND.Key['X']) { bLookThrough = !bLookThrough; }
				bOldLookThrough = WND.Key['X'];

				if(!bLookThrough)
				{
					// Render the scene
					for (int t=0; t<timesToRenderScene; t++)
						roomScene.Render(view, proj.Transposed());

					WebCamMngr.DrawBoard(view, proj.Transposed());
				}
				else { WebCamMngr.DrawLookThrough(eye); }
            }
        }

        // Do distortion rendering, Present and flush/sync
    #if SDK_RENDER
		#if RENDER_OPENGL
		ovrGLTexture eyeTexture[2]; // Gather data for eye textures 
        for (int eye = 0; eye<2; eye++)
        {
            eyeTexture[eye].OGL.Header.API				= ovrRenderAPI_OpenGL;
            eyeTexture[eye].OGL.Header.TextureSize		= pEyeRenderTexture[eye]->Size;
            eyeTexture[eye].OGL.Header.RenderViewport	= EyeRenderViewport[eye];
            eyeTexture[eye].OGL.TexId					= pEyeRenderTexture[eye]->TexId;
        }
		#else
        ovrD3D11Texture eyeTexture[2]; // Gather data for eye textures 
        for (int eye = 0; eye<2; eye++)
        {
            eyeTexture[eye].D3D11.Header.API            = ovrRenderAPI_D3D11;
            eyeTexture[eye].D3D11.Header.TextureSize    = pEyeRenderTexture[eye]->Size;
            eyeTexture[eye].D3D11.Header.RenderViewport = EyeRenderViewport[eye];
            eyeTexture[eye].D3D11.pTexture              = pEyeRenderTexture[eye]->Tex;
            eyeTexture[eye].D3D11.pSRView               = pEyeRenderTexture[eye]->TexSv;
        }
		#endif
		ovrHmd_EndFrame(HMD, EyeRenderPose, &eyeTexture[0].Texture);
    #else
        APP_RENDER_DistortAndPresent();
    #endif
    }

	WebCamMngr.StopCapture();

    // Release and close down
    ovrHmd_Destroy(HMD);
    ovr_Shutdown();
	WND.ReleaseWindow(hinst);

    return(0);
}
Esempio n. 14
0
int OVRConfigureRenderer(int width, int height, float znear, float zfar, float ipd, float multisample, int lowpersistence, int dynamicprediction, int vsync, int chromatic, int timewarp, int vignette, int state, int flip, int srgb, int overdrive, int profile)
{
    unsigned hmdCaps;
	unsigned int distortionCaps;
    ovrFovPort eyeFov[EYE_ALL] = { _OVRGlobals.HMD->DefaultEyeFov[EYE_LEFT], _OVRGlobals.HMD->DefaultEyeFov[EYE_RIGHT] };
    float FovSideTanMax   = OVR::FovPort::Max(_OVRGlobals.HMD->DefaultEyeFov[EYE_LEFT], _OVRGlobals.HMD->DefaultEyeFov[EYE_RIGHT]).GetMaxSideTan();
	//float FovSideTanLimit = OVR::FovPort::Max(_OVRGlobals.HMD->MaxEyeFov[EYE_LEFT], _OVRGlobals.HMD->MaxEyeFov[EYE_RIGHT]).GetMaxSideTan();
	ovrBool didSetIPD = 0;

	// generate the HMD and distortion caps
	hmdCaps = (lowpersistence ? ovrHmdCap_LowPersistence : 0) |
	          (dynamicprediction ? ovrHmdCap_DynamicPrediction : 0) |
	          (vsync ? 0 : ovrHmdCap_NoVSync);

	distortionCaps = (chromatic ? ovrDistortionCap_Chromatic : 0) |
	                 (timewarp ? ovrDistortionCap_TimeWarp : 0) |
	                 (vignette ? ovrDistortionCap_Vignette : 0) |
					(state ? 0 : ovrDistortionCap_NoRestore) |
					(flip ? ovrDistortionCap_FlipInput : 0) |
					(srgb ? ovrDistortionCap_SRGB : 0) |
					(overdrive ? ovrDistortionCap_Overdrive : 0) |
					(profile ? ovrDistortionCap_ProfileNoTimewarpSpinWaits : 0);

	didSetIPD = ovrHmd_SetFloat( _OVRGlobals.HMD, OVR_KEY_IPD, ipd * 0.001 );

	ovrHmd_SetEnabledCaps( _OVRGlobals.HMD, hmdCaps );

	ovrRenderAPIConfig config = ovrRenderAPIConfig();
	config.Header.API = ovrRenderAPI_OpenGL;
	config.Header.RTSize.w = width;
	config.Header.RTSize.h = height;
	config.Header.Multisample = multisample > 1 ? 1 : 0;

	// clamp fov
    eyeFov[EYE_LEFT] = OVR::FovPort::Min(eyeFov[EYE_LEFT], OVR::FovPort(FovSideTanMax));
    eyeFov[EYE_RIGHT] = OVR::FovPort::Min(eyeFov[EYE_RIGHT], OVR::FovPort(FovSideTanMax));

    if ( !ovrHmd_ConfigureRendering( _OVRGlobals.HMD, &config, distortionCaps, eyeFov, _OVRGlobals.EyeRenderDesc ) ) {
        return 0;
    }

#ifdef DEBUG
	ovrhmd_EnableHSWDisplaySDKRender( _OVRGlobals.HMD, false );
#else
	ovrHmd_DismissHSWDisplay( _OVRGlobals.HMD );
#endif

	_OVRGlobals.IPD = ovrHmd_GetFloat( _OVRGlobals.HMD, OVR_KEY_IPD, ipd * 0.001 );

	// create the projection
	_OVRGlobals.Eye[EYE_LEFT].Projection =
		ovrMatrix4f_Projection( _OVRGlobals.EyeRenderDesc[EYE_LEFT].Fov, znear, zfar, true );
    _OVRGlobals.Eye[EYE_RIGHT].Projection =
		ovrMatrix4f_Projection( _OVRGlobals.EyeRenderDesc[EYE_RIGHT].Fov, znear, zfar, true );

	// transpose the projection
	OVR::Matrix4 <float>transposeLeft = _OVRGlobals.Eye[EYE_LEFT].Projection;
	OVR::Matrix4 <float>transposeRight = _OVRGlobals.Eye[EYE_RIGHT].Projection;

	_OVRGlobals.Eye[EYE_LEFT].Projection = transposeLeft.Transposed();
	_OVRGlobals.Eye[EYE_RIGHT].Projection = transposeRight.Transposed();

	// TODO: ortho
	{
		float    orthoDistance = 0.8f; // 2D is 0.8 meter from camera
		OVR::Vector2f orthoScale0   = OVR::Vector2f(1.0f) / OVR::Vector2f(_OVRGlobals.EyeRenderDesc[EYE_LEFT].PixelsPerTanAngleAtCenter);
		OVR::Vector2f orthoScale1   = OVR::Vector2f(1.0f) / OVR::Vector2f(_OVRGlobals.EyeRenderDesc[EYE_RIGHT].PixelsPerTanAngleAtCenter);

		_OVRGlobals.Eye[EYE_LEFT].OrthoProjection =
			ovrMatrix4f_OrthoSubProjection(_OVRGlobals.Eye[EYE_LEFT].Projection, orthoScale0, orthoDistance, _OVRGlobals.EyeRenderDesc[EYE_LEFT].ViewAdjust.x);

		_OVRGlobals.Eye[EYE_RIGHT].OrthoProjection =
			ovrMatrix4f_OrthoSubProjection(_OVRGlobals.Eye[EYE_RIGHT].Projection, orthoScale1, orthoDistance, _OVRGlobals.EyeRenderDesc[EYE_RIGHT].ViewAdjust.x);

		OVR::Matrix4 <float>transposeLeftOrtho = _OVRGlobals.Eye[EYE_LEFT].OrthoProjection;
		OVR::Matrix4 <float>transposeRightOrtho = _OVRGlobals.Eye[EYE_RIGHT].OrthoProjection;

		_OVRGlobals.Eye[EYE_LEFT].OrthoProjection = transposeLeftOrtho.Transposed();
		_OVRGlobals.Eye[EYE_RIGHT].OrthoProjection = transposeRightOrtho.Transposed();
	}

	return 1;
}
//-------------------------------------------------------------------------------------
void ProcessAndRender()
{
#if 0
	//HRESULT hr = pRender->Device->SetRenderState(D3DRS_ZENABLE, D3DZB_TRUE);
	//OVR_ASSERT(SUCCEEDED(hr));

	pRender->Clear();
	pRender->BeginScene();

	Vector3f eye(0.0f, 0.0f, -5.0f);
	Vector3f lookat(0.0f, 0.0f, 0.0f);
	Vector3f up(0.0f, 1.0f, 0.0f);
	Matrix4f view = Matrix4f::LookAtLH(eye, lookat, up);
	//Matrix4f proj = Matrix4f::PerspectiveLH(3.14145f / 2, 800.0f / 600.0f, 1.0f, 10000.0f);

	ovrFovPort fov = { 1, 1, 1, 1 };
	Matrix4f proj = ovrMatrix4f_Projection(fov, 1.0f, 10000.0f, false);

	pRender->SetProjection(proj);
	pRoomScene->Render(pRender, view);

	pRender->EndScene();
	pRender->Present();
#endif

	static ovrPosef eyeRenderPose[2];
	ovrHmd_BeginFrameTiming(HMD, 0);

	// Adjust eye position and rotation from controls, maintaining y position from HMD.
	static float BodyYaw(3.141592f);
	static Vector3f HeadPos(0.0f, 1.6f, -5.0f);
	HeadPos.y = ovrHmd_GetFloat(HMD, OVR_KEY_EYE_HEIGHT, HeadPos.y);
	bool freezeEyeRender = false;

	pRender->BeginScene();

	if (!freezeEyeRender)
	{
		pRender->SetRenderTarget(pRendertargetTexture);
		pRender->SetViewport(Recti(0, 0, pRendertargetTexture->Width, pRendertargetTexture->Height));
		pRender->Clear();
		for (int eyeIndex = 0; eyeIndex < ovrEye_Count; ++eyeIndex)
		{
			ovrEyeType eye = HMD->EyeRenderOrder[eyeIndex];
			eyeRenderPose[eye] = ovrHmd_GetEyePose(HMD, eye);

			// Get view and projection matrices
			Matrix4f rollPitchYaw = Matrix4f::RotationY(BodyYaw);
			Matrix4f finalRollPitchYaw = rollPitchYaw * Matrix4f(eyeRenderPose[eye].Orientation);
			Vector3f finalUp = finalRollPitchYaw.Transform(Vector3f(0, 1, 0));
			Vector3f finalForward = finalRollPitchYaw.Transform(Vector3f(0, 0, -1));
			Vector3f shiftedEyePos      = HeadPos + rollPitchYaw.Transform(eyeRenderPose[eye].Position);
			//Matrix4f view = Matrix4f::LookAtRH(shiftedEyePos, shiftedEyePos + finalForward, finalUp); 
			//Matrix4f proj = ovrMatrix4f_Projection(EyeRenderDesc[eye].Fov, 0.01f, 10000.0f, true);
			Matrix4f view = Matrix4f::LookAtLH(shiftedEyePos, shiftedEyePos + finalForward, finalUp);
			Matrix4f proj = ovrMatrix4f_Projection(EyeRenderDesc[eye].Fov, 0.01f, 10000.0f, false);

			pRender->SetViewport(Recti(EyeRenderViewport[eye]));
			pRender->SetProjection(proj);
			pRender->SetDepthMode(true, true);
			pRoomScene->Render(pRender, Matrix4f::Translation(EyeRenderDesc[eye].ViewAdjust) * view);
		}
	}

	pRender->SetDefaultRenderTarget();
	pRender->SetFullViewport();
	pRender->Clear(0.0f, 0.0f, 0.0f, 0.0f);

	ShaderFill distortionShaderFill(DistortionShaders);
	distortionShaderFill.SetTexture(0, pRendertargetTexture);

	for (int eyeNum = 0; eyeNum < ovrEye_Count; eyeNum++)
	{
		// Get and set shader constants
		DistortionShaders->SetUniform2f("EyeToSourceUVScale", UVScaleOffset[eyeNum][0].x, UVScaleOffset[eyeNum][0].y);
		DistortionShaders->SetUniform2f("EyeToSourceUVOffset", UVScaleOffset[eyeNum][1].x, UVScaleOffset[eyeNum][1].y);
		ovrMatrix4f timeWarpMatrices[2];
		ovrHmd_GetEyeTimewarpMatrices(HMD, (ovrEyeType)eyeNum, eyeRenderPose[eyeNum], timeWarpMatrices);
		DistortionShaders->SetUniform4x4f("EyeRotationStart", timeWarpMatrices[0]);  //Nb transposed when set
		DistortionShaders->SetUniform4x4f("EyeRotationEnd", timeWarpMatrices[1]);  //Nb transposed when set
		// Perform distortion
		pRender->Render(&distortionShaderFill, DistortionDecl, MeshVBs[eyeNum], MeshIBs[eyeNum],
			sizeof(ovrDistortionVertex), Matrix4f(), MeshVBCnts[eyeNum], MeshIBCnts[eyeNum], Prim_Triangles);
		//Render(fill, vertices, indices, stride, Matrix4f(), 0,(int)vertices->GetSize(), Prim_Triangles, false);
		//(&distortionShaderFill, MeshVBs[eyeNum], MeshIBs[eyeNum],sizeof(ovrDistortionVertex));
	}

	/*
	pRender->SetDefaultRenderTarget();
	pRender->SetFullViewport();
	pRender->Clear(0.0f, 0.0f, 0.0f, 0.0f);

	Vector3f eye(0.0f, 0.0f, -5.0f);
	Vector3f lookat(0.0f, 0.0f, 0.0f);
	Vector3f up(0.0f, 1.0f, 0.0f);
	Matrix4f view = Matrix4f::LookAtLH(eye, lookat, up);
	Matrix4f proj = Matrix4f::PerspectiveLH(3.14145f / 4, 800.0f / 600.0f, 1.0f, 10000.0f);

	pRender->Proj = proj;
	pScene->Render(pRender, view);
	*/
	//pRender->SetDefaultRenderTarget();

	pRender->EndScene();
	pRender->Present();

	//if (HMD->HmdCaps & ovrHmdCap_ExtendDesktop)
	//	pRender->WaitUntilG
	ovrHmd_EndFrameTiming(HMD);
}