コード例 #1
0
ファイル: HelloRift.cpp プロジェクト: ritwik1993/OculusMatlab
  void onKey(int key, int scancode, int action, int mods) {
    if (oria::clearHSW(hmd)) {
      return;
    }

    if (CameraControl::instance().onKey(key, scancode, action, mods)) {
      return;
    }

    if (GLFW_PRESS != action) {
      int caps = ovrHmd_GetEnabledCaps(hmd);
      switch (key) {
      case GLFW_KEY_V:
        if (caps & ovrHmdCap_NoVSync) {
          ovrHmd_SetEnabledCaps(hmd, caps & ~ovrHmdCap_NoVSync);
        } else {
          ovrHmd_SetEnabledCaps(hmd, caps | ovrHmdCap_NoVSync);
        }
        return;
      case GLFW_KEY_P:
        if (caps & ovrHmdCap_LowPersistence) {
          ovrHmd_SetEnabledCaps(hmd, caps & ~ovrHmdCap_LowPersistence);
        } else {
          ovrHmd_SetEnabledCaps(hmd, caps | ovrHmdCap_LowPersistence);
        }
        return;
      case GLFW_KEY_R:
        resetPosition();
        return;
      }
    }

    GlfwApp::onKey(key, scancode, action, mods);
  }
コード例 #2
0
 virtual void onKey(int key, int scancode, int action, int mods) {
   if (!CameraControl::instance().onKey(key, scancode, action, mods)) {
     static const float ROOT_2 = sqrt(2.0f);
     static const float INV_ROOT_2 = 1.0f / ROOT_2;
     if (action == GLFW_PRESS) {
       switch (key) {
       case GLFW_KEY_HOME:
         if (0 == perEyeDelay) {
           perEyeDelay = 1;
         } else {
           perEyeDelay <<= 1;
         }
         return;
       case GLFW_KEY_END:
         perEyeDelay >>= 1;
         return;
       case GLFW_KEY_R:
         resetCamera();
         return;
       case GLFW_KEY_P:
         {
           int caps = ovrHmd_GetEnabledCaps(hmd);
           if (caps & ovrHmdCap_LowPersistence) {
             ovrHmd_SetEnabledCaps(hmd, caps & ~ovrHmdCap_LowPersistence);
           } else {
             ovrHmd_SetEnabledCaps(hmd, caps | ovrHmdCap_LowPersistence);
           }
         }
         return;
       }
     }
     RiftGlfwApp::onKey(key, scancode, action, mods);
   }
 }
コード例 #3
0
COculusVR::~COculusVR()
{
	if(m_isReady)
		return;

	//release
	wzDeleteRenderTarget(&m_screenRender);
	wzDeleteRenderBuffer(&m_screenBuffer);
	wzDeleteTexture(&m_screenTex);

	wzDeleteShader(&LensShader);
	wzDeleteMesh(&MeshBuffer[0]);
	wzDeleteMesh(&MeshBuffer[1]);

	ovrHmd_SetEnabledCaps(Hmd, ovrHmdCap_DynamicPrediction);	//ovrHmdCap_LowPersistence

	if (Hmd)
    {
        ovrHmd_Destroy(Hmd);
        Hmd = 0;
    }

	ovr_Shutdown();

	m_isReady = true;
}
コード例 #4
0
ファイル: OculusInterface.cpp プロジェクト: NCCA/SponzaRift
void OculusInterface::initOculus(float _devicePixelAspect)
{

  m_devicePixelAspect=_devicePixelAspect;
  std::cout<<"setting device aspect "<<m_devicePixelAspect<<"\n";
  m_hmd = ovrHmd_Create(0);
  if (!m_hmd)
  {
    std::cerr<<"Unable to create HMD: "<< ovrHmd_GetLastError(NULL)<<std::endl;
    std::cerr<<"Attempting to run without HMD\n";
    // If we didn't detect an Hmd, create a simulated one for debugging.
    m_hmd = ovrHmd_CreateDebug(ovrHmd_DK1);
    if (!m_hmd)
    {   // Failed Hmd creation.
      exit(EXIT_FAILURE);
    }
  }
  m_windowWidth=m_hmd->Resolution.w;
  m_windowHeight=m_hmd->Resolution.h;

  oculusDebug();
  // Start the sensor which provides the Rift’s pose and motion.
  ovrHmd_ConfigureTracking(m_hmd, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position, 0);
  // let's fill in some info about oculus
  m_eyeres[0] = ovrHmd_GetFovTextureSize(m_hmd, ovrEye_Left, m_hmd->DefaultEyeFov[0], 1.0);
  m_eyeres[1] = ovrHmd_GetFovTextureSize(m_hmd, ovrEye_Right, m_hmd->DefaultEyeFov[1],1.0);

	/* and create a single render target texture to encompass both eyes */
	//m_fbWidth = m_eyeres[0].w + m_eyeres[1].w;
	//m_fbHeight = m_eyeres[0].h > m_eyeres[1].h ? m_eyeres[0].h : m_eyeres[1].h;

	ovrSizei recommenedTex0Size = ovrHmd_GetFovTextureSize(m_hmd, ovrEye_Left,m_hmd->DefaultEyeFov[0], m_devicePixelAspect);
	ovrSizei recommenedTex1Size = ovrHmd_GetFovTextureSize(m_hmd, ovrEye_Right,m_hmd->DefaultEyeFov[1], m_devicePixelAspect);

	// Determine dimensions to fit into a single render target.
	ovrSizei renderTargetSize;
	m_fbWidth = recommenedTex0Size.w + recommenedTex1Size.w;
	m_fbHeight = std::max ( recommenedTex0Size.h, recommenedTex1Size.h );


	createRenderTarget();
	createOVRGLConfig();
	createOVRTextureBuffers();
	/* enable low-persistence display and dynamic prediction for lattency compensation */
	ovrHmd_SetEnabledCaps(m_hmd, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction);

	/* configure SDK-rendering and enable chromatic abberation correction, vignetting, and
	 * timewrap, which shifts the image before drawing to counter any lattency between the call
	 * to ovrHmd_GetEyePose and ovrHmd_EndFrame.
	 */
	unsigned int dcaps = ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette | ovrDistortionCap_TimeWarp |
		ovrDistortionCap_Overdrive;
	if(!ovrHmd_ConfigureRendering(m_hmd, &m_glcfg.Config, dcaps, m_hmd->DefaultEyeFov, m_eyeRdesc))
	{
		fprintf(stderr, "failed to configure distortion renderer\n");
	}


}
コード例 #5
0
		void SetLowPresistenceMode(bool on)
		{
			unsigned caps = ovrHmd_GetEnabledCaps(m_device);
			if (on)
				caps |= ovrHmdCap_LowPersistence;
			else caps &= ~ovrHmdCap_LowPersistence;
			ovrHmd_SetEnabledCaps(m_device,caps);
			StartTrackingCaps = caps;
		}
コード例 #6
0
ファイル: vr.cpp プロジェクト: nikki93/thesis-2015
VR::VR(Game &game)
{
    // create HMD
    if (!(m_hmd = ovrHmd_Create(0)))
    {
        std::cerr << "couldn't create Oculus HMD, falling back to debug HMD"
            << std::endl;
        if (!(m_hmd = ovrHmd_CreateDebug(ovrHmd_DK2)))
            throw Error("couldn't create debug HMD");
    }
    orient_window(game);

    // enable position, rotation tracking
    ovrHmd_ConfigureTracking(m_hmd, ovrTrackingCap_Orientation
        | ovrTrackingCap_MagYawCorrection
        | ovrTrackingCap_Position, 0);

    // calculate framebuffer resolution and create framebuffer
    ovrSizei eye_res[2];
    eye_res[0] = ovrHmd_GetFovTextureSize(m_hmd, ovrEye_Left,
        m_hmd->DefaultEyeFov[0], 1.0);
    eye_res[1] = ovrHmd_GetFovTextureSize(m_hmd, ovrEye_Right,
        m_hmd->DefaultEyeFov[1], 1.0);
    m_fb_width = eye_res[0].w + eye_res[1].w;
    m_fb_height = eye_res[0].h > eye_res[1].h ? eye_res[0].h : eye_res[1].h;
    update_fb();

    // fill in ovrGLConfig
    ovrGLConfig glcfg;
    memset(&glcfg, 0, sizeof glcfg);
    glcfg.OGL.Header.API = ovrRenderAPI_OpenGL;
    glcfg.OGL.Header.RTSize = m_hmd->Resolution;
    glcfg.OGL.Header.Multisample = 1;
    glcfg.OGL.Window = GetActiveWindow();
    glcfg.OGL.DC = wglGetCurrentDC();
    if (!(m_hmd->HmdCaps & ovrHmdCap_ExtendDesktop))
        ovrHmd_AttachToWindow(m_hmd, glcfg.OGL.Window, 0, 0);

    // enable HMD, distortion capabilities and enable SDK rendering
    ovrHmd_SetEnabledCaps(m_hmd, ovrHmdCap_LowPersistence
        | ovrHmdCap_DynamicPrediction);
    if (!ovrHmd_ConfigureRendering(m_hmd, &glcfg.Config,
        ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette
        | ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive,
        m_hmd->DefaultEyeFov, m_eye_rdesc))
        throw Error("failed to configure distortion rendering");

    // disable health/safety warning
    ovrhmd_EnableHSWDisplaySDKRender(m_hmd, 0);
}
コード例 #7
0
ファイル: gl_ovrisolator.cpp プロジェクト: CarlKenner/gz3doom
OvrSdkRenderer::OvrSdkRenderer()
{
	ovr_Initialize();
	hmd = ovrHmd_Create(0);
	if (hmd) {
		ovrHmd_ConfigureTracking(hmd,
			ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position, // supported
			ovrTrackingCap_Orientation); // required

		// Set low persistence mode
		int hmdCaps = ovrHmd_GetEnabledCaps(hmd);
		ovrHmd_SetEnabledCaps(hmd, hmdCaps | ovrHmdCap_LowPersistence);
	}
}
コード例 #8
0
ファイル: vr.c プロジェクト: isuker/Quakespasm-Rift
qboolean VR_Enable()
{
	int i;
	if( ovr_Initialize(NULL) != ovrSuccess ) {
		Con_Printf("Failed to Initialize Oculus SDK");
		return false;
	}

	if( ovrHmd_Create(0, &hmd) != ovrSuccess ) {
		Con_Printf("Failed to get HMD");
		return false;
	}

	if( !InitOpenGLExtensions() ) {
		Con_Printf("Failed to initialize OpenGL extensions");
		return false;
	}

	ovrHmd_CreateMirrorTextureGL(hmd, GL_RGBA, glwidth, glheight, (ovrTexture**)&mirror_texture);
	glGenFramebuffersEXT(1, &mirror_fbo);
	glBindFramebufferEXT(GL_READ_FRAMEBUFFER_EXT, mirror_fbo);
	glFramebufferTexture2DEXT(GL_READ_FRAMEBUFFER_EXT, GL_COLOR_ATTACHMENT0_EXT, GL_TEXTURE_2D, mirror_texture->OGL.TexId, 0);
	glFramebufferRenderbufferEXT(GL_READ_FRAMEBUFFER_EXT, GL_DEPTH_ATTACHMENT_EXT, GL_RENDERBUFFER_EXT, 0);
	glBindFramebufferEXT(GL_READ_FRAMEBUFFER_EXT, 0);

	for( i = 0; i < 2; i++ ) {
		ovrSizei size = ovrHmd_GetFovTextureSize(hmd, (ovrEyeType)i, hmd->DefaultEyeFov[i], 1);

		eyes[i].index = i;
		eyes[i].fbo = CreateFBO(size.w, size.h);
		eyes[i].render_desc = ovrHmd_GetRenderDesc(hmd, (ovrEyeType)i, hmd->DefaultEyeFov[i]);
		eyes[i].fov_x = (atan(hmd->DefaultEyeFov[i].LeftTan) + atan(hmd->DefaultEyeFov[i].RightTan)) / M_PI_DIV_180;
		eyes[i].fov_y = (atan(hmd->DefaultEyeFov[i].UpTan) + atan(hmd->DefaultEyeFov[i].DownTan)) / M_PI_DIV_180;
	}

	ovrHmd_SetEnabledCaps(hmd, ovrHmdCap_LowPersistence|ovrHmdCap_DynamicPrediction);
	ovrHmd_ConfigureTracking(hmd, ovrTrackingCap_Orientation|ovrTrackingCap_MagYawCorrection|ovrTrackingCap_Position, 0);
	
	wglSwapIntervalEXT(0); // Disable V-Sync

	vr_initialized = true;
	return true;
}
コード例 #9
0
ファイル: vr_ovr.c プロジェクト: fourks/quake2vr
void VR_OVR_FrameStart()
{

	const char *results = ovrHmd_GetLatencyTestResult(hmd);
	if (results && strncmp(results,"",1))
	{
		static float lastms = 0;
		float ms;
		if (sscanf(results,"RESULT=%f ",&ms) && ms != lastms)
		{
			Cvar_SetInteger("vr_prediction",(int) ms);
			lastms = ms;
		}
	}

	if (vr_ovr_lowpersistence->modified)
	{
		unsigned int caps = 0;
		if (hmd->HmdCaps & ovrHmdCap_DynamicPrediction)
			caps |= ovrHmdCap_DynamicPrediction;

		if (hmd->HmdCaps & ovrHmdCap_LowPersistence && vr_ovr_lowpersistence->value)
			caps |= ovrHmdCap_LowPersistence;
		
		ovrHmd_SetEnabledCaps(hmd,caps);
		vr_ovr_lowpersistence->modified = false;
	}

	if (!withinFrame)
	{
		frameTime = ovrHmd_BeginFrameTiming(hmd,0);
	}
	else
	{
		ovrHmd_EndFrameTiming(hmd);
		ovrHmd_ResetFrameTiming(hmd,0);
		frameTime = ovrHmd_BeginFrameTiming(hmd,0);
	}
	withinFrame = true;
}
コード例 #10
0
ファイル: OculusRift.cpp プロジェクト: peter158/dewVRplayer
		OculusRift::OculusRift()
		{
			//init
			ovr_Initialize();

			//create hmd 
			mHmd = ovrHmd_Create(0);
			if (mHmd)
			{
				// Get more details about the HMD.
				ovrSizei resolution = mHmd->Resolution;
				console() << "hmd detected! " << mHmd->ProductName << " size: " << resolution.w << ", " << resolution.h << " reco Fov: " << mHmd->DefaultEyeFov[0].LeftTan << endl;
			}
			else
			{
				//no oculus
				console() << "No Oculus found! Check your settings." << endl;
				return;
			}

			ovrHmd_SetEnabledCaps(mHmd, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction);

			ovrHmd_ConfigureTracking(mHmd, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position, 0);
		}
コード例 #11
0
void HeadMountedDisplay::setEnabledCaps( kvs::UInt32 caps )
{
    KVS_OVR_CALL( ovrHmd_SetEnabledCaps( m_handler, caps ) );
}
コード例 #12
0
		void UpdateOVRParams()
		{


			m_data.eyeFov[0] = m_device->DefaultEyeFov[0];
			m_data.eyeFov[1] = m_device->DefaultEyeFov[1];

			float DesiredPixelDensity = 1;
			// Configure Stereo settings. Default pixel density is 1.0f.
			Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(m_device, ovrEye_Left, m_data.eyeFov[0], DesiredPixelDensity);
			Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(m_device, ovrEye_Right, m_data.eyeFov[1], DesiredPixelDensity);

			Sizei  rtSize(recommenedTex0Size.w + recommenedTex1Size.w,
				Alg::Max(recommenedTex0Size.h, recommenedTex1Size.h));

			m_data.texSize.set(rtSize.w, rtSize.h);
			Sizei eyeRenderSize[2];

			// Don't draw more then recommended size; this also ensures that resolution reported
			// in the overlay HUD size is updated correctly for FOV/pixel density change.            
			eyeRenderSize[0] = Sizei::Min(Sizei(rtSize.w / 2, rtSize.h), recommenedTex0Size);
			eyeRenderSize[1] = Sizei::Min(Sizei(rtSize.w / 2, rtSize.h), recommenedTex1Size);
			m_data.eyeRenderSize[0].set(eyeRenderSize[0].w, eyeRenderSize[0].h);
			m_data.eyeRenderSize[1].set(eyeRenderSize[1].w, eyeRenderSize[1].h);

			m_data.hmdResolution.x = m_device->Resolution.w;
			m_data.hmdResolution.y = m_device->Resolution.h;


			bool IsLowPersistence = true;
			bool DynamicPrediction = false;
			bool VsyncEnabled = false;

			// Hmd caps.
			unsigned hmdCaps = (VsyncEnabled ? 0 : ovrHmdCap_NoVSync);
			if (IsLowPersistence)
				hmdCaps |= ovrHmdCap_LowPersistence;

			// ovrHmdCap_DynamicPrediction - enables internal latency feedback
			if (DynamicPrediction)
				hmdCaps |= ovrHmdCap_DynamicPrediction;

			// ovrHmdCap_DisplayOff - turns off the display
			//if (DisplaySleep)
			hmdCaps |= ovrHmdCap_DisplayOff;

			//if (!MirrorToWindow)
			hmdCaps |= ovrHmdCap_NoMirrorToWindow;

			ovrHmd_SetEnabledCaps(m_device, hmdCaps);
			

			unsigned sensorCaps = ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection;
			if (!IsTrackingConnected())
				printf("Oculus tracking device not detected!\n");
			sensorCaps |= ovrTrackingCap_Position;

			if (StartTrackingCaps != sensorCaps)
			{
				ovrHmd_ConfigureTracking(m_device, sensorCaps, 0);
				StartTrackingCaps = sensorCaps;
			}
		}
コード例 #13
0
ファイル: gkOculus.cpp プロジェクト: CheryJazz/gkEngine
void gkOculus::InitHMD()
{
	ovrHmd_SetEnabledCaps(HMD, ovrHmdCap_LowPersistence);

	// Start the sensor which informs of the Rift's pose and motion
	ovrHmd_ConfigureTracking(HMD,   ovrTrackingCap_Orientation |
		ovrTrackingCap_MagYawCorrection |
		ovrTrackingCap_Position, 0);

	OVR::Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Left,  HMD->DefaultEyeFov[0], 1.0f);
	OVR::Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Right, HMD->DefaultEyeFov[1], 1.0f);

	gEnv->pRenderer->SetOverrideSize( recommenedTex0Size.w, recommenedTex0Size.h, true );

	gkTexturePtr tex = gEnv->pSystem->getTextureMngPtr()->getByName(_T("RT_BACKBUFFER_STEREOOUT"));

	for (int eyenum=0; eyenum < 2; ++eyenum)
	{
		m_disortation_renderable_eyes[eyenum] = new gkOculusDisortationRenderable(this);
		m_disortation_renderable_eyes[eyenum]->m_eye_index = eyenum;
		m_disortation_renderable_eyes[eyenum]->HMD = HMD;

		gkNameValuePairList createlist;

		createlist[_T("file")] = _T("engine/assets/meshs/oculus_disort.mtl");

		TCHAR buffer[255];
		_stprintf( buffer, _T("$OculusDisortation_%d"), eyenum );

		gkMaterialPtr mat = gEnv->pSystem->getMaterialMngPtr()->create( buffer, _T("stereo"), &createlist );
		mat->load();
		mat->setTexture( tex , 0);


		m_disortation_renderable_eyes[eyenum]->m_material = mat;

		_stprintf( buffer, _T("$OculusDisortationMesh_%d"), eyenum );

		createlist[_T("type")] =	_T("Pt2T2T2T2T2");
		gkMeshPtr mesh = gEnv->pSystem->getMeshMngPtr()->create( buffer, _T("stereo"), &createlist );
		mesh->load();
		m_disortation_renderable_eyes[eyenum]->m_mesh = mesh;

		ovrFovPort eyeFov[2] = { HMD->DefaultEyeFov[0], HMD->DefaultEyeFov[1] } ;

		float fovy = atan( eyeFov[0].UpTan ) * 2;
		gEnv->p3DEngine->getMainCamera()->setFOVy( fovy );

		ovrDistortionMesh meshData;
		ovrHmd_CreateDistortionMesh(HMD, (ovrEyeType)eyenum, eyeFov[eyenum],
			ovrDistortionCap_Chromatic | ovrDistortionCap_TimeWarp, &meshData);

		mesh->getVertexBuffer()->resizeDiscard( meshData.VertexCount );
		mesh->getIndexBuffer()->resizeDiscard( meshData.IndexCount );

		memcpy( mesh->getVertexBuffer()->data, meshData.pVertexData, meshData.VertexCount * sizeof(ovrDistortionVertex) );
		memcpy( mesh->getIndexBuffer()->data, meshData.pIndexData, meshData.IndexCount * sizeof(unsigned short) );

		ovrHmd_DestroyDistortionMesh( &meshData );

		//Create eye render description for use later
		EyeRenderDesc[eyenum] = ovrHmd_GetRenderDesc(HMD, (ovrEyeType)eyenum,  eyeFov[eyenum]);

		//Do scale and offset
		OVR::Sizei RenderTargetSize;
		RenderTargetSize.w = recommenedTex0Size.w;
		RenderTargetSize.h = recommenedTex1Size.h;

		ovrRecti EyeRenderViewport[2];
		EyeRenderViewport[0].Pos  = OVR::Vector2i(0,0);
		EyeRenderViewport[0].Size = OVR::Sizei(RenderTargetSize.w / 2, RenderTargetSize.h);
		EyeRenderViewport[1].Pos  = OVR::Vector2i((RenderTargetSize.w + 1) / 2, 0);
		EyeRenderViewport[1].Size = EyeRenderViewport[0].Size;

		ovrHmd_GetRenderScaleAndOffset(eyeFov[eyenum],RenderTargetSize, EyeRenderViewport[eyenum], m_disortation_renderable_eyes[eyenum]->UVScaleOffset[eyenum]);
	}
}
コード例 #14
0
ファイル: viewmat.c プロジェクト: sborar/opengl-examples
/** Initialize the Oculus HMD.
 *
 * @param pos The position that we want the Oculus HMD to start at.
 */
static void viewmat_init_hmd_oculus(const float pos[3])
{
#ifdef MISSING_OVR
	msg(MSG_FATAL, "Oculus support is missing: You have not compiled this code against the LibOVR library.\n");
	exit(EXIT_FAILURE);
#else
	ovr_Initialize(NULL);

	int useDebugMode = 0;
	hmd = ovrHmd_Create(0);
	if(!hmd)
	{
		msg(MSG_WARNING, "Failed to open Oculus HMD. Is ovrd running? Is libOVRRT*.so.* in /usr/lib, /usr/local/lib, or the current directory?\n");
		msg(MSG_WARNING, "Press any key to proceed with Oculus debugging window.\n");
		char c; 
		if(fscanf(stdin, "%c", &c) < 0)
		{
			msg(MSG_ERROR, "fscanf error.\n");
			exit(EXIT_FAILURE);
		}

		hmd = ovrHmd_CreateDebug(ovrHmd_DK2);
		useDebugMode = 1;
		if(!hmd)
		{
			msg(MSG_ERROR, "Oculus: Failed to create virtual debugging HMD\n");
			exit(EXIT_FAILURE);
		}
	}
	
	msg(MSG_INFO, "Initialized HMD: %s - %s\n", hmd->Manufacturer, hmd->ProductName);

#if 0
	printf("default fov tangents left eye:\n");
	printf("up=%f\n", hmd->DefaultEyeFov[ovrEye_Left].UpTan);
	printf("up=%f\n", hmd->DefaultEyeFov[ovrEye_Left].DownTan);
	printf("up=%f\n", hmd->DefaultEyeFov[ovrEye_Left].LeftTan);
	printf("up=%f\n", hmd->DefaultEyeFov[ovrEye_Left].RightTan);
#endif
	

	/* pixelDensity can range between 0 to 1 (where 1 has the highest
	 * resolution). Using smaller values will result in smaller
	 * textures that each eye is rendered into. */
	float pixelDensity = 1;
	/* Number of multisample antialiasing while rendering the scene
	 * for each eye. */
	GLint msaa_samples = 2;
	recommendTexSizeL = ovrHmd_GetFovTextureSize(hmd, ovrEye_Left,  hmd->DefaultEyeFov[ovrEye_Left],  pixelDensity);
	recommendTexSizeR = ovrHmd_GetFovTextureSize(hmd, ovrEye_Right, hmd->DefaultEyeFov[ovrEye_Right], pixelDensity);
	
	GLuint leftTextureAA,rightTextureAA;
	leftFramebufferAA  = kuhl_gen_framebuffer_msaa(recommendTexSizeL.w, recommendTexSizeL.h, &leftTextureAA, NULL, msaa_samples);
	rightFramebufferAA = kuhl_gen_framebuffer_msaa(recommendTexSizeR.w, recommendTexSizeR.h, &rightTextureAA, NULL, msaa_samples);
	GLuint leftTexture,rightTexture;
	leftFramebuffer  = kuhl_gen_framebuffer(recommendTexSizeL.w, recommendTexSizeL.h, &leftTexture,  NULL);
	rightFramebuffer = kuhl_gen_framebuffer(recommendTexSizeR.w, recommendTexSizeR.h, &rightTexture, NULL);
	//printf("Left recommended texture size: %d %d\n", recommendTexSizeL.w, recommendTexSizeL.h);
	//printf("Right recommended texture size: %d %d\n", recommendTexSizeR.w, recommendTexSizeR.h);

	EyeTexture[0].OGL.Header.API = ovrRenderAPI_OpenGL;
	EyeTexture[0].OGL.Header.TextureSize.w = recommendTexSizeL.w;
	EyeTexture[0].OGL.Header.TextureSize.h = recommendTexSizeL.h;
	EyeTexture[0].OGL.Header.RenderViewport.Pos.x = 0;
	EyeTexture[0].OGL.Header.RenderViewport.Pos.y = 0;
	EyeTexture[0].OGL.Header.RenderViewport.Size.w = recommendTexSizeL.w;
	EyeTexture[0].OGL.Header.RenderViewport.Size.h = recommendTexSizeL.h;

	EyeTexture[1].OGL.Header.API = ovrRenderAPI_OpenGL;
	EyeTexture[1].OGL.Header.TextureSize.w = recommendTexSizeR.w;
	EyeTexture[1].OGL.Header.TextureSize.h = recommendTexSizeR.h;
	EyeTexture[1].OGL.Header.RenderViewport.Pos.x = 0;
	EyeTexture[1].OGL.Header.RenderViewport.Pos.y = 0;
	EyeTexture[1].OGL.Header.RenderViewport.Size.w = recommendTexSizeR.w;
	EyeTexture[1].OGL.Header.RenderViewport.Size.h = recommendTexSizeR.h;

	EyeTexture[0].OGL.TexId = leftTexture;
	EyeTexture[1].OGL.TexId = rightTexture;

	union ovrGLConfig glcfg;
	memset(&glcfg, 0, sizeof(glcfg));
	glcfg.OGL.Header.API=ovrRenderAPI_OpenGL;
	glcfg.OGL.Header.Multisample = 0;
	glcfg.OGL.Disp = glXGetCurrentDisplay();
	
	if(hmd->Type == ovrHmd_DK2 && useDebugMode == 0)
	{
		/* Since the DK2 monitor is rotated, we need to swap the width
		 * and height here so that the final image correctly fills the
		 * entire screen. */
		glcfg.OGL.Header.BackBufferSize.h=hmd->Resolution.w;
		glcfg.OGL.Header.BackBufferSize.w=hmd->Resolution.h;
	} else
	{
		glcfg.OGL.Header.BackBufferSize.h=hmd->Resolution.h;
		glcfg.OGL.Header.BackBufferSize.w=hmd->Resolution.w;
	}
// interferes with PROJAT_FULLSCREEN
//	glutReshapeWindow(glcfg.OGL.Header.BackBufferSize.w,
//	                  glcfg.OGL.Header.BackBufferSize.h);

	unsigned int trackingcap = 0;
	trackingcap |= ovrTrackingCap_Orientation; // orientation tracking
	trackingcap |= ovrTrackingCap_Position;    // position tracking
	trackingcap |= ovrTrackingCap_MagYawCorrection; // use magnetic compass
	ovrHmd_ConfigureTracking(hmd, trackingcap, 0);

	
	unsigned int hmd_caps = 0;
	hmd_caps |= ovrHmdCap_DynamicPrediction; // enable internal latency feedback
	
	/* disable vsync; allow frame rate higher than display refresh
	   rate, can cause tearing. On some windowing systems, you using
	   this setting reduces issues with overrunning the time budget
	   and tearing still does not occur. */
	hmd_caps |= ovrHmdCap_NoVSync;
	hmd_caps |= ovrHmdCap_LowPersistence; // Less blur during rotation; dimmer screen
	
	ovrHmd_SetEnabledCaps(hmd, hmd_caps);

	/* Distortion options
	 * See OVR_CAPI.h for additional options
	 */
	unsigned int distort_caps = 0;
	distort_caps |= ovrDistortionCap_LinuxDevFullscreen; // Screen rotation for DK2
	// distort_caps |= ovrDistortionCap_Chromatic; // Chromatic aberration correction - Necessary for 0.4.4, turned on permanently in 0.5.0.1
	distort_caps |= ovrDistortionCap_Vignette; // Apply gradient to edge of image
	// distort_caps |= ovrDistortionCap_OverDrive; // Overdrive brightness transitions to compensate for DK2 artifacts

	/* Shift image based on time difference between
	 * ovrHmd_GetEyePose() and ovrHmd_EndFrame(). This option seems to
	 * reduce FPS on at least one machine. */
	//distort_caps |= ovrDistortionCap_TimeWarp; 
	
	if(!ovrHmd_ConfigureRendering(hmd, &glcfg.Config, distort_caps, hmd->DefaultEyeFov, eye_rdesc)) {
		msg(MSG_FATAL, "Failed to configure distortion renderer.\n");
		exit(EXIT_FAILURE);
	}

	/* disable health and safety warning */
	ovrHmd_DismissHSWDisplay(hmd);

	vec3f_copy(oculus_initialPos, pos);

	// Try to connect to VRPN
	viewmat_init_vrpn();

	
	// TODO: We are supposed to do these things when we are done:
	//ovrHmd_Destroy(hmd);
	//ovr_Shutdown();
#endif
}
コード例 #15
0
int Init()
{
	ovr_Initialize();
	HMD = ovrHmd_Create(0);
	if (!HMD)
	{
		MessageBox(NULL, "Oculus Rift not detected.", "", MB_OK);
		return 1;
	}
	if (HMD->ProductName[0] == '\0')
	{
		MessageBox(NULL, "Rift detected, display not enabled.", "", MB_OK);
	}

	//Setup Window and Graphics - use window frame if relying on Oculus driver
	const int backBufferMultisample = 1;
	bool UseAppWindowFrame = true;
	HWND window = Util_InitWindowAndGraphics(Recti(HMD->WindowsPos, HMD->Resolution), 
		FullScreen, backBufferMultisample, UseAppWindowFrame, &pRender);
	if (!window) return 1;
	ovrHmd_AttachToWindow(HMD, window, NULL, NULL);

	Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Left, HMD->DefaultEyeFov[0], 1.0f);
	Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Right, HMD->DefaultEyeFov[1], 1.0f);
	Sizei RenderTargetSize;
	RenderTargetSize.w = recommenedTex0Size.w + recommenedTex1Size.w;
	RenderTargetSize.h = max(recommenedTex0Size.h, recommenedTex1Size.h);

	RenderTargetSize.w = HMD->Resolution.w;
	RenderTargetSize.h = HMD->Resolution.h;

	//const int eyeRenderMultisample = 1;
	pRendertargetTexture = pRender->CreateRenderTarget(RenderTargetSize.w/2, RenderTargetSize.h/2);
	//pRendertargetTexture = pRender->CreateRenderTarget(512, 512);
	RenderTargetSize.w = pRendertargetTexture->Width;
	RenderTargetSize.h = pRendertargetTexture->Height;

	IDirect3DSurface9 *zb = 0;
	pRender->Device->GetDepthStencilSurface(&zb);
	D3DSURFACE_DESC d;
	zb->GetDesc(&d);

	// Initialize eye rendering information.
	// The viewport sizes are re-computed in case RenderTargetSize due to HW limitations.
	ovrFovPort eyeFov[2] = { HMD->DefaultEyeFov[0], HMD->DefaultEyeFov[1] };

	EyeRenderViewport[0].Pos  = Vector2i(0, 0);
	EyeRenderViewport[0].Size = Sizei(RenderTargetSize.w / 2, RenderTargetSize.h);
	EyeRenderViewport[1].Pos  = Vector2i((RenderTargetSize.w + 1) / 2, 0);
	EyeRenderViewport[1].Size = EyeRenderViewport[0].Size;

	// ---------------------

	DistortionShaders = pRender->CreateShaderSet();
	DistortionShaders->SetShader(pRender->LoadBuiltinShader(Shader_Vertex, VShader_Distortion));
	DistortionShaders->SetShader(pRender->LoadBuiltinShader(Shader_Pixel, PShader_Distortion));
	DistortionDecl = VertexDecl::GetDecl(VertexType_Distortion);

	for (int eyeNum = 0; eyeNum < 2; ++eyeNum)
	{
		ovrDistortionMesh meshData;
		ovrHmd_CreateDistortionMesh(HMD, (ovrEyeType)eyeNum, eyeFov[eyeNum],
			ovrDistortionCap_Chromatic | ovrDistortionCap_TimeWarp, &meshData);
		MeshVBs[eyeNum] = pRender->CreateVertexBuffer();
		MeshVBs[eyeNum]->Data(meshData.pVertexData, sizeof(ovrDistortionVertex)*meshData.VertexCount);
		MeshIBs[eyeNum] = pRender->CreateIndexBuffer();
		MeshIBs[eyeNum]->Data(meshData.pIndexData, sizeof(unsigned short)*meshData.IndexCount);

		MeshVBCnts[eyeNum] = meshData.VertexCount;
		MeshIBCnts[eyeNum] = meshData.IndexCount;
		ovrHmd_DestroyDistortionMesh(&meshData);

		EyeRenderDesc[eyeNum] = ovrHmd_GetRenderDesc(HMD, (ovrEyeType)eyeNum, eyeFov[eyeNum]);

		ovrHmd_GetRenderScaleAndOffset(eyeFov[eyeNum], RenderTargetSize, EyeRenderViewport[eyeNum], UVScaleOffset[eyeNum]);
	}

	ovrHmd_SetEnabledCaps(HMD, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction);

	ovrHmd_ConfigureTracking(HMD,
		ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position, 0);

	// ---------------------

	pRoomScene = new Scene;
	PopulateRoomScene(pRoomScene, pRender);

	// texture model
	ShaderSet* ss = pRender->CreateShaderSet();
	ss->SetShader(pRender->LoadBuiltinShader(Shader_Vertex, VShader_MVP_UV));
	ss->SetShader(pRender->LoadBuiltinShader(Shader_Pixel, PShader_UV));

	Model<VertexXYZUV> *pModel2 = new Model<VertexXYZUV>();
	pModel2->Decl = VertexDecl::GetDecl(VertexType_XYZUV);
	pModel2->Fill = new ShaderFill(ss);

	//Texture* ttt = new Texture(pRender);
	//ttt->LoadFromFile("face.tga");
	pModel2->Fill->SetTexture(0, pRendertargetTexture);

	pModel2->AddVertex(VertexXYZUV(0.5f, -1.0f, 0.0f, 0.0f, 0.0f));
	pModel2->AddVertex(VertexXYZUV(2.5f, -1.0f, 0.0f, 1.0f, 0.0f));
	pModel2->AddVertex(VertexXYZUV(0.5f, 1.0f, 0.0f, 0.0f, 1.0f));
	pModel2->AddVertex(VertexXYZUV(2.5f, 1.0f, 0.0f, 1.0f, 1.0f));

	pModel2->AddVertex(VertexXYZUV(-1.0f, -1.5f, -1.0f, 0.0f, 0.0f));
	pModel2->AddVertex(VertexXYZUV(1.0f, -1.5f, -1.0f, 1.0f, 0.0f));
	pModel2->AddVertex(VertexXYZUV(-1.0f, -1.5f, 1.0f, 0.0f, 1.0f));
	pModel2->AddVertex(VertexXYZUV(1.0f, -1.5f, 1.0f, 1.0f, 1.0f));

	pModel2->AddTriangle(0, 1, 2);
	pModel2->AddTriangle(2, 1, 3);
	pModel2->AddTriangle(4, 5, 6);
	pModel2->AddTriangle(6, 5, 7);

	pScene = new Scene;
	pScene->World.Add(pModel2);

    return (0);
}
コード例 #16
0
int OVRConfigureRenderer(int width, int height, float znear, float zfar, float ipd, float multisample, int lowpersistence, int dynamicprediction, int vsync, int chromatic, int timewarp, int vignette, int state, int flip, int srgb, int overdrive, int profile)
{
    unsigned hmdCaps;
	unsigned int distortionCaps;
    ovrFovPort eyeFov[EYE_ALL] = { _OVRGlobals.HMD->DefaultEyeFov[EYE_LEFT], _OVRGlobals.HMD->DefaultEyeFov[EYE_RIGHT] };
    float FovSideTanMax   = OVR::FovPort::Max(_OVRGlobals.HMD->DefaultEyeFov[EYE_LEFT], _OVRGlobals.HMD->DefaultEyeFov[EYE_RIGHT]).GetMaxSideTan();
	//float FovSideTanLimit = OVR::FovPort::Max(_OVRGlobals.HMD->MaxEyeFov[EYE_LEFT], _OVRGlobals.HMD->MaxEyeFov[EYE_RIGHT]).GetMaxSideTan();
	ovrBool didSetIPD = 0;

	// generate the HMD and distortion caps
	hmdCaps = (lowpersistence ? ovrHmdCap_LowPersistence : 0) |
	          (dynamicprediction ? ovrHmdCap_DynamicPrediction : 0) |
	          (vsync ? 0 : ovrHmdCap_NoVSync);

	distortionCaps = (chromatic ? ovrDistortionCap_Chromatic : 0) |
	                 (timewarp ? ovrDistortionCap_TimeWarp : 0) |
	                 (vignette ? ovrDistortionCap_Vignette : 0) |
					(state ? 0 : ovrDistortionCap_NoRestore) |
					(flip ? ovrDistortionCap_FlipInput : 0) |
					(srgb ? ovrDistortionCap_SRGB : 0) |
					(overdrive ? ovrDistortionCap_Overdrive : 0) |
					(profile ? ovrDistortionCap_ProfileNoTimewarpSpinWaits : 0);

	didSetIPD = ovrHmd_SetFloat( _OVRGlobals.HMD, OVR_KEY_IPD, ipd * 0.001 );

	ovrHmd_SetEnabledCaps( _OVRGlobals.HMD, hmdCaps );

	ovrRenderAPIConfig config = ovrRenderAPIConfig();
	config.Header.API = ovrRenderAPI_OpenGL;
	config.Header.RTSize.w = width;
	config.Header.RTSize.h = height;
	config.Header.Multisample = multisample > 1 ? 1 : 0;

	// clamp fov
    eyeFov[EYE_LEFT] = OVR::FovPort::Min(eyeFov[EYE_LEFT], OVR::FovPort(FovSideTanMax));
    eyeFov[EYE_RIGHT] = OVR::FovPort::Min(eyeFov[EYE_RIGHT], OVR::FovPort(FovSideTanMax));

    if ( !ovrHmd_ConfigureRendering( _OVRGlobals.HMD, &config, distortionCaps, eyeFov, _OVRGlobals.EyeRenderDesc ) ) {
        return 0;
    }

#ifdef DEBUG
	ovrhmd_EnableHSWDisplaySDKRender( _OVRGlobals.HMD, false );
#else
	ovrHmd_DismissHSWDisplay( _OVRGlobals.HMD );
#endif

	_OVRGlobals.IPD = ovrHmd_GetFloat( _OVRGlobals.HMD, OVR_KEY_IPD, ipd * 0.001 );

	// create the projection
	_OVRGlobals.Eye[EYE_LEFT].Projection =
		ovrMatrix4f_Projection( _OVRGlobals.EyeRenderDesc[EYE_LEFT].Fov, znear, zfar, true );
    _OVRGlobals.Eye[EYE_RIGHT].Projection =
		ovrMatrix4f_Projection( _OVRGlobals.EyeRenderDesc[EYE_RIGHT].Fov, znear, zfar, true );

	// transpose the projection
	OVR::Matrix4 <float>transposeLeft = _OVRGlobals.Eye[EYE_LEFT].Projection;
	OVR::Matrix4 <float>transposeRight = _OVRGlobals.Eye[EYE_RIGHT].Projection;

	_OVRGlobals.Eye[EYE_LEFT].Projection = transposeLeft.Transposed();
	_OVRGlobals.Eye[EYE_RIGHT].Projection = transposeRight.Transposed();

	// TODO: ortho
	{
		float    orthoDistance = 0.8f; // 2D is 0.8 meter from camera
		OVR::Vector2f orthoScale0   = OVR::Vector2f(1.0f) / OVR::Vector2f(_OVRGlobals.EyeRenderDesc[EYE_LEFT].PixelsPerTanAngleAtCenter);
		OVR::Vector2f orthoScale1   = OVR::Vector2f(1.0f) / OVR::Vector2f(_OVRGlobals.EyeRenderDesc[EYE_RIGHT].PixelsPerTanAngleAtCenter);

		_OVRGlobals.Eye[EYE_LEFT].OrthoProjection =
			ovrMatrix4f_OrthoSubProjection(_OVRGlobals.Eye[EYE_LEFT].Projection, orthoScale0, orthoDistance, _OVRGlobals.EyeRenderDesc[EYE_LEFT].ViewAdjust.x);

		_OVRGlobals.Eye[EYE_RIGHT].OrthoProjection =
			ovrMatrix4f_OrthoSubProjection(_OVRGlobals.Eye[EYE_RIGHT].Projection, orthoScale1, orthoDistance, _OVRGlobals.EyeRenderDesc[EYE_RIGHT].ViewAdjust.x);

		OVR::Matrix4 <float>transposeLeftOrtho = _OVRGlobals.Eye[EYE_LEFT].OrthoProjection;
		OVR::Matrix4 <float>transposeRightOrtho = _OVRGlobals.Eye[EYE_RIGHT].OrthoProjection;

		_OVRGlobals.Eye[EYE_LEFT].OrthoProjection = transposeLeftOrtho.Transposed();
		_OVRGlobals.Eye[EYE_RIGHT].OrthoProjection = transposeRightOrtho.Transposed();
	}

	return 1;
}
コード例 #17
0
ファイル: main.cpp プロジェクト: Dean-Jansen/GMOculus
GMO double linkWindowHandle(void* windowHandle) {
	const int eyeRenderMultisample = 1;
	const int backBufferMultisample = 1;

	//HWND handle = GetWindow((HWND)(int)windowHandle, GW_OWNER);
	//HWND handle = (HWND) (int) windowHandle;
	HWND handle = (HWND) windowHandle;

	/*
	 * This function returns the passed windows' title. Just to debug / test
	LPWSTR title;
	GetWindowText(handle, title, GetWindowTextLength(handle) + 1);
	MessageBox(NULL, (LPCWSTR)title, (LPCWSTR)title, MB_ICONWARNING);
	MessageBoxA(NULL, (LPCSTR)title, (LPCSTR)title, MB_ICONWARNING);
	*/
	hWnd = handle;
	ovrHmd_AttachToWindow(HMD, handle, NULL, NULL);

	Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Left,  HMD->DefaultEyeFov[0], 1.0f);
    Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Right, HMD->DefaultEyeFov[1], 1.0f);
	Sizei RenderTargetSize;
    RenderTargetSize.w = recommenedTex0Size.w + recommenedTex1Size.w;
    RenderTargetSize.h = max ( recommenedTex0Size.h, recommenedTex1Size.h );

	bool UseAppWindowFrame = true;//(HMD->HmdCaps & ovrHmdCap_ExtendDesktop) ? false : true;
	HWND window = Util_InitWindowAndGraphics(Recti(HMD->WindowsPos, HMD->Resolution), FullScreen, backBufferMultisample, 1,&pRender, handle);
    pRendertargetTexture = pRender->CreateTexture(Texture_RGBA | Texture_RenderTarget |
                                                  eyeRenderMultisample,
                                                  RenderTargetSize.w, RenderTargetSize.h, NULL);
    // The actual RT size may be different due to HW limits.
    RenderTargetSize.w = pRendertargetTexture->GetWidth();
    RenderTargetSize.h = pRendertargetTexture->GetHeight();

    // Initialize eye rendering information.
    // The viewport sizes are re-computed in case RenderTargetSize changed due to HW limitations.
    ovrFovPort eyeFov[2] = { HMD->DefaultEyeFov[0], HMD->DefaultEyeFov[1] } ;

    EyeRenderViewport[0].Pos  = Vector2i(0,0);
    EyeRenderViewport[0].Size = Sizei(RenderTargetSize.w / 2, RenderTargetSize.h);
    EyeRenderViewport[1].Pos  = Vector2i((RenderTargetSize.w + 1) / 2, 0);
    EyeRenderViewport[1].Size = EyeRenderViewport[0].Size;

	EyeTexture[0].D3D11.Header.API            = ovrRenderAPI_D3D11;
    EyeTexture[0].D3D11.Header.TextureSize    = RenderTargetSize;
    EyeTexture[0].D3D11.Header.RenderViewport = EyeRenderViewport[0];
    EyeTexture[0].D3D11.pTexture              = pRendertargetTexture->Tex.GetPtr();
    EyeTexture[0].D3D11.pSRView               = pRendertargetTexture->TexSv.GetPtr();

    // Right eye uses the same texture, but different rendering viewport.
    EyeTexture[1] = EyeTexture[0];
    EyeTexture[1].D3D11.Header.RenderViewport = EyeRenderViewport[1];

    // Configure d3d11.
    ovrD3D11Config d3d11cfg;
    d3d11cfg.D3D11.Header.API         = ovrRenderAPI_D3D11;
    d3d11cfg.D3D11.Header.RTSize      = Sizei(HMD->Resolution.w, HMD->Resolution.h);
    d3d11cfg.D3D11.Header.Multisample = backBufferMultisample;
    d3d11cfg.D3D11.pDevice            = pRender->Device;
    d3d11cfg.D3D11.pDeviceContext     = pRender->Context;
    d3d11cfg.D3D11.pBackBufferRT      = pRender->BackBufferRT;
    d3d11cfg.D3D11.pSwapChain         = pRender->SwapChain;
	
    if (!ovrHmd_ConfigureRendering(HMD, &d3d11cfg.Config,
		                           ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette |
                                   ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive,
								   eyeFov, EyeRenderDesc)) return -2;

	// Some settings might be changed here lateron.
	ovrHmd_SetEnabledCaps(HMD, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction);// | ovrHmdCap_ExtendDesktop);

	// Start the sensor which informs of the Rift's pose and motion
    ovrHmd_ConfigureTracking(HMD, ovrTrackingCap_Orientation |
                            ovrTrackingCap_MagYawCorrection |
                            ovrTrackingCap_Position, 0);
	return 1;
}
コード例 #18
0
//-------------------------------------------------------------------------------------
int Init()
{
    // Initializes LibOVR, and the Rift
    ovr_Initialize();
	HMD = ovrHmd_Create(0);
    if (!HMD)
    {
        MessageBoxA(NULL,"Oculus Rift not detected.","", MB_OK);
        return(1);
    }
	if (HMD->ProductName[0] == '\0') 
        MessageBoxA(NULL,"Rift detected, display not enabled.","", MB_OK);

	//Setup Window and Graphics - use window frame if relying on Oculus driver
	const int backBufferMultisample = 1;
    bool UseAppWindowFrame = (HMD->HmdCaps & ovrHmdCap_ExtendDesktop) ? false : true;
    HWND window = Util_InitWindowAndGraphics(Recti(HMD->WindowsPos, HMD->Resolution),
                                         FullScreen, backBufferMultisample, UseAppWindowFrame,&pRender);
	if (!window) return 1;
	ovrHmd_AttachToWindow(HMD, window, NULL, NULL);

    //Configure Stereo settings.
    Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Left,  HMD->DefaultEyeFov[0], 1.0f);
    Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Right, HMD->DefaultEyeFov[1], 1.0f);
	Sizei RenderTargetSize;
    RenderTargetSize.w = recommenedTex0Size.w + recommenedTex1Size.w;
    RenderTargetSize.h = max ( recommenedTex0Size.h, recommenedTex1Size.h );

    const int eyeRenderMultisample = 1;
    pRendertargetTexture = pRender->CreateTexture(Texture_RGBA | Texture_RenderTarget |
                                                  eyeRenderMultisample,
                                                  RenderTargetSize.w, RenderTargetSize.h, NULL);
    // The actual RT size may be different due to HW limits.
    RenderTargetSize.w = pRendertargetTexture->GetWidth();
    RenderTargetSize.h = pRendertargetTexture->GetHeight();

    // Initialize eye rendering information.
    // The viewport sizes are re-computed in case RenderTargetSize changed due to HW limitations.
    ovrFovPort eyeFov[2] = { HMD->DefaultEyeFov[0], HMD->DefaultEyeFov[1] } ;

    EyeRenderViewport[0].Pos  = Vector2i(0,0);
    EyeRenderViewport[0].Size = Sizei(RenderTargetSize.w / 2, RenderTargetSize.h);
    EyeRenderViewport[1].Pos  = Vector2i((RenderTargetSize.w + 1) / 2, 0);
    EyeRenderViewport[1].Size = EyeRenderViewport[0].Size;

    #if SDK_RENDER
	// Query D3D texture data.
    EyeTexture[0].D3D11.Header.API            = ovrRenderAPI_D3D11;
    EyeTexture[0].D3D11.Header.TextureSize    = RenderTargetSize;
    EyeTexture[0].D3D11.Header.RenderViewport = EyeRenderViewport[0];
    EyeTexture[0].D3D11.pTexture              = pRendertargetTexture->Tex.GetPtr();
    EyeTexture[0].D3D11.pSRView               = pRendertargetTexture->TexSv.GetPtr();

    // Right eye uses the same texture, but different rendering viewport.
    EyeTexture[1] = EyeTexture[0];
    EyeTexture[1].D3D11.Header.RenderViewport = EyeRenderViewport[1];

    // Configure d3d11.
    ovrD3D11Config d3d11cfg;
    d3d11cfg.D3D11.Header.API         = ovrRenderAPI_D3D11;
    d3d11cfg.D3D11.Header.RTSize      = Sizei(HMD->Resolution.w, HMD->Resolution.h);
    d3d11cfg.D3D11.Header.Multisample = backBufferMultisample;
    d3d11cfg.D3D11.pDevice            = pRender->Device;
    d3d11cfg.D3D11.pDeviceContext     = pRender->Context;
    d3d11cfg.D3D11.pBackBufferRT      = pRender->BackBufferRT;
    d3d11cfg.D3D11.pSwapChain         = pRender->SwapChain;

    if (!ovrHmd_ConfigureRendering(HMD, &d3d11cfg.Config,
		                           ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette |
                                   ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive,
								   eyeFov, EyeRenderDesc))	return(1);
    #else
	//Shader vertex format
	D3D11_INPUT_ELEMENT_DESC DistortionMeshVertexDesc[] = {
		{"Position", 0, DXGI_FORMAT_R32G32_FLOAT,   0, 0,  D3D11_INPUT_PER_VERTEX_DATA, 0},
		{"Position", 1, DXGI_FORMAT_R32_FLOAT,      0, 8,  D3D11_INPUT_PER_VERTEX_DATA, 0},
		{"Position", 2, DXGI_FORMAT_R32_FLOAT,      0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0},
		{"TexCoord", 0, DXGI_FORMAT_R32G32_FLOAT,   0, 16, D3D11_INPUT_PER_VERTEX_DATA, 0},
		{"TexCoord", 1, DXGI_FORMAT_R32G32_FLOAT,   0, 24, D3D11_INPUT_PER_VERTEX_DATA, 0},
		{"TexCoord", 2, DXGI_FORMAT_R32G32_FLOAT,   0, 32, D3D11_INPUT_PER_VERTEX_DATA, 0}};
	
	//Distortion vertex shader
	const char* vertexShader = 
		"float2 EyeToSourceUVScale, EyeToSourceUVOffset;                                        \n"
		"float4x4 EyeRotationStart, EyeRotationEnd;                                             \n"
		"float2 TimewarpTexCoord(float2 TexCoord, float4x4 rotMat)                              \n"
		"{                                                                                      \n"
		// Vertex inputs are in TanEyeAngle space for the R,G,B channels (i.e. after chromatic 
		// aberration and distortion). These are now "real world" vectors in direction (x,y,1) 
		// relative to the eye of the HMD.	Apply the 3x3 timewarp rotation to these vectors.
		"    float3 transformed = float3( mul ( rotMat, float4(TexCoord.xy, 1, 1) ).xyz);       \n"
		// Project them back onto the Z=1 plane of the rendered images.
		"    float2 flattened = (transformed.xy / transformed.z);                               \n"
		// Scale them into ([0,0.5],[0,1]) or ([0.5,0],[0,1]) UV lookup space (depending on eye)
		"    return(EyeToSourceUVScale * flattened + EyeToSourceUVOffset);                      \n"
		"}                                                                                      \n"
		"void main(in float2  Position   : POSITION,  in float timewarpLerpFactor : POSITION1,  \n"
		"          in float   Vignette   : POSITION2, in float2 TexCoord0         : TEXCOORD0,  \n"
		"          in float2  TexCoord1  : TEXCOORD1, in float2 TexCoord2         : TEXCOORD2,  \n"
		"          out float4 oPosition  : SV_Position,                                         \n"
		"          out float2 oTexCoord0 : TEXCOORD0, out float2 oTexCoord1 : TEXCOORD1,        \n"
		"          out float2 oTexCoord2 : TEXCOORD2, out float  oVignette  : TEXCOORD3)        \n"
		"{                                                                                      \n"
		"    float4x4 lerpedEyeRot = lerp(EyeRotationStart, EyeRotationEnd, timewarpLerpFactor);\n"
		"    oTexCoord0  = TimewarpTexCoord(TexCoord0,lerpedEyeRot);                            \n"
		"    oTexCoord1  = TimewarpTexCoord(TexCoord1,lerpedEyeRot);                            \n"
		"    oTexCoord2  = TimewarpTexCoord(TexCoord2,lerpedEyeRot);                            \n"
		"    oPosition = float4(Position.xy, 0.5, 1.0);    oVignette = Vignette;                \n"
		"}";

	//Distortion pixel shader
	const char* pixelShader = 
		"Texture2D Texture   : register(t0);                                                    \n"
		"SamplerState Linear : register(s0);                                                    \n"
		"float4 main(in float4 oPosition  : SV_Position,  in float2 oTexCoord0 : TEXCOORD0,     \n"
		"            in float2 oTexCoord1 : TEXCOORD1,    in float2 oTexCoord2 : TEXCOORD2,     \n"
		"            in float  oVignette  : TEXCOORD3)    : SV_Target                           \n"
		"{                                                                                      \n"
		// 3 samples for fixing chromatic aberrations
		"    float R = Texture.Sample(Linear, oTexCoord0.xy).r;                                 \n"
		"    float G = Texture.Sample(Linear, oTexCoord1.xy).g;                                 \n"
		"    float B = Texture.Sample(Linear, oTexCoord2.xy).b;                                 \n"
		"    return (oVignette*float4(R,G,B,1));                                                \n"
		"}";
	pRender->InitShaders(vertexShader, pixelShader, &Shaders, &VertexIL,DistortionMeshVertexDesc,6);

    for ( int eyeNum = 0; eyeNum < 2; eyeNum++ )
    {
        // Allocate mesh vertices, registering with renderer using the OVR vertex format.
        ovrDistortionMesh meshData;
        ovrHmd_CreateDistortionMesh(HMD, (ovrEyeType) eyeNum, eyeFov[eyeNum],
			                        ovrDistortionCap_Chromatic | ovrDistortionCap_TimeWarp, &meshData);
        MeshVBs[eyeNum] = *pRender->CreateBuffer();
        MeshVBs[eyeNum]->Data(Buffer_Vertex,meshData.pVertexData,sizeof(ovrDistortionVertex)*meshData.VertexCount);
        MeshIBs[eyeNum] = *pRender->CreateBuffer();
        MeshIBs[eyeNum]->Data(Buffer_Index,meshData.pIndexData,sizeof(unsigned short) * meshData.IndexCount);
        ovrHmd_DestroyDistortionMesh( &meshData );

		//Create eye render description for use later
		EyeRenderDesc[eyeNum] = ovrHmd_GetRenderDesc(HMD, (ovrEyeType) eyeNum,  eyeFov[eyeNum]);

		//Do scale and offset
		ovrHmd_GetRenderScaleAndOffset(eyeFov[eyeNum],RenderTargetSize, EyeRenderViewport[eyeNum], UVScaleOffset[eyeNum]);
	}

    #endif

    ovrHmd_SetEnabledCaps(HMD, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction);

	// Start the sensor which informs of the Rift's pose and motion
    ovrHmd_ConfigureTracking(HMD, ovrTrackingCap_Orientation |
                            ovrTrackingCap_MagYawCorrection |
                            ovrTrackingCap_Position, 0);

    // This creates lights and models.
  	pRoomScene = new Scene;
	sbuilder.PopulateRoomScene(pRoomScene, pRender);

    return (0);
}
コード例 #19
0
COculusVR::COculusVR(bool latency)
{
	m_isReady = true;

	// Initializes LibOVR, and the Rift
    ovr_Initialize();

    Hmd = ovrHmd_Create(0);
    if (!Hmd)
    {
        MessageBoxA(NULL, "Oculus Rift not detected.", "", MB_OK);
        return;
    }
    if (Hmd->ProductName[0] == '\0')
        MessageBoxA(NULL, "Rift detected, display not enabled.", "", MB_OK);

    if (Hmd->HmdCaps & ovrHmdCap_ExtendDesktop)
    {
        WindowSize = Hmd->Resolution;
    }
    else
    {
        // In Direct App-rendered mode, we can use smaller window size,
        // as it can have its own contents and isn't tied to the buffer.
        WindowSize = Sizei(1100, 618);//Sizei(960, 540); avoid rotated output bug.
    }

	ovrHmd_AttachToWindow(Hmd, wzGetWindowHandle(), NULL, NULL);

	// Configure Stereo settings.
	Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(Hmd, ovrEye_Left, Hmd->DefaultEyeFov[0], 1.0f);
	Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(Hmd, ovrEye_Right, Hmd->DefaultEyeFov[1], 1.0f);

    EyeRenderTargetSize.w = recommenedTex0Size.w + recommenedTex1Size.w;
    EyeRenderTargetSize.h = Alg::Max( recommenedTex0Size.h, recommenedTex1Size.h );

	//Create Framebuffer
	wzCreateRenderTarget(&m_screenRender);
	wzCreateRenderBufferDepth(&m_screenBuffer,EyeRenderTargetSize.w,EyeRenderTargetSize.h);
	wzCreateTexture(&m_screenTex,EyeRenderTargetSize.w,EyeRenderTargetSize.h,WZ_FORMATTYPE_RGB,NULL);
	//attach
	wzSetRenderBuffer(&m_screenRender,&m_screenBuffer);
	wzSetRenderTexture(&m_screenRender,&m_screenTex);

    // Initialize eye rendering information.
    // The viewport sizes are re-computed in case RenderTargetSize changed due to HW limitations.
    ovrFovPort eyeFov[2] = { Hmd->DefaultEyeFov[0], Hmd->DefaultEyeFov[1] } ;

    EyeRenderViewport[0].Pos  = Vector2i(0,0);
    EyeRenderViewport[0].Size = Sizei(EyeRenderTargetSize.w / 2, EyeRenderTargetSize.h);
    EyeRenderViewport[1].Pos  = Vector2i((EyeRenderTargetSize.w + 1) / 2, 0);
    EyeRenderViewport[1].Size = EyeRenderViewport[0].Size;

	//Shader vertex format
	wzVertexElements ve_var[] = {
		{WZVETYPE_FLOAT2,"position"},
		{WZVETYPE_FLOAT1,"timewarpLerpFactor"},
		{WZVETYPE_FLOAT1,"vignette"},
		{WZVETYPE_FLOAT2,"texCoord0"},
		{WZVETYPE_FLOAT2,"texCoord1"},
		{WZVETYPE_FLOAT2,"texCoord2"},
		WZVE_TMT()
	};

	//carete mesh
	for ( int eyeNum = 0; eyeNum < 2; eyeNum++ )
	{
		// Allocate mesh vertices, registering with renderer using the OVR vertex format.
		ovrDistortionMesh meshData;
		ovrHmd_CreateDistortionMesh(Hmd, (ovrEyeType) eyeNum, eyeFov[eyeNum],
									ovrDistortionCap_Chromatic | ovrDistortionCap_TimeWarp, &meshData);
		//Create datas
		wzVector2* vertex_pos = new wzVector2[meshData.VertexCount];
		float* vertex_posTimewarp = new float[meshData.VertexCount];
		float* vertex_posVignette = new float[meshData.VertexCount];
		wzVector2* vertex_textanR = new wzVector2[meshData.VertexCount];
		wzVector2* vertex_textanG = new wzVector2[meshData.VertexCount];
		wzVector2* vertex_textanB = new wzVector2[meshData.VertexCount];

		//data copy
		for(unsigned int i = 0; i < meshData.VertexCount; i++) {
			vertex_pos[i].x = meshData.pVertexData[i].ScreenPosNDC.x;
			vertex_pos[i].y = meshData.pVertexData[i].ScreenPosNDC.y;
			vertex_posTimewarp[i] = meshData.pVertexData[i].TimeWarpFactor;
			vertex_posVignette[i] = meshData.pVertexData[i].VignetteFactor;
			vertex_textanR[i].x = meshData.pVertexData[i].TanEyeAnglesR.x;
			vertex_textanR[i].y = meshData.pVertexData[i].TanEyeAnglesR.y;
			vertex_textanG[i].x = meshData.pVertexData[i].TanEyeAnglesG.x;
			vertex_textanG[i].y = meshData.pVertexData[i].TanEyeAnglesG.y;
			vertex_textanB[i].x = meshData.pVertexData[i].TanEyeAnglesB.x;
			vertex_textanB[i].y = meshData.pVertexData[i].TanEyeAnglesB.y;
		}

		void* vertex_pointer[] = {vertex_pos,vertex_posTimewarp,vertex_posVignette,vertex_textanR,vertex_textanG,vertex_textanB};

		if(wzCreateMesh(&MeshBuffer[eyeNum], vertex_pointer, ve_var,
			meshData.pIndexData, meshData.VertexCount, meshData.IndexCount)) {
				MessageBoxA(NULL, "Lens Distort Mesh Error.", "", MB_OK);
				
			delete[] vertex_pos;
			delete[] vertex_posTimewarp;
			delete[] vertex_posVignette;
			delete[] vertex_textanR;
			delete[] vertex_textanG;
			delete[] vertex_textanB;

			return;	//error
		}
		wzChangeDrawMode(&MeshBuffer[eyeNum],WZ_MESH_DF_TRIANGLELIST);

		delete[] vertex_pos;
		delete[] vertex_posTimewarp;
		delete[] vertex_posVignette;
		delete[] vertex_textanR;
		delete[] vertex_textanG;
		delete[] vertex_textanB;

		ovrHmd_DestroyDistortionMesh(&meshData);

		//Create eye render description for use later
		EyeRenderDesc[eyeNum] = ovrHmd_GetRenderDesc(Hmd, (ovrEyeType) eyeNum,  eyeFov[eyeNum]);

		//Do scale and offset
		ovrHmd_GetRenderScaleAndOffset(eyeFov[eyeNum],EyeRenderTargetSize, EyeRenderViewport[eyeNum], UVScaleOffset[eyeNum]);
	}

	//Create shader
	if(wzCreateShader(&LensShader, ols_vertexshader,ols_flagshader, ve_var)) {
		MessageBoxA(NULL, "Lens Shader Compile Error.", "", MB_OK);
		return;
	}

    if(latency) ovrHmd_SetEnabledCaps(Hmd, ovrHmdCap_DynamicPrediction);	//ovrHmdCap_LowPersistence
	// Start the sensor which informs of the Rift's pose and motion
	ovrHmd_ConfigureTracking(Hmd, ovrTrackingCap_Orientation |
								ovrTrackingCap_MagYawCorrection, 0);		//not use : ovrTrackingCap_Position

	m_isReady = false;
}
コード例 #20
0
//-------------------------------------------------------------------------------------
int WINAPI WinMain(HINSTANCE hinst, HINSTANCE, LPSTR, int)
{
    // Initializes LibOVR, and the Rift
    ovr_Initialize();
    HMD = ovrHmd_Create(0);

    if (!HMD)                       { MessageBoxA(NULL,"Oculus Rift not detected.","", MB_OK); return(0); }
    if (HMD->ProductName[0] == '\0')  MessageBoxA(NULL,"Rift detected, display not enabled.", "", MB_OK);

    // Setup Window and Graphics - use window frame if relying on Oculus driver
    bool windowed = (HMD->HmdCaps & ovrHmdCap_ExtendDesktop) ? false : true;    
    if (!WND.InitWindowAndDevice(hinst, Recti(HMD->WindowsPos, HMD->Resolution), windowed))
        return(0);

    WND.SetMaxFrameLatency(1);
    ovrHmd_AttachToWindow(HMD, WND.Window, NULL, NULL);
    ovrHmd_SetEnabledCaps(HMD, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction);

    // Start the sensor which informs of the Rift's pose and motion
    ovrHmd_ConfigureTracking(HMD, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection |
                                  ovrTrackingCap_Position, 0);

    // Make the eye render buffers (caution if actual size < requested due to HW limits). 
    for (int eye=0; eye<2; eye++)
    {
        Sizei idealSize             = ovrHmd_GetFovTextureSize(HMD, (ovrEyeType)eye,
                                                               HMD->DefaultEyeFov[eye], 1.0f);
        pEyeRenderTexture[eye]      = new ImageBuffer(true, false, idealSize);
        pEyeDepthBuffer[eye]        = new ImageBuffer(true, true, pEyeRenderTexture[eye]->Size);
        EyeRenderViewport[eye].Pos  = Vector2i(0, 0);
        EyeRenderViewport[eye].Size = pEyeRenderTexture[eye]->Size;
    }

    // Setup VR components
#if SDK_RENDER
	#if RENDER_OPENGL
    ovrGLConfig oglcfg;
    oglcfg.OGL.Header.API				= ovrRenderAPI_OpenGL;
    oglcfg.OGL.Header.BackBufferSize	= Sizei(HMD->Resolution.w, HMD->Resolution.h);
    oglcfg.OGL.Header.Multisample		= 1;
	oglcfg.OGL.Window					= OGL.Window;
	oglcfg.OGL.DC						= GetDC(OGL.Window);

    if (!ovrHmd_ConfigureRendering(HMD, &oglcfg.Config,
		                           ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette |
                                   ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive,
								   HMD->DefaultEyeFov, EyeRenderDesc))	
		return(1);
	#else
    ovrD3D11Config d3d11cfg;
    d3d11cfg.D3D11.Header.API            = ovrRenderAPI_D3D11;
    d3d11cfg.D3D11.Header.BackBufferSize = Sizei(HMD->Resolution.w, HMD->Resolution.h);
    d3d11cfg.D3D11.Header.Multisample    = 1;
    d3d11cfg.D3D11.pDevice               = WND.Device;
    d3d11cfg.D3D11.pDeviceContext        = WND.Context;
    d3d11cfg.D3D11.pBackBufferRT         = WND.BackBufferRT;
    d3d11cfg.D3D11.pSwapChain            = WND.SwapChain;

    if (!ovrHmd_ConfigureRendering(HMD, &d3d11cfg.Config,
                                   ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette |
                                   ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive,
                                   HMD->DefaultEyeFov, EyeRenderDesc))
        return(1);
	#endif
#else
    APP_RENDER_SetupGeometryAndShaders();
#endif

    // Create the room model
    Scene roomScene(false); // Can simplify scene further with parameter if required.

    // Initialize Webcams and threads
	WebCamManager WebCamMngr(HMD);

    // MAIN LOOP
    // =========
    while (!(WND.Key['Q'] && WND.Key[VK_CONTROL]) && !WND.Key[VK_ESCAPE])
    {
        WND.HandleMessages();
        
        float       speed                    = 1.0f; // Can adjust the movement speed. 
        int         timesToRenderScene       = 1;    // Can adjust the render burden on the app.
		ovrVector3f useHmdToEyeViewOffset[2] = {EyeRenderDesc[0].HmdToEyeViewOffset,
			                                    EyeRenderDesc[1].HmdToEyeViewOffset};
        // Start timing
    #if SDK_RENDER
        ovrHmd_BeginFrame(HMD, 0);
    #else
        ovrHmd_BeginFrameTiming(HMD, 0);
    #endif

        // Handle key toggles for re-centering, meshes, FOV, etc.
        ExampleFeatures1(&speed, &timesToRenderScene, useHmdToEyeViewOffset);

        // Keyboard inputs to adjust player orientation
        if (WND.Key[VK_LEFT])  Yaw += 0.02f;
        if (WND.Key[VK_RIGHT]) Yaw -= 0.02f;

        // Keyboard inputs to adjust player position
        if (WND.Key['W']||WND.Key[VK_UP])   Pos+=Matrix4f::RotationY(Yaw).Transform(Vector3f(0,0,-speed*0.05f));
        if (WND.Key['S']||WND.Key[VK_DOWN]) Pos+=Matrix4f::RotationY(Yaw).Transform(Vector3f(0,0,+speed*0.05f));
        if (WND.Key['D'])                    Pos+=Matrix4f::RotationY(Yaw).Transform(Vector3f(+speed*0.05f,0,0));
        if (WND.Key['A'])                    Pos+=Matrix4f::RotationY(Yaw).Transform(Vector3f(-speed*0.05f,0,0));
        Pos.y = ovrHmd_GetFloat(HMD, OVR_KEY_EYE_HEIGHT, Pos.y);
  
        // Animate the cube
        if (speed)
            roomScene.Models[0]->Pos = Vector3f(9*sin(0.01f*clock),3,9*cos(0.01f*clock));

		// Get both eye poses simultaneously, with IPD offset already included. 
		ovrPosef temp_EyeRenderPose[2];
		ovrHmd_GetEyePoses(HMD, 0, useHmdToEyeViewOffset, temp_EyeRenderPose, NULL);

		// Update textures with WebCams' frames
		WebCamMngr.Update();	

        // Render the two undistorted eye views into their render buffers.  
        for (int eye = 0; eye < 2; eye++)
        {
            ImageBuffer * useBuffer      = pEyeRenderTexture[eye];  
            ovrPosef    * useEyePose     = &EyeRenderPose[eye];
            float       * useYaw         = &YawAtRender[eye];
            bool          clearEyeImage  = true;
            bool          updateEyeImage = true;

            // Handle key toggles for half-frame rendering, buffer resolution, etc.
            ExampleFeatures2(eye, &useBuffer, &useEyePose, &useYaw, &clearEyeImage, &updateEyeImage);

            if (clearEyeImage)
			#if RENDER_OPENGL
				WND.ClearAndSetRenderTarget(useBuffer, Recti(EyeRenderViewport[eye]));
			#else
                WND.ClearAndSetRenderTarget(useBuffer->TexRtv,
                                             pEyeDepthBuffer[eye], Recti(EyeRenderViewport[eye]));	
			#endif

            if (updateEyeImage)
            {
                // Write in values actually used (becomes significant in Example features)
                *useEyePose = temp_EyeRenderPose[eye];
                *useYaw     = Yaw;

                // Get view and projection matrices (note near Z to reduce eye strain)
                Matrix4f rollPitchYaw       = Matrix4f::RotationY(Yaw);
                Matrix4f finalRollPitchYaw  = rollPitchYaw * Matrix4f(useEyePose->Orientation);
                Vector3f finalUp            = finalRollPitchYaw.Transform(Vector3f(0, 1, 0));
                Vector3f finalForward       = finalRollPitchYaw.Transform(Vector3f(0, 0, -1));
                Vector3f shiftedEyePos      = Pos + rollPitchYaw.Transform(useEyePose->Position);

                Matrix4f view = Matrix4f::LookAtRH(shiftedEyePos, shiftedEyePos + finalForward, finalUp);
                Matrix4f proj = ovrMatrix4f_Projection(EyeRenderDesc[eye].Fov, 0.2f, 1000.0f, true); 

				// Keyboard input to switch from "look through" to scene mode
				static bool bOldLookThrough	= false;
				static bool bLookThrough	= true;
				if (WND.Key['X'] && bOldLookThrough != WND.Key['X']) { bLookThrough = !bLookThrough; }
				bOldLookThrough = WND.Key['X'];

				if(!bLookThrough)
				{
					// Render the scene
					for (int t=0; t<timesToRenderScene; t++)
						roomScene.Render(view, proj.Transposed());

					WebCamMngr.DrawBoard(view, proj.Transposed());
				}
				else { WebCamMngr.DrawLookThrough(eye); }
            }
        }

        // Do distortion rendering, Present and flush/sync
    #if SDK_RENDER
		#if RENDER_OPENGL
		ovrGLTexture eyeTexture[2]; // Gather data for eye textures 
        for (int eye = 0; eye<2; eye++)
        {
            eyeTexture[eye].OGL.Header.API				= ovrRenderAPI_OpenGL;
            eyeTexture[eye].OGL.Header.TextureSize		= pEyeRenderTexture[eye]->Size;
            eyeTexture[eye].OGL.Header.RenderViewport	= EyeRenderViewport[eye];
            eyeTexture[eye].OGL.TexId					= pEyeRenderTexture[eye]->TexId;
        }
		#else
        ovrD3D11Texture eyeTexture[2]; // Gather data for eye textures 
        for (int eye = 0; eye<2; eye++)
        {
            eyeTexture[eye].D3D11.Header.API            = ovrRenderAPI_D3D11;
            eyeTexture[eye].D3D11.Header.TextureSize    = pEyeRenderTexture[eye]->Size;
            eyeTexture[eye].D3D11.Header.RenderViewport = EyeRenderViewport[eye];
            eyeTexture[eye].D3D11.pTexture              = pEyeRenderTexture[eye]->Tex;
            eyeTexture[eye].D3D11.pSRView               = pEyeRenderTexture[eye]->TexSv;
        }
		#endif
		ovrHmd_EndFrame(HMD, EyeRenderPose, &eyeTexture[0].Texture);
    #else
        APP_RENDER_DistortAndPresent();
    #endif
    }

	WebCamMngr.StopCapture();

    // Release and close down
    ovrHmd_Destroy(HMD);
    ovr_Shutdown();
	WND.ReleaseWindow(hinst);

    return(0);
}
コード例 #21
0
ファイル: ovr.cpp プロジェクト: DanielAeolusLaude/mame
	bool OVR::postReset(void* _nwh, ovrRenderAPIConfig* _config, bool _debug)
	{
		if (_debug)
		{
			switch (_config->Header.API)
			{
#if BGFX_CONFIG_RENDERER_DIRECT3D11
			case ovrRenderAPI_D3D11:
				{
					ovrD3D11ConfigData* data = (ovrD3D11ConfigData*)_config;
#	if OVR_VERSION > OVR_VERSION_043
					m_rtSize = data->Header.BackBufferSize;
#	else
					m_rtSize = data->Header.RTSize;
#	endif // OVR_VERSION > OVR_VERSION_043
				}
				break;
#endif // BGFX_CONFIG_RENDERER_DIRECT3D11

#if BGFX_CONFIG_RENDERER_OPENGL
			case ovrRenderAPI_OpenGL:
				{
					ovrGLConfigData* data = (ovrGLConfigData*)_config;
#	if OVR_VERSION > OVR_VERSION_043
					m_rtSize = data->Header.BackBufferSize;
#	else
					m_rtSize = data->Header.RTSize;
#	endif // OVR_VERSION > OVR_VERSION_043
				}
				break;
#endif // BGFX_CONFIG_RENDERER_OPENGL

			case ovrRenderAPI_None:
			default:
				BX_CHECK(false, "You should not be here!");
				break;
			}

			m_debug = true;
			return false;
		}

		if (!m_initialized)
		{
			return false;
		}

		if (!_debug)
		{
			m_hmd = ovrHmd_Create(0);
		}

		if (NULL == m_hmd)
		{
			m_hmd = ovrHmd_CreateDebug(ovrHmd_DK2);
			BX_WARN(NULL != m_hmd, "Unable to initialize OVR.");

			if (NULL == m_hmd)
			{
				return false;
			}
		}

		BX_TRACE("HMD: %s, %s, firmware: %d.%d"
			, m_hmd->ProductName
			, m_hmd->Manufacturer
			, m_hmd->FirmwareMajor
			, m_hmd->FirmwareMinor
			);

		ovrBool result;
		result = ovrHmd_AttachToWindow(m_hmd, _nwh, NULL, NULL);
		if (!result) { goto ovrError; }

		ovrFovPort eyeFov[2] = { m_hmd->DefaultEyeFov[0], m_hmd->DefaultEyeFov[1] };
		result = ovrHmd_ConfigureRendering(m_hmd
			, _config
			, 0
#if OVR_VERSION < OVR_VERSION_050
			| ovrDistortionCap_Chromatic // permanently enabled >= v5.0
#endif
			| ovrDistortionCap_Vignette
			| ovrDistortionCap_TimeWarp
			| ovrDistortionCap_Overdrive
			| ovrDistortionCap_NoRestore
			| ovrDistortionCap_HqDistortion
			, eyeFov
			, m_erd
			);
		if (!result) { goto ovrError; }

		ovrHmd_SetEnabledCaps(m_hmd
			, 0
			| ovrHmdCap_LowPersistence
			| ovrHmdCap_DynamicPrediction
			);

		result = ovrHmd_ConfigureTracking(m_hmd
			, 0
			| ovrTrackingCap_Orientation
			| ovrTrackingCap_MagYawCorrection
			| ovrTrackingCap_Position
			, 0
			);

		if (!result)
		{
ovrError:
			BX_TRACE("Failed to initialize OVR.");
			ovrHmd_Destroy(m_hmd);
			m_hmd = NULL;
			return false;
		}

		ovrSizei sizeL = ovrHmd_GetFovTextureSize(m_hmd, ovrEye_Left,  m_hmd->DefaultEyeFov[0], 1.0f);
		ovrSizei sizeR = ovrHmd_GetFovTextureSize(m_hmd, ovrEye_Right, m_hmd->DefaultEyeFov[1], 1.0f);
		m_rtSize.w = sizeL.w + sizeR.w;
		m_rtSize.h = bx::uint32_max(sizeL.h, sizeR.h);

		m_warning = true;

		return true;
	}
//-------------------------------------------------------------------------------------
int Init()
{
    // Initializes LibOVR, and the Rift
    ovr_Initialize();
    if (!HMD)
    {
        HMD = ovrHmd_Create(0);
        if (!HMD)
        {
            MessageBoxA(NULL, "Oculus Rift not detected.", "", MB_OK);
            return(1);
        }
        if (HMD->ProductName[0] == '\0')
            MessageBoxA(NULL, "Rift detected, display not enabled.", "", MB_OK);
    }

	//Setup Window and Graphics - use window frame if relying on Oculus driver
	const int backBufferMultisample = 1;
    bool UseAppWindowFrame = (HMD->HmdCaps & ovrHmdCap_ExtendDesktop) ? false : true;
    HWND window = Util_InitWindowAndGraphics(Recti(HMD->WindowsPos, HMD->Resolution),
                                         FullScreen, backBufferMultisample, UseAppWindowFrame,&pRender);
	if (!window) return 1;
	ovrHmd_AttachToWindow(HMD, window, NULL, NULL);

    //Configure Stereo settings.
    Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Left,  HMD->DefaultEyeFov[0], 1.0f);
    Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Right, HMD->DefaultEyeFov[1], 1.0f);
	Sizei RenderTargetSize;
    RenderTargetSize.w = recommenedTex0Size.w + recommenedTex1Size.w;
    RenderTargetSize.h = max ( recommenedTex0Size.h, recommenedTex1Size.h );

    const int eyeRenderMultisample = 1;
    pRendertargetTexture = pRender->CreateTexture(Texture_RGBA | Texture_RenderTarget |
                                                  eyeRenderMultisample,
                                                  RenderTargetSize.w, RenderTargetSize.h, NULL);
    // The actual RT size may be different due to HW limits.
    RenderTargetSize.w = pRendertargetTexture->GetWidth();
    RenderTargetSize.h = pRendertargetTexture->GetHeight();

    // Initialize eye rendering information.
    // The viewport sizes are re-computed in case RenderTargetSize changed due to HW limitations.
    ovrFovPort eyeFov[2] = { HMD->DefaultEyeFov[0], HMD->DefaultEyeFov[1] } ;

    EyeRenderViewport[0].Pos  = Vector2i(0,0);
    EyeRenderViewport[0].Size = Sizei(RenderTargetSize.w / 2, RenderTargetSize.h);
    EyeRenderViewport[1].Pos  = Vector2i((RenderTargetSize.w + 1) / 2, 0);
    EyeRenderViewport[1].Size = EyeRenderViewport[0].Size;

    #if SDK_RENDER
	// Query OGL texture data.
	EyeTexture[0].OGL.Header.API			= ovrRenderAPI_OpenGL;
	EyeTexture[0].OGL.Header.TextureSize	= RenderTargetSize;
	EyeTexture[0].OGL.Header.RenderViewport = EyeRenderViewport[0];
	EyeTexture[0].OGL.TexId					= pRendertargetTexture->TexId;

    // Right eye uses the same texture, but different rendering viewport.
    EyeTexture[1]							= EyeTexture[0];
    EyeTexture[1].OGL.Header.RenderViewport	= EyeRenderViewport[1];

    // Configure OpenGL.
    ovrGLConfig oglcfg;
    oglcfg.OGL.Header.API					= ovrRenderAPI_OpenGL;
    oglcfg.OGL.Header.RTSize				= Sizei(HMD->Resolution.w, HMD->Resolution.h);
    oglcfg.OGL.Header.Multisample			= backBufferMultisample;
	oglcfg.OGL.Window						= window;
	oglcfg.OGL.DC							= GetDC(window);

    if (!ovrHmd_ConfigureRendering(HMD, &oglcfg.Config,
		                           ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette |
                                   ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive,
								   eyeFov, EyeRenderDesc))	return(1);	

    #else
	//Distortion vertex shader
	const char* vertexShader =
		"#version 110																			\n"
		"uniform vec2 EyeToSourceUVScale;														\n"
		"uniform vec2 EyeToSourceUVOffset;														\n"
		"uniform mat4 EyeRotationStart;															\n"
		"uniform mat4 EyeRotationEnd;															\n"
		"attribute vec2 Position;																\n"
		"attribute vec2 inTWLF_V;																\n"		
		"attribute vec2 inTexCoord0;															\n"
		"attribute vec2 inTexCoord1;															\n"
		"attribute vec2 inTexCoord2;															\n"
		"varying vec4 oPosition;																\n"
		"varying vec2 oTexCoord0;																\n"
		"varying vec2 oTexCoord1;																\n"
		"varying vec2 oTexCoord2;																\n"
		"varying float oVignette;																\n"
		"vec2 TexCoord0 = vec2(inTexCoord0.x, -inTexCoord0.y);									\n"
		"vec2 TexCoord1 = vec2(inTexCoord1.x, -inTexCoord1.y);									\n"
		"vec2 TexCoord2 = vec2(inTexCoord2.x, -inTexCoord2.y);									\n"
		"float timewarpLerpFactor = inTWLF_V.x;													\n"
		"float Vignette = inTWLF_V.y;															\n"
		"vec2 TimewarpTexCoord( in vec2 TexCoord, in mat4 rotMat )								\n"
		"{																						\n"
		// Vertex inputs are in TanEyeAngle space for the R,G,B channels (i.e. after chromatic 
		// aberration and distortion). These are now "real world" vectors in direction (x,y,1) 
		// relative to the eye of the HMD.	Apply the 3x3 timewarp rotation to these vectors.
		"   vec3 transformed = vec3( ( rotMat * vec4( TexCoord.xy , 1.00000, 1.00000) ).xyz );	\n"
		// Project them back onto the Z=1 plane of the rendered images.
		"   vec2 flattened = (transformed.xy  / transformed.z );								\n"
		// Scale them into ([0,0.5],[0,1]) or ([0.5,0],[0,1]) UV lookup space (depending on eye)
		"   return ((EyeToSourceUVScale * flattened) + EyeToSourceUVOffset);					\n"
		"}																						\n"
		"mat4 mat4_lerp( in mat4 x, in mat4 y, in mat4 s )										\n"
		"{																						\n"
		"	return mat4(mix(x[0],y[0],s[0]), mix(x[1],y[1],s[1]), mix(x[2],y[2],s[2]), mix(x[3],y[3],s[3]));\n"
		"}																						\n"
		"void main()																			\n"
		"{																						\n"
		"   mat4 lerpedEyeRot = mat4_lerp( EyeRotationStart, EyeRotationEnd, mat4( timewarpLerpFactor));\n"
		"   oTexCoord0 = TimewarpTexCoord( TexCoord0, lerpedEyeRot);							\n"
		"   oTexCoord1 = TimewarpTexCoord( TexCoord1, lerpedEyeRot);							\n"
		"   oTexCoord2 = TimewarpTexCoord( TexCoord2, lerpedEyeRot);							\n"
		"   oPosition = vec4( Position.xy , 0.500000, 1.00000);									\n"
		"   oVignette = Vignette;																\n"
		"   gl_Position = oPosition;															\n"
		"}";

	//Distortion pixel shader
	const char* pixelShader = 
		"#version 110																			\n"
		"uniform sampler2D Texture0;															\n"
		"varying vec4 oPosition;																\n"
		"varying vec2 oTexCoord0;																\n"
		"varying vec2 oTexCoord1;																\n"
		"varying vec2 oTexCoord2;																\n"
		"varying float oVignette;																\n"
		"void main()																			\n"
		"{																						\n"
		// 3 samples for fixing chromatic aberrations
		"   float R = texture2D(Texture0, oTexCoord0.xy).r;										\n"
		"   float G = texture2D(Texture0, oTexCoord1.xy).g;										\n"
		"   float B = texture2D(Texture0, oTexCoord2.xy).b;										\n"
		"   gl_FragColor = (oVignette*vec4(R,G,B,1));											\n"
		"}";

	pRender->InitShaders(vertexShader, pixelShader, &Shaders);

    for ( int eyeNum = 0; eyeNum < 2; eyeNum++ )
    {
        // Allocate mesh vertices, registering with renderer using the OVR vertex format.
        ovrDistortionMesh meshData;
        ovrHmd_CreateDistortionMesh(HMD, (ovrEyeType) eyeNum, eyeFov[eyeNum],
			                        ovrDistortionCap_Chromatic | ovrDistortionCap_TimeWarp, &meshData);
        MeshVBs[eyeNum] = *pRender->CreateBuffer();
        MeshVBs[eyeNum]->Data(Buffer_Vertex,meshData.pVertexData,sizeof(ovrDistortionVertex)*meshData.VertexCount);
        MeshIBs[eyeNum] = *pRender->CreateBuffer();
        MeshIBs[eyeNum]->Data(Buffer_Index,meshData.pIndexData,sizeof(unsigned short) * meshData.IndexCount);
        ovrHmd_DestroyDistortionMesh( &meshData );

		//Create eye render description for use later
		EyeRenderDesc[eyeNum] = ovrHmd_GetRenderDesc(HMD, (ovrEyeType) eyeNum,  eyeFov[eyeNum]);

		//Do scale and offset
		ovrHmd_GetRenderScaleAndOffset(eyeFov[eyeNum],RenderTargetSize, EyeRenderViewport[eyeNum], UVScaleOffset[eyeNum]);
	}

    #endif

    ovrHmd_SetEnabledCaps(HMD, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction);

	// Start the sensor which informs of the Rift's pose and motion
    ovrHmd_ConfigureTracking(HMD,   ovrTrackingCap_Orientation |
                                    ovrTrackingCap_MagYawCorrection |
                                    ovrTrackingCap_Position, 0);

    // This creates lights and models.
  	pRoomScene = new Scene;
	PopulateRoomScene(pRoomScene, pRender);

    return (0);
}
コード例 #23
0
HRESULT InitOculusRiftObjects(unsigned distortionCaps, ovrHmd HMD)
{
	HRESULT hr = S_OK;

	// ステレオ描画設定を取得
	// レンダーターゲットの作成するためのサイズ決定に必要。
	// なお DK1 ではかなり変なサイズで返ってくるのでこのまま信用していいのかは不明
	ovrSizei recommenedTex0Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Left, HMDDesc.DefaultEyeFov[0], 1.0f);
	ovrSizei recommenedTex1Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Right, HMDDesc.DefaultEyeFov[1], 1.0f);
	ovrSizei RenderTargetSize;
	RenderTargetSize.w = recommenedTex0Size.w + recommenedTex1Size.w;
	RenderTargetSize.h = max(recommenedTex0Size.h, recommenedTex1Size.h);

	{
		// レンダーターゲット用のテクスチャを作成する
		// ここにシーンを描画する

		D3D11_TEXTURE2D_DESC desc;
		ZeroMemory(&desc, sizeof(desc));
		desc.Width = RenderTargetSize.w;
		desc.Height = RenderTargetSize.h;
		desc.MipLevels = 1;
		desc.ArraySize = 1;
		desc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
		desc.SampleDesc.Count = 1;
		desc.Usage = D3D11_USAGE_DEFAULT;
		desc.BindFlags = D3D11_BIND_RENDER_TARGET | D3D11_BIND_SHADER_RESOURCE;
		//テクスチャを作成する
		g_pd3dDevice->CreateTexture2D(&desc, NULL, &g_pTextureOculus);
		//シェーダーリソースビュー作ることで描画元として使用出来る
		g_pd3dDevice->CreateShaderResourceView(g_pTextureOculus, NULL, &g_pShaderResViewOculus);
		//レンダーターゲットビューを作る
		g_pd3dDevice->CreateRenderTargetView(g_pTextureOculus, NULL, &g_pRenderTargetViewOculus);

		// 実際に作成されたサイズを取得しておく(ミスがあるかもしれない)
		g_pTextureOculus->GetDesc(&desc);
		RenderTargetSize.w = desc.Width;
		RenderTargetSize.h = desc.Height;

		//深度バッファ作成(ステンシル)
		D3D11_TEXTURE2D_DESC descDepth;
		ZeroMemory(&descDepth, sizeof(descDepth));
		descDepth.Width = RenderTargetSize.w;
		descDepth.Height = RenderTargetSize.h;
		descDepth.MipLevels = 1;
		descDepth.ArraySize = 1;
		descDepth.Format = DXGI_FORMAT_D24_UNORM_S8_UINT;
		descDepth.SampleDesc.Count = 1;
		descDepth.SampleDesc.Quality = 0;
		descDepth.Usage = D3D11_USAGE_DEFAULT;
		descDepth.BindFlags = D3D11_BIND_DEPTH_STENCIL;
		descDepth.CPUAccessFlags = 0;
		descDepth.MiscFlags = 0;
		g_pd3dDevice->CreateTexture2D(&descDepth, NULL, &g_pDepthStencilOculus);

		//ステンシルビューの作成
		D3D11_DEPTH_STENCIL_VIEW_DESC descDSV;
		ZeroMemory(&descDSV, sizeof(descDSV));
		descDSV.Format = descDepth.Format;
		descDSV.ViewDimension = D3D11_DSV_DIMENSION_TEXTURE2D;
		descDSV.Texture2D.MipSlice = 0;
		g_pd3dDevice->CreateDepthStencilView(g_pDepthStencilOculus, &descDSV, &g_pDepthStencilViewOculus);
	}


	// それぞれの目に対応する描画のための情報を取得します。
	ovrFovPort eyeFov[2] = { HMDDesc.DefaultEyeFov[0], HMDDesc.DefaultEyeFov[1] };
	EyeRenderDesc[0] = ovrHmd_GetRenderDesc(HMD, ovrEye_Left, eyeFov[0]);
	EyeRenderDesc[1] = ovrHmd_GetRenderDesc(HMD, ovrEye_Right, eyeFov[1]);

	// HMDで使用する機能を設定します。
	ovrHmd_SetEnabledCaps(HMD, ovrHmdCap_LowPersistence |
		ovrHmdCap_LatencyTest);

	// センサーの使用を開始します。
	// YawCorrection(?) Orientation(姿勢)、Position(位置)を取得できるようにします。
	ovrHmd_StartSensor(HMD, ovrSensorCap_Orientation |
		ovrSensorCap_YawCorrection |
		ovrSensorCap_Position, 0);

	//ビューポートの情報を格納します。
	EyeRenderViewport[0].Pos.x = 0;
	EyeRenderViewport[0].Pos.y = 0;
	EyeRenderViewport[0].Size.w = RenderTargetSize.w / 2;
	EyeRenderViewport[0].Size.h = RenderTargetSize.h;
	EyeRenderViewport[1].Pos.x = (RenderTargetSize.w + 1) / 2;
	EyeRenderViewport[1].Pos.y = 0;
	EyeRenderViewport[1].Size = EyeRenderViewport[0].Size;



	//ゆがませるメッシュをそれぞれの目に対応するように作成する
	for (int eyeNum = 0; eyeNum < 2; eyeNum++)
	{
		// メッシュデータを取得する
		ovrDistortionMesh meshData;
		ovrHmd_CreateDistortionMesh(HMD,
			EyeRenderDesc[eyeNum].Eye, EyeRenderDesc[eyeNum].Fov,
			distortionCaps, &meshData);

		ovrHmd_GetRenderScaleAndOffset(EyeRenderDesc[eyeNum].Fov,
			RenderTargetSize, EyeRenderViewport[eyeNum],
			(ovrVector2f*)UVScaleOffset[eyeNum]);

		// こちらで用意した頂点形式にあうようにパースします。
		DistortionVertex * pVBVerts = (DistortionVertex*)OVR_ALLOC(
			sizeof(DistortionVertex)* meshData.VertexCount);
		DistortionVertex * v = pVBVerts;
		ovrDistortionVertex * ov = meshData.pVertexData;
		for (unsigned vertNum = 0; vertNum < meshData.VertexCount; vertNum++)
		{
			v->Pos.x = ov->Pos.x;
			v->Pos.y = ov->Pos.y;
			v->TexR = (*(ovrVector2f*)&ov->TexR);
			v->TexG = (*(ovrVector2f*)&ov->TexG);
			v->TexB = (*(ovrVector2f*)&ov->TexB);
			v->Col[0] = v->Col[1] = v->Col[2] = (BYTE)(ov->VignetteFactor*255.99f);
			v->Col[3] = (BYTE)(ov->TimeWarpFactor*255.99f);
			v++; ov++;
		}

		//メッシュの頂点データを用いて頂点バッファを作成します。
		D3D11_BUFFER_DESC bd = { 0 };
		bd.Usage = D3D11_USAGE_DEFAULT;
		bd.ByteWidth = sizeof(DistortionVertex)* meshData.VertexCount;
		bd.BindFlags = D3D11_BIND_VERTEX_BUFFER;
		bd.CPUAccessFlags = 0;
		D3D11_SUBRESOURCE_DATA InitData = { 0 };
		InitData.pSysMem = pVBVerts;
		hr = g_pd3dDevice->CreateBuffer(&bd, &InitData, &g_pVertexBufferOculus[eyeNum]);
		if (FAILED(hr))
			return hr;

		//同様にインデックスバッファを作成します。
		bd.ByteWidth = sizeof(unsigned short)* meshData.IndexCount;
		bd.BindFlags = D3D11_BIND_INDEX_BUFFER;
		bd.CPUAccessFlags = 0;
		InitData.pSysMem = meshData.pIndexData;
		hr = g_pd3dDevice->CreateBuffer(&bd, &InitData, &g_pIndexBufferOculus[eyeNum]);
		if (FAILED(hr))
			return hr;
		oculusIndexCount = meshData.IndexCount;

		OVR_FREE(pVBVerts);
		ovrHmd_DestroyDistortionMesh(&meshData);
	}


	{
		//頂点シェーダーをコンパイル
		ID3DBlob* pBlob = NULL;
		hr = CompileShaderFromFile("OculusRift.hlsl", "VS_TimeWarp", "vs_4_0", &pBlob);
		if (FAILED(hr))
		{
			MessageBox(NULL, "ピクセルシェーダーを読み込めませんでした。", "Error", MB_OK);
			return hr;
		}

		//ピクセルシェーダーからピクセルシェーダのオブジェクトを作成
		hr = g_pd3dDevice->CreateVertexShader(pBlob->GetBufferPointer(), pBlob->GetBufferSize(), NULL, &g_pVertexShaderOculus);
		if (FAILED(hr))
			return hr;

		//インプットレイアウトの形式
		static D3D11_INPUT_ELEMENT_DESC DistortionMeshVertexDesc[] =
		{
			{ "Position", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 },
			{ "TexCoord", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 8, D3D11_INPUT_PER_VERTEX_DATA, 0 },
			{ "TexCoord", 1, DXGI_FORMAT_R32G32_FLOAT, 0, 16, D3D11_INPUT_PER_VERTEX_DATA, 0 },
			{ "TexCoord", 2, DXGI_FORMAT_R32G32_FLOAT, 0, 24, D3D11_INPUT_PER_VERTEX_DATA, 0 },
			{ "Color", 0, DXGI_FORMAT_R8G8B8A8_UNORM, 0, 32, D3D11_INPUT_PER_VERTEX_DATA, 0 },
		};

		//インプットレイアウトの作成
		hr = g_pd3dDevice->CreateInputLayout(DistortionMeshVertexDesc, ARRAYSIZE(DistortionMeshVertexDesc), pBlob->GetBufferPointer(),
			pBlob->GetBufferSize(), &g_pVertexLayoutOculus);
		if (FAILED(hr))
			return hr;

		pBlob->Release();
	}

	{
		//ピクセルシェーダーをコンパイル
		ID3DBlob* pBlob = NULL;
		hr = CompileShaderFromFile("OculusRift.hlsl", "PS_Oculus", "ps_4_0", &pBlob);
		if (FAILED(hr))
		{
			MessageBox(NULL, "ピクセルシェーダーを読み込めませんでした。", "Error", MB_OK);
			return hr;
		}

		//ピクセルシェーダーからピクセルシェーダのオブジェクトを作成
		hr = g_pd3dDevice->CreatePixelShader(pBlob->GetBufferPointer(), pBlob->GetBufferSize(), NULL, &g_pPixelShaderOculus);
		pBlob->Release();
		if (FAILED(hr))
			return hr;
	}

	// コンスタントバッファの作成
	// ゆがませるメッシュを描画するためのシェーダー用の設定
	{
		D3D11_BUFFER_DESC bd = { 0 };
		bd.Usage = D3D11_USAGE_DEFAULT;
		bd.ByteWidth = sizeof(OculusRiftSettings);
		bd.BindFlags = D3D11_BIND_CONSTANT_BUFFER;
		bd.CPUAccessFlags = 0;
		hr = g_pd3dDevice->CreateBuffer(&bd, NULL, &g_pConstantBufferOculus);
		if (FAILED(hr))
			return hr;
	}

	return hr;
}
コード例 #24
0
void OculusWorldDemoApp::CalculateHmdValues()
{
    // Initialize eye rendering information for ovrHmd_Configure.
    // The viewport sizes are re-computed in case RenderTargetSize changed due to HW limitations.
    ovrFovPort eyeFov[2];
    eyeFov[0] = HmdDesc.DefaultEyeFov[0];
    eyeFov[1] = HmdDesc.DefaultEyeFov[1];

    // Clamp Fov based on our dynamically adjustable FovSideTanMax.
    // Most apps should use the default, but reducing Fov does reduce rendering cost.
    eyeFov[0] = FovPort::Min(eyeFov[0], FovPort(FovSideTanMax));
    eyeFov[1] = FovPort::Min(eyeFov[1], FovPort(FovSideTanMax));


    if (ForceZeroIpd)
    {
        // ForceZeroIpd does three things:
        //  1) Sets FOV to maximum symmetrical FOV based on both eyes
        //  2) Sets eye ViewAdjust values to 0.0 (effective IPD == 0)
        //  3) Uses only the Left texture for rendering.
        
        eyeFov[0] = FovPort::Max(eyeFov[0], eyeFov[1]);
        eyeFov[1] = eyeFov[0];

        Sizei recommenedTexSize = ovrHmd_GetFovTextureSize(Hmd, ovrEye_Left,
                                                           eyeFov[0], DesiredPixelDensity);

        Sizei textureSize = EnsureRendertargetAtLeastThisBig(Rendertarget_Left,  recommenedTexSize);

        EyeRenderSize[0] = Sizei::Min(textureSize, recommenedTexSize);
        EyeRenderSize[1] = EyeRenderSize[0];

        // Store texture pointers that will be passed for rendering.
        EyeTexture[0]                       = RenderTargets[Rendertarget_Left].Tex;
        EyeTexture[0].Header.TextureSize    = textureSize;
        EyeTexture[0].Header.RenderViewport = Recti(EyeRenderSize[0]);
        // Right eye is the same.
        EyeTexture[1] = EyeTexture[0];
    }

    else
    {
        // Configure Stereo settings. Default pixel density is 1.0f.
        Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(Hmd, ovrEye_Left,  eyeFov[0], DesiredPixelDensity);
        Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(Hmd, ovrEye_Right, eyeFov[1], DesiredPixelDensity);

        if (RendertargetIsSharedByBothEyes)
        {
            Sizei  rtSize(recommenedTex0Size.w + recommenedTex1Size.w,
                          Alg::Max(recommenedTex0Size.h, recommenedTex1Size.h));

            // Use returned size as the actual RT size may be different due to HW limits.
            rtSize = EnsureRendertargetAtLeastThisBig(Rendertarget_BothEyes, rtSize);

            // Don't draw more then recommended size; this also ensures that resolution reported
            // in the overlay HUD size is updated correctly for FOV/pixel density change.            
            EyeRenderSize[0] = Sizei::Min(Sizei(rtSize.w/2, rtSize.h), recommenedTex0Size);
            EyeRenderSize[1] = Sizei::Min(Sizei(rtSize.w/2, rtSize.h), recommenedTex1Size);

            // Store texture pointers that will be passed for rendering.
            // Same texture is used, but with different viewports.
            EyeTexture[0]                       = RenderTargets[Rendertarget_BothEyes].Tex;
            EyeTexture[0].Header.TextureSize    = rtSize;
            EyeTexture[0].Header.RenderViewport = Recti(Vector2i(0), EyeRenderSize[0]);
            EyeTexture[1]                       = RenderTargets[Rendertarget_BothEyes].Tex;
            EyeTexture[1].Header.TextureSize    = rtSize;
            EyeTexture[1].Header.RenderViewport = Recti(Vector2i((rtSize.w+1)/2, 0), EyeRenderSize[1]);
        }

        else
        {
            Sizei tex0Size = EnsureRendertargetAtLeastThisBig(Rendertarget_Left,  recommenedTex0Size);
            Sizei tex1Size = EnsureRendertargetAtLeastThisBig(Rendertarget_Right, recommenedTex1Size);

            EyeRenderSize[0] = Sizei::Min(tex0Size, recommenedTex0Size);
            EyeRenderSize[1] = Sizei::Min(tex1Size, recommenedTex1Size);

            // Store texture pointers and viewports that will be passed for rendering.
            EyeTexture[0]                       = RenderTargets[Rendertarget_Left].Tex;
            EyeTexture[0].Header.TextureSize    = tex0Size;
            EyeTexture[0].Header.RenderViewport = Recti(EyeRenderSize[0]);
            EyeTexture[1]                       = RenderTargets[Rendertarget_Right].Tex;
            EyeTexture[1].Header.TextureSize    = tex1Size;
            EyeTexture[1].Header.RenderViewport = Recti(EyeRenderSize[1]);
        }
    }

    // Hmd caps.
    unsigned hmdCaps = (VsyncEnabled ? 0 : ovrHmdCap_NoVSync) |
                       ovrHmdCap_LatencyTest;
    if (IsLowPersistence)
        hmdCaps |= ovrHmdCap_LowPersistence;
    if (DynamicPrediction)
        hmdCaps |= ovrHmdCap_DynamicPrediction;

    ovrHmd_SetEnabledCaps(Hmd, hmdCaps);


	ovrRenderAPIConfig config         = pRender->Get_ovrRenderAPIConfig();
    unsigned           distortionCaps = ovrDistortionCap_Chromatic;
    if (TimewarpEnabled)
        distortionCaps |= ovrDistortionCap_TimeWarp;

    if (!ovrHmd_ConfigureRendering( Hmd, &config, distortionCaps,
                                    eyeFov, EyeRenderDesc ))
    {
        // Fail exit? TBD
        return;
    }

    if (ForceZeroIpd)
    {
        // Remove IPD adjust
        EyeRenderDesc[0].ViewAdjust = Vector3f(0);
        EyeRenderDesc[1].ViewAdjust = Vector3f(0);
    }

    // ovrHmdCap_LatencyTest - enables internal latency feedback
    unsigned sensorCaps = ovrSensorCap_Orientation|ovrSensorCap_YawCorrection;
    if (PositionTrackingEnabled)
        sensorCaps |= ovrSensorCap_Position;
      
    if (StartSensorCaps != sensorCaps)
    {
        ovrHmd_StartSensor(Hmd, sensorCaps, 0);
        StartSensorCaps = sensorCaps;
    }    

    // Calculate projections
    Projection[0] = ovrMatrix4f_Projection(EyeRenderDesc[0].Fov,  0.01f, 10000.0f, true);
    Projection[1] = ovrMatrix4f_Projection(EyeRenderDesc[1].Fov,  0.01f, 10000.0f, true);

    float    orthoDistance = 0.8f; // 2D is 0.8 meter from camera
    Vector2f orthoScale0   = Vector2f(1.0f) / Vector2f(EyeRenderDesc[0].PixelsPerTanAngleAtCenter);
    Vector2f orthoScale1   = Vector2f(1.0f) / Vector2f(EyeRenderDesc[1].PixelsPerTanAngleAtCenter);
    
    OrthoProjection[0] = ovrMatrix4f_OrthoSubProjection(Projection[0], orthoScale0, orthoDistance,
                                                        EyeRenderDesc[0].ViewAdjust.x);
    OrthoProjection[1] = ovrMatrix4f_OrthoSubProjection(Projection[1], orthoScale1, orthoDistance,
                                                        EyeRenderDesc[1].ViewAdjust.x);
}