ClientSideDistortionExample() {
   ovrHmd_StartSensor(hmd, 0, 0);
   player = glm::inverse(glm::lookAt(
     glm::vec3(0, eyeHeight, ipd * 4),  // Position of the camera
     glm::vec3(0, eyeHeight, 0),  // Where the camera is looking
     GlUtils::Y_AXIS));           // Camera up axis
 }
Esempio n. 2
0
void RiftSetup()
{
    ovr_Initialize();

    s_hmd = ovrHmd_Create(0);
    if (!s_hmd)
    {
        s_hmd = ovrHmd_CreateDebug(ovrHmd_DK1);
    }

    ovrHmd_GetDesc(s_hmd, &s_hmdDesc);
    DumpHMDInfo(s_hmdDesc);

    uint32_t supportedSensorCaps = ovrSensorCap_Orientation;
    uint32_t requiredSensorCaps = ovrSensorCap_Orientation;
    ovrBool success = ovrHmd_StartSensor(s_hmd, supportedSensorCaps, requiredSensorCaps);
    if (!success) {
        fprintf(stderr, "ERROR: HMD does not have required capabilities!\n");
        exit(2);
    }

    // Figure out dimensions of render target
    ovrSizei recommenedTex0Size = ovrHmd_GetFovTextureSize(s_hmd, ovrEye_Left, s_hmdDesc.DefaultEyeFov[0], 1.0f);
    ovrSizei recommenedTex1Size = ovrHmd_GetFovTextureSize(s_hmd, ovrEye_Right, s_hmdDesc.DefaultEyeFov[1], 1.0f);
    s_renderTargetSize.w = recommenedTex0Size.w + recommenedTex1Size.w;
    s_renderTargetSize.h = std::max(recommenedTex0Size.h, recommenedTex1Size.h);

    CreateRenderTarget(s_renderTargetSize.w, s_renderTargetSize.h);

    s_eyeTexture[0].Header.API = ovrRenderAPI_OpenGL;
    s_eyeTexture[0].Header.TextureSize = s_renderTargetSize;
    s_eyeTexture[0].Header.RenderViewport.Pos = {0, 0};
    s_eyeTexture[0].Header.RenderViewport.Size = {s_renderTargetSize.w / 2, s_renderTargetSize.h};
    ((ovrGLTexture*)(&s_eyeTexture[0]))->OGL.TexId = s_fboTex;

    s_eyeTexture[1].Header.API = ovrRenderAPI_OpenGL;
    s_eyeTexture[1].Header.TextureSize = s_renderTargetSize;
    s_eyeTexture[1].Header.RenderViewport.Pos = {s_renderTargetSize.w / 2, 0};
    s_eyeTexture[1].Header.RenderViewport.Size = {s_renderTargetSize.w / 2, s_renderTargetSize.h};
    ((ovrGLTexture*)(&s_eyeTexture[1]))->OGL.TexId = s_fboTex;

    // Configure ovr SDK Rendering
    ovrGLConfig cfg;
    memset(&cfg, 0, sizeof(ovrGLConfig));
    cfg.OGL.Header.API = ovrRenderAPI_OpenGL;
    cfg.OGL.Header.RTSize = {s_config->width, s_config->height};
    cfg.OGL.Header.Multisample = 0;
    // TODO: on windows need to set HWND, on Linux need to set other parameters
    if (!ovrHmd_ConfigureRendering(s_hmd, &cfg.Config, s_hmdDesc.DistortionCaps, s_hmdDesc.DefaultEyeFov, s_eyeRenderDesc))
    {
        fprintf(stderr, "ERROR: HMD configure rendering failed!\n");
        exit(3);
    }
}
void OculusInterface::init()
{
    try
    {
        ovr_Initialize();
        hmd = ovrHmd_Create(0);

        if(hmd)
            ovrHmd_GetDesc(hmd, &hmdDesc);
        else
            throw 0;
    }
    catch(int e)
    {
        cout << "Cannot get HMD" << endl;
        //for now.
//        initialized = false; 
//        return;

        //TODO replace content of this exeption catch by creating a virtual debug HMD to run correctly

        hmd = ovrHmd_CreateDebug(ovrHmd_DK2);
        ovrHmd_GetDesc(hmd, &hmdDesc);
    }

    customReport();
    
    try
    {
        if(!ovrHmd_StartSensor(hmd,ovrSensorCap_Orientation |ovrSensorCap_YawCorrection |ovrSensorCap_Position,ovrSensorCap_Orientation)) //minial required 
            throw string("Unable to start sensor! The detected device by OVR is not capable to get sensor state. We cannot do anything with that...");
    }
    catch (string const& e)
    {
        cerr << e << endl;
        ovrHmd_Destroy(hmd);
        ovr_Shutdown();
        abort();
    }   
    
    initialized = true;
}
Esempio n. 4
0
HRESULT InitOculusRiftObjects(unsigned distortionCaps, ovrHmd HMD)
{
	HRESULT hr = S_OK;

	// ステレオ描画設定を取得
	// レンダーターゲットの作成するためのサイズ決定に必要。
	// なお DK1 ではかなり変なサイズで返ってくるのでこのまま信用していいのかは不明
	ovrSizei recommenedTex0Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Left, HMDDesc.DefaultEyeFov[0], 1.0f);
	ovrSizei recommenedTex1Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Right, HMDDesc.DefaultEyeFov[1], 1.0f);
	ovrSizei RenderTargetSize;
	RenderTargetSize.w = recommenedTex0Size.w + recommenedTex1Size.w;
	RenderTargetSize.h = max(recommenedTex0Size.h, recommenedTex1Size.h);

	{
		// レンダーターゲット用のテクスチャを作成する
		// ここにシーンを描画する

		D3D11_TEXTURE2D_DESC desc;
		ZeroMemory(&desc, sizeof(desc));
		desc.Width = RenderTargetSize.w;
		desc.Height = RenderTargetSize.h;
		desc.MipLevels = 1;
		desc.ArraySize = 1;
		desc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
		desc.SampleDesc.Count = 1;
		desc.Usage = D3D11_USAGE_DEFAULT;
		desc.BindFlags = D3D11_BIND_RENDER_TARGET | D3D11_BIND_SHADER_RESOURCE;
		//テクスチャを作成する
		g_pd3dDevice->CreateTexture2D(&desc, NULL, &g_pTextureOculus);
		//シェーダーリソースビュー作ることで描画元として使用出来る
		g_pd3dDevice->CreateShaderResourceView(g_pTextureOculus, NULL, &g_pShaderResViewOculus);
		//レンダーターゲットビューを作る
		g_pd3dDevice->CreateRenderTargetView(g_pTextureOculus, NULL, &g_pRenderTargetViewOculus);

		// 実際に作成されたサイズを取得しておく(ミスがあるかもしれない)
		g_pTextureOculus->GetDesc(&desc);
		RenderTargetSize.w = desc.Width;
		RenderTargetSize.h = desc.Height;

		//深度バッファ作成(ステンシル)
		D3D11_TEXTURE2D_DESC descDepth;
		ZeroMemory(&descDepth, sizeof(descDepth));
		descDepth.Width = RenderTargetSize.w;
		descDepth.Height = RenderTargetSize.h;
		descDepth.MipLevels = 1;
		descDepth.ArraySize = 1;
		descDepth.Format = DXGI_FORMAT_D24_UNORM_S8_UINT;
		descDepth.SampleDesc.Count = 1;
		descDepth.SampleDesc.Quality = 0;
		descDepth.Usage = D3D11_USAGE_DEFAULT;
		descDepth.BindFlags = D3D11_BIND_DEPTH_STENCIL;
		descDepth.CPUAccessFlags = 0;
		descDepth.MiscFlags = 0;
		g_pd3dDevice->CreateTexture2D(&descDepth, NULL, &g_pDepthStencilOculus);

		//ステンシルビューの作成
		D3D11_DEPTH_STENCIL_VIEW_DESC descDSV;
		ZeroMemory(&descDSV, sizeof(descDSV));
		descDSV.Format = descDepth.Format;
		descDSV.ViewDimension = D3D11_DSV_DIMENSION_TEXTURE2D;
		descDSV.Texture2D.MipSlice = 0;
		g_pd3dDevice->CreateDepthStencilView(g_pDepthStencilOculus, &descDSV, &g_pDepthStencilViewOculus);
	}


	// それぞれの目に対応する描画のための情報を取得します。
	ovrFovPort eyeFov[2] = { HMDDesc.DefaultEyeFov[0], HMDDesc.DefaultEyeFov[1] };
	EyeRenderDesc[0] = ovrHmd_GetRenderDesc(HMD, ovrEye_Left, eyeFov[0]);
	EyeRenderDesc[1] = ovrHmd_GetRenderDesc(HMD, ovrEye_Right, eyeFov[1]);

	// HMDで使用する機能を設定します。
	ovrHmd_SetEnabledCaps(HMD, ovrHmdCap_LowPersistence |
		ovrHmdCap_LatencyTest);

	// センサーの使用を開始します。
	// YawCorrection(?) Orientation(姿勢)、Position(位置)を取得できるようにします。
	ovrHmd_StartSensor(HMD, ovrSensorCap_Orientation |
		ovrSensorCap_YawCorrection |
		ovrSensorCap_Position, 0);

	//ビューポートの情報を格納します。
	EyeRenderViewport[0].Pos.x = 0;
	EyeRenderViewport[0].Pos.y = 0;
	EyeRenderViewport[0].Size.w = RenderTargetSize.w / 2;
	EyeRenderViewport[0].Size.h = RenderTargetSize.h;
	EyeRenderViewport[1].Pos.x = (RenderTargetSize.w + 1) / 2;
	EyeRenderViewport[1].Pos.y = 0;
	EyeRenderViewport[1].Size = EyeRenderViewport[0].Size;



	//ゆがませるメッシュをそれぞれの目に対応するように作成する
	for (int eyeNum = 0; eyeNum < 2; eyeNum++)
	{
		// メッシュデータを取得する
		ovrDistortionMesh meshData;
		ovrHmd_CreateDistortionMesh(HMD,
			EyeRenderDesc[eyeNum].Eye, EyeRenderDesc[eyeNum].Fov,
			distortionCaps, &meshData);

		ovrHmd_GetRenderScaleAndOffset(EyeRenderDesc[eyeNum].Fov,
			RenderTargetSize, EyeRenderViewport[eyeNum],
			(ovrVector2f*)UVScaleOffset[eyeNum]);

		// こちらで用意した頂点形式にあうようにパースします。
		DistortionVertex * pVBVerts = (DistortionVertex*)OVR_ALLOC(
			sizeof(DistortionVertex)* meshData.VertexCount);
		DistortionVertex * v = pVBVerts;
		ovrDistortionVertex * ov = meshData.pVertexData;
		for (unsigned vertNum = 0; vertNum < meshData.VertexCount; vertNum++)
		{
			v->Pos.x = ov->Pos.x;
			v->Pos.y = ov->Pos.y;
			v->TexR = (*(ovrVector2f*)&ov->TexR);
			v->TexG = (*(ovrVector2f*)&ov->TexG);
			v->TexB = (*(ovrVector2f*)&ov->TexB);
			v->Col[0] = v->Col[1] = v->Col[2] = (BYTE)(ov->VignetteFactor*255.99f);
			v->Col[3] = (BYTE)(ov->TimeWarpFactor*255.99f);
			v++; ov++;
		}

		//メッシュの頂点データを用いて頂点バッファを作成します。
		D3D11_BUFFER_DESC bd = { 0 };
		bd.Usage = D3D11_USAGE_DEFAULT;
		bd.ByteWidth = sizeof(DistortionVertex)* meshData.VertexCount;
		bd.BindFlags = D3D11_BIND_VERTEX_BUFFER;
		bd.CPUAccessFlags = 0;
		D3D11_SUBRESOURCE_DATA InitData = { 0 };
		InitData.pSysMem = pVBVerts;
		hr = g_pd3dDevice->CreateBuffer(&bd, &InitData, &g_pVertexBufferOculus[eyeNum]);
		if (FAILED(hr))
			return hr;

		//同様にインデックスバッファを作成します。
		bd.ByteWidth = sizeof(unsigned short)* meshData.IndexCount;
		bd.BindFlags = D3D11_BIND_INDEX_BUFFER;
		bd.CPUAccessFlags = 0;
		InitData.pSysMem = meshData.pIndexData;
		hr = g_pd3dDevice->CreateBuffer(&bd, &InitData, &g_pIndexBufferOculus[eyeNum]);
		if (FAILED(hr))
			return hr;
		oculusIndexCount = meshData.IndexCount;

		OVR_FREE(pVBVerts);
		ovrHmd_DestroyDistortionMesh(&meshData);
	}


	{
		//頂点シェーダーをコンパイル
		ID3DBlob* pBlob = NULL;
		hr = CompileShaderFromFile("OculusRift.hlsl", "VS_TimeWarp", "vs_4_0", &pBlob);
		if (FAILED(hr))
		{
			MessageBox(NULL, "ピクセルシェーダーを読み込めませんでした。", "Error", MB_OK);
			return hr;
		}

		//ピクセルシェーダーからピクセルシェーダのオブジェクトを作成
		hr = g_pd3dDevice->CreateVertexShader(pBlob->GetBufferPointer(), pBlob->GetBufferSize(), NULL, &g_pVertexShaderOculus);
		if (FAILED(hr))
			return hr;

		//インプットレイアウトの形式
		static D3D11_INPUT_ELEMENT_DESC DistortionMeshVertexDesc[] =
		{
			{ "Position", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 },
			{ "TexCoord", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 8, D3D11_INPUT_PER_VERTEX_DATA, 0 },
			{ "TexCoord", 1, DXGI_FORMAT_R32G32_FLOAT, 0, 16, D3D11_INPUT_PER_VERTEX_DATA, 0 },
			{ "TexCoord", 2, DXGI_FORMAT_R32G32_FLOAT, 0, 24, D3D11_INPUT_PER_VERTEX_DATA, 0 },
			{ "Color", 0, DXGI_FORMAT_R8G8B8A8_UNORM, 0, 32, D3D11_INPUT_PER_VERTEX_DATA, 0 },
		};

		//インプットレイアウトの作成
		hr = g_pd3dDevice->CreateInputLayout(DistortionMeshVertexDesc, ARRAYSIZE(DistortionMeshVertexDesc), pBlob->GetBufferPointer(),
			pBlob->GetBufferSize(), &g_pVertexLayoutOculus);
		if (FAILED(hr))
			return hr;

		pBlob->Release();
	}

	{
		//ピクセルシェーダーをコンパイル
		ID3DBlob* pBlob = NULL;
		hr = CompileShaderFromFile("OculusRift.hlsl", "PS_Oculus", "ps_4_0", &pBlob);
		if (FAILED(hr))
		{
			MessageBox(NULL, "ピクセルシェーダーを読み込めませんでした。", "Error", MB_OK);
			return hr;
		}

		//ピクセルシェーダーからピクセルシェーダのオブジェクトを作成
		hr = g_pd3dDevice->CreatePixelShader(pBlob->GetBufferPointer(), pBlob->GetBufferSize(), NULL, &g_pPixelShaderOculus);
		pBlob->Release();
		if (FAILED(hr))
			return hr;
	}

	// コンスタントバッファの作成
	// ゆがませるメッシュを描画するためのシェーダー用の設定
	{
		D3D11_BUFFER_DESC bd = { 0 };
		bd.Usage = D3D11_USAGE_DEFAULT;
		bd.ByteWidth = sizeof(OculusRiftSettings);
		bd.BindFlags = D3D11_BIND_CONSTANT_BUFFER;
		bd.CPUAccessFlags = 0;
		hr = g_pd3dDevice->CreateBuffer(&bd, NULL, &g_pConstantBufferOculus);
		if (FAILED(hr))
			return hr;
	}

	return hr;
}
Esempio n. 5
0
void OculusWorldDemoApp::CalculateHmdValues()
{
    // Initialize eye rendering information for ovrHmd_Configure.
    // The viewport sizes are re-computed in case RenderTargetSize changed due to HW limitations.
    ovrFovPort eyeFov[2];
    eyeFov[0] = HmdDesc.DefaultEyeFov[0];
    eyeFov[1] = HmdDesc.DefaultEyeFov[1];

    // Clamp Fov based on our dynamically adjustable FovSideTanMax.
    // Most apps should use the default, but reducing Fov does reduce rendering cost.
    eyeFov[0] = FovPort::Min(eyeFov[0], FovPort(FovSideTanMax));
    eyeFov[1] = FovPort::Min(eyeFov[1], FovPort(FovSideTanMax));


    if (ForceZeroIpd)
    {
        // ForceZeroIpd does three things:
        //  1) Sets FOV to maximum symmetrical FOV based on both eyes
        //  2) Sets eye ViewAdjust values to 0.0 (effective IPD == 0)
        //  3) Uses only the Left texture for rendering.
        
        eyeFov[0] = FovPort::Max(eyeFov[0], eyeFov[1]);
        eyeFov[1] = eyeFov[0];

        Sizei recommenedTexSize = ovrHmd_GetFovTextureSize(Hmd, ovrEye_Left,
                                                           eyeFov[0], DesiredPixelDensity);

        Sizei textureSize = EnsureRendertargetAtLeastThisBig(Rendertarget_Left,  recommenedTexSize);

        EyeRenderSize[0] = Sizei::Min(textureSize, recommenedTexSize);
        EyeRenderSize[1] = EyeRenderSize[0];

        // Store texture pointers that will be passed for rendering.
        EyeTexture[0]                       = RenderTargets[Rendertarget_Left].Tex;
        EyeTexture[0].Header.TextureSize    = textureSize;
        EyeTexture[0].Header.RenderViewport = Recti(EyeRenderSize[0]);
        // Right eye is the same.
        EyeTexture[1] = EyeTexture[0];
    }

    else
    {
        // Configure Stereo settings. Default pixel density is 1.0f.
        Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(Hmd, ovrEye_Left,  eyeFov[0], DesiredPixelDensity);
        Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(Hmd, ovrEye_Right, eyeFov[1], DesiredPixelDensity);

        if (RendertargetIsSharedByBothEyes)
        {
            Sizei  rtSize(recommenedTex0Size.w + recommenedTex1Size.w,
                          Alg::Max(recommenedTex0Size.h, recommenedTex1Size.h));

            // Use returned size as the actual RT size may be different due to HW limits.
            rtSize = EnsureRendertargetAtLeastThisBig(Rendertarget_BothEyes, rtSize);

            // Don't draw more then recommended size; this also ensures that resolution reported
            // in the overlay HUD size is updated correctly for FOV/pixel density change.            
            EyeRenderSize[0] = Sizei::Min(Sizei(rtSize.w/2, rtSize.h), recommenedTex0Size);
            EyeRenderSize[1] = Sizei::Min(Sizei(rtSize.w/2, rtSize.h), recommenedTex1Size);

            // Store texture pointers that will be passed for rendering.
            // Same texture is used, but with different viewports.
            EyeTexture[0]                       = RenderTargets[Rendertarget_BothEyes].Tex;
            EyeTexture[0].Header.TextureSize    = rtSize;
            EyeTexture[0].Header.RenderViewport = Recti(Vector2i(0), EyeRenderSize[0]);
            EyeTexture[1]                       = RenderTargets[Rendertarget_BothEyes].Tex;
            EyeTexture[1].Header.TextureSize    = rtSize;
            EyeTexture[1].Header.RenderViewport = Recti(Vector2i((rtSize.w+1)/2, 0), EyeRenderSize[1]);
        }

        else
        {
            Sizei tex0Size = EnsureRendertargetAtLeastThisBig(Rendertarget_Left,  recommenedTex0Size);
            Sizei tex1Size = EnsureRendertargetAtLeastThisBig(Rendertarget_Right, recommenedTex1Size);

            EyeRenderSize[0] = Sizei::Min(tex0Size, recommenedTex0Size);
            EyeRenderSize[1] = Sizei::Min(tex1Size, recommenedTex1Size);

            // Store texture pointers and viewports that will be passed for rendering.
            EyeTexture[0]                       = RenderTargets[Rendertarget_Left].Tex;
            EyeTexture[0].Header.TextureSize    = tex0Size;
            EyeTexture[0].Header.RenderViewport = Recti(EyeRenderSize[0]);
            EyeTexture[1]                       = RenderTargets[Rendertarget_Right].Tex;
            EyeTexture[1].Header.TextureSize    = tex1Size;
            EyeTexture[1].Header.RenderViewport = Recti(EyeRenderSize[1]);
        }
    }

    // Hmd caps.
    unsigned hmdCaps = (VsyncEnabled ? 0 : ovrHmdCap_NoVSync) |
                       ovrHmdCap_LatencyTest;
    if (IsLowPersistence)
        hmdCaps |= ovrHmdCap_LowPersistence;
    if (DynamicPrediction)
        hmdCaps |= ovrHmdCap_DynamicPrediction;

    ovrHmd_SetEnabledCaps(Hmd, hmdCaps);


	ovrRenderAPIConfig config         = pRender->Get_ovrRenderAPIConfig();
    unsigned           distortionCaps = ovrDistortionCap_Chromatic;
    if (TimewarpEnabled)
        distortionCaps |= ovrDistortionCap_TimeWarp;

    if (!ovrHmd_ConfigureRendering( Hmd, &config, distortionCaps,
                                    eyeFov, EyeRenderDesc ))
    {
        // Fail exit? TBD
        return;
    }

    if (ForceZeroIpd)
    {
        // Remove IPD adjust
        EyeRenderDesc[0].ViewAdjust = Vector3f(0);
        EyeRenderDesc[1].ViewAdjust = Vector3f(0);
    }

    // ovrHmdCap_LatencyTest - enables internal latency feedback
    unsigned sensorCaps = ovrSensorCap_Orientation|ovrSensorCap_YawCorrection;
    if (PositionTrackingEnabled)
        sensorCaps |= ovrSensorCap_Position;
      
    if (StartSensorCaps != sensorCaps)
    {
        ovrHmd_StartSensor(Hmd, sensorCaps, 0);
        StartSensorCaps = sensorCaps;
    }    

    // Calculate projections
    Projection[0] = ovrMatrix4f_Projection(EyeRenderDesc[0].Fov,  0.01f, 10000.0f, true);
    Projection[1] = ovrMatrix4f_Projection(EyeRenderDesc[1].Fov,  0.01f, 10000.0f, true);

    float    orthoDistance = 0.8f; // 2D is 0.8 meter from camera
    Vector2f orthoScale0   = Vector2f(1.0f) / Vector2f(EyeRenderDesc[0].PixelsPerTanAngleAtCenter);
    Vector2f orthoScale1   = Vector2f(1.0f) / Vector2f(EyeRenderDesc[1].PixelsPerTanAngleAtCenter);
    
    OrthoProjection[0] = ovrMatrix4f_OrthoSubProjection(Projection[0], orthoScale0, orthoDistance,
                                                        EyeRenderDesc[0].ViewAdjust.x);
    OrthoProjection[1] = ovrMatrix4f_OrthoSubProjection(Projection[1], orthoScale1, orthoDistance,
                                                        EyeRenderDesc[1].ViewAdjust.x);
}