void DistortionRenderer::SubmitEye(int eyeId, const ovrTexture* eyeTexture)
{
	//Doesn't do a lot in here??
	const ovrD3D9Texture* tex = (const ovrD3D9Texture*)eyeTexture;

	//Write in values
    eachEye[eyeId].texture = tex->D3D9.pTexture;

	// Its only at this point we discover what the viewport of the texture is.
	// because presumably we allow users to realtime adjust the resolution.
    eachEye[eyeId].TextureSize    = tex->D3D9.Header.TextureSize;
    eachEye[eyeId].RenderViewport = tex->D3D9.Header.RenderViewport;

    const ovrEyeRenderDesc& erd = RState.EyeRenderDesc[eyeId];
    
    ovrHmd_GetRenderScaleAndOffset( erd.Fov,
                                    eachEye[eyeId].TextureSize, eachEye[eyeId].RenderViewport,
                                    eachEye[eyeId].UVScaleOffset );

	if (RState.DistortionCaps & ovrDistortionCap_FlipInput)
	{
		eachEye[eyeId].UVScaleOffset[0].y = -eachEye[eyeId].UVScaleOffset[0].y;
		eachEye[eyeId].UVScaleOffset[1].y = 1.0f - eachEye[eyeId].UVScaleOffset[1].y;
	}
}
コード例 #2
0
  void initGl() {
    RiftGlfwApp::initGl();
    for_each_eye([&](ovrEyeType eye){
      EyeArg & eyeArg = eyeArgs[eye];
      ovrFovPort fov = hmdDesc.DefaultEyeFov[eye];
      ovrEyeRenderDesc renderDesc = ovrHmd_GetRenderDesc(hmd, eye, fov);

      // Set up the per-eye projection matrix
      eyeArg.projection = Rift::fromOvr(
        ovrMatrix4f_Projection(fov, 0.01, 100000, true));
      eyeArg.viewOffset = glm::translate(glm::mat4(), Rift::fromOvr(renderDesc.ViewAdjust));
      ovrRecti texRect;
      texRect.Size = ovrHmd_GetFovTextureSize(hmd, eye,
        hmdDesc.DefaultEyeFov[eye], 1.0f);
      texRect.Pos.x = texRect.Pos.y = 0;

      eyeArg.frameBuffer.init(Rift::fromOvr(texRect.Size));

      ovrVector2f scaleAndOffset[2];
      ovrHmd_GetRenderScaleAndOffset(fov, texRect.Size,
        texRect, scaleAndOffset);
      eyeArg.scale = Rift::fromOvr(scaleAndOffset[0]);
      eyeArg.offset = Rift::fromOvr(scaleAndOffset[1]);

      ovrHmd_CreateDistortionMesh(hmd, eye, fov, 0, &eyeArg.mesh);

      eyeArg.meshVao = gl::VertexArrayPtr(new gl::VertexArray());
      eyeArg.meshVao->bind();

      eyeArg.meshIbo = gl::IndexBufferPtr(new gl::IndexBuffer());
      eyeArg.meshIbo->bind();
      size_t bufferSize = eyeArg.mesh.IndexCount * sizeof(unsigned short);
      eyeArg.meshIbo->load(bufferSize, eyeArg.mesh.pIndexData);

      eyeArg.meshVbo = gl::VertexBufferPtr(new gl::VertexBuffer());
      eyeArg.meshVbo->bind();
      bufferSize = eyeArg.mesh.VertexCount * sizeof(ovrDistortionVertex);
      eyeArg.meshVbo->load(bufferSize, eyeArg.mesh.pVertexData);

      size_t stride = sizeof(ovrDistortionVertex);
      size_t offset = offsetof(ovrDistortionVertex, Pos);
      glEnableVertexAttribArray(gl::Attribute::Position);
      glVertexAttribPointer(gl::Attribute::Position, 2, GL_FLOAT, GL_FALSE,
        stride, (void*)offset);

      offset = offsetof(ovrDistortionVertex, TexG);
      glEnableVertexAttribArray(gl::Attribute::TexCoord0);
      glVertexAttribPointer(gl::Attribute::TexCoord0, 2, GL_FLOAT, GL_FALSE,
        stride, (void*)offset);

      gl::VertexArray::unbind();
      gl::Program::clear();
    });

    distortionProgram = GlUtils::getProgram(
      Resource::SHADERS_DISTORTION_VS,
      Resource::SHADERS_DISTORTION_FS
    );
  }
コード例 #3
0
ファイル: RiftAppSkeleton.cpp プロジェクト: cleoag/RiftRay
int RiftAppSkeleton::ConfigureClientRendering()
{
    if (m_Hmd == NULL)
        return 1;

    ovrSizei l_TextureSizeLeft = ovrHmd_GetFovTextureSize(m_Hmd, ovrEye_Left, m_Hmd->DefaultEyeFov[0], 1.0f);
    ovrSizei l_TextureSizeRight = ovrHmd_GetFovTextureSize(m_Hmd, ovrEye_Right, m_Hmd->DefaultEyeFov[1], 1.0f);
    ovrSizei l_TextureSize;
    l_TextureSize.w = l_TextureSizeLeft.w + l_TextureSizeRight.w;
    l_TextureSize.h = std::max(l_TextureSizeLeft.h, l_TextureSizeRight.h);

    // Renderbuffer init - we can use smaller subsets of it easily
    deallocateFBO(m_renderBuffer);
    allocateFBO(m_renderBuffer, l_TextureSize.w, l_TextureSize.h);


    l_EyeTexture[0].OGL.Header.API = ovrRenderAPI_OpenGL;
    l_EyeTexture[0].OGL.Header.TextureSize.w = l_TextureSize.w;
    l_EyeTexture[0].OGL.Header.TextureSize.h = l_TextureSize.h;
    l_EyeTexture[0].OGL.Header.RenderViewport.Pos.x = 0;
    l_EyeTexture[0].OGL.Header.RenderViewport.Pos.y = 0;
    l_EyeTexture[0].OGL.Header.RenderViewport.Size.w = l_TextureSize.w/2;
    l_EyeTexture[0].OGL.Header.RenderViewport.Size.h = l_TextureSize.h;
    l_EyeTexture[0].OGL.TexId = m_renderBuffer.tex;

    // Right eye the same, except for the x-position in the texture...
    l_EyeTexture[1] = l_EyeTexture[0];
    l_EyeTexture[1].OGL.Header.RenderViewport.Pos.x = (l_TextureSize.w+1) / 2;

    m_RenderViewports[0] = l_EyeTexture[0].OGL.Header.RenderViewport;
    m_RenderViewports[1] = l_EyeTexture[1].OGL.Header.RenderViewport;

    const int distortionCaps =
        ovrDistortionCap_Chromatic |
        ovrDistortionCap_TimeWarp |
        ovrDistortionCap_Vignette;

    // Generate distortion mesh for each eye
    for (int eyeNum = 0; eyeNum < 2; eyeNum++)
    {
        // Allocate & generate distortion mesh vertices.
        ovrHmd_CreateDistortionMesh(
            m_Hmd,
            m_EyeRenderDesc[eyeNum].Eye,
            m_EyeRenderDesc[eyeNum].Fov,
            distortionCaps,
            &m_DistMeshes[eyeNum]);

        ovrHmd_GetRenderScaleAndOffset(
            m_EyeRenderDesc[eyeNum].Fov,
            l_TextureSize,
            m_RenderViewports[eyeNum],
            &m_uvScaleOffsetOut[2*eyeNum]);
    }
    return 0;
}
コード例 #4
0
		void GetRenderScaleAndOffset(ovrFovPort fov, const math::vector2di& textureSize, const math::recti& renderVP, math::vector2d& scale, math::vector2d& offset)
		{
			ovrSizei sz;
			sz.w = textureSize.x;
			sz.h = textureSize.y;
			ovrRecti r;
			r.Pos.x = renderVP.ULPoint.x;
			r.Pos.y = renderVP.ULPoint.y;
			r.Size.w = renderVP.getWidth();
			r.Size.h = renderVP.getHeight();
			ovrVector2f so[2];
			ovrHmd_GetRenderScaleAndOffset(fov, sz, r, so);
			scale.set(so[0].x, so[0].y);
			offset.set(so[1].x, so[1].y);
		}
コード例 #5
0
HRESULT InitOculusRiftObjects(unsigned distortionCaps, ovrHmd HMD)
{
	HRESULT hr = S_OK;

	// ステレオ描画設定を取得
	// レンダーターゲットの作成するためのサイズ決定に必要。
	// なお DK1 ではかなり変なサイズで返ってくるのでこのまま信用していいのかは不明
	ovrSizei recommenedTex0Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Left, HMDDesc.DefaultEyeFov[0], 1.0f);
	ovrSizei recommenedTex1Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Right, HMDDesc.DefaultEyeFov[1], 1.0f);
	ovrSizei RenderTargetSize;
	RenderTargetSize.w = recommenedTex0Size.w + recommenedTex1Size.w;
	RenderTargetSize.h = max(recommenedTex0Size.h, recommenedTex1Size.h);

	{
		// レンダーターゲット用のテクスチャを作成する
		// ここにシーンを描画する

		D3D11_TEXTURE2D_DESC desc;
		ZeroMemory(&desc, sizeof(desc));
		desc.Width = RenderTargetSize.w;
		desc.Height = RenderTargetSize.h;
		desc.MipLevels = 1;
		desc.ArraySize = 1;
		desc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
		desc.SampleDesc.Count = 1;
		desc.Usage = D3D11_USAGE_DEFAULT;
		desc.BindFlags = D3D11_BIND_RENDER_TARGET | D3D11_BIND_SHADER_RESOURCE;
		//テクスチャを作成する
		g_pd3dDevice->CreateTexture2D(&desc, NULL, &g_pTextureOculus);
		//シェーダーリソースビュー作ることで描画元として使用出来る
		g_pd3dDevice->CreateShaderResourceView(g_pTextureOculus, NULL, &g_pShaderResViewOculus);
		//レンダーターゲットビューを作る
		g_pd3dDevice->CreateRenderTargetView(g_pTextureOculus, NULL, &g_pRenderTargetViewOculus);

		// 実際に作成されたサイズを取得しておく(ミスがあるかもしれない)
		g_pTextureOculus->GetDesc(&desc);
		RenderTargetSize.w = desc.Width;
		RenderTargetSize.h = desc.Height;

		//深度バッファ作成(ステンシル)
		D3D11_TEXTURE2D_DESC descDepth;
		ZeroMemory(&descDepth, sizeof(descDepth));
		descDepth.Width = RenderTargetSize.w;
		descDepth.Height = RenderTargetSize.h;
		descDepth.MipLevels = 1;
		descDepth.ArraySize = 1;
		descDepth.Format = DXGI_FORMAT_D24_UNORM_S8_UINT;
		descDepth.SampleDesc.Count = 1;
		descDepth.SampleDesc.Quality = 0;
		descDepth.Usage = D3D11_USAGE_DEFAULT;
		descDepth.BindFlags = D3D11_BIND_DEPTH_STENCIL;
		descDepth.CPUAccessFlags = 0;
		descDepth.MiscFlags = 0;
		g_pd3dDevice->CreateTexture2D(&descDepth, NULL, &g_pDepthStencilOculus);

		//ステンシルビューの作成
		D3D11_DEPTH_STENCIL_VIEW_DESC descDSV;
		ZeroMemory(&descDSV, sizeof(descDSV));
		descDSV.Format = descDepth.Format;
		descDSV.ViewDimension = D3D11_DSV_DIMENSION_TEXTURE2D;
		descDSV.Texture2D.MipSlice = 0;
		g_pd3dDevice->CreateDepthStencilView(g_pDepthStencilOculus, &descDSV, &g_pDepthStencilViewOculus);
	}


	// それぞれの目に対応する描画のための情報を取得します。
	ovrFovPort eyeFov[2] = { HMDDesc.DefaultEyeFov[0], HMDDesc.DefaultEyeFov[1] };
	EyeRenderDesc[0] = ovrHmd_GetRenderDesc(HMD, ovrEye_Left, eyeFov[0]);
	EyeRenderDesc[1] = ovrHmd_GetRenderDesc(HMD, ovrEye_Right, eyeFov[1]);

	// HMDで使用する機能を設定します。
	ovrHmd_SetEnabledCaps(HMD, ovrHmdCap_LowPersistence |
		ovrHmdCap_LatencyTest);

	// センサーの使用を開始します。
	// YawCorrection(?) Orientation(姿勢)、Position(位置)を取得できるようにします。
	ovrHmd_StartSensor(HMD, ovrSensorCap_Orientation |
		ovrSensorCap_YawCorrection |
		ovrSensorCap_Position, 0);

	//ビューポートの情報を格納します。
	EyeRenderViewport[0].Pos.x = 0;
	EyeRenderViewport[0].Pos.y = 0;
	EyeRenderViewport[0].Size.w = RenderTargetSize.w / 2;
	EyeRenderViewport[0].Size.h = RenderTargetSize.h;
	EyeRenderViewport[1].Pos.x = (RenderTargetSize.w + 1) / 2;
	EyeRenderViewport[1].Pos.y = 0;
	EyeRenderViewport[1].Size = EyeRenderViewport[0].Size;



	//ゆがませるメッシュをそれぞれの目に対応するように作成する
	for (int eyeNum = 0; eyeNum < 2; eyeNum++)
	{
		// メッシュデータを取得する
		ovrDistortionMesh meshData;
		ovrHmd_CreateDistortionMesh(HMD,
			EyeRenderDesc[eyeNum].Eye, EyeRenderDesc[eyeNum].Fov,
			distortionCaps, &meshData);

		ovrHmd_GetRenderScaleAndOffset(EyeRenderDesc[eyeNum].Fov,
			RenderTargetSize, EyeRenderViewport[eyeNum],
			(ovrVector2f*)UVScaleOffset[eyeNum]);

		// こちらで用意した頂点形式にあうようにパースします。
		DistortionVertex * pVBVerts = (DistortionVertex*)OVR_ALLOC(
			sizeof(DistortionVertex)* meshData.VertexCount);
		DistortionVertex * v = pVBVerts;
		ovrDistortionVertex * ov = meshData.pVertexData;
		for (unsigned vertNum = 0; vertNum < meshData.VertexCount; vertNum++)
		{
			v->Pos.x = ov->Pos.x;
			v->Pos.y = ov->Pos.y;
			v->TexR = (*(ovrVector2f*)&ov->TexR);
			v->TexG = (*(ovrVector2f*)&ov->TexG);
			v->TexB = (*(ovrVector2f*)&ov->TexB);
			v->Col[0] = v->Col[1] = v->Col[2] = (BYTE)(ov->VignetteFactor*255.99f);
			v->Col[3] = (BYTE)(ov->TimeWarpFactor*255.99f);
			v++; ov++;
		}

		//メッシュの頂点データを用いて頂点バッファを作成します。
		D3D11_BUFFER_DESC bd = { 0 };
		bd.Usage = D3D11_USAGE_DEFAULT;
		bd.ByteWidth = sizeof(DistortionVertex)* meshData.VertexCount;
		bd.BindFlags = D3D11_BIND_VERTEX_BUFFER;
		bd.CPUAccessFlags = 0;
		D3D11_SUBRESOURCE_DATA InitData = { 0 };
		InitData.pSysMem = pVBVerts;
		hr = g_pd3dDevice->CreateBuffer(&bd, &InitData, &g_pVertexBufferOculus[eyeNum]);
		if (FAILED(hr))
			return hr;

		//同様にインデックスバッファを作成します。
		bd.ByteWidth = sizeof(unsigned short)* meshData.IndexCount;
		bd.BindFlags = D3D11_BIND_INDEX_BUFFER;
		bd.CPUAccessFlags = 0;
		InitData.pSysMem = meshData.pIndexData;
		hr = g_pd3dDevice->CreateBuffer(&bd, &InitData, &g_pIndexBufferOculus[eyeNum]);
		if (FAILED(hr))
			return hr;
		oculusIndexCount = meshData.IndexCount;

		OVR_FREE(pVBVerts);
		ovrHmd_DestroyDistortionMesh(&meshData);
	}


	{
		//頂点シェーダーをコンパイル
		ID3DBlob* pBlob = NULL;
		hr = CompileShaderFromFile("OculusRift.hlsl", "VS_TimeWarp", "vs_4_0", &pBlob);
		if (FAILED(hr))
		{
			MessageBox(NULL, "ピクセルシェーダーを読み込めませんでした。", "Error", MB_OK);
			return hr;
		}

		//ピクセルシェーダーからピクセルシェーダのオブジェクトを作成
		hr = g_pd3dDevice->CreateVertexShader(pBlob->GetBufferPointer(), pBlob->GetBufferSize(), NULL, &g_pVertexShaderOculus);
		if (FAILED(hr))
			return hr;

		//インプットレイアウトの形式
		static D3D11_INPUT_ELEMENT_DESC DistortionMeshVertexDesc[] =
		{
			{ "Position", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 },
			{ "TexCoord", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 8, D3D11_INPUT_PER_VERTEX_DATA, 0 },
			{ "TexCoord", 1, DXGI_FORMAT_R32G32_FLOAT, 0, 16, D3D11_INPUT_PER_VERTEX_DATA, 0 },
			{ "TexCoord", 2, DXGI_FORMAT_R32G32_FLOAT, 0, 24, D3D11_INPUT_PER_VERTEX_DATA, 0 },
			{ "Color", 0, DXGI_FORMAT_R8G8B8A8_UNORM, 0, 32, D3D11_INPUT_PER_VERTEX_DATA, 0 },
		};

		//インプットレイアウトの作成
		hr = g_pd3dDevice->CreateInputLayout(DistortionMeshVertexDesc, ARRAYSIZE(DistortionMeshVertexDesc), pBlob->GetBufferPointer(),
			pBlob->GetBufferSize(), &g_pVertexLayoutOculus);
		if (FAILED(hr))
			return hr;

		pBlob->Release();
	}

	{
		//ピクセルシェーダーをコンパイル
		ID3DBlob* pBlob = NULL;
		hr = CompileShaderFromFile("OculusRift.hlsl", "PS_Oculus", "ps_4_0", &pBlob);
		if (FAILED(hr))
		{
			MessageBox(NULL, "ピクセルシェーダーを読み込めませんでした。", "Error", MB_OK);
			return hr;
		}

		//ピクセルシェーダーからピクセルシェーダのオブジェクトを作成
		hr = g_pd3dDevice->CreatePixelShader(pBlob->GetBufferPointer(), pBlob->GetBufferSize(), NULL, &g_pPixelShaderOculus);
		pBlob->Release();
		if (FAILED(hr))
			return hr;
	}

	// コンスタントバッファの作成
	// ゆがませるメッシュを描画するためのシェーダー用の設定
	{
		D3D11_BUFFER_DESC bd = { 0 };
		bd.Usage = D3D11_USAGE_DEFAULT;
		bd.ByteWidth = sizeof(OculusRiftSettings);
		bd.BindFlags = D3D11_BIND_CONSTANT_BUFFER;
		bd.CPUAccessFlags = 0;
		hr = g_pd3dDevice->CreateBuffer(&bd, NULL, &g_pConstantBufferOculus);
		if (FAILED(hr))
			return hr;
	}

	return hr;
}
コード例 #6
0
/**
* Render the Oculus Rift View.
***/
void* OculusRenderer::Provoke(void* pThis, int eD3D, int eD3DInterface, int eD3DMethod, DWORD dwNumberConnected, int& nProvokerIndex)	
{
	// return if wrong call
	if ((eD3D >= (int)AQU_DirectXVersion::DirectX_9_0) &&
		(eD3D <= (int)AQU_DirectXVersion::DirectX_9_29))
	{
		if (((eD3DInterface == INTERFACE_IDIRECT3DDEVICE9) &&
			(eD3DMethod == METHOD_IDIRECT3DDEVICE9_PRESENT)) || 
			((eD3DInterface == INTERFACE_IDIRECT3DDEVICE9) &&
			(eD3DMethod == METHOD_IDIRECT3DDEVICE9_ENDSCENE)) ||
			((eD3DInterface == INTERFACE_IDIRECT3DSWAPCHAIN9) &&
			(eD3DMethod == METHOD_IDIRECT3DSWAPCHAIN9_PRESENT)))
		{
			(pThis);
		}
		else return nullptr;
	}
	else
		return nullptr;

	// get input data
	if (m_paInput[(int)ORN_Decommanders::LeftTexture])
		m_pcTextureLeft = *(LPDIRECT3DTEXTURE9*)m_paInput[(int)ORN_Decommanders::LeftTexture];
	else 
		m_pcTextureLeft = nullptr;
	if (m_paInput[(int)ORN_Decommanders::RightTexture])
		m_pcTextureRight = *(LPDIRECT3DTEXTURE9*)m_paInput[(int)ORN_Decommanders::RightTexture];
	else m_pcTextureRight = nullptr;

	if (m_paInput[(int)ORN_Decommanders::DistortionVertexBufferLeft])
		if (*(LPDIRECT3DVERTEXBUFFER9**)m_paInput[(int)ORN_Decommanders::DistortionVertexBufferLeft])
			m_pcDistortionVertexBufferLeft = **(LPDIRECT3DVERTEXBUFFER9**)m_paInput[(int)ORN_Decommanders::DistortionVertexBufferLeft];
		else m_pcDistortionVertexBufferLeft = nullptr;
	else m_pcDistortionVertexBufferLeft = nullptr;
	if (m_paInput[(int)ORN_Decommanders::DistortionVertexBufferRight])
		if (*(LPDIRECT3DVERTEXBUFFER9**)m_paInput[(int)ORN_Decommanders::DistortionVertexBufferRight])
			m_pcDistortionVertexBufferRight = **(LPDIRECT3DVERTEXBUFFER9**)m_paInput[(int)ORN_Decommanders::DistortionVertexBufferRight];
		else m_pcDistortionVertexBufferRight = nullptr;
	else m_pcDistortionVertexBufferRight = nullptr;
	if (m_paInput[(int)ORN_Decommanders::DistortionIndexBufferLeft])
		if (*(LPDIRECT3DINDEXBUFFER9**)m_paInput[(int)ORN_Decommanders::DistortionIndexBufferLeft])
			m_pcDistortionIndexBufferLeft = **(LPDIRECT3DINDEXBUFFER9**)m_paInput[(int)ORN_Decommanders::DistortionIndexBufferLeft];
		else m_pcDistortionIndexBufferLeft = nullptr;
	else m_pcDistortionIndexBufferLeft = nullptr;
	if (m_paInput[(int)ORN_Decommanders::DistortionIndexBufferRight])
		if (*(LPDIRECT3DINDEXBUFFER9**)m_paInput[(int)ORN_Decommanders::DistortionIndexBufferRight])
			m_pcDistortionIndexBufferRight = **(LPDIRECT3DINDEXBUFFER9**)m_paInput[(int)ORN_Decommanders::DistortionIndexBufferRight];
		else m_pcDistortionIndexBufferRight = nullptr;
	else m_pcDistortionIndexBufferRight = nullptr;
	if (m_paInput[(int)ORN_Decommanders::OculusVertexDeclaration])
		if (*(LPDIRECT3DVERTEXDECLARATION9**)m_paInput[(int)ORN_Decommanders::OculusVertexDeclaration])
			m_pcVertexDecl = **(LPDIRECT3DVERTEXDECLARATION9**)m_paInput[(int)ORN_Decommanders::OculusVertexDeclaration];
		else m_pcVertexDecl = nullptr;
	else m_pcVertexDecl = nullptr;
	if (m_paInput[(int)ORN_Decommanders::DefaultEyeFovLeft])
		if (*(ovrFovPort**)m_paInput[(int)ORN_Decommanders::DefaultEyeFovLeft])
			m_psFOVPortLeft = *(ovrFovPort**)m_paInput[(int)ORN_Decommanders::DefaultEyeFovLeft];
		else m_psFOVPortLeft = nullptr;
	else m_psFOVPortLeft = nullptr;
	if (m_paInput[(int)ORN_Decommanders::DefaultEyeFovRight])
		if (*(ovrFovPort**)m_paInput[(int)ORN_Decommanders::DefaultEyeFovRight])
			m_psFOVPortRight = *(ovrFovPort**)m_paInput[(int)ORN_Decommanders::DefaultEyeFovRight];
		else m_psFOVPortRight = nullptr;
	else m_psFOVPortRight = nullptr;

	// get device 
	LPDIRECT3DDEVICE9 pcDevice = nullptr;
	bool bReleaseDevice = false;
	if (eD3DInterface == INTERFACE_IDIRECT3DDEVICE9)
	{
		pcDevice = (LPDIRECT3DDEVICE9)pThis;
	}
	else if (eD3DInterface == INTERFACE_IDIRECT3DSWAPCHAIN9)
	{
		LPDIRECT3DSWAPCHAIN9 pSwapChain = (LPDIRECT3DSWAPCHAIN9)pThis;
		if (!pSwapChain) 
		{
			OutputDebugString(L"Oculus Renderer Node : No swapchain !");
			return nullptr;
		}
		pSwapChain->GetDevice(&pcDevice);
		bReleaseDevice = true;
	}
	if (!pcDevice)
	{
		OutputDebugString(L"Oculus Renderer Node : No device !");
		return nullptr;
	}

	// Original code (LibOVR) :
	// pShaderCode = ShaderCompile("precompiledVertexShaderSrc",VertexShaderSrc,"vs_2_0");
	// pShaderCode = ShaderCompile("precompiledVertexShaderTimewarpSrc",VertexShaderTimewarpSrc,"vs_3_0");
	// pShaderCode = ShaderCompile("precompiledPixelShaderSrc",PixelShaderSrc,"ps_3_0");

	// pixel shader created ?
	if (!m_pcOculusPixelShader)
	{
		LPD3DXBUFFER pShader;

		// compile and create shader
		if (SUCCEEDED(D3DXCompileShader(PixelShaderSrc,strlen(PixelShaderSrc),NULL,NULL,"main","ps_3_0",NULL,&pShader,NULL,&m_pcOculusPixelShaderCT)))
		{
			OutputDebugString(L"Pixel shader compiled!");
			pcDevice->CreatePixelShader((DWORD*)pShader->GetBufferPointer(), &m_pcOculusPixelShader);
		}
	}

	// vertex shader created ?
	if (!m_pcOculusVertexShader)
	{
		LPD3DXBUFFER pShader;

		// compile and create shader
		if (SUCCEEDED(D3DXCompileShader(VertexShaderSrc,strlen(VertexShaderSrc),NULL,NULL,"main","vs_2_0",NULL,&pShader,NULL,&m_pcOculusVertexShaderCT)))
		{
			OutputDebugString(L"Vertex shader compiled!");
			pcDevice->CreateVertexShader((DWORD*)pShader->GetBufferPointer(), &m_pcOculusVertexShader);
		}
	}

	// side by side pixel shader ?
	if (!m_pcSideBySidePixelShader)
	{
		LPD3DXBUFFER pShader;

		// compile and create shader
		if (SUCCEEDED(D3DXCompileShader(PixelShaderSrcSideBySide,strlen(PixelShaderSrcSideBySide),NULL,NULL,"SBS","ps_2_0",NULL,&pShader,NULL,&m_pcSideBySidePixelShaderCT)))
		{
			OutputDebugString(L"Pixel shader compiled!");
			pcDevice->CreatePixelShader((DWORD*)pShader->GetBufferPointer(), &m_pcSideBySidePixelShader);
		}

	}

	// test textures created ?
	if ((!m_pcTextureRightTest) || (!m_pcTextureLeftTest))
	{
		HMODULE hModule = GetModuleHandle(L"OculusRenderer.dll");

		if (!m_pcTextureLeftTest)
		{
			// create a test texture
			if (SUCCEEDED(D3DXCreateTextureFromResource(pcDevice, hModule, MAKEINTRESOURCE(IMG_BACKGROUND01), &m_pcTextureLeftTest)))
				OutputDebugString(L"Texture created !");
			else m_pcTextureLeftTest = nullptr;
		}
		if (!m_pcTextureRightTest)
		{
			// create a test texture
			if (SUCCEEDED(D3DXCreateTextureFromResource(pcDevice, hModule, MAKEINTRESOURCE(IMG_BACKGROUND02), &m_pcTextureRightTest)))
				OutputDebugString(L"Texture created !");
			else m_pcTextureRightTest = nullptr;
		}
	}

	// default vertex buffer ?
	if (!m_pcVertexBufferDefault)
	{
		InitDefaultVertexBuffer(pcDevice);
		if (!m_pcVertexBufferDefault) return nullptr;
	}

	// vertex buffers ? index buffers ? not all present ?
	if ((!m_pcDistortionVertexBufferLeft) || (!m_pcDistortionVertexBufferRight) ||
		(!m_pcDistortionIndexBufferLeft) || (!m_pcDistortionIndexBufferRight) ||
		(!m_pcVertexDecl))
	{
		if (m_bBuffersConnected)
		{
			// clear all descriptions for safety
			ZeroMemory(&m_sVertexBufferDescLeft, sizeof(D3DVERTEXBUFFER_DESC));
			ZeroMemory(&m_sVertexBufferDescRight, sizeof(D3DVERTEXBUFFER_DESC));
			ZeroMemory(&m_sIndexBufferDescLeft, sizeof(D3DINDEXBUFFER_DESC));
			ZeroMemory(&m_sIndexBufferDescRight, sizeof(D3DINDEXBUFFER_DESC));

			m_bBuffersConnected = false;
		}
	}
	else
	{
		m_bBuffersConnected = true;

		// vertex buffer description ? left
		if (!m_sVertexBufferDescLeft.Size)
			m_pcDistortionVertexBufferLeft->GetDesc(&m_sVertexBufferDescLeft);

		// vertex buffer length matches oculus vertex type ?
		if ((m_sVertexBufferDescLeft.Size % sizeof(ovrDistortionVertex)) != 0)
		{
			OutputDebugString(L"OculusRenderer Node : Connected vertex buffer size mismatch !");
			return nullptr;
		}

		// and right
		if (!m_sVertexBufferDescRight.Size)
			m_pcDistortionVertexBufferRight->GetDesc(&m_sVertexBufferDescRight);

		// vertex buffer length matches oculus vertex type ?
		if ((m_sVertexBufferDescRight.Size % sizeof(ovrDistortionVertex)) != 0)
		{
			OutputDebugString(L"OculusRenderer Node : Connected vertex buffer size mismatch !");
			return nullptr;
		}

		// index buffer ?
		if ((!m_pcDistortionIndexBufferLeft) || (!m_pcDistortionIndexBufferRight))
			return nullptr;

		// index buffer description ?
		if (!m_sIndexBufferDescLeft.Size)
		{
			m_pcDistortionIndexBufferLeft->GetDesc(&m_sIndexBufferDescLeft);

			// index buffer length matches vertex buffer size ? TODO !!
			/*if ()
			{
			OutputDebugString(L"OculusRenderer Node : Connected index buffer size mismatch !");
			return nullptr;
			}*/
		}
		if (!m_sIndexBufferDescRight.Size)
		{
			m_pcDistortionIndexBufferRight->GetDesc(&m_sIndexBufferDescRight);

			// index buffer length matches vertex buffer size ? TODO !!
			/*if ()
			{
			OutputDebugString(L"OculusRenderer Node : Connected index buffer size mismatch !");
			return nullptr;
			}*/
		}
	}

	// start to render
	pcDevice->BeginScene();

	// save states
	IDirect3DStateBlock9* pStateBlock = nullptr;
	pcDevice->CreateStateBlock(D3DSBT_ALL, &pStateBlock);

	// set ALL render states to default
	SetAllRenderStatesDefault(pcDevice);

	// set states
	pcDevice->SetTextureStageState(0, D3DTSS_COLOROP, D3DTOP_SELECTARG1);
	pcDevice->SetTextureStageState(0, D3DTSS_COLORARG1, D3DTA_TEXTURE);
	pcDevice->SetTextureStageState(0, D3DTSS_ALPHAOP, D3DTOP_SELECTARG1);
	pcDevice->SetTextureStageState(0, D3DTSS_ALPHAARG1, D3DTA_CONSTANT);
	pcDevice->SetTextureStageState(0, D3DTSS_CONSTANT, 0xffffffff);
	pcDevice->SetRenderState(D3DRS_ALPHABLENDENABLE, TRUE);
	pcDevice->SetRenderState(D3DRS_ZENABLE, D3DZB_FALSE);
	pcDevice->SetRenderState(D3DRS_ZWRITEENABLE, FALSE);
	pcDevice->SetSamplerState(0, D3DSAMP_SRGBTEXTURE, 0);
	pcDevice->SetSamplerState(0, D3DSAMP_ADDRESSU, D3DTADDRESS_CLAMP);
	pcDevice->SetSamplerState(0, D3DSAMP_ADDRESSV, D3DTADDRESS_CLAMP);
	pcDevice->SetSamplerState(0, D3DSAMP_ADDRESSW, D3DTADDRESS_CLAMP);
	pcDevice->SetSamplerState(0, D3DSAMP_MAGFILTER, D3DTEXF_ANISOTROPIC);
	pcDevice->SetSamplerState(0, D3DSAMP_MINFILTER, D3DTEXF_ANISOTROPIC);
	pcDevice->SetSamplerState(0, D3DSAMP_MIPFILTER, D3DTEXF_NONE);

	D3DCOLOR clearColor = D3DCOLOR_RGBA(0, 0, 0, 0);

	pcDevice->Clear(0, NULL, D3DCLEAR_TARGET, clearColor, 0, 0);

	// required fields
	D3DSURFACE_DESC sSurfaceDesc;
	ovrSizei sTextureSize;
	ovrRecti sRenderViewport;
	ovrVector2f UVScaleOffset[2];

	// LEFT EYE :

	// left eye, first set stream source, indices
	if (m_bBuffersConnected)
	{
		// no timewarp here, use standard vertex shader, set pixel shader, set vertex declaration
		pcDevice->SetVertexShader( m_pcOculusVertexShader);
		pcDevice->SetPixelShader( m_pcOculusPixelShader );
		pcDevice->SetVertexDeclaration( m_pcVertexDecl );

		// set texture
		if (m_pcTextureLeft)
			pcDevice->SetTexture( 0, m_pcTextureLeft );
		else if (m_pcTextureLeftTest)
			pcDevice->SetTexture( 0, m_pcTextureLeftTest );
		else pcDevice->SetTexture( 0, 0);

		// get texture size
		if (m_pcTextureLeft)
			m_pcTextureLeft->GetLevelDesc(0, &sSurfaceDesc);
		else if (m_pcTextureLeftTest)
			m_pcTextureLeftTest->GetLevelDesc(0, &sSurfaceDesc);
		else ZeroMemory(&sSurfaceDesc, sizeof(D3DSURFACE_DESC));
		sTextureSize.w = (int)sSurfaceDesc.Width;
		sTextureSize.h = (int)sSurfaceDesc.Height;

		// set render viewport size the same size as the texture size (!)
		sRenderViewport.Pos.x = 0;
		sRenderViewport.Pos.y = 0;
		sRenderViewport.Size.w = sTextureSize.w;
		sRenderViewport.Size.h = sTextureSize.h;

		// get and set scale and offset
		if (m_psFOVPortLeft)
			ovrHmd_GetRenderScaleAndOffset(*m_psFOVPortLeft, sTextureSize, sRenderViewport, UVScaleOffset);
		else
			ovrHmd_GetRenderScaleAndOffset(m_sDefaultFOVPortLeft, sTextureSize, sRenderViewport, UVScaleOffset);
		pcDevice->SetVertexShaderConstantF( 0, ( FLOAT* )&UVScaleOffset[0], 1 );
		pcDevice->SetVertexShaderConstantF( 2, ( FLOAT* )&UVScaleOffset[1], 1 );

		pcDevice->SetStreamSource( 0, m_pcDistortionVertexBufferLeft,0, sizeof(ovrDistortionVertex) );
		pcDevice->SetIndices( m_pcDistortionIndexBufferLeft);

		// draw
		pcDevice->DrawIndexedPrimitive( D3DPT_TRIANGLELIST,0,0, m_sVertexBufferDescLeft.Size / sizeof(ovrDistortionVertex) , 0, m_sIndexBufferDescLeft.Size / 6 );
	}
	else
	{
		pcDevice->SetSamplerState(1, D3DSAMP_SRGBTEXTURE, 0);
		pcDevice->SetSamplerState(1, D3DSAMP_ADDRESSU, D3DTADDRESS_CLAMP);
		pcDevice->SetSamplerState(1, D3DSAMP_ADDRESSV, D3DTADDRESS_CLAMP);
		pcDevice->SetSamplerState(1, D3DSAMP_ADDRESSW, D3DTADDRESS_CLAMP);
		pcDevice->SetSamplerState(1, D3DSAMP_MAGFILTER, D3DTEXF_ANISOTROPIC);
		pcDevice->SetSamplerState(1, D3DSAMP_MINFILTER, D3DTEXF_ANISOTROPIC);
		pcDevice->SetSamplerState(1, D3DSAMP_MIPFILTER, D3DTEXF_NONE);

		// side by side render
		pcDevice->SetVertexShader(NULL);
		pcDevice->SetPixelShader(m_pcSideBySidePixelShader);
		pcDevice->SetVertexDeclaration( NULL );
		pcDevice->SetFVF(D3DFVF_TEXVERTEX);

		// set textures
		if (m_pcTextureLeft)
			pcDevice->SetTexture( 0, m_pcTextureLeft );
		else if (m_pcTextureLeftTest)
			pcDevice->SetTexture( 0, m_pcTextureLeftTest );
		else pcDevice->SetTexture( 0, 0);
		if (m_pcTextureRight)
			pcDevice->SetTexture( 1, m_pcTextureRight );
		else if (m_pcTextureRightTest)
			pcDevice->SetTexture( 1, m_pcTextureRightTest );
		else pcDevice->SetTexture( 1, 0);

		// set stream and draw
		pcDevice->SetStreamSource( 0, m_pcVertexBufferDefault,0, sizeof(TEXVERTEX) );
		pcDevice->DrawPrimitive(D3DPT_TRIANGLEFAN, 0, 2);
	}

	// RIGHT EYE:
	if (m_bBuffersConnected)
	{
		// set texture
		if (m_pcTextureRight)
			pcDevice->SetTexture( 0, m_pcTextureRight );
		else if (m_pcTextureRightTest)
			pcDevice->SetTexture( 0, m_pcTextureRightTest );
		else pcDevice->SetTexture( 0, 0);

		// get texture size
		if (m_pcTextureRight)
			m_pcTextureRight->GetLevelDesc(0, &sSurfaceDesc);
		else if (m_pcTextureRightTest)
			m_pcTextureRightTest->GetLevelDesc(0, &sSurfaceDesc);
		else ZeroMemory(&sSurfaceDesc, sizeof(D3DSURFACE_DESC));
		sTextureSize.w = (int)sSurfaceDesc.Width;
		sTextureSize.h = (int)sSurfaceDesc.Height;

		// get and set scale and offset
		if (m_psFOVPortRight)
			ovrHmd_GetRenderScaleAndOffset(*m_psFOVPortRight, sTextureSize, sRenderViewport, UVScaleOffset);
		else
			ovrHmd_GetRenderScaleAndOffset(m_sDefaultFOVPortRight, sTextureSize, sRenderViewport, UVScaleOffset);
		pcDevice->SetVertexShaderConstantF( 0, ( FLOAT* )&UVScaleOffset[0], 1 );
		pcDevice->SetVertexShaderConstantF( 2, ( FLOAT* )&UVScaleOffset[1], 1 );

		// set stream source, indices, draw
		pcDevice->SetStreamSource( 0, m_pcDistortionVertexBufferRight,0, sizeof(ovrDistortionVertex) );
		pcDevice->SetIndices( m_pcDistortionIndexBufferRight);

		// draw
		pcDevice->DrawIndexedPrimitive( D3DPT_TRIANGLELIST,0,0, m_sVertexBufferDescRight.Size / sizeof(ovrDistortionVertex) , 0, m_sIndexBufferDescRight.Size / 6 );
	}

	pcDevice->EndScene();

	// apply and release state block
	if (pStateBlock)
	{
		pStateBlock->Apply();
		pStateBlock->Release();
	}

	// release device if provided by swapchain
	if (bReleaseDevice) pcDevice->Release();

	return nullptr;
}
コード例 #7
0
//-------------------------------------------------------------------------------------
int Init()
{
    // Initializes LibOVR, and the Rift
    ovr_Initialize();
	HMD = ovrHmd_Create(0);
    if (!HMD)
    {
        MessageBoxA(NULL,"Oculus Rift not detected.","", MB_OK);
        return(1);
    }
	if (HMD->ProductName[0] == '\0') 
        MessageBoxA(NULL,"Rift detected, display not enabled.","", MB_OK);

	//Setup Window and Graphics - use window frame if relying on Oculus driver
	const int backBufferMultisample = 1;
    bool UseAppWindowFrame = (HMD->HmdCaps & ovrHmdCap_ExtendDesktop) ? false : true;
    HWND window = Util_InitWindowAndGraphics(Recti(HMD->WindowsPos, HMD->Resolution),
                                         FullScreen, backBufferMultisample, UseAppWindowFrame,&pRender);
	if (!window) return 1;
	ovrHmd_AttachToWindow(HMD, window, NULL, NULL);

    //Configure Stereo settings.
    Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Left,  HMD->DefaultEyeFov[0], 1.0f);
    Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Right, HMD->DefaultEyeFov[1], 1.0f);
	Sizei RenderTargetSize;
    RenderTargetSize.w = recommenedTex0Size.w + recommenedTex1Size.w;
    RenderTargetSize.h = max ( recommenedTex0Size.h, recommenedTex1Size.h );

    const int eyeRenderMultisample = 1;
    pRendertargetTexture = pRender->CreateTexture(Texture_RGBA | Texture_RenderTarget |
                                                  eyeRenderMultisample,
                                                  RenderTargetSize.w, RenderTargetSize.h, NULL);
    // The actual RT size may be different due to HW limits.
    RenderTargetSize.w = pRendertargetTexture->GetWidth();
    RenderTargetSize.h = pRendertargetTexture->GetHeight();

    // Initialize eye rendering information.
    // The viewport sizes are re-computed in case RenderTargetSize changed due to HW limitations.
    ovrFovPort eyeFov[2] = { HMD->DefaultEyeFov[0], HMD->DefaultEyeFov[1] } ;

    EyeRenderViewport[0].Pos  = Vector2i(0,0);
    EyeRenderViewport[0].Size = Sizei(RenderTargetSize.w / 2, RenderTargetSize.h);
    EyeRenderViewport[1].Pos  = Vector2i((RenderTargetSize.w + 1) / 2, 0);
    EyeRenderViewport[1].Size = EyeRenderViewport[0].Size;

    #if SDK_RENDER
	// Query D3D texture data.
    EyeTexture[0].D3D11.Header.API            = ovrRenderAPI_D3D11;
    EyeTexture[0].D3D11.Header.TextureSize    = RenderTargetSize;
    EyeTexture[0].D3D11.Header.RenderViewport = EyeRenderViewport[0];
    EyeTexture[0].D3D11.pTexture              = pRendertargetTexture->Tex.GetPtr();
    EyeTexture[0].D3D11.pSRView               = pRendertargetTexture->TexSv.GetPtr();

    // Right eye uses the same texture, but different rendering viewport.
    EyeTexture[1] = EyeTexture[0];
    EyeTexture[1].D3D11.Header.RenderViewport = EyeRenderViewport[1];

    // Configure d3d11.
    ovrD3D11Config d3d11cfg;
    d3d11cfg.D3D11.Header.API         = ovrRenderAPI_D3D11;
    d3d11cfg.D3D11.Header.RTSize      = Sizei(HMD->Resolution.w, HMD->Resolution.h);
    d3d11cfg.D3D11.Header.Multisample = backBufferMultisample;
    d3d11cfg.D3D11.pDevice            = pRender->Device;
    d3d11cfg.D3D11.pDeviceContext     = pRender->Context;
    d3d11cfg.D3D11.pBackBufferRT      = pRender->BackBufferRT;
    d3d11cfg.D3D11.pSwapChain         = pRender->SwapChain;

    if (!ovrHmd_ConfigureRendering(HMD, &d3d11cfg.Config,
		                           ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette |
                                   ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive,
								   eyeFov, EyeRenderDesc))	return(1);
    #else
	//Shader vertex format
	D3D11_INPUT_ELEMENT_DESC DistortionMeshVertexDesc[] = {
		{"Position", 0, DXGI_FORMAT_R32G32_FLOAT,   0, 0,  D3D11_INPUT_PER_VERTEX_DATA, 0},
		{"Position", 1, DXGI_FORMAT_R32_FLOAT,      0, 8,  D3D11_INPUT_PER_VERTEX_DATA, 0},
		{"Position", 2, DXGI_FORMAT_R32_FLOAT,      0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0},
		{"TexCoord", 0, DXGI_FORMAT_R32G32_FLOAT,   0, 16, D3D11_INPUT_PER_VERTEX_DATA, 0},
		{"TexCoord", 1, DXGI_FORMAT_R32G32_FLOAT,   0, 24, D3D11_INPUT_PER_VERTEX_DATA, 0},
		{"TexCoord", 2, DXGI_FORMAT_R32G32_FLOAT,   0, 32, D3D11_INPUT_PER_VERTEX_DATA, 0}};
	
	//Distortion vertex shader
	const char* vertexShader = 
		"float2 EyeToSourceUVScale, EyeToSourceUVOffset;                                        \n"
		"float4x4 EyeRotationStart, EyeRotationEnd;                                             \n"
		"float2 TimewarpTexCoord(float2 TexCoord, float4x4 rotMat)                              \n"
		"{                                                                                      \n"
		// Vertex inputs are in TanEyeAngle space for the R,G,B channels (i.e. after chromatic 
		// aberration and distortion). These are now "real world" vectors in direction (x,y,1) 
		// relative to the eye of the HMD.	Apply the 3x3 timewarp rotation to these vectors.
		"    float3 transformed = float3( mul ( rotMat, float4(TexCoord.xy, 1, 1) ).xyz);       \n"
		// Project them back onto the Z=1 plane of the rendered images.
		"    float2 flattened = (transformed.xy / transformed.z);                               \n"
		// Scale them into ([0,0.5],[0,1]) or ([0.5,0],[0,1]) UV lookup space (depending on eye)
		"    return(EyeToSourceUVScale * flattened + EyeToSourceUVOffset);                      \n"
		"}                                                                                      \n"
		"void main(in float2  Position   : POSITION,  in float timewarpLerpFactor : POSITION1,  \n"
		"          in float   Vignette   : POSITION2, in float2 TexCoord0         : TEXCOORD0,  \n"
		"          in float2  TexCoord1  : TEXCOORD1, in float2 TexCoord2         : TEXCOORD2,  \n"
		"          out float4 oPosition  : SV_Position,                                         \n"
		"          out float2 oTexCoord0 : TEXCOORD0, out float2 oTexCoord1 : TEXCOORD1,        \n"
		"          out float2 oTexCoord2 : TEXCOORD2, out float  oVignette  : TEXCOORD3)        \n"
		"{                                                                                      \n"
		"    float4x4 lerpedEyeRot = lerp(EyeRotationStart, EyeRotationEnd, timewarpLerpFactor);\n"
		"    oTexCoord0  = TimewarpTexCoord(TexCoord0,lerpedEyeRot);                            \n"
		"    oTexCoord1  = TimewarpTexCoord(TexCoord1,lerpedEyeRot);                            \n"
		"    oTexCoord2  = TimewarpTexCoord(TexCoord2,lerpedEyeRot);                            \n"
		"    oPosition = float4(Position.xy, 0.5, 1.0);    oVignette = Vignette;                \n"
		"}";

	//Distortion pixel shader
	const char* pixelShader = 
		"Texture2D Texture   : register(t0);                                                    \n"
		"SamplerState Linear : register(s0);                                                    \n"
		"float4 main(in float4 oPosition  : SV_Position,  in float2 oTexCoord0 : TEXCOORD0,     \n"
		"            in float2 oTexCoord1 : TEXCOORD1,    in float2 oTexCoord2 : TEXCOORD2,     \n"
		"            in float  oVignette  : TEXCOORD3)    : SV_Target                           \n"
		"{                                                                                      \n"
		// 3 samples for fixing chromatic aberrations
		"    float R = Texture.Sample(Linear, oTexCoord0.xy).r;                                 \n"
		"    float G = Texture.Sample(Linear, oTexCoord1.xy).g;                                 \n"
		"    float B = Texture.Sample(Linear, oTexCoord2.xy).b;                                 \n"
		"    return (oVignette*float4(R,G,B,1));                                                \n"
		"}";
	pRender->InitShaders(vertexShader, pixelShader, &Shaders, &VertexIL,DistortionMeshVertexDesc,6);

    for ( int eyeNum = 0; eyeNum < 2; eyeNum++ )
    {
        // Allocate mesh vertices, registering with renderer using the OVR vertex format.
        ovrDistortionMesh meshData;
        ovrHmd_CreateDistortionMesh(HMD, (ovrEyeType) eyeNum, eyeFov[eyeNum],
			                        ovrDistortionCap_Chromatic | ovrDistortionCap_TimeWarp, &meshData);
        MeshVBs[eyeNum] = *pRender->CreateBuffer();
        MeshVBs[eyeNum]->Data(Buffer_Vertex,meshData.pVertexData,sizeof(ovrDistortionVertex)*meshData.VertexCount);
        MeshIBs[eyeNum] = *pRender->CreateBuffer();
        MeshIBs[eyeNum]->Data(Buffer_Index,meshData.pIndexData,sizeof(unsigned short) * meshData.IndexCount);
        ovrHmd_DestroyDistortionMesh( &meshData );

		//Create eye render description for use later
		EyeRenderDesc[eyeNum] = ovrHmd_GetRenderDesc(HMD, (ovrEyeType) eyeNum,  eyeFov[eyeNum]);

		//Do scale and offset
		ovrHmd_GetRenderScaleAndOffset(eyeFov[eyeNum],RenderTargetSize, EyeRenderViewport[eyeNum], UVScaleOffset[eyeNum]);
	}

    #endif

    ovrHmd_SetEnabledCaps(HMD, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction);

	// Start the sensor which informs of the Rift's pose and motion
    ovrHmd_ConfigureTracking(HMD, ovrTrackingCap_Orientation |
                            ovrTrackingCap_MagYawCorrection |
                            ovrTrackingCap_Position, 0);

    // This creates lights and models.
  	pRoomScene = new Scene;
	sbuilder.PopulateRoomScene(pRoomScene, pRender);

    return (0);
}
コード例 #8
0
ファイル: r_vr_ovr.c プロジェクト: Nephatrine/nephq2
void OVR_FrameStart(int32_t changeBackBuffers)
{
	if (vr_ovr_maxfov->modified)
	{
		int newValue =  vr_ovr_maxfov->value ? 1 : 0;
		if (newValue != (int)vr_ovr_maxfov->value)
			Cvar_SetInteger("vr_ovr_maxfov",newValue);
		changeBackBuffers = 1;
		vr_ovr_maxfov->modified = (qboolean) false;
	}

	if (vr_ovr_supersample->modified)
	{
		if (vr_ovr_supersample->value < 1.0)
			Cvar_Set("vr_ovr_supersample", "1.0");
		else if (vr_ovr_supersample->value > 2.0)
			Cvar_Set("vr_ovr_supersample", "2.0");
		changeBackBuffers = 1;
		vr_ovr_supersample->modified = false;
	}
	if (useChroma != (qboolean) !!vr_chromatic->value)
	{
		useChroma = (qboolean) !!vr_chromatic->value;
	}

	if (vr_ovr_lumoverdrive->modified)
	{
		changeBackBuffers = 1;
		currentFrame = 0;
		vr_ovr_lumoverdrive->modified = false;
	}

	if (changeBackBuffers)
	{
		int i;
		float width, height;
		float ovrScale;

		OVR_CalculateState(&currentState);


		width = glConfig.render_width / (float) hmd->Resolution.w;
		height = glConfig.render_height / (float) hmd->Resolution.h;
		ovrScale = (width + height) / 2.0;
		ovrScale *= R_AntialiasGetScale() * vr_ovr_supersample->value;
		if (vr_ovr_debug->value)
			Com_Printf("VR_OVR: Set render target scale to %.2f\n",ovrScale);
		for (i = 0; i < 2; i++)
		{
			ovrRecti viewport = {{0,0}, {0,0}};
			renderInfo[i].renderTarget = ovrHmd_GetFovTextureSize(hmd, (ovrEyeType) i, renderInfo[i].eyeFov, ovrScale);
			viewport.Size.w = renderInfo[i].renderTarget.w;
			viewport.Size.h = renderInfo[i].renderTarget.h;
			ovrHmd_GetRenderScaleAndOffset(renderInfo[i].eyeFov, renderInfo[i].renderTarget, viewport, (ovrVector2f*) renderInfo[i].UVScaleOffset);

			if (renderInfo[i].renderTarget.w != renderInfo[i].eyeFBO.width || renderInfo[i].renderTarget.h != renderInfo[i].eyeFBO.height)
			{
				if (vr_ovr_debug->value)
					Com_Printf("VR_OVR: Set buffer %i to size %i x %i\n",i,renderInfo[i].renderTarget.w, renderInfo[i].renderTarget.h);
				R_ResizeFBO(renderInfo[i].renderTarget.w, renderInfo[i].renderTarget.h, 1, GL_RGBA8, &renderInfo[i].eyeFBO);
				R_ClearFBO(&renderInfo[i].eyeFBO);
			}

		}
	}
}
コード例 #9
0
COculusVR::COculusVR(bool latency)
{
	m_isReady = true;

	// Initializes LibOVR, and the Rift
    ovr_Initialize();

    Hmd = ovrHmd_Create(0);
    if (!Hmd)
    {
        MessageBoxA(NULL, "Oculus Rift not detected.", "", MB_OK);
        return;
    }
    if (Hmd->ProductName[0] == '\0')
        MessageBoxA(NULL, "Rift detected, display not enabled.", "", MB_OK);

    if (Hmd->HmdCaps & ovrHmdCap_ExtendDesktop)
    {
        WindowSize = Hmd->Resolution;
    }
    else
    {
        // In Direct App-rendered mode, we can use smaller window size,
        // as it can have its own contents and isn't tied to the buffer.
        WindowSize = Sizei(1100, 618);//Sizei(960, 540); avoid rotated output bug.
    }

	ovrHmd_AttachToWindow(Hmd, wzGetWindowHandle(), NULL, NULL);

	// Configure Stereo settings.
	Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(Hmd, ovrEye_Left, Hmd->DefaultEyeFov[0], 1.0f);
	Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(Hmd, ovrEye_Right, Hmd->DefaultEyeFov[1], 1.0f);

    EyeRenderTargetSize.w = recommenedTex0Size.w + recommenedTex1Size.w;
    EyeRenderTargetSize.h = Alg::Max( recommenedTex0Size.h, recommenedTex1Size.h );

	//Create Framebuffer
	wzCreateRenderTarget(&m_screenRender);
	wzCreateRenderBufferDepth(&m_screenBuffer,EyeRenderTargetSize.w,EyeRenderTargetSize.h);
	wzCreateTexture(&m_screenTex,EyeRenderTargetSize.w,EyeRenderTargetSize.h,WZ_FORMATTYPE_RGB,NULL);
	//attach
	wzSetRenderBuffer(&m_screenRender,&m_screenBuffer);
	wzSetRenderTexture(&m_screenRender,&m_screenTex);

    // Initialize eye rendering information.
    // The viewport sizes are re-computed in case RenderTargetSize changed due to HW limitations.
    ovrFovPort eyeFov[2] = { Hmd->DefaultEyeFov[0], Hmd->DefaultEyeFov[1] } ;

    EyeRenderViewport[0].Pos  = Vector2i(0,0);
    EyeRenderViewport[0].Size = Sizei(EyeRenderTargetSize.w / 2, EyeRenderTargetSize.h);
    EyeRenderViewport[1].Pos  = Vector2i((EyeRenderTargetSize.w + 1) / 2, 0);
    EyeRenderViewport[1].Size = EyeRenderViewport[0].Size;

	//Shader vertex format
	wzVertexElements ve_var[] = {
		{WZVETYPE_FLOAT2,"position"},
		{WZVETYPE_FLOAT1,"timewarpLerpFactor"},
		{WZVETYPE_FLOAT1,"vignette"},
		{WZVETYPE_FLOAT2,"texCoord0"},
		{WZVETYPE_FLOAT2,"texCoord1"},
		{WZVETYPE_FLOAT2,"texCoord2"},
		WZVE_TMT()
	};

	//carete mesh
	for ( int eyeNum = 0; eyeNum < 2; eyeNum++ )
	{
		// Allocate mesh vertices, registering with renderer using the OVR vertex format.
		ovrDistortionMesh meshData;
		ovrHmd_CreateDistortionMesh(Hmd, (ovrEyeType) eyeNum, eyeFov[eyeNum],
									ovrDistortionCap_Chromatic | ovrDistortionCap_TimeWarp, &meshData);
		//Create datas
		wzVector2* vertex_pos = new wzVector2[meshData.VertexCount];
		float* vertex_posTimewarp = new float[meshData.VertexCount];
		float* vertex_posVignette = new float[meshData.VertexCount];
		wzVector2* vertex_textanR = new wzVector2[meshData.VertexCount];
		wzVector2* vertex_textanG = new wzVector2[meshData.VertexCount];
		wzVector2* vertex_textanB = new wzVector2[meshData.VertexCount];

		//data copy
		for(unsigned int i = 0; i < meshData.VertexCount; i++) {
			vertex_pos[i].x = meshData.pVertexData[i].ScreenPosNDC.x;
			vertex_pos[i].y = meshData.pVertexData[i].ScreenPosNDC.y;
			vertex_posTimewarp[i] = meshData.pVertexData[i].TimeWarpFactor;
			vertex_posVignette[i] = meshData.pVertexData[i].VignetteFactor;
			vertex_textanR[i].x = meshData.pVertexData[i].TanEyeAnglesR.x;
			vertex_textanR[i].y = meshData.pVertexData[i].TanEyeAnglesR.y;
			vertex_textanG[i].x = meshData.pVertexData[i].TanEyeAnglesG.x;
			vertex_textanG[i].y = meshData.pVertexData[i].TanEyeAnglesG.y;
			vertex_textanB[i].x = meshData.pVertexData[i].TanEyeAnglesB.x;
			vertex_textanB[i].y = meshData.pVertexData[i].TanEyeAnglesB.y;
		}

		void* vertex_pointer[] = {vertex_pos,vertex_posTimewarp,vertex_posVignette,vertex_textanR,vertex_textanG,vertex_textanB};

		if(wzCreateMesh(&MeshBuffer[eyeNum], vertex_pointer, ve_var,
			meshData.pIndexData, meshData.VertexCount, meshData.IndexCount)) {
				MessageBoxA(NULL, "Lens Distort Mesh Error.", "", MB_OK);
				
			delete[] vertex_pos;
			delete[] vertex_posTimewarp;
			delete[] vertex_posVignette;
			delete[] vertex_textanR;
			delete[] vertex_textanG;
			delete[] vertex_textanB;

			return;	//error
		}
		wzChangeDrawMode(&MeshBuffer[eyeNum],WZ_MESH_DF_TRIANGLELIST);

		delete[] vertex_pos;
		delete[] vertex_posTimewarp;
		delete[] vertex_posVignette;
		delete[] vertex_textanR;
		delete[] vertex_textanG;
		delete[] vertex_textanB;

		ovrHmd_DestroyDistortionMesh(&meshData);

		//Create eye render description for use later
		EyeRenderDesc[eyeNum] = ovrHmd_GetRenderDesc(Hmd, (ovrEyeType) eyeNum,  eyeFov[eyeNum]);

		//Do scale and offset
		ovrHmd_GetRenderScaleAndOffset(eyeFov[eyeNum],EyeRenderTargetSize, EyeRenderViewport[eyeNum], UVScaleOffset[eyeNum]);
	}

	//Create shader
	if(wzCreateShader(&LensShader, ols_vertexshader,ols_flagshader, ve_var)) {
		MessageBoxA(NULL, "Lens Shader Compile Error.", "", MB_OK);
		return;
	}

    if(latency) ovrHmd_SetEnabledCaps(Hmd, ovrHmdCap_DynamicPrediction);	//ovrHmdCap_LowPersistence
	// Start the sensor which informs of the Rift's pose and motion
	ovrHmd_ConfigureTracking(Hmd, ovrTrackingCap_Orientation |
								ovrTrackingCap_MagYawCorrection, 0);		//not use : ovrTrackingCap_Position

	m_isReady = false;
}
コード例 #10
0
void DistortionMeshInit(unsigned distortionCaps, ovrHmd HMD,
                        ovrEyeRenderDesc eyeRenderDesc[2],
                        ovrSizei textureSize, ovrRecti viewports[2],
                        RenderDevice* pRender)
{
    //Generate distortion mesh for each eye
    for ( int eyeNum = 0; eyeNum < 2; eyeNum++ )
    {
        // Allocate & generate distortion mesh vertices.
        ovrDistortionMesh meshData;
        ovrHmd_CreateDistortionMesh(HMD,
                                    eyeRenderDesc[eyeNum].Eye, eyeRenderDesc[eyeNum].Fov,
                                    distortionCaps, &meshData);

        ovrHmd_GetRenderScaleAndOffset(eyeRenderDesc[eyeNum].Fov,
                                       textureSize, viewports[eyeNum],
                                       (ovrVector2f*) DistortionData.UVScaleOffset[eyeNum]);

        // Now parse the vertex data and create a render ready vertex buffer from it
        DistortionVertex * pVBVerts = (DistortionVertex*)OVR_ALLOC( 
                                       sizeof(DistortionVertex) * meshData.VertexCount );
        DistortionVertex * v        = pVBVerts;
        ovrDistortionVertex * ov    = meshData.pVertexData;
        for ( unsigned vertNum = 0; vertNum < meshData.VertexCount; vertNum++ )
        {
            v->Pos.x = ov->Pos.x;
            v->Pos.y = ov->Pos.y;
            v->TexR  = (*(Vector2f*)&ov->TexR);
            v->TexG  = (*(Vector2f*)&ov->TexG);
            v->TexB  = (*(Vector2f*)&ov->TexB);
            v->Col.R = v->Col.G = v->Col.B = (OVR::UByte)( ov->VignetteFactor * 255.99f );
            v->Col.A = (OVR::UByte)( ov->TimeWarpFactor * 255.99f );
            v++; ov++;
        }
        //Register this mesh with the renderer
        DistortionData.MeshVBs[eyeNum] = *pRender->CreateBuffer();
        DistortionData.MeshVBs[eyeNum]->Data ( Buffer_Vertex, pVBVerts,
                                               sizeof(DistortionVertex) * meshData.VertexCount );
        DistortionData.MeshIBs[eyeNum] = *pRender->CreateBuffer();
        DistortionData.MeshIBs[eyeNum]->Data ( Buffer_Index, meshData.pIndexData,
                                                sizeof(unsigned short) * meshData.IndexCount );

        OVR_FREE ( pVBVerts );
        ovrHmd_DestroyDistortionMesh( &meshData );
    }

	// Pixel shader for the mesh
	//-------------------------------------------------------------------------------------------
    const char* pixelShader =
		"Texture2D Texture   : register(t0);                                                    \n"
		"SamplerState Linear : register(s0);                                                    \n"

		"float4 main(in float4 oPosition  : SV_Position, in float4 oColor : COLOR,              \n"
		"            in float2 oTexCoord0 : TEXCOORD0,   in float2 oTexCoord1 : TEXCOORD1,      \n"
		"            in float2 oTexCoord2 : TEXCOORD2)   : SV_Target                            \n"
		"{                                                                                      \n"
		// 3 samples for fixing chromatic aberrations
		"    float ResultR = Texture.Sample(Linear, oTexCoord0.xy).r;                           \n"
		"    float ResultG = Texture.Sample(Linear, oTexCoord1.xy).g;                           \n"
		"    float ResultB = Texture.Sample(Linear, oTexCoord2.xy).b;                           \n"
		"    return float4(ResultR * oColor.r, ResultG * oColor.g, ResultB * oColor.b, 1.0);    \n"
		"}";


    // Choose the vertex shader, according to if you have timewarp enabled
	if (distortionCaps & ovrDistortionCap_TimeWarp)
	{   // TIMEWARP
		//--------------------------------------------------------------------------------------------
		const char* vertexShader = 
			"float2 EyeToSourceUVScale;                                                                \n"
			"float2 EyeToSourceUVOffset;                                                               \n"
			"float4x4 EyeRotationStart;                                                             \n"
			"float4x4 EyeRotationEnd;                                                               \n"
			"float2 TimewarpTexCoord(float2 TexCoord, float4x4 rotMat)                              \n"
			"{                                                                                      \n"
			// Vertex inputs are in TanEyeAngle space for the R,G,B channels (i.e. after chromatic 
			// aberration and distortion). These are now "real world" vectors in direction (x,y,1) 
			// relative to the eye of the HMD.	Apply the 3x3 timewarp rotation to these vectors.
			"    float3 transformed = float3( mul ( rotMat, float4(TexCoord.xy, 1, 1) ).xyz);       \n"
			// Project them back onto the Z=1 plane of the rendered images.
			"    float2 flattened = (transformed.xy / transformed.z);                               \n"
			// Scale them into ([0,0.5],[0,1]) or ([0.5,0],[0,1]) UV lookup space (depending on eye)
			"    return(EyeToSourceUVScale * flattened + EyeToSourceUVOffset);                             \n"
			"}                                                                                      \n"
			"void main(in float2 Position    : POSITION,    in float4 Color       : COLOR0,         \n"
			"          in float2 TexCoord0   : TEXCOORD0,   in float2 TexCoord1   : TEXCOORD1,      \n"
			"          in float2 TexCoord2   : TEXCOORD2,                                           \n"
			"          out float4 oPosition  : SV_Position, out float4 oColor     : COLOR,          \n"
			"          out float2 oTexCoord0 : TEXCOORD0,   out float2 oTexCoord1 : TEXCOORD1,      \n"
			"          out float2 oTexCoord2 : TEXCOORD2)                                           \n"
			"{                                                                                      \n"
			"    float timewarpLerpFactor = Color.a;                                                \n"
			"    float4x4 lerpedEyeRot = lerp(EyeRotationStart, EyeRotationEnd, timewarpLerpFactor);\n"
			"    oTexCoord0  = TimewarpTexCoord(TexCoord0,lerpedEyeRot);                            \n"
			"    oTexCoord1  = TimewarpTexCoord(TexCoord1,lerpedEyeRot);                            \n"
			"    oTexCoord2  = TimewarpTexCoord(TexCoord2,lerpedEyeRot);                            \n"
			"    oPosition = float4(Position.xy, 0.5, 1.0);                                         \n"
			"    oColor = Color.r;  /*For vignette fade*/                                           \n"
			"}";
		
		pRender->InitShaders(vertexShader, pixelShader, &DistortionData.Shaders,
                             &DistortionData.VertexIL,DistortionMeshVertexDesc,5);
	}
	else
	{
		//-------------------------------------------------------------------------------------------
		const char* vertexShader = 
			"float2 EyeToSourceUVScale;                                                                \n"
			"float2 EyeToSourceUVOffset;                                                               \n"
			"void main(in float2 Position    : POSITION,    in float4 Color       : COLOR0,         \n"
			"          in float2 TexCoord0   : TEXCOORD0,   in float2 TexCoord1   : TEXCOORD1,      \n"
			"          in float2 TexCoord2   : TEXCOORD2,                                           \n"
			"          out float4 oPosition  : SV_Position, out float4 oColor     : COLOR,          \n"
			"          out float2 oTexCoord0 : TEXCOORD0,   out float2 oTexCoord1 : TEXCOORD1,      \n"
			"          out float2 oTexCoord2 : TEXCOORD2)                                           \n"
			"{                                                                                      \n"
			// Scale them into ([0,0.5],[0,1]) or ([0.5,0],[0,1]) UV lookup space (depending on eye)
			"    oTexCoord0  = EyeToSourceUVScale * TexCoord0 + EyeToSourceUVOffset;                      \n"
			"    oTexCoord1  = EyeToSourceUVScale * TexCoord1 + EyeToSourceUVOffset;                      \n"
			"    oTexCoord2  = EyeToSourceUVScale * TexCoord2 + EyeToSourceUVOffset;                      \n"
			"    oPosition = float4(Position.xy, 0.5, 1.0);                                         \n"
			"    oColor = Color.r;  /*For vignette fade*/                                           \n"
			"}";
		
		pRender->InitShaders(vertexShader, pixelShader, &DistortionData.Shaders,
                             &DistortionData.VertexIL,DistortionMeshVertexDesc,5);
	}
}
コード例 #11
0
int
setup_rift(struct weston_compositor *compositor)
{
  struct oculus_rift *rift = compositor->rift;

  rift->enabled = 1;

  rift->screen_z = -5.0;
  rift->screen_scale = 1.0;

  weston_compositor_add_key_binding(compositor, KEY_5, MODIFIER_SUPER, 
      toggle_sbs, compositor);
  weston_compositor_add_key_binding(compositor, KEY_6, MODIFIER_SUPER, 
      toggle_rotate, compositor);
  weston_compositor_add_key_binding(compositor, KEY_7, MODIFIER_SUPER, 
      move_in, compositor);
  weston_compositor_add_key_binding(compositor, KEY_8, MODIFIER_SUPER, 
      move_out, compositor);
  weston_compositor_add_key_binding(compositor, KEY_9, MODIFIER_SUPER, 
      scale_up, compositor);
  weston_compositor_add_key_binding(compositor, KEY_0, MODIFIER_SUPER, 
      scale_down, compositor);

  /*// use this at some point in the future to detect and grab the rift display
  struct weston_output *output;
  wl_list_for_each(output, &compositor->output_list, link)
  {
    weston_log("Output (%i): %s\n\t%ix%i\n", output->id, output->name,
        output->width, output->height);
  }*/

  rift->distortion_shader = calloc(1, sizeof *(rift->distortion_shader));
  struct distortion_shader_ *d = rift->distortion_shader;
  d->program = CreateProgram(distortion_vertex_shader, distortion_fragment_shader);
  d->EyeToSourceUVScale = glGetUniformLocation(d->program, "EyeToSourceUVScale");
  d->EyeToSourceUVOffset = glGetUniformLocation(d->program, "EyeToSourceUVOffset");
  d->RightEye = glGetUniformLocation(d->program, "RightEye");
  d->angle = glGetUniformLocation(d->program, "angle");
  d->Position = glGetAttribLocation(d->program, "Position");
  d->TexCoord0 = glGetAttribLocation(d->program, "TexCoord0");
  d->TexCoordR = glGetAttribLocation(d->program, "TexCoordR");
  d->TexCoordG = glGetAttribLocation(d->program, "TexCoordG");
  d->TexCoordB = glGetAttribLocation(d->program, "TexCoordB");
  d->eyeTexture = glGetAttribLocation(d->program, "Texture0");

  rift->eye_shader = calloc(1, sizeof *(rift->eye_shader));
  struct eye_shader_ *e = rift->eye_shader;
  e->program = CreateProgram(eye_vertex_shader, eye_fragment_shader);
  e->Position = glGetAttribLocation(d->program, "Position");
  e->TexCoord0 = glGetAttribLocation(d->program, "TexCoord0");
  e->Projection = glGetUniformLocation(e->program, "Projection");
  e->ModelView = glGetUniformLocation(e->program, "ModelView");
  e->virtualScreenTexture = glGetAttribLocation(d->program, "Texture0");

  rift->scene = calloc(1, sizeof *(rift->scene));
  glGenBuffers(1, &rift->scene->vertexBuffer);
  glBindBuffer(GL_ARRAY_BUFFER, rift->scene->vertexBuffer);
  static const GLfloat rectangle[] = 
    {-1.0f, -1.0f, -0.5f, 
      1.0f, -1.0f, -0.5f, 
     -1.0f, 1.0f, -0.5f,
      1.0f, -1.0f, -0.5f, 
      1.0f, 1.0f, -0.5f,
     -1.0f, 1.0f, -0.5f};
  glBufferData(GL_ARRAY_BUFFER, sizeof(rectangle), rectangle, GL_STATIC_DRAW);

  glGenBuffers(2, &rift->scene->SBSuvsBuffer[0]);
  glGenBuffers(1, &rift->scene->uvsBuffer);
  static const GLfloat uvs[3][12] = 
   {{ 0.0, 0.0,
      0.5, 0.0,
      0.0, 1.0,
      0.5, 0.0,
      0.5, 1.0,
      0.0, 1.0},
   {  0.5, 0.0,
      1.0, 0.0,
      0.5, 1.0,
      1.0, 0.0,
      1.0, 1.0,
      0.5, 1.0},
   {  0.0, 0.0,
      1.0, 0.0,
      0.0, 1.0,
      1.0, 0.0,
      1.0, 1.0,
      0.0, 1.0}};
  glBindBuffer(GL_ARRAY_BUFFER, rift->scene->SBSuvsBuffer[0]);
  glBufferData(GL_ARRAY_BUFFER, sizeof(uvs[0]), uvs[0], GL_STATIC_DRAW);

  glBindBuffer(GL_ARRAY_BUFFER, rift->scene->SBSuvsBuffer[1]);
  glBufferData(GL_ARRAY_BUFFER, sizeof(uvs[1]), uvs[1], GL_STATIC_DRAW);

  glBindBuffer(GL_ARRAY_BUFFER, rift->scene->uvsBuffer);
  glBufferData(GL_ARRAY_BUFFER, sizeof(uvs[2]), uvs[2], GL_STATIC_DRAW);

  rift->width = 1920;
  rift->height = 1080;

  glGenTextures(1, &rift->fbTexture);
  glBindTexture(GL_TEXTURE_2D, rift->fbTexture);
  glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, rift->width, rift->height, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

  glGenFramebuffers(1, &rift->redirectedFramebuffer);
  glBindFramebuffer(GL_FRAMEBUFFER, rift->redirectedFramebuffer);
  glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, rift->fbTexture, 0); show_error();
  if(glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE)
  {
    switch(glCheckFramebufferStatus(GL_FRAMEBUFFER))
    {
      case GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT: weston_log("incomplete attachment\n"); break;
      case GL_FRAMEBUFFER_INCOMPLETE_DIMENSIONS: weston_log("incomplete dimensions\n"); break;
      case GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT: weston_log("incomplete missing attachment\n"); break;
      case GL_FRAMEBUFFER_UNSUPPORTED: weston_log("unsupported\n"); break;
    }

    weston_log("framebuffer not working\n");
    show_error();
    exit(1);
  }
  glClear(GL_COLOR_BUFFER_BIT);

  /*EGLint pbufferAttributes[] = {
     EGL_WIDTH,           rift->width,
     EGL_HEIGHT,          rift->height,
     EGL_TEXTURE_FORMAT,  EGL_TEXTURE_RGB,
     EGL_TEXTURE_TARGET,  EGL_TEXTURE_2D,
     EGL_NONE
  };

  rift->pbuffer = eglCreatePbufferSurface(
      rift->egl_display, rift->egl_config, 
      pbufferAttributes);

  glGenTextures(1, &(rift->texture));
  glBindTexture(GL_TEXTURE_2D, rift->texture);
  //glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, rift->width, rift->height, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

  eglMakeCurrent(rift->egl_display, rift->pbuffer, rift->pbuffer, rift->egl_context);
  eglBindTexImage(rift->egl_display, rift->pbuffer, EGL_BACK_BUFFER);
  eglMakeCurrent(rift->egl_display, rift->orig_surface, rift->orig_surface, rift->egl_context);*/

  ovr_Initialize(0);
  rift->hmd = ovrHmd_Create(0);
  if(rift->hmd == NULL)
  {
    rift->hmd = ovrHmd_CreateDebug(ovrHmd_DK2);
  }
  ovrHmd_ConfigureTracking(rift->hmd, ovrTrackingCap_Orientation | 
      ovrTrackingCap_Position | ovrTrackingCap_MagYawCorrection, 0);
  ovrHmd_ResetFrameTiming(rift->hmd, 0);

  int eye;
  for(eye = 0; eye < 2; eye++)
  {
    ovrFovPort fov = rift->hmd->DefaultEyeFov[eye];
    ovrEyeRenderDesc renderDesc = ovrHmd_GetRenderDesc(rift->hmd, eye, fov);
    struct EyeArg *eyeArg = &rift->eyeArgs[eye];

    eyeArg->projection = ovrMatrix4f_Projection(fov, 0.1, 100000, true);
    /*int j, k;
    for(k=0; k<4; k++)
    {
      for(j=0; j<4; j++)
      {
        printf("%f\t", eyeArg->projection.M[k][j]);
      }
      printf("\n");
    }*/
    rift->hmdToEyeOffsets[eye] = renderDesc.HmdToEyeViewOffset;
    ovrRecti texRect;
    texRect.Size = ovrHmd_GetFovTextureSize(rift->hmd, eye, rift->hmd->DefaultEyeFov[eye],
        1.0f);
    texRect.Pos.x = texRect.Pos.y = 0;
    eyeArg->textureWidth = texRect.Size.w;
    eyeArg->textureHeight = texRect.Size.h;

    glGenTextures(1, &eyeArg->texture);
    glBindTexture(GL_TEXTURE_2D, eyeArg->texture);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, eyeArg->textureWidth, eyeArg->textureHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

    glGenFramebuffers(1, &eyeArg->framebuffer); show_error();
    glBindFramebuffer(GL_FRAMEBUFFER, eyeArg->framebuffer); show_error();
    glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, eyeArg->texture, 0); show_error();
    if(glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE)
    {
      switch(glCheckFramebufferStatus(GL_FRAMEBUFFER))
      {
        case GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT: weston_log("incomplete attachment\n"); break;
        case GL_FRAMEBUFFER_INCOMPLETE_DIMENSIONS: weston_log("incomplete dimensions\n"); break;
        case GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT: weston_log("incomplete missing attachment\n"); break;
        case GL_FRAMEBUFFER_UNSUPPORTED: weston_log("unsupported\n"); break;
      }

      weston_log("framebuffer not working\n");
      show_error();
      exit(1);
    }
    if(eye)
    {
      glClearColor(1.0, 0.0, 0.0, 1.0); show_error();
    }
    else
    {
      glClearColor(0.0, 1.0, 0.0, 1.0); show_error();
    }
    glClear(GL_COLOR_BUFFER_BIT); show_error();

    /*EGLint eyePbufferAttributes[] = {
       EGL_WIDTH,           texRect.Size.w,
       EGL_HEIGHT,          texRect.Size.h,
       EGL_TEXTURE_FORMAT,  EGL_TEXTURE_RGB,
       EGL_TEXTURE_TARGET,  EGL_TEXTURE_2D,
       EGL_NONE
    };

    eyeArg.surface = eglCreatePbufferSurface(
        rift->egl_display, rift->egl_config, 
        eyePbufferAttributes);*/

    ovrVector2f scaleAndOffset[2];
    ovrHmd_GetRenderScaleAndOffset(fov, texRect.Size, texRect, scaleAndOffset);
    eyeArg->scale = scaleAndOffset[0];
    eyeArg->offset = scaleAndOffset[1];

    ovrHmd_CreateDistortionMesh(rift->hmd, eye, fov, 0, &eyeArg->mesh);

    glGenBuffers(1, &eyeArg->indexBuffer);
    glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, eyeArg->indexBuffer);
    glBufferData(GL_ELEMENT_ARRAY_BUFFER, eyeArg->mesh.IndexCount * sizeof(unsigned short), eyeArg->mesh.pIndexData, GL_STATIC_DRAW);
    glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);

    float vertices_buffer[eyeArg->mesh.VertexCount*2];
    float uvs_buffer[3][eyeArg->mesh.VertexCount*2];
    uint i;
    for(i=0; i<eyeArg->mesh.VertexCount; i++)
    {
      ovrDistortionVertex vertex = eyeArg->mesh.pVertexData[i];
      vertices_buffer[i*2] = vertex.ScreenPosNDC.x;
      vertices_buffer[(i*2)+1] = vertex.ScreenPosNDC.y;
      uvs_buffer[0][i*2] = vertex.TanEyeAnglesR.x;
      uvs_buffer[0][(i*2)+1] = vertex.TanEyeAnglesR.y;
      uvs_buffer[1][i*2] = vertex.TanEyeAnglesG.x;
      uvs_buffer[1][(i*2)+1] = vertex.TanEyeAnglesG.y;
      uvs_buffer[2][i*2] = vertex.TanEyeAnglesB.x;
      uvs_buffer[2][(i*2)+1] = vertex.TanEyeAnglesB.y;
    }

    glGenBuffers(1, &eyeArg->vertexBuffer);
    glBindBuffer(GL_ARRAY_BUFFER, eyeArg->vertexBuffer);
    glBufferData(GL_ARRAY_BUFFER, eyeArg->mesh.VertexCount * sizeof(GL_FLOAT) * 2, vertices_buffer, GL_STATIC_DRAW);
    glGenBuffers(3, &eyeArg->uvsBuffer[0]);
    for(i=0; i<3; i++)
    {
      glBindBuffer(GL_ARRAY_BUFFER, eyeArg->uvsBuffer[i]);
      glBufferData(GL_ARRAY_BUFFER, eyeArg->mesh.VertexCount * sizeof(GL_FLOAT) * 2, uvs_buffer[i], GL_STATIC_DRAW);
      glBindBuffer(GL_ARRAY_BUFFER, 0);
    }
  }

  return 0;
}
コード例 #12
0
osg::Geode* OculusDevice::distortionMesh(Eye eye, osg::Program* program, int x, int y, int w, int h, bool splitViewport) {
	osg::ref_ptr<osg::Geode> geode = new osg::Geode;
	// Allocate & generate distortion mesh vertices.
	ovrDistortionMesh meshData;
	ovrHmd_CreateDistortionMesh(m_hmdDevice, m_eyeRenderDesc[eye].Eye, m_eyeRenderDesc[eye].Fov, ovrDistortionCap_Chromatic | ovrDistortionCap_TimeWarp, &meshData);
	
	// Now parse the vertex data and create a render ready vertex buffer from it
	ovrDistortionVertex* ov = meshData.pVertexData;
	osg::Vec2Array* positionArray = new osg::Vec2Array;
	osg::Vec4Array* colorArray = new osg::Vec4Array;
	osg::Vec2Array* textureRArray = new osg::Vec2Array;
	osg::Vec2Array* textureGArray = new osg::Vec2Array;
	osg::Vec2Array* textureBArray = new osg::Vec2Array;

	for (unsigned vertNum = 0; vertNum < meshData.VertexCount; ++vertNum)
	{
		if (splitViewport) {
			// Positions need to be scaled and translated if we are using one viewport per eye
			if (eye == LEFT) {
				positionArray->push_back(osg::Vec2f(2 * ov[vertNum].ScreenPosNDC.x + 1.0, ov[vertNum].ScreenPosNDC.y));
			}
			else if (eye == RIGHT) {
				positionArray->push_back(osg::Vec2f(2 * ov[vertNum].ScreenPosNDC.x - 1.0, ov[vertNum].ScreenPosNDC.y));
			}
		}
		else {
			positionArray->push_back(osg::Vec2f(ov[vertNum].ScreenPosNDC.x, ov[vertNum].ScreenPosNDC.y));
		}
		
		colorArray->push_back(osg::Vec4f(ov[vertNum].VignetteFactor, ov[vertNum].VignetteFactor, ov[vertNum].VignetteFactor, ov[vertNum].TimeWarpFactor));
		textureRArray->push_back(osg::Vec2f(ov[vertNum].TanEyeAnglesR.x, ov[vertNum].TanEyeAnglesR.y));
		textureGArray->push_back(osg::Vec2f(ov[vertNum].TanEyeAnglesG.x, ov[vertNum].TanEyeAnglesG.y));
		textureBArray->push_back(osg::Vec2f(ov[vertNum].TanEyeAnglesB.x, ov[vertNum].TanEyeAnglesB.y));
	}
	
	// Get triangle indicies 
	osg::UShortArray* indexArray = new osg::UShortArray;
	unsigned short* index = meshData.pIndexData;
	for (unsigned indexNum = 0; indexNum < meshData.IndexCount; ++indexNum) {
		indexArray->push_back(index[indexNum]);
	}

	// Deallocate the mesh data
	ovrHmd_DestroyDistortionMesh(&meshData);
	
	osg::ref_ptr<osg::Geometry> geometry = new osg::Geometry;
	geometry->setUseDisplayList( false );
	geometry->setUseVertexBufferObjects(true);
	osg::ref_ptr<osg::DrawElementsUShort> drawElement = new osg::DrawElementsUShort(osg::PrimitiveSet::TRIANGLES, indexArray->size(), (GLushort*) indexArray->getDataPointer());
	geometry->addPrimitiveSet(drawElement);

	GLuint positionLoc = 0;
	GLuint colorLoc = 1;
	GLuint texCoord0Loc = 2;
	GLuint texCoord1Loc = 3;
	GLuint texCoord2Loc = 4;

	program->addBindAttribLocation("Position", positionLoc);
	geometry->setVertexAttribArray(positionLoc, positionArray);
	geometry->setVertexAttribBinding(positionLoc, osg::Geometry::BIND_PER_VERTEX);

	program->addBindAttribLocation("Color", colorLoc);
	geometry->setVertexAttribArray(colorLoc, colorArray);
	geometry->setVertexAttribBinding(colorLoc, osg::Geometry::BIND_PER_VERTEX);

	program->addBindAttribLocation("TexCoord0", texCoord0Loc);
	geometry->setVertexAttribArray(texCoord0Loc, textureRArray);
	geometry->setVertexAttribBinding(texCoord0Loc, osg::Geometry::BIND_PER_VERTEX);

	program->addBindAttribLocation("TexCoord1", texCoord1Loc);
	geometry->setVertexAttribArray(texCoord1Loc, textureGArray);
	geometry->setVertexAttribBinding(texCoord1Loc, osg::Geometry::BIND_PER_VERTEX);

	program->addBindAttribLocation("TexCoord2", texCoord2Loc);
	geometry->setVertexAttribArray(texCoord2Loc, textureBArray);
	geometry->setVertexAttribBinding(texCoord2Loc, osg::Geometry::BIND_PER_VERTEX);

	// Compute UV scale and offset
	ovrRecti eyeRenderViewport;
	eyeRenderViewport.Pos.x = x;
	eyeRenderViewport.Pos.y = y;
	eyeRenderViewport.Size.w = w;
	eyeRenderViewport.Size.h = h;
	ovrSizei renderTargetSize;
	renderTargetSize.w = m_renderTargetSize.w / 2;
	renderTargetSize.h = m_renderTargetSize.h;
	ovrHmd_GetRenderScaleAndOffset(m_eyeRenderDesc[eye].Fov, renderTargetSize, eyeRenderViewport, m_UVScaleOffset[eye]);
	geode->addDrawable(geometry);
	return geode.release();
}
//-------------------------------------------------------------------------------------
int Init()
{
    // Initializes LibOVR, and the Rift
    ovr_Initialize();
    if (!HMD)
    {
        HMD = ovrHmd_Create(0);
        if (!HMD)
        {
            MessageBoxA(NULL, "Oculus Rift not detected.", "", MB_OK);
            return(1);
        }
        if (HMD->ProductName[0] == '\0')
            MessageBoxA(NULL, "Rift detected, display not enabled.", "", MB_OK);
    }

	//Setup Window and Graphics - use window frame if relying on Oculus driver
	const int backBufferMultisample = 1;
    bool UseAppWindowFrame = (HMD->HmdCaps & ovrHmdCap_ExtendDesktop) ? false : true;
    HWND window = Util_InitWindowAndGraphics(Recti(HMD->WindowsPos, HMD->Resolution),
                                         FullScreen, backBufferMultisample, UseAppWindowFrame,&pRender);
	if (!window) return 1;
	ovrHmd_AttachToWindow(HMD, window, NULL, NULL);

    //Configure Stereo settings.
    Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Left,  HMD->DefaultEyeFov[0], 1.0f);
    Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Right, HMD->DefaultEyeFov[1], 1.0f);
	Sizei RenderTargetSize;
    RenderTargetSize.w = recommenedTex0Size.w + recommenedTex1Size.w;
    RenderTargetSize.h = max ( recommenedTex0Size.h, recommenedTex1Size.h );

    const int eyeRenderMultisample = 1;
    pRendertargetTexture = pRender->CreateTexture(Texture_RGBA | Texture_RenderTarget |
                                                  eyeRenderMultisample,
                                                  RenderTargetSize.w, RenderTargetSize.h, NULL);
    // The actual RT size may be different due to HW limits.
    RenderTargetSize.w = pRendertargetTexture->GetWidth();
    RenderTargetSize.h = pRendertargetTexture->GetHeight();

    // Initialize eye rendering information.
    // The viewport sizes are re-computed in case RenderTargetSize changed due to HW limitations.
    ovrFovPort eyeFov[2] = { HMD->DefaultEyeFov[0], HMD->DefaultEyeFov[1] } ;

    EyeRenderViewport[0].Pos  = Vector2i(0,0);
    EyeRenderViewport[0].Size = Sizei(RenderTargetSize.w / 2, RenderTargetSize.h);
    EyeRenderViewport[1].Pos  = Vector2i((RenderTargetSize.w + 1) / 2, 0);
    EyeRenderViewport[1].Size = EyeRenderViewport[0].Size;

    #if SDK_RENDER
	// Query OGL texture data.
	EyeTexture[0].OGL.Header.API			= ovrRenderAPI_OpenGL;
	EyeTexture[0].OGL.Header.TextureSize	= RenderTargetSize;
	EyeTexture[0].OGL.Header.RenderViewport = EyeRenderViewport[0];
	EyeTexture[0].OGL.TexId					= pRendertargetTexture->TexId;

    // Right eye uses the same texture, but different rendering viewport.
    EyeTexture[1]							= EyeTexture[0];
    EyeTexture[1].OGL.Header.RenderViewport	= EyeRenderViewport[1];

    // Configure OpenGL.
    ovrGLConfig oglcfg;
    oglcfg.OGL.Header.API					= ovrRenderAPI_OpenGL;
    oglcfg.OGL.Header.RTSize				= Sizei(HMD->Resolution.w, HMD->Resolution.h);
    oglcfg.OGL.Header.Multisample			= backBufferMultisample;
	oglcfg.OGL.Window						= window;
	oglcfg.OGL.DC							= GetDC(window);

    if (!ovrHmd_ConfigureRendering(HMD, &oglcfg.Config,
		                           ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette |
                                   ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive,
								   eyeFov, EyeRenderDesc))	return(1);	

    #else
	//Distortion vertex shader
	const char* vertexShader =
		"#version 110																			\n"
		"uniform vec2 EyeToSourceUVScale;														\n"
		"uniform vec2 EyeToSourceUVOffset;														\n"
		"uniform mat4 EyeRotationStart;															\n"
		"uniform mat4 EyeRotationEnd;															\n"
		"attribute vec2 Position;																\n"
		"attribute vec2 inTWLF_V;																\n"		
		"attribute vec2 inTexCoord0;															\n"
		"attribute vec2 inTexCoord1;															\n"
		"attribute vec2 inTexCoord2;															\n"
		"varying vec4 oPosition;																\n"
		"varying vec2 oTexCoord0;																\n"
		"varying vec2 oTexCoord1;																\n"
		"varying vec2 oTexCoord2;																\n"
		"varying float oVignette;																\n"
		"vec2 TexCoord0 = vec2(inTexCoord0.x, -inTexCoord0.y);									\n"
		"vec2 TexCoord1 = vec2(inTexCoord1.x, -inTexCoord1.y);									\n"
		"vec2 TexCoord2 = vec2(inTexCoord2.x, -inTexCoord2.y);									\n"
		"float timewarpLerpFactor = inTWLF_V.x;													\n"
		"float Vignette = inTWLF_V.y;															\n"
		"vec2 TimewarpTexCoord( in vec2 TexCoord, in mat4 rotMat )								\n"
		"{																						\n"
		// Vertex inputs are in TanEyeAngle space for the R,G,B channels (i.e. after chromatic 
		// aberration and distortion). These are now "real world" vectors in direction (x,y,1) 
		// relative to the eye of the HMD.	Apply the 3x3 timewarp rotation to these vectors.
		"   vec3 transformed = vec3( ( rotMat * vec4( TexCoord.xy , 1.00000, 1.00000) ).xyz );	\n"
		// Project them back onto the Z=1 plane of the rendered images.
		"   vec2 flattened = (transformed.xy  / transformed.z );								\n"
		// Scale them into ([0,0.5],[0,1]) or ([0.5,0],[0,1]) UV lookup space (depending on eye)
		"   return ((EyeToSourceUVScale * flattened) + EyeToSourceUVOffset);					\n"
		"}																						\n"
		"mat4 mat4_lerp( in mat4 x, in mat4 y, in mat4 s )										\n"
		"{																						\n"
		"	return mat4(mix(x[0],y[0],s[0]), mix(x[1],y[1],s[1]), mix(x[2],y[2],s[2]), mix(x[3],y[3],s[3]));\n"
		"}																						\n"
		"void main()																			\n"
		"{																						\n"
		"   mat4 lerpedEyeRot = mat4_lerp( EyeRotationStart, EyeRotationEnd, mat4( timewarpLerpFactor));\n"
		"   oTexCoord0 = TimewarpTexCoord( TexCoord0, lerpedEyeRot);							\n"
		"   oTexCoord1 = TimewarpTexCoord( TexCoord1, lerpedEyeRot);							\n"
		"   oTexCoord2 = TimewarpTexCoord( TexCoord2, lerpedEyeRot);							\n"
		"   oPosition = vec4( Position.xy , 0.500000, 1.00000);									\n"
		"   oVignette = Vignette;																\n"
		"   gl_Position = oPosition;															\n"
		"}";

	//Distortion pixel shader
	const char* pixelShader = 
		"#version 110																			\n"
		"uniform sampler2D Texture0;															\n"
		"varying vec4 oPosition;																\n"
		"varying vec2 oTexCoord0;																\n"
		"varying vec2 oTexCoord1;																\n"
		"varying vec2 oTexCoord2;																\n"
		"varying float oVignette;																\n"
		"void main()																			\n"
		"{																						\n"
		// 3 samples for fixing chromatic aberrations
		"   float R = texture2D(Texture0, oTexCoord0.xy).r;										\n"
		"   float G = texture2D(Texture0, oTexCoord1.xy).g;										\n"
		"   float B = texture2D(Texture0, oTexCoord2.xy).b;										\n"
		"   gl_FragColor = (oVignette*vec4(R,G,B,1));											\n"
		"}";

	pRender->InitShaders(vertexShader, pixelShader, &Shaders);

    for ( int eyeNum = 0; eyeNum < 2; eyeNum++ )
    {
        // Allocate mesh vertices, registering with renderer using the OVR vertex format.
        ovrDistortionMesh meshData;
        ovrHmd_CreateDistortionMesh(HMD, (ovrEyeType) eyeNum, eyeFov[eyeNum],
			                        ovrDistortionCap_Chromatic | ovrDistortionCap_TimeWarp, &meshData);
        MeshVBs[eyeNum] = *pRender->CreateBuffer();
        MeshVBs[eyeNum]->Data(Buffer_Vertex,meshData.pVertexData,sizeof(ovrDistortionVertex)*meshData.VertexCount);
        MeshIBs[eyeNum] = *pRender->CreateBuffer();
        MeshIBs[eyeNum]->Data(Buffer_Index,meshData.pIndexData,sizeof(unsigned short) * meshData.IndexCount);
        ovrHmd_DestroyDistortionMesh( &meshData );

		//Create eye render description for use later
		EyeRenderDesc[eyeNum] = ovrHmd_GetRenderDesc(HMD, (ovrEyeType) eyeNum,  eyeFov[eyeNum]);

		//Do scale and offset
		ovrHmd_GetRenderScaleAndOffset(eyeFov[eyeNum],RenderTargetSize, EyeRenderViewport[eyeNum], UVScaleOffset[eyeNum]);
	}

    #endif

    ovrHmd_SetEnabledCaps(HMD, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction);

	// Start the sensor which informs of the Rift's pose and motion
    ovrHmd_ConfigureTracking(HMD,   ovrTrackingCap_Orientation |
                                    ovrTrackingCap_MagYawCorrection |
                                    ovrTrackingCap_Position, 0);

    // This creates lights and models.
  	pRoomScene = new Scene;
	PopulateRoomScene(pRoomScene, pRender);

    return (0);
}
コード例 #14
0
int Init()
{
	ovr_Initialize();
	HMD = ovrHmd_Create(0);
	if (!HMD)
	{
		MessageBox(NULL, "Oculus Rift not detected.", "", MB_OK);
		return 1;
	}
	if (HMD->ProductName[0] == '\0')
	{
		MessageBox(NULL, "Rift detected, display not enabled.", "", MB_OK);
	}

	//Setup Window and Graphics - use window frame if relying on Oculus driver
	const int backBufferMultisample = 1;
	bool UseAppWindowFrame = true;
	HWND window = Util_InitWindowAndGraphics(Recti(HMD->WindowsPos, HMD->Resolution), 
		FullScreen, backBufferMultisample, UseAppWindowFrame, &pRender);
	if (!window) return 1;
	ovrHmd_AttachToWindow(HMD, window, NULL, NULL);

	Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Left, HMD->DefaultEyeFov[0], 1.0f);
	Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Right, HMD->DefaultEyeFov[1], 1.0f);
	Sizei RenderTargetSize;
	RenderTargetSize.w = recommenedTex0Size.w + recommenedTex1Size.w;
	RenderTargetSize.h = max(recommenedTex0Size.h, recommenedTex1Size.h);

	RenderTargetSize.w = HMD->Resolution.w;
	RenderTargetSize.h = HMD->Resolution.h;

	//const int eyeRenderMultisample = 1;
	pRendertargetTexture = pRender->CreateRenderTarget(RenderTargetSize.w/2, RenderTargetSize.h/2);
	//pRendertargetTexture = pRender->CreateRenderTarget(512, 512);
	RenderTargetSize.w = pRendertargetTexture->Width;
	RenderTargetSize.h = pRendertargetTexture->Height;

	IDirect3DSurface9 *zb = 0;
	pRender->Device->GetDepthStencilSurface(&zb);
	D3DSURFACE_DESC d;
	zb->GetDesc(&d);

	// Initialize eye rendering information.
	// The viewport sizes are re-computed in case RenderTargetSize due to HW limitations.
	ovrFovPort eyeFov[2] = { HMD->DefaultEyeFov[0], HMD->DefaultEyeFov[1] };

	EyeRenderViewport[0].Pos  = Vector2i(0, 0);
	EyeRenderViewport[0].Size = Sizei(RenderTargetSize.w / 2, RenderTargetSize.h);
	EyeRenderViewport[1].Pos  = Vector2i((RenderTargetSize.w + 1) / 2, 0);
	EyeRenderViewport[1].Size = EyeRenderViewport[0].Size;

	// ---------------------

	DistortionShaders = pRender->CreateShaderSet();
	DistortionShaders->SetShader(pRender->LoadBuiltinShader(Shader_Vertex, VShader_Distortion));
	DistortionShaders->SetShader(pRender->LoadBuiltinShader(Shader_Pixel, PShader_Distortion));
	DistortionDecl = VertexDecl::GetDecl(VertexType_Distortion);

	for (int eyeNum = 0; eyeNum < 2; ++eyeNum)
	{
		ovrDistortionMesh meshData;
		ovrHmd_CreateDistortionMesh(HMD, (ovrEyeType)eyeNum, eyeFov[eyeNum],
			ovrDistortionCap_Chromatic | ovrDistortionCap_TimeWarp, &meshData);
		MeshVBs[eyeNum] = pRender->CreateVertexBuffer();
		MeshVBs[eyeNum]->Data(meshData.pVertexData, sizeof(ovrDistortionVertex)*meshData.VertexCount);
		MeshIBs[eyeNum] = pRender->CreateIndexBuffer();
		MeshIBs[eyeNum]->Data(meshData.pIndexData, sizeof(unsigned short)*meshData.IndexCount);

		MeshVBCnts[eyeNum] = meshData.VertexCount;
		MeshIBCnts[eyeNum] = meshData.IndexCount;
		ovrHmd_DestroyDistortionMesh(&meshData);

		EyeRenderDesc[eyeNum] = ovrHmd_GetRenderDesc(HMD, (ovrEyeType)eyeNum, eyeFov[eyeNum]);

		ovrHmd_GetRenderScaleAndOffset(eyeFov[eyeNum], RenderTargetSize, EyeRenderViewport[eyeNum], UVScaleOffset[eyeNum]);
	}

	ovrHmd_SetEnabledCaps(HMD, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction);

	ovrHmd_ConfigureTracking(HMD,
		ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position, 0);

	// ---------------------

	pRoomScene = new Scene;
	PopulateRoomScene(pRoomScene, pRender);

	// texture model
	ShaderSet* ss = pRender->CreateShaderSet();
	ss->SetShader(pRender->LoadBuiltinShader(Shader_Vertex, VShader_MVP_UV));
	ss->SetShader(pRender->LoadBuiltinShader(Shader_Pixel, PShader_UV));

	Model<VertexXYZUV> *pModel2 = new Model<VertexXYZUV>();
	pModel2->Decl = VertexDecl::GetDecl(VertexType_XYZUV);
	pModel2->Fill = new ShaderFill(ss);

	//Texture* ttt = new Texture(pRender);
	//ttt->LoadFromFile("face.tga");
	pModel2->Fill->SetTexture(0, pRendertargetTexture);

	pModel2->AddVertex(VertexXYZUV(0.5f, -1.0f, 0.0f, 0.0f, 0.0f));
	pModel2->AddVertex(VertexXYZUV(2.5f, -1.0f, 0.0f, 1.0f, 0.0f));
	pModel2->AddVertex(VertexXYZUV(0.5f, 1.0f, 0.0f, 0.0f, 1.0f));
	pModel2->AddVertex(VertexXYZUV(2.5f, 1.0f, 0.0f, 1.0f, 1.0f));

	pModel2->AddVertex(VertexXYZUV(-1.0f, -1.5f, -1.0f, 0.0f, 0.0f));
	pModel2->AddVertex(VertexXYZUV(1.0f, -1.5f, -1.0f, 1.0f, 0.0f));
	pModel2->AddVertex(VertexXYZUV(-1.0f, -1.5f, 1.0f, 0.0f, 1.0f));
	pModel2->AddVertex(VertexXYZUV(1.0f, -1.5f, 1.0f, 1.0f, 1.0f));

	pModel2->AddTriangle(0, 1, 2);
	pModel2->AddTriangle(2, 1, 3);
	pModel2->AddTriangle(4, 5, 6);
	pModel2->AddTriangle(6, 5, 7);

	pScene = new Scene;
	pScene->World.Add(pModel2);

    return (0);
}
コード例 #15
0
ファイル: gkOculus.cpp プロジェクト: CheryJazz/gkEngine
void gkOculus::InitHMD()
{
	ovrHmd_SetEnabledCaps(HMD, ovrHmdCap_LowPersistence);

	// Start the sensor which informs of the Rift's pose and motion
	ovrHmd_ConfigureTracking(HMD,   ovrTrackingCap_Orientation |
		ovrTrackingCap_MagYawCorrection |
		ovrTrackingCap_Position, 0);

	OVR::Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Left,  HMD->DefaultEyeFov[0], 1.0f);
	OVR::Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Right, HMD->DefaultEyeFov[1], 1.0f);

	gEnv->pRenderer->SetOverrideSize( recommenedTex0Size.w, recommenedTex0Size.h, true );

	gkTexturePtr tex = gEnv->pSystem->getTextureMngPtr()->getByName(_T("RT_BACKBUFFER_STEREOOUT"));

	for (int eyenum=0; eyenum < 2; ++eyenum)
	{
		m_disortation_renderable_eyes[eyenum] = new gkOculusDisortationRenderable(this);
		m_disortation_renderable_eyes[eyenum]->m_eye_index = eyenum;
		m_disortation_renderable_eyes[eyenum]->HMD = HMD;

		gkNameValuePairList createlist;

		createlist[_T("file")] = _T("engine/assets/meshs/oculus_disort.mtl");

		TCHAR buffer[255];
		_stprintf( buffer, _T("$OculusDisortation_%d"), eyenum );

		gkMaterialPtr mat = gEnv->pSystem->getMaterialMngPtr()->create( buffer, _T("stereo"), &createlist );
		mat->load();
		mat->setTexture( tex , 0);


		m_disortation_renderable_eyes[eyenum]->m_material = mat;

		_stprintf( buffer, _T("$OculusDisortationMesh_%d"), eyenum );

		createlist[_T("type")] =	_T("Pt2T2T2T2T2");
		gkMeshPtr mesh = gEnv->pSystem->getMeshMngPtr()->create( buffer, _T("stereo"), &createlist );
		mesh->load();
		m_disortation_renderable_eyes[eyenum]->m_mesh = mesh;

		ovrFovPort eyeFov[2] = { HMD->DefaultEyeFov[0], HMD->DefaultEyeFov[1] } ;

		float fovy = atan( eyeFov[0].UpTan ) * 2;
		gEnv->p3DEngine->getMainCamera()->setFOVy( fovy );

		ovrDistortionMesh meshData;
		ovrHmd_CreateDistortionMesh(HMD, (ovrEyeType)eyenum, eyeFov[eyenum],
			ovrDistortionCap_Chromatic | ovrDistortionCap_TimeWarp, &meshData);

		mesh->getVertexBuffer()->resizeDiscard( meshData.VertexCount );
		mesh->getIndexBuffer()->resizeDiscard( meshData.IndexCount );

		memcpy( mesh->getVertexBuffer()->data, meshData.pVertexData, meshData.VertexCount * sizeof(ovrDistortionVertex) );
		memcpy( mesh->getIndexBuffer()->data, meshData.pIndexData, meshData.IndexCount * sizeof(unsigned short) );

		ovrHmd_DestroyDistortionMesh( &meshData );

		//Create eye render description for use later
		EyeRenderDesc[eyenum] = ovrHmd_GetRenderDesc(HMD, (ovrEyeType)eyenum,  eyeFov[eyenum]);

		//Do scale and offset
		OVR::Sizei RenderTargetSize;
		RenderTargetSize.w = recommenedTex0Size.w;
		RenderTargetSize.h = recommenedTex1Size.h;

		ovrRecti EyeRenderViewport[2];
		EyeRenderViewport[0].Pos  = OVR::Vector2i(0,0);
		EyeRenderViewport[0].Size = OVR::Sizei(RenderTargetSize.w / 2, RenderTargetSize.h);
		EyeRenderViewport[1].Pos  = OVR::Vector2i((RenderTargetSize.w + 1) / 2, 0);
		EyeRenderViewport[1].Size = EyeRenderViewport[0].Size;

		ovrHmd_GetRenderScaleAndOffset(eyeFov[eyenum],RenderTargetSize, EyeRenderViewport[eyenum], m_disortation_renderable_eyes[eyenum]->UVScaleOffset[eyenum]);
	}
}