void DistortionRenderer::Create_Distortion_Models(void) { //Make the distortion models for (int eye=0; eye<2; eye++) { FOR_EACH_EYE * e = &eachEye[eye]; ovrDistortionMesh meshData; ovrHmd_CreateDistortionMesh(HMD, RState.EyeRenderDesc[eye].Eye, RState.EyeRenderDesc[eye].Fov, distortionCaps, &meshData); e->numVerts = meshData.VertexCount; e->numIndices = meshData.IndexCount; device->CreateVertexBuffer( (e->numVerts)*sizeof(ovrDistortionVertex),0, 0, D3DPOOL_MANAGED, &e->dxVerts, NULL ); ovrDistortionVertex * dxv; e->dxVerts->Lock( 0, 0, (void**)&dxv, 0 ); for (int v=0; v<e->numVerts; v++) dxv[v] = meshData.pVertexData[v]; e->dxVerts->Unlock(); device->CreateIndexBuffer( (e->numIndices)*sizeof(u_short),0, D3DFMT_INDEX16, D3DPOOL_MANAGED, &e->dxIndices, NULL ); unsigned short* dxi; e->dxIndices->Lock( 0, 0, (void**)&dxi, 0 ); for (int i=0; i<e->numIndices; i++) dxi[i] = meshData.pIndexData[i]; e->dxIndices->Unlock(); ovrHmd_DestroyDistortionMesh( &meshData ); } }
HRESULT InitOculusRiftObjects(unsigned distortionCaps, ovrHmd HMD) { HRESULT hr = S_OK; // ステレオ描画設定を取得 // レンダーターゲットの作成するためのサイズ決定に必要。 // なお DK1 ではかなり変なサイズで返ってくるのでこのまま信用していいのかは不明 ovrSizei recommenedTex0Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Left, HMDDesc.DefaultEyeFov[0], 1.0f); ovrSizei recommenedTex1Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Right, HMDDesc.DefaultEyeFov[1], 1.0f); ovrSizei RenderTargetSize; RenderTargetSize.w = recommenedTex0Size.w + recommenedTex1Size.w; RenderTargetSize.h = max(recommenedTex0Size.h, recommenedTex1Size.h); { // レンダーターゲット用のテクスチャを作成する // ここにシーンを描画する D3D11_TEXTURE2D_DESC desc; ZeroMemory(&desc, sizeof(desc)); desc.Width = RenderTargetSize.w; desc.Height = RenderTargetSize.h; desc.MipLevels = 1; desc.ArraySize = 1; desc.Format = DXGI_FORMAT_R8G8B8A8_UNORM; desc.SampleDesc.Count = 1; desc.Usage = D3D11_USAGE_DEFAULT; desc.BindFlags = D3D11_BIND_RENDER_TARGET | D3D11_BIND_SHADER_RESOURCE; //テクスチャを作成する g_pd3dDevice->CreateTexture2D(&desc, NULL, &g_pTextureOculus); //シェーダーリソースビュー作ることで描画元として使用出来る g_pd3dDevice->CreateShaderResourceView(g_pTextureOculus, NULL, &g_pShaderResViewOculus); //レンダーターゲットビューを作る g_pd3dDevice->CreateRenderTargetView(g_pTextureOculus, NULL, &g_pRenderTargetViewOculus); // 実際に作成されたサイズを取得しておく(ミスがあるかもしれない) g_pTextureOculus->GetDesc(&desc); RenderTargetSize.w = desc.Width; RenderTargetSize.h = desc.Height; //深度バッファ作成(ステンシル) D3D11_TEXTURE2D_DESC descDepth; ZeroMemory(&descDepth, sizeof(descDepth)); descDepth.Width = RenderTargetSize.w; descDepth.Height = RenderTargetSize.h; descDepth.MipLevels = 1; descDepth.ArraySize = 1; descDepth.Format = DXGI_FORMAT_D24_UNORM_S8_UINT; descDepth.SampleDesc.Count = 1; descDepth.SampleDesc.Quality = 0; descDepth.Usage = D3D11_USAGE_DEFAULT; descDepth.BindFlags = D3D11_BIND_DEPTH_STENCIL; descDepth.CPUAccessFlags = 0; descDepth.MiscFlags = 0; g_pd3dDevice->CreateTexture2D(&descDepth, NULL, &g_pDepthStencilOculus); //ステンシルビューの作成 D3D11_DEPTH_STENCIL_VIEW_DESC descDSV; ZeroMemory(&descDSV, sizeof(descDSV)); descDSV.Format = descDepth.Format; descDSV.ViewDimension = D3D11_DSV_DIMENSION_TEXTURE2D; descDSV.Texture2D.MipSlice = 0; g_pd3dDevice->CreateDepthStencilView(g_pDepthStencilOculus, &descDSV, &g_pDepthStencilViewOculus); } // それぞれの目に対応する描画のための情報を取得します。 ovrFovPort eyeFov[2] = { HMDDesc.DefaultEyeFov[0], HMDDesc.DefaultEyeFov[1] }; EyeRenderDesc[0] = ovrHmd_GetRenderDesc(HMD, ovrEye_Left, eyeFov[0]); EyeRenderDesc[1] = ovrHmd_GetRenderDesc(HMD, ovrEye_Right, eyeFov[1]); // HMDで使用する機能を設定します。 ovrHmd_SetEnabledCaps(HMD, ovrHmdCap_LowPersistence | ovrHmdCap_LatencyTest); // センサーの使用を開始します。 // YawCorrection(?) Orientation(姿勢)、Position(位置)を取得できるようにします。 ovrHmd_StartSensor(HMD, ovrSensorCap_Orientation | ovrSensorCap_YawCorrection | ovrSensorCap_Position, 0); //ビューポートの情報を格納します。 EyeRenderViewport[0].Pos.x = 0; EyeRenderViewport[0].Pos.y = 0; EyeRenderViewport[0].Size.w = RenderTargetSize.w / 2; EyeRenderViewport[0].Size.h = RenderTargetSize.h; EyeRenderViewport[1].Pos.x = (RenderTargetSize.w + 1) / 2; EyeRenderViewport[1].Pos.y = 0; EyeRenderViewport[1].Size = EyeRenderViewport[0].Size; //ゆがませるメッシュをそれぞれの目に対応するように作成する for (int eyeNum = 0; eyeNum < 2; eyeNum++) { // メッシュデータを取得する ovrDistortionMesh meshData; ovrHmd_CreateDistortionMesh(HMD, EyeRenderDesc[eyeNum].Eye, EyeRenderDesc[eyeNum].Fov, distortionCaps, &meshData); ovrHmd_GetRenderScaleAndOffset(EyeRenderDesc[eyeNum].Fov, RenderTargetSize, EyeRenderViewport[eyeNum], (ovrVector2f*)UVScaleOffset[eyeNum]); // こちらで用意した頂点形式にあうようにパースします。 DistortionVertex * pVBVerts = (DistortionVertex*)OVR_ALLOC( sizeof(DistortionVertex)* meshData.VertexCount); DistortionVertex * v = pVBVerts; ovrDistortionVertex * ov = meshData.pVertexData; for (unsigned vertNum = 0; vertNum < meshData.VertexCount; vertNum++) { v->Pos.x = ov->Pos.x; v->Pos.y = ov->Pos.y; v->TexR = (*(ovrVector2f*)&ov->TexR); v->TexG = (*(ovrVector2f*)&ov->TexG); v->TexB = (*(ovrVector2f*)&ov->TexB); v->Col[0] = v->Col[1] = v->Col[2] = (BYTE)(ov->VignetteFactor*255.99f); v->Col[3] = (BYTE)(ov->TimeWarpFactor*255.99f); v++; ov++; } //メッシュの頂点データを用いて頂点バッファを作成します。 D3D11_BUFFER_DESC bd = { 0 }; bd.Usage = D3D11_USAGE_DEFAULT; bd.ByteWidth = sizeof(DistortionVertex)* meshData.VertexCount; bd.BindFlags = D3D11_BIND_VERTEX_BUFFER; bd.CPUAccessFlags = 0; D3D11_SUBRESOURCE_DATA InitData = { 0 }; InitData.pSysMem = pVBVerts; hr = g_pd3dDevice->CreateBuffer(&bd, &InitData, &g_pVertexBufferOculus[eyeNum]); if (FAILED(hr)) return hr; //同様にインデックスバッファを作成します。 bd.ByteWidth = sizeof(unsigned short)* meshData.IndexCount; bd.BindFlags = D3D11_BIND_INDEX_BUFFER; bd.CPUAccessFlags = 0; InitData.pSysMem = meshData.pIndexData; hr = g_pd3dDevice->CreateBuffer(&bd, &InitData, &g_pIndexBufferOculus[eyeNum]); if (FAILED(hr)) return hr; oculusIndexCount = meshData.IndexCount; OVR_FREE(pVBVerts); ovrHmd_DestroyDistortionMesh(&meshData); } { //頂点シェーダーをコンパイル ID3DBlob* pBlob = NULL; hr = CompileShaderFromFile("OculusRift.hlsl", "VS_TimeWarp", "vs_4_0", &pBlob); if (FAILED(hr)) { MessageBox(NULL, "ピクセルシェーダーを読み込めませんでした。", "Error", MB_OK); return hr; } //ピクセルシェーダーからピクセルシェーダのオブジェクトを作成 hr = g_pd3dDevice->CreateVertexShader(pBlob->GetBufferPointer(), pBlob->GetBufferSize(), NULL, &g_pVertexShaderOculus); if (FAILED(hr)) return hr; //インプットレイアウトの形式 static D3D11_INPUT_ELEMENT_DESC DistortionMeshVertexDesc[] = { { "Position", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 }, { "TexCoord", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 8, D3D11_INPUT_PER_VERTEX_DATA, 0 }, { "TexCoord", 1, DXGI_FORMAT_R32G32_FLOAT, 0, 16, D3D11_INPUT_PER_VERTEX_DATA, 0 }, { "TexCoord", 2, DXGI_FORMAT_R32G32_FLOAT, 0, 24, D3D11_INPUT_PER_VERTEX_DATA, 0 }, { "Color", 0, DXGI_FORMAT_R8G8B8A8_UNORM, 0, 32, D3D11_INPUT_PER_VERTEX_DATA, 0 }, }; //インプットレイアウトの作成 hr = g_pd3dDevice->CreateInputLayout(DistortionMeshVertexDesc, ARRAYSIZE(DistortionMeshVertexDesc), pBlob->GetBufferPointer(), pBlob->GetBufferSize(), &g_pVertexLayoutOculus); if (FAILED(hr)) return hr; pBlob->Release(); } { //ピクセルシェーダーをコンパイル ID3DBlob* pBlob = NULL; hr = CompileShaderFromFile("OculusRift.hlsl", "PS_Oculus", "ps_4_0", &pBlob); if (FAILED(hr)) { MessageBox(NULL, "ピクセルシェーダーを読み込めませんでした。", "Error", MB_OK); return hr; } //ピクセルシェーダーからピクセルシェーダのオブジェクトを作成 hr = g_pd3dDevice->CreatePixelShader(pBlob->GetBufferPointer(), pBlob->GetBufferSize(), NULL, &g_pPixelShaderOculus); pBlob->Release(); if (FAILED(hr)) return hr; } // コンスタントバッファの作成 // ゆがませるメッシュを描画するためのシェーダー用の設定 { D3D11_BUFFER_DESC bd = { 0 }; bd.Usage = D3D11_USAGE_DEFAULT; bd.ByteWidth = sizeof(OculusRiftSettings); bd.BindFlags = D3D11_BIND_CONSTANT_BUFFER; bd.CPUAccessFlags = 0; hr = g_pd3dDevice->CreateBuffer(&bd, NULL, &g_pConstantBufferOculus); if (FAILED(hr)) return hr; } return hr; }
//------------------------------------------------------------------------------------- int Init() { // Initializes LibOVR, and the Rift ovr_Initialize(); HMD = ovrHmd_Create(0); if (!HMD) { MessageBoxA(NULL,"Oculus Rift not detected.","", MB_OK); return(1); } if (HMD->ProductName[0] == '\0') MessageBoxA(NULL,"Rift detected, display not enabled.","", MB_OK); //Setup Window and Graphics - use window frame if relying on Oculus driver const int backBufferMultisample = 1; bool UseAppWindowFrame = (HMD->HmdCaps & ovrHmdCap_ExtendDesktop) ? false : true; HWND window = Util_InitWindowAndGraphics(Recti(HMD->WindowsPos, HMD->Resolution), FullScreen, backBufferMultisample, UseAppWindowFrame,&pRender); if (!window) return 1; ovrHmd_AttachToWindow(HMD, window, NULL, NULL); //Configure Stereo settings. Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Left, HMD->DefaultEyeFov[0], 1.0f); Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Right, HMD->DefaultEyeFov[1], 1.0f); Sizei RenderTargetSize; RenderTargetSize.w = recommenedTex0Size.w + recommenedTex1Size.w; RenderTargetSize.h = max ( recommenedTex0Size.h, recommenedTex1Size.h ); const int eyeRenderMultisample = 1; pRendertargetTexture = pRender->CreateTexture(Texture_RGBA | Texture_RenderTarget | eyeRenderMultisample, RenderTargetSize.w, RenderTargetSize.h, NULL); // The actual RT size may be different due to HW limits. RenderTargetSize.w = pRendertargetTexture->GetWidth(); RenderTargetSize.h = pRendertargetTexture->GetHeight(); // Initialize eye rendering information. // The viewport sizes are re-computed in case RenderTargetSize changed due to HW limitations. ovrFovPort eyeFov[2] = { HMD->DefaultEyeFov[0], HMD->DefaultEyeFov[1] } ; EyeRenderViewport[0].Pos = Vector2i(0,0); EyeRenderViewport[0].Size = Sizei(RenderTargetSize.w / 2, RenderTargetSize.h); EyeRenderViewport[1].Pos = Vector2i((RenderTargetSize.w + 1) / 2, 0); EyeRenderViewport[1].Size = EyeRenderViewport[0].Size; #if SDK_RENDER // Query D3D texture data. EyeTexture[0].D3D11.Header.API = ovrRenderAPI_D3D11; EyeTexture[0].D3D11.Header.TextureSize = RenderTargetSize; EyeTexture[0].D3D11.Header.RenderViewport = EyeRenderViewport[0]; EyeTexture[0].D3D11.pTexture = pRendertargetTexture->Tex.GetPtr(); EyeTexture[0].D3D11.pSRView = pRendertargetTexture->TexSv.GetPtr(); // Right eye uses the same texture, but different rendering viewport. EyeTexture[1] = EyeTexture[0]; EyeTexture[1].D3D11.Header.RenderViewport = EyeRenderViewport[1]; // Configure d3d11. ovrD3D11Config d3d11cfg; d3d11cfg.D3D11.Header.API = ovrRenderAPI_D3D11; d3d11cfg.D3D11.Header.RTSize = Sizei(HMD->Resolution.w, HMD->Resolution.h); d3d11cfg.D3D11.Header.Multisample = backBufferMultisample; d3d11cfg.D3D11.pDevice = pRender->Device; d3d11cfg.D3D11.pDeviceContext = pRender->Context; d3d11cfg.D3D11.pBackBufferRT = pRender->BackBufferRT; d3d11cfg.D3D11.pSwapChain = pRender->SwapChain; if (!ovrHmd_ConfigureRendering(HMD, &d3d11cfg.Config, ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette | ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive, eyeFov, EyeRenderDesc)) return(1); #else //Shader vertex format D3D11_INPUT_ELEMENT_DESC DistortionMeshVertexDesc[] = { {"Position", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0}, {"Position", 1, DXGI_FORMAT_R32_FLOAT, 0, 8, D3D11_INPUT_PER_VERTEX_DATA, 0}, {"Position", 2, DXGI_FORMAT_R32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0}, {"TexCoord", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 16, D3D11_INPUT_PER_VERTEX_DATA, 0}, {"TexCoord", 1, DXGI_FORMAT_R32G32_FLOAT, 0, 24, D3D11_INPUT_PER_VERTEX_DATA, 0}, {"TexCoord", 2, DXGI_FORMAT_R32G32_FLOAT, 0, 32, D3D11_INPUT_PER_VERTEX_DATA, 0}}; //Distortion vertex shader const char* vertexShader = "float2 EyeToSourceUVScale, EyeToSourceUVOffset; \n" "float4x4 EyeRotationStart, EyeRotationEnd; \n" "float2 TimewarpTexCoord(float2 TexCoord, float4x4 rotMat) \n" "{ \n" // Vertex inputs are in TanEyeAngle space for the R,G,B channels (i.e. after chromatic // aberration and distortion). These are now "real world" vectors in direction (x,y,1) // relative to the eye of the HMD. Apply the 3x3 timewarp rotation to these vectors. " float3 transformed = float3( mul ( rotMat, float4(TexCoord.xy, 1, 1) ).xyz); \n" // Project them back onto the Z=1 plane of the rendered images. " float2 flattened = (transformed.xy / transformed.z); \n" // Scale them into ([0,0.5],[0,1]) or ([0.5,0],[0,1]) UV lookup space (depending on eye) " return(EyeToSourceUVScale * flattened + EyeToSourceUVOffset); \n" "} \n" "void main(in float2 Position : POSITION, in float timewarpLerpFactor : POSITION1, \n" " in float Vignette : POSITION2, in float2 TexCoord0 : TEXCOORD0, \n" " in float2 TexCoord1 : TEXCOORD1, in float2 TexCoord2 : TEXCOORD2, \n" " out float4 oPosition : SV_Position, \n" " out float2 oTexCoord0 : TEXCOORD0, out float2 oTexCoord1 : TEXCOORD1, \n" " out float2 oTexCoord2 : TEXCOORD2, out float oVignette : TEXCOORD3) \n" "{ \n" " float4x4 lerpedEyeRot = lerp(EyeRotationStart, EyeRotationEnd, timewarpLerpFactor);\n" " oTexCoord0 = TimewarpTexCoord(TexCoord0,lerpedEyeRot); \n" " oTexCoord1 = TimewarpTexCoord(TexCoord1,lerpedEyeRot); \n" " oTexCoord2 = TimewarpTexCoord(TexCoord2,lerpedEyeRot); \n" " oPosition = float4(Position.xy, 0.5, 1.0); oVignette = Vignette; \n" "}"; //Distortion pixel shader const char* pixelShader = "Texture2D Texture : register(t0); \n" "SamplerState Linear : register(s0); \n" "float4 main(in float4 oPosition : SV_Position, in float2 oTexCoord0 : TEXCOORD0, \n" " in float2 oTexCoord1 : TEXCOORD1, in float2 oTexCoord2 : TEXCOORD2, \n" " in float oVignette : TEXCOORD3) : SV_Target \n" "{ \n" // 3 samples for fixing chromatic aberrations " float R = Texture.Sample(Linear, oTexCoord0.xy).r; \n" " float G = Texture.Sample(Linear, oTexCoord1.xy).g; \n" " float B = Texture.Sample(Linear, oTexCoord2.xy).b; \n" " return (oVignette*float4(R,G,B,1)); \n" "}"; pRender->InitShaders(vertexShader, pixelShader, &Shaders, &VertexIL,DistortionMeshVertexDesc,6); for ( int eyeNum = 0; eyeNum < 2; eyeNum++ ) { // Allocate mesh vertices, registering with renderer using the OVR vertex format. ovrDistortionMesh meshData; ovrHmd_CreateDistortionMesh(HMD, (ovrEyeType) eyeNum, eyeFov[eyeNum], ovrDistortionCap_Chromatic | ovrDistortionCap_TimeWarp, &meshData); MeshVBs[eyeNum] = *pRender->CreateBuffer(); MeshVBs[eyeNum]->Data(Buffer_Vertex,meshData.pVertexData,sizeof(ovrDistortionVertex)*meshData.VertexCount); MeshIBs[eyeNum] = *pRender->CreateBuffer(); MeshIBs[eyeNum]->Data(Buffer_Index,meshData.pIndexData,sizeof(unsigned short) * meshData.IndexCount); ovrHmd_DestroyDistortionMesh( &meshData ); //Create eye render description for use later EyeRenderDesc[eyeNum] = ovrHmd_GetRenderDesc(HMD, (ovrEyeType) eyeNum, eyeFov[eyeNum]); //Do scale and offset ovrHmd_GetRenderScaleAndOffset(eyeFov[eyeNum],RenderTargetSize, EyeRenderViewport[eyeNum], UVScaleOffset[eyeNum]); } #endif ovrHmd_SetEnabledCaps(HMD, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction); // Start the sensor which informs of the Rift's pose and motion ovrHmd_ConfigureTracking(HMD, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position, 0); // This creates lights and models. pRoomScene = new Scene; sbuilder.PopulateRoomScene(pRoomScene, pRender); return (0); }
void OVR_CalculateState(vr_param_t *state) { vr_param_t ovrState; float ovrScale = vr_ovr_supersample->value; int eye = 0; for (eye = 0; eye < 2; eye++) { ovrDistortionMesh meshData; ovr_vert_t *mesh = NULL; ovr_vert_t *v = NULL; ovrDistortionVertex *ov = NULL; unsigned int i = 0; float vignette_factor; if (vr_ovr_maxfov->value) { renderInfo[eye].eyeFov = hmd->MaxEyeFov[eye]; } else { renderInfo[eye].eyeFov = hmd->DefaultEyeFov[eye]; } ovrState.eyeFBO[eye] = &renderInfo[eye].eyeFBO; ovrState.renderParams[eye].projection.x.scale = 2.0f / ( renderInfo[eye].eyeFov.LeftTan + renderInfo[eye].eyeFov.RightTan ); ovrState.renderParams[eye].projection.x.offset = ( renderInfo[eye].eyeFov.LeftTan - renderInfo[eye].eyeFov.RightTan ) * ovrState.renderParams[eye].projection.x.scale * 0.5f; ovrState.renderParams[eye].projection.y.scale = 2.0f / ( renderInfo[eye].eyeFov.UpTan + renderInfo[eye].eyeFov.DownTan ); ovrState.renderParams[eye].projection.y.offset = ( renderInfo[eye].eyeFov.UpTan - renderInfo[eye].eyeFov.DownTan ) * ovrState.renderParams[eye].projection.y.scale * 0.5f; // set up rendering info eyeDesc[eye] = ovrHmd_GetRenderDesc(hmd,(ovrEyeType) eye,renderInfo[eye].eyeFov); VectorSet(ovrState.renderParams[eye].viewOffset, -eyeDesc[eye].HmdToEyeViewOffset.x, eyeDesc[eye].HmdToEyeViewOffset.y, eyeDesc[eye].HmdToEyeViewOffset.z); ovrHmd_CreateDistortionMesh(hmd, eyeDesc[eye].Eye, eyeDesc[eye].Fov, ovrDistortionCap_Chromatic | ovrDistortionCap_SRGB | ovrDistortionCap_TimeWarp | ovrDistortionCap_Vignette, &meshData); mesh = (ovr_vert_t *) Z_TagMalloc(sizeof(ovr_vert_t) * meshData.VertexCount, TAG_RENDERER); v = mesh; ov = meshData.pVertexData; for (i = 0; i < meshData.VertexCount; i++) { // DK2 display not rotated - rotate the coordinates manually if (vid.width < vid.height) { v->pos.x = -ov->ScreenPosNDC.y; v->pos.y = ov->ScreenPosNDC.x; } else { v->pos.x = ov->ScreenPosNDC.x; v->pos.y = ov->ScreenPosNDC.y; } v->texR = (*(ovrVector2f*)&ov->TanEyeAnglesR); v->texG = (*(ovrVector2f*)&ov->TanEyeAnglesG); v->texB = (*(ovrVector2f*)&ov->TanEyeAnglesB); vignette_factor = ov->VignetteFactor; if (vignette_factor < 0) vignette_factor = 0; v->color[0] = v->color[1] = v->color[2] = (GLubyte)(vignette_factor * 255.99f); v->color[3] = (GLubyte)( ov->TimeWarpFactor * 255.99f ); v++; ov++; } R_BindIVBO(&renderInfo[eye].eye,NULL,0); R_VertexData(&renderInfo[eye].eye,sizeof(ovr_vert_t) * meshData.VertexCount, mesh); R_IndexData(&renderInfo[eye].eye,GL_TRIANGLES,GL_UNSIGNED_SHORT,meshData.IndexCount,sizeof(uint16_t) * meshData.IndexCount,meshData.pIndexData); R_ReleaseIVBO(); Z_Free(mesh); ovrHmd_DestroyDistortionMesh( &meshData ); } { // calculate this to give the engine a rough idea of the fov float combinedTanHalfFovHorizontal = max ( max ( renderInfo[0].eyeFov.LeftTan, renderInfo[0].eyeFov.RightTan ), max ( renderInfo[1].eyeFov.LeftTan, renderInfo[1].eyeFov.RightTan ) ); float combinedTanHalfFovVertical = max ( max ( renderInfo[0].eyeFov.UpTan, renderInfo[0].eyeFov.DownTan ), max ( renderInfo[1].eyeFov.UpTan, renderInfo[1].eyeFov.DownTan ) ); float horizontalFullFovInRadians = 2.0f * atanf ( combinedTanHalfFovHorizontal ); float fovX = RAD2DEG(horizontalFullFovInRadians); float fovY = RAD2DEG(2.0 * atanf(combinedTanHalfFovVertical)); ovrState.aspect = combinedTanHalfFovHorizontal / combinedTanHalfFovVertical; ovrState.viewFovY = fovY; ovrState.viewFovX = fovX; ovrState.pixelScale = ovrScale * vid.width / (float) hmd->Resolution.w; } *state = ovrState; }
COculusVR::COculusVR(bool latency) { m_isReady = true; // Initializes LibOVR, and the Rift ovr_Initialize(); Hmd = ovrHmd_Create(0); if (!Hmd) { MessageBoxA(NULL, "Oculus Rift not detected.", "", MB_OK); return; } if (Hmd->ProductName[0] == '\0') MessageBoxA(NULL, "Rift detected, display not enabled.", "", MB_OK); if (Hmd->HmdCaps & ovrHmdCap_ExtendDesktop) { WindowSize = Hmd->Resolution; } else { // In Direct App-rendered mode, we can use smaller window size, // as it can have its own contents and isn't tied to the buffer. WindowSize = Sizei(1100, 618);//Sizei(960, 540); avoid rotated output bug. } ovrHmd_AttachToWindow(Hmd, wzGetWindowHandle(), NULL, NULL); // Configure Stereo settings. Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(Hmd, ovrEye_Left, Hmd->DefaultEyeFov[0], 1.0f); Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(Hmd, ovrEye_Right, Hmd->DefaultEyeFov[1], 1.0f); EyeRenderTargetSize.w = recommenedTex0Size.w + recommenedTex1Size.w; EyeRenderTargetSize.h = Alg::Max( recommenedTex0Size.h, recommenedTex1Size.h ); //Create Framebuffer wzCreateRenderTarget(&m_screenRender); wzCreateRenderBufferDepth(&m_screenBuffer,EyeRenderTargetSize.w,EyeRenderTargetSize.h); wzCreateTexture(&m_screenTex,EyeRenderTargetSize.w,EyeRenderTargetSize.h,WZ_FORMATTYPE_RGB,NULL); //attach wzSetRenderBuffer(&m_screenRender,&m_screenBuffer); wzSetRenderTexture(&m_screenRender,&m_screenTex); // Initialize eye rendering information. // The viewport sizes are re-computed in case RenderTargetSize changed due to HW limitations. ovrFovPort eyeFov[2] = { Hmd->DefaultEyeFov[0], Hmd->DefaultEyeFov[1] } ; EyeRenderViewport[0].Pos = Vector2i(0,0); EyeRenderViewport[0].Size = Sizei(EyeRenderTargetSize.w / 2, EyeRenderTargetSize.h); EyeRenderViewport[1].Pos = Vector2i((EyeRenderTargetSize.w + 1) / 2, 0); EyeRenderViewport[1].Size = EyeRenderViewport[0].Size; //Shader vertex format wzVertexElements ve_var[] = { {WZVETYPE_FLOAT2,"position"}, {WZVETYPE_FLOAT1,"timewarpLerpFactor"}, {WZVETYPE_FLOAT1,"vignette"}, {WZVETYPE_FLOAT2,"texCoord0"}, {WZVETYPE_FLOAT2,"texCoord1"}, {WZVETYPE_FLOAT2,"texCoord2"}, WZVE_TMT() }; //carete mesh for ( int eyeNum = 0; eyeNum < 2; eyeNum++ ) { // Allocate mesh vertices, registering with renderer using the OVR vertex format. ovrDistortionMesh meshData; ovrHmd_CreateDistortionMesh(Hmd, (ovrEyeType) eyeNum, eyeFov[eyeNum], ovrDistortionCap_Chromatic | ovrDistortionCap_TimeWarp, &meshData); //Create datas wzVector2* vertex_pos = new wzVector2[meshData.VertexCount]; float* vertex_posTimewarp = new float[meshData.VertexCount]; float* vertex_posVignette = new float[meshData.VertexCount]; wzVector2* vertex_textanR = new wzVector2[meshData.VertexCount]; wzVector2* vertex_textanG = new wzVector2[meshData.VertexCount]; wzVector2* vertex_textanB = new wzVector2[meshData.VertexCount]; //data copy for(unsigned int i = 0; i < meshData.VertexCount; i++) { vertex_pos[i].x = meshData.pVertexData[i].ScreenPosNDC.x; vertex_pos[i].y = meshData.pVertexData[i].ScreenPosNDC.y; vertex_posTimewarp[i] = meshData.pVertexData[i].TimeWarpFactor; vertex_posVignette[i] = meshData.pVertexData[i].VignetteFactor; vertex_textanR[i].x = meshData.pVertexData[i].TanEyeAnglesR.x; vertex_textanR[i].y = meshData.pVertexData[i].TanEyeAnglesR.y; vertex_textanG[i].x = meshData.pVertexData[i].TanEyeAnglesG.x; vertex_textanG[i].y = meshData.pVertexData[i].TanEyeAnglesG.y; vertex_textanB[i].x = meshData.pVertexData[i].TanEyeAnglesB.x; vertex_textanB[i].y = meshData.pVertexData[i].TanEyeAnglesB.y; } void* vertex_pointer[] = {vertex_pos,vertex_posTimewarp,vertex_posVignette,vertex_textanR,vertex_textanG,vertex_textanB}; if(wzCreateMesh(&MeshBuffer[eyeNum], vertex_pointer, ve_var, meshData.pIndexData, meshData.VertexCount, meshData.IndexCount)) { MessageBoxA(NULL, "Lens Distort Mesh Error.", "", MB_OK); delete[] vertex_pos; delete[] vertex_posTimewarp; delete[] vertex_posVignette; delete[] vertex_textanR; delete[] vertex_textanG; delete[] vertex_textanB; return; //error } wzChangeDrawMode(&MeshBuffer[eyeNum],WZ_MESH_DF_TRIANGLELIST); delete[] vertex_pos; delete[] vertex_posTimewarp; delete[] vertex_posVignette; delete[] vertex_textanR; delete[] vertex_textanG; delete[] vertex_textanB; ovrHmd_DestroyDistortionMesh(&meshData); //Create eye render description for use later EyeRenderDesc[eyeNum] = ovrHmd_GetRenderDesc(Hmd, (ovrEyeType) eyeNum, eyeFov[eyeNum]); //Do scale and offset ovrHmd_GetRenderScaleAndOffset(eyeFov[eyeNum],EyeRenderTargetSize, EyeRenderViewport[eyeNum], UVScaleOffset[eyeNum]); } //Create shader if(wzCreateShader(&LensShader, ols_vertexshader,ols_flagshader, ve_var)) { MessageBoxA(NULL, "Lens Shader Compile Error.", "", MB_OK); return; } if(latency) ovrHmd_SetEnabledCaps(Hmd, ovrHmdCap_DynamicPrediction); //ovrHmdCap_LowPersistence // Start the sensor which informs of the Rift's pose and motion ovrHmd_ConfigureTracking(Hmd, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection, 0); //not use : ovrTrackingCap_Position m_isReady = false; }
void DistortionMeshInit(unsigned distortionCaps, ovrHmd HMD, ovrEyeRenderDesc eyeRenderDesc[2], ovrSizei textureSize, ovrRecti viewports[2], RenderDevice* pRender) { //Generate distortion mesh for each eye for ( int eyeNum = 0; eyeNum < 2; eyeNum++ ) { // Allocate & generate distortion mesh vertices. ovrDistortionMesh meshData; ovrHmd_CreateDistortionMesh(HMD, eyeRenderDesc[eyeNum].Eye, eyeRenderDesc[eyeNum].Fov, distortionCaps, &meshData); ovrHmd_GetRenderScaleAndOffset(eyeRenderDesc[eyeNum].Fov, textureSize, viewports[eyeNum], (ovrVector2f*) DistortionData.UVScaleOffset[eyeNum]); // Now parse the vertex data and create a render ready vertex buffer from it DistortionVertex * pVBVerts = (DistortionVertex*)OVR_ALLOC( sizeof(DistortionVertex) * meshData.VertexCount ); DistortionVertex * v = pVBVerts; ovrDistortionVertex * ov = meshData.pVertexData; for ( unsigned vertNum = 0; vertNum < meshData.VertexCount; vertNum++ ) { v->Pos.x = ov->Pos.x; v->Pos.y = ov->Pos.y; v->TexR = (*(Vector2f*)&ov->TexR); v->TexG = (*(Vector2f*)&ov->TexG); v->TexB = (*(Vector2f*)&ov->TexB); v->Col.R = v->Col.G = v->Col.B = (OVR::UByte)( ov->VignetteFactor * 255.99f ); v->Col.A = (OVR::UByte)( ov->TimeWarpFactor * 255.99f ); v++; ov++; } //Register this mesh with the renderer DistortionData.MeshVBs[eyeNum] = *pRender->CreateBuffer(); DistortionData.MeshVBs[eyeNum]->Data ( Buffer_Vertex, pVBVerts, sizeof(DistortionVertex) * meshData.VertexCount ); DistortionData.MeshIBs[eyeNum] = *pRender->CreateBuffer(); DistortionData.MeshIBs[eyeNum]->Data ( Buffer_Index, meshData.pIndexData, sizeof(unsigned short) * meshData.IndexCount ); OVR_FREE ( pVBVerts ); ovrHmd_DestroyDistortionMesh( &meshData ); } // Pixel shader for the mesh //------------------------------------------------------------------------------------------- const char* pixelShader = "Texture2D Texture : register(t0); \n" "SamplerState Linear : register(s0); \n" "float4 main(in float4 oPosition : SV_Position, in float4 oColor : COLOR, \n" " in float2 oTexCoord0 : TEXCOORD0, in float2 oTexCoord1 : TEXCOORD1, \n" " in float2 oTexCoord2 : TEXCOORD2) : SV_Target \n" "{ \n" // 3 samples for fixing chromatic aberrations " float ResultR = Texture.Sample(Linear, oTexCoord0.xy).r; \n" " float ResultG = Texture.Sample(Linear, oTexCoord1.xy).g; \n" " float ResultB = Texture.Sample(Linear, oTexCoord2.xy).b; \n" " return float4(ResultR * oColor.r, ResultG * oColor.g, ResultB * oColor.b, 1.0); \n" "}"; // Choose the vertex shader, according to if you have timewarp enabled if (distortionCaps & ovrDistortionCap_TimeWarp) { // TIMEWARP //-------------------------------------------------------------------------------------------- const char* vertexShader = "float2 EyeToSourceUVScale; \n" "float2 EyeToSourceUVOffset; \n" "float4x4 EyeRotationStart; \n" "float4x4 EyeRotationEnd; \n" "float2 TimewarpTexCoord(float2 TexCoord, float4x4 rotMat) \n" "{ \n" // Vertex inputs are in TanEyeAngle space for the R,G,B channels (i.e. after chromatic // aberration and distortion). These are now "real world" vectors in direction (x,y,1) // relative to the eye of the HMD. Apply the 3x3 timewarp rotation to these vectors. " float3 transformed = float3( mul ( rotMat, float4(TexCoord.xy, 1, 1) ).xyz); \n" // Project them back onto the Z=1 plane of the rendered images. " float2 flattened = (transformed.xy / transformed.z); \n" // Scale them into ([0,0.5],[0,1]) or ([0.5,0],[0,1]) UV lookup space (depending on eye) " return(EyeToSourceUVScale * flattened + EyeToSourceUVOffset); \n" "} \n" "void main(in float2 Position : POSITION, in float4 Color : COLOR0, \n" " in float2 TexCoord0 : TEXCOORD0, in float2 TexCoord1 : TEXCOORD1, \n" " in float2 TexCoord2 : TEXCOORD2, \n" " out float4 oPosition : SV_Position, out float4 oColor : COLOR, \n" " out float2 oTexCoord0 : TEXCOORD0, out float2 oTexCoord1 : TEXCOORD1, \n" " out float2 oTexCoord2 : TEXCOORD2) \n" "{ \n" " float timewarpLerpFactor = Color.a; \n" " float4x4 lerpedEyeRot = lerp(EyeRotationStart, EyeRotationEnd, timewarpLerpFactor);\n" " oTexCoord0 = TimewarpTexCoord(TexCoord0,lerpedEyeRot); \n" " oTexCoord1 = TimewarpTexCoord(TexCoord1,lerpedEyeRot); \n" " oTexCoord2 = TimewarpTexCoord(TexCoord2,lerpedEyeRot); \n" " oPosition = float4(Position.xy, 0.5, 1.0); \n" " oColor = Color.r; /*For vignette fade*/ \n" "}"; pRender->InitShaders(vertexShader, pixelShader, &DistortionData.Shaders, &DistortionData.VertexIL,DistortionMeshVertexDesc,5); } else { //------------------------------------------------------------------------------------------- const char* vertexShader = "float2 EyeToSourceUVScale; \n" "float2 EyeToSourceUVOffset; \n" "void main(in float2 Position : POSITION, in float4 Color : COLOR0, \n" " in float2 TexCoord0 : TEXCOORD0, in float2 TexCoord1 : TEXCOORD1, \n" " in float2 TexCoord2 : TEXCOORD2, \n" " out float4 oPosition : SV_Position, out float4 oColor : COLOR, \n" " out float2 oTexCoord0 : TEXCOORD0, out float2 oTexCoord1 : TEXCOORD1, \n" " out float2 oTexCoord2 : TEXCOORD2) \n" "{ \n" // Scale them into ([0,0.5],[0,1]) or ([0.5,0],[0,1]) UV lookup space (depending on eye) " oTexCoord0 = EyeToSourceUVScale * TexCoord0 + EyeToSourceUVOffset; \n" " oTexCoord1 = EyeToSourceUVScale * TexCoord1 + EyeToSourceUVOffset; \n" " oTexCoord2 = EyeToSourceUVScale * TexCoord2 + EyeToSourceUVOffset; \n" " oPosition = float4(Position.xy, 0.5, 1.0); \n" " oColor = Color.r; /*For vignette fade*/ \n" "}"; pRender->InitShaders(vertexShader, pixelShader, &DistortionData.Shaders, &DistortionData.VertexIL,DistortionMeshVertexDesc,5); } }
osg::Geode* OculusDevice::distortionMesh(Eye eye, osg::Program* program, int x, int y, int w, int h, bool splitViewport) { osg::ref_ptr<osg::Geode> geode = new osg::Geode; // Allocate & generate distortion mesh vertices. ovrDistortionMesh meshData; ovrHmd_CreateDistortionMesh(m_hmdDevice, m_eyeRenderDesc[eye].Eye, m_eyeRenderDesc[eye].Fov, ovrDistortionCap_Chromatic | ovrDistortionCap_TimeWarp, &meshData); // Now parse the vertex data and create a render ready vertex buffer from it ovrDistortionVertex* ov = meshData.pVertexData; osg::Vec2Array* positionArray = new osg::Vec2Array; osg::Vec4Array* colorArray = new osg::Vec4Array; osg::Vec2Array* textureRArray = new osg::Vec2Array; osg::Vec2Array* textureGArray = new osg::Vec2Array; osg::Vec2Array* textureBArray = new osg::Vec2Array; for (unsigned vertNum = 0; vertNum < meshData.VertexCount; ++vertNum) { if (splitViewport) { // Positions need to be scaled and translated if we are using one viewport per eye if (eye == LEFT) { positionArray->push_back(osg::Vec2f(2 * ov[vertNum].ScreenPosNDC.x + 1.0, ov[vertNum].ScreenPosNDC.y)); } else if (eye == RIGHT) { positionArray->push_back(osg::Vec2f(2 * ov[vertNum].ScreenPosNDC.x - 1.0, ov[vertNum].ScreenPosNDC.y)); } } else { positionArray->push_back(osg::Vec2f(ov[vertNum].ScreenPosNDC.x, ov[vertNum].ScreenPosNDC.y)); } colorArray->push_back(osg::Vec4f(ov[vertNum].VignetteFactor, ov[vertNum].VignetteFactor, ov[vertNum].VignetteFactor, ov[vertNum].TimeWarpFactor)); textureRArray->push_back(osg::Vec2f(ov[vertNum].TanEyeAnglesR.x, ov[vertNum].TanEyeAnglesR.y)); textureGArray->push_back(osg::Vec2f(ov[vertNum].TanEyeAnglesG.x, ov[vertNum].TanEyeAnglesG.y)); textureBArray->push_back(osg::Vec2f(ov[vertNum].TanEyeAnglesB.x, ov[vertNum].TanEyeAnglesB.y)); } // Get triangle indicies osg::UShortArray* indexArray = new osg::UShortArray; unsigned short* index = meshData.pIndexData; for (unsigned indexNum = 0; indexNum < meshData.IndexCount; ++indexNum) { indexArray->push_back(index[indexNum]); } // Deallocate the mesh data ovrHmd_DestroyDistortionMesh(&meshData); osg::ref_ptr<osg::Geometry> geometry = new osg::Geometry; geometry->setUseDisplayList( false ); geometry->setUseVertexBufferObjects(true); osg::ref_ptr<osg::DrawElementsUShort> drawElement = new osg::DrawElementsUShort(osg::PrimitiveSet::TRIANGLES, indexArray->size(), (GLushort*) indexArray->getDataPointer()); geometry->addPrimitiveSet(drawElement); GLuint positionLoc = 0; GLuint colorLoc = 1; GLuint texCoord0Loc = 2; GLuint texCoord1Loc = 3; GLuint texCoord2Loc = 4; program->addBindAttribLocation("Position", positionLoc); geometry->setVertexAttribArray(positionLoc, positionArray); geometry->setVertexAttribBinding(positionLoc, osg::Geometry::BIND_PER_VERTEX); program->addBindAttribLocation("Color", colorLoc); geometry->setVertexAttribArray(colorLoc, colorArray); geometry->setVertexAttribBinding(colorLoc, osg::Geometry::BIND_PER_VERTEX); program->addBindAttribLocation("TexCoord0", texCoord0Loc); geometry->setVertexAttribArray(texCoord0Loc, textureRArray); geometry->setVertexAttribBinding(texCoord0Loc, osg::Geometry::BIND_PER_VERTEX); program->addBindAttribLocation("TexCoord1", texCoord1Loc); geometry->setVertexAttribArray(texCoord1Loc, textureGArray); geometry->setVertexAttribBinding(texCoord1Loc, osg::Geometry::BIND_PER_VERTEX); program->addBindAttribLocation("TexCoord2", texCoord2Loc); geometry->setVertexAttribArray(texCoord2Loc, textureBArray); geometry->setVertexAttribBinding(texCoord2Loc, osg::Geometry::BIND_PER_VERTEX); // Compute UV scale and offset ovrRecti eyeRenderViewport; eyeRenderViewport.Pos.x = x; eyeRenderViewport.Pos.y = y; eyeRenderViewport.Size.w = w; eyeRenderViewport.Size.h = h; ovrSizei renderTargetSize; renderTargetSize.w = m_renderTargetSize.w / 2; renderTargetSize.h = m_renderTargetSize.h; ovrHmd_GetRenderScaleAndOffset(m_eyeRenderDesc[eye].Fov, renderTargetSize, eyeRenderViewport, m_UVScaleOffset[eye]); geode->addDrawable(geometry); return geode.release(); }
//------------------------------------------------------------------------------------- int Init() { // Initializes LibOVR, and the Rift ovr_Initialize(); if (!HMD) { HMD = ovrHmd_Create(0); if (!HMD) { MessageBoxA(NULL, "Oculus Rift not detected.", "", MB_OK); return(1); } if (HMD->ProductName[0] == '\0') MessageBoxA(NULL, "Rift detected, display not enabled.", "", MB_OK); } //Setup Window and Graphics - use window frame if relying on Oculus driver const int backBufferMultisample = 1; bool UseAppWindowFrame = (HMD->HmdCaps & ovrHmdCap_ExtendDesktop) ? false : true; HWND window = Util_InitWindowAndGraphics(Recti(HMD->WindowsPos, HMD->Resolution), FullScreen, backBufferMultisample, UseAppWindowFrame,&pRender); if (!window) return 1; ovrHmd_AttachToWindow(HMD, window, NULL, NULL); //Configure Stereo settings. Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Left, HMD->DefaultEyeFov[0], 1.0f); Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Right, HMD->DefaultEyeFov[1], 1.0f); Sizei RenderTargetSize; RenderTargetSize.w = recommenedTex0Size.w + recommenedTex1Size.w; RenderTargetSize.h = max ( recommenedTex0Size.h, recommenedTex1Size.h ); const int eyeRenderMultisample = 1; pRendertargetTexture = pRender->CreateTexture(Texture_RGBA | Texture_RenderTarget | eyeRenderMultisample, RenderTargetSize.w, RenderTargetSize.h, NULL); // The actual RT size may be different due to HW limits. RenderTargetSize.w = pRendertargetTexture->GetWidth(); RenderTargetSize.h = pRendertargetTexture->GetHeight(); // Initialize eye rendering information. // The viewport sizes are re-computed in case RenderTargetSize changed due to HW limitations. ovrFovPort eyeFov[2] = { HMD->DefaultEyeFov[0], HMD->DefaultEyeFov[1] } ; EyeRenderViewport[0].Pos = Vector2i(0,0); EyeRenderViewport[0].Size = Sizei(RenderTargetSize.w / 2, RenderTargetSize.h); EyeRenderViewport[1].Pos = Vector2i((RenderTargetSize.w + 1) / 2, 0); EyeRenderViewport[1].Size = EyeRenderViewport[0].Size; #if SDK_RENDER // Query OGL texture data. EyeTexture[0].OGL.Header.API = ovrRenderAPI_OpenGL; EyeTexture[0].OGL.Header.TextureSize = RenderTargetSize; EyeTexture[0].OGL.Header.RenderViewport = EyeRenderViewport[0]; EyeTexture[0].OGL.TexId = pRendertargetTexture->TexId; // Right eye uses the same texture, but different rendering viewport. EyeTexture[1] = EyeTexture[0]; EyeTexture[1].OGL.Header.RenderViewport = EyeRenderViewport[1]; // Configure OpenGL. ovrGLConfig oglcfg; oglcfg.OGL.Header.API = ovrRenderAPI_OpenGL; oglcfg.OGL.Header.RTSize = Sizei(HMD->Resolution.w, HMD->Resolution.h); oglcfg.OGL.Header.Multisample = backBufferMultisample; oglcfg.OGL.Window = window; oglcfg.OGL.DC = GetDC(window); if (!ovrHmd_ConfigureRendering(HMD, &oglcfg.Config, ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette | ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive, eyeFov, EyeRenderDesc)) return(1); #else //Distortion vertex shader const char* vertexShader = "#version 110 \n" "uniform vec2 EyeToSourceUVScale; \n" "uniform vec2 EyeToSourceUVOffset; \n" "uniform mat4 EyeRotationStart; \n" "uniform mat4 EyeRotationEnd; \n" "attribute vec2 Position; \n" "attribute vec2 inTWLF_V; \n" "attribute vec2 inTexCoord0; \n" "attribute vec2 inTexCoord1; \n" "attribute vec2 inTexCoord2; \n" "varying vec4 oPosition; \n" "varying vec2 oTexCoord0; \n" "varying vec2 oTexCoord1; \n" "varying vec2 oTexCoord2; \n" "varying float oVignette; \n" "vec2 TexCoord0 = vec2(inTexCoord0.x, -inTexCoord0.y); \n" "vec2 TexCoord1 = vec2(inTexCoord1.x, -inTexCoord1.y); \n" "vec2 TexCoord2 = vec2(inTexCoord2.x, -inTexCoord2.y); \n" "float timewarpLerpFactor = inTWLF_V.x; \n" "float Vignette = inTWLF_V.y; \n" "vec2 TimewarpTexCoord( in vec2 TexCoord, in mat4 rotMat ) \n" "{ \n" // Vertex inputs are in TanEyeAngle space for the R,G,B channels (i.e. after chromatic // aberration and distortion). These are now "real world" vectors in direction (x,y,1) // relative to the eye of the HMD. Apply the 3x3 timewarp rotation to these vectors. " vec3 transformed = vec3( ( rotMat * vec4( TexCoord.xy , 1.00000, 1.00000) ).xyz ); \n" // Project them back onto the Z=1 plane of the rendered images. " vec2 flattened = (transformed.xy / transformed.z ); \n" // Scale them into ([0,0.5],[0,1]) or ([0.5,0],[0,1]) UV lookup space (depending on eye) " return ((EyeToSourceUVScale * flattened) + EyeToSourceUVOffset); \n" "} \n" "mat4 mat4_lerp( in mat4 x, in mat4 y, in mat4 s ) \n" "{ \n" " return mat4(mix(x[0],y[0],s[0]), mix(x[1],y[1],s[1]), mix(x[2],y[2],s[2]), mix(x[3],y[3],s[3]));\n" "} \n" "void main() \n" "{ \n" " mat4 lerpedEyeRot = mat4_lerp( EyeRotationStart, EyeRotationEnd, mat4( timewarpLerpFactor));\n" " oTexCoord0 = TimewarpTexCoord( TexCoord0, lerpedEyeRot); \n" " oTexCoord1 = TimewarpTexCoord( TexCoord1, lerpedEyeRot); \n" " oTexCoord2 = TimewarpTexCoord( TexCoord2, lerpedEyeRot); \n" " oPosition = vec4( Position.xy , 0.500000, 1.00000); \n" " oVignette = Vignette; \n" " gl_Position = oPosition; \n" "}"; //Distortion pixel shader const char* pixelShader = "#version 110 \n" "uniform sampler2D Texture0; \n" "varying vec4 oPosition; \n" "varying vec2 oTexCoord0; \n" "varying vec2 oTexCoord1; \n" "varying vec2 oTexCoord2; \n" "varying float oVignette; \n" "void main() \n" "{ \n" // 3 samples for fixing chromatic aberrations " float R = texture2D(Texture0, oTexCoord0.xy).r; \n" " float G = texture2D(Texture0, oTexCoord1.xy).g; \n" " float B = texture2D(Texture0, oTexCoord2.xy).b; \n" " gl_FragColor = (oVignette*vec4(R,G,B,1)); \n" "}"; pRender->InitShaders(vertexShader, pixelShader, &Shaders); for ( int eyeNum = 0; eyeNum < 2; eyeNum++ ) { // Allocate mesh vertices, registering with renderer using the OVR vertex format. ovrDistortionMesh meshData; ovrHmd_CreateDistortionMesh(HMD, (ovrEyeType) eyeNum, eyeFov[eyeNum], ovrDistortionCap_Chromatic | ovrDistortionCap_TimeWarp, &meshData); MeshVBs[eyeNum] = *pRender->CreateBuffer(); MeshVBs[eyeNum]->Data(Buffer_Vertex,meshData.pVertexData,sizeof(ovrDistortionVertex)*meshData.VertexCount); MeshIBs[eyeNum] = *pRender->CreateBuffer(); MeshIBs[eyeNum]->Data(Buffer_Index,meshData.pIndexData,sizeof(unsigned short) * meshData.IndexCount); ovrHmd_DestroyDistortionMesh( &meshData ); //Create eye render description for use later EyeRenderDesc[eyeNum] = ovrHmd_GetRenderDesc(HMD, (ovrEyeType) eyeNum, eyeFov[eyeNum]); //Do scale and offset ovrHmd_GetRenderScaleAndOffset(eyeFov[eyeNum],RenderTargetSize, EyeRenderViewport[eyeNum], UVScaleOffset[eyeNum]); } #endif ovrHmd_SetEnabledCaps(HMD, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction); // Start the sensor which informs of the Rift's pose and motion ovrHmd_ConfigureTracking(HMD, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position, 0); // This creates lights and models. pRoomScene = new Scene; PopulateRoomScene(pRoomScene, pRender); return (0); }
int Init() { ovr_Initialize(); HMD = ovrHmd_Create(0); if (!HMD) { MessageBox(NULL, "Oculus Rift not detected.", "", MB_OK); return 1; } if (HMD->ProductName[0] == '\0') { MessageBox(NULL, "Rift detected, display not enabled.", "", MB_OK); } //Setup Window and Graphics - use window frame if relying on Oculus driver const int backBufferMultisample = 1; bool UseAppWindowFrame = true; HWND window = Util_InitWindowAndGraphics(Recti(HMD->WindowsPos, HMD->Resolution), FullScreen, backBufferMultisample, UseAppWindowFrame, &pRender); if (!window) return 1; ovrHmd_AttachToWindow(HMD, window, NULL, NULL); Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Left, HMD->DefaultEyeFov[0], 1.0f); Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Right, HMD->DefaultEyeFov[1], 1.0f); Sizei RenderTargetSize; RenderTargetSize.w = recommenedTex0Size.w + recommenedTex1Size.w; RenderTargetSize.h = max(recommenedTex0Size.h, recommenedTex1Size.h); RenderTargetSize.w = HMD->Resolution.w; RenderTargetSize.h = HMD->Resolution.h; //const int eyeRenderMultisample = 1; pRendertargetTexture = pRender->CreateRenderTarget(RenderTargetSize.w/2, RenderTargetSize.h/2); //pRendertargetTexture = pRender->CreateRenderTarget(512, 512); RenderTargetSize.w = pRendertargetTexture->Width; RenderTargetSize.h = pRendertargetTexture->Height; IDirect3DSurface9 *zb = 0; pRender->Device->GetDepthStencilSurface(&zb); D3DSURFACE_DESC d; zb->GetDesc(&d); // Initialize eye rendering information. // The viewport sizes are re-computed in case RenderTargetSize due to HW limitations. ovrFovPort eyeFov[2] = { HMD->DefaultEyeFov[0], HMD->DefaultEyeFov[1] }; EyeRenderViewport[0].Pos = Vector2i(0, 0); EyeRenderViewport[0].Size = Sizei(RenderTargetSize.w / 2, RenderTargetSize.h); EyeRenderViewport[1].Pos = Vector2i((RenderTargetSize.w + 1) / 2, 0); EyeRenderViewport[1].Size = EyeRenderViewport[0].Size; // --------------------- DistortionShaders = pRender->CreateShaderSet(); DistortionShaders->SetShader(pRender->LoadBuiltinShader(Shader_Vertex, VShader_Distortion)); DistortionShaders->SetShader(pRender->LoadBuiltinShader(Shader_Pixel, PShader_Distortion)); DistortionDecl = VertexDecl::GetDecl(VertexType_Distortion); for (int eyeNum = 0; eyeNum < 2; ++eyeNum) { ovrDistortionMesh meshData; ovrHmd_CreateDistortionMesh(HMD, (ovrEyeType)eyeNum, eyeFov[eyeNum], ovrDistortionCap_Chromatic | ovrDistortionCap_TimeWarp, &meshData); MeshVBs[eyeNum] = pRender->CreateVertexBuffer(); MeshVBs[eyeNum]->Data(meshData.pVertexData, sizeof(ovrDistortionVertex)*meshData.VertexCount); MeshIBs[eyeNum] = pRender->CreateIndexBuffer(); MeshIBs[eyeNum]->Data(meshData.pIndexData, sizeof(unsigned short)*meshData.IndexCount); MeshVBCnts[eyeNum] = meshData.VertexCount; MeshIBCnts[eyeNum] = meshData.IndexCount; ovrHmd_DestroyDistortionMesh(&meshData); EyeRenderDesc[eyeNum] = ovrHmd_GetRenderDesc(HMD, (ovrEyeType)eyeNum, eyeFov[eyeNum]); ovrHmd_GetRenderScaleAndOffset(eyeFov[eyeNum], RenderTargetSize, EyeRenderViewport[eyeNum], UVScaleOffset[eyeNum]); } ovrHmd_SetEnabledCaps(HMD, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction); ovrHmd_ConfigureTracking(HMD, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position, 0); // --------------------- pRoomScene = new Scene; PopulateRoomScene(pRoomScene, pRender); // texture model ShaderSet* ss = pRender->CreateShaderSet(); ss->SetShader(pRender->LoadBuiltinShader(Shader_Vertex, VShader_MVP_UV)); ss->SetShader(pRender->LoadBuiltinShader(Shader_Pixel, PShader_UV)); Model<VertexXYZUV> *pModel2 = new Model<VertexXYZUV>(); pModel2->Decl = VertexDecl::GetDecl(VertexType_XYZUV); pModel2->Fill = new ShaderFill(ss); //Texture* ttt = new Texture(pRender); //ttt->LoadFromFile("face.tga"); pModel2->Fill->SetTexture(0, pRendertargetTexture); pModel2->AddVertex(VertexXYZUV(0.5f, -1.0f, 0.0f, 0.0f, 0.0f)); pModel2->AddVertex(VertexXYZUV(2.5f, -1.0f, 0.0f, 1.0f, 0.0f)); pModel2->AddVertex(VertexXYZUV(0.5f, 1.0f, 0.0f, 0.0f, 1.0f)); pModel2->AddVertex(VertexXYZUV(2.5f, 1.0f, 0.0f, 1.0f, 1.0f)); pModel2->AddVertex(VertexXYZUV(-1.0f, -1.5f, -1.0f, 0.0f, 0.0f)); pModel2->AddVertex(VertexXYZUV(1.0f, -1.5f, -1.0f, 1.0f, 0.0f)); pModel2->AddVertex(VertexXYZUV(-1.0f, -1.5f, 1.0f, 0.0f, 1.0f)); pModel2->AddVertex(VertexXYZUV(1.0f, -1.5f, 1.0f, 1.0f, 1.0f)); pModel2->AddTriangle(0, 1, 2); pModel2->AddTriangle(2, 1, 3); pModel2->AddTriangle(4, 5, 6); pModel2->AddTriangle(6, 5, 7); pScene = new Scene; pScene->World.Add(pModel2); return (0); }
void gkOculus::InitHMD() { ovrHmd_SetEnabledCaps(HMD, ovrHmdCap_LowPersistence); // Start the sensor which informs of the Rift's pose and motion ovrHmd_ConfigureTracking(HMD, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position, 0); OVR::Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Left, HMD->DefaultEyeFov[0], 1.0f); OVR::Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Right, HMD->DefaultEyeFov[1], 1.0f); gEnv->pRenderer->SetOverrideSize( recommenedTex0Size.w, recommenedTex0Size.h, true ); gkTexturePtr tex = gEnv->pSystem->getTextureMngPtr()->getByName(_T("RT_BACKBUFFER_STEREOOUT")); for (int eyenum=0; eyenum < 2; ++eyenum) { m_disortation_renderable_eyes[eyenum] = new gkOculusDisortationRenderable(this); m_disortation_renderable_eyes[eyenum]->m_eye_index = eyenum; m_disortation_renderable_eyes[eyenum]->HMD = HMD; gkNameValuePairList createlist; createlist[_T("file")] = _T("engine/assets/meshs/oculus_disort.mtl"); TCHAR buffer[255]; _stprintf( buffer, _T("$OculusDisortation_%d"), eyenum ); gkMaterialPtr mat = gEnv->pSystem->getMaterialMngPtr()->create( buffer, _T("stereo"), &createlist ); mat->load(); mat->setTexture( tex , 0); m_disortation_renderable_eyes[eyenum]->m_material = mat; _stprintf( buffer, _T("$OculusDisortationMesh_%d"), eyenum ); createlist[_T("type")] = _T("Pt2T2T2T2T2"); gkMeshPtr mesh = gEnv->pSystem->getMeshMngPtr()->create( buffer, _T("stereo"), &createlist ); mesh->load(); m_disortation_renderable_eyes[eyenum]->m_mesh = mesh; ovrFovPort eyeFov[2] = { HMD->DefaultEyeFov[0], HMD->DefaultEyeFov[1] } ; float fovy = atan( eyeFov[0].UpTan ) * 2; gEnv->p3DEngine->getMainCamera()->setFOVy( fovy ); ovrDistortionMesh meshData; ovrHmd_CreateDistortionMesh(HMD, (ovrEyeType)eyenum, eyeFov[eyenum], ovrDistortionCap_Chromatic | ovrDistortionCap_TimeWarp, &meshData); mesh->getVertexBuffer()->resizeDiscard( meshData.VertexCount ); mesh->getIndexBuffer()->resizeDiscard( meshData.IndexCount ); memcpy( mesh->getVertexBuffer()->data, meshData.pVertexData, meshData.VertexCount * sizeof(ovrDistortionVertex) ); memcpy( mesh->getIndexBuffer()->data, meshData.pIndexData, meshData.IndexCount * sizeof(unsigned short) ); ovrHmd_DestroyDistortionMesh( &meshData ); //Create eye render description for use later EyeRenderDesc[eyenum] = ovrHmd_GetRenderDesc(HMD, (ovrEyeType)eyenum, eyeFov[eyenum]); //Do scale and offset OVR::Sizei RenderTargetSize; RenderTargetSize.w = recommenedTex0Size.w; RenderTargetSize.h = recommenedTex1Size.h; ovrRecti EyeRenderViewport[2]; EyeRenderViewport[0].Pos = OVR::Vector2i(0,0); EyeRenderViewport[0].Size = OVR::Sizei(RenderTargetSize.w / 2, RenderTargetSize.h); EyeRenderViewport[1].Pos = OVR::Vector2i((RenderTargetSize.w + 1) / 2, 0); EyeRenderViewport[1].Size = EyeRenderViewport[0].Size; ovrHmd_GetRenderScaleAndOffset(eyeFov[eyenum],RenderTargetSize, EyeRenderViewport[eyenum], m_disortation_renderable_eyes[eyenum]->UVScaleOffset[eyenum]); } }