Exemplo n.º 1
0
void OculusBaseDisplayPlugin::activate() {
    _session = acquireOculusSession();

    _hmdDesc = ovr_GetHmdDesc(_session);

    _ipd = ovr_GetFloat(_session, OVR_KEY_IPD, _ipd);

    glm::uvec2 eyeSizes[2];
    _viewScaleDesc.HmdSpaceToWorldScaleInMeters = 1.0f;

    ovr_for_each_eye([&](ovrEyeType eye) {
        _eyeFovs[eye] = _hmdDesc.DefaultEyeFov[eye];
        ovrEyeRenderDesc& erd = _eyeRenderDescs[eye] = ovr_GetRenderDesc(_session, eye, _eyeFovs[eye]);
        ovrMatrix4f ovrPerspectiveProjection =
            ovrMatrix4f_Projection(erd.Fov, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded);
        _eyeProjections[eye] = toGlm(ovrPerspectiveProjection);
        _eyeOffsets[eye] = glm::translate(mat4(), toGlm(erd.HmdToEyeViewOffset));
        eyeSizes[eye] = toGlm(ovr_GetFovTextureSize(_session, eye, erd.Fov, 1.0f));
        _viewScaleDesc.HmdToEyeViewOffset[eye] = erd.HmdToEyeViewOffset;
    });

    auto combinedFov = _eyeFovs[0];
    combinedFov.LeftTan = combinedFov.RightTan = std::max(combinedFov.LeftTan, combinedFov.RightTan);
    _cullingProjection = toGlm(ovrMatrix4f_Projection(combinedFov, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded));

    _renderTargetSize = uvec2(
        eyeSizes[0].x + eyeSizes[1].x,
        std::max(eyeSizes[0].y, eyeSizes[1].y));

    if (!OVR_SUCCESS(ovr_ConfigureTracking(_session,
        ovrTrackingCap_Orientation | ovrTrackingCap_Position | ovrTrackingCap_MagYawCorrection, 0))) {
        qFatal("Could not attach to sensor device");
    }

    // Parent class relies on our _session intialization, so it must come after that.
    memset(&_sceneLayer, 0, sizeof(ovrLayerEyeFov));
    _sceneLayer.Header.Type = ovrLayerType_EyeFov;
    _sceneLayer.Header.Flags = ovrLayerFlag_TextureOriginAtBottomLeft;
    ovr_for_each_eye([&](ovrEyeType eye) {
        ovrFovPort & fov = _sceneLayer.Fov[eye] = _eyeRenderDescs[eye].Fov;
        ovrSizei & size = _sceneLayer.Viewport[eye].Size = ovr_GetFovTextureSize(_session, eye, fov, 1.0f);
        _sceneLayer.Viewport[eye].Pos = { eye == ovrEye_Left ? 0 : size.w, 0 };
    });

    if (!OVR_SUCCESS(ovr_ConfigureTracking(_session,
        ovrTrackingCap_Orientation | ovrTrackingCap_Position | ovrTrackingCap_MagYawCorrection, 0))) {
        qFatal("Could not attach to sensor device");
    }

    // This must come after the initialization, so that the values calculated 
    // above are available during the customizeContext call (when not running
    // in threaded present mode)
    HmdDisplayPlugin::activate();
}
Exemplo n.º 2
0
void gkOculus::Flush()
{
	if(HMD)
	{
		// OVERRIDE MAIN CAMERA

		ICamera* cam = gEnv->p3DEngine->getMainCamera();

		static ovrTrackingState HmdState;

		ovrVector3f hmdToEyeViewOffset[2] = { EyeRenderDesc[0].HmdToEyeViewOffset, EyeRenderDesc[1].HmdToEyeViewOffset };
		ovrHmd_GetEyePoses(HMD, 0, hmdToEyeViewOffset, eyeRenderPose, &HmdState);

		Quat camOrientation;
		camOrientation.v.x = eyeRenderPose->Orientation.x;
		camOrientation.v.y = -eyeRenderPose->Orientation.z;
		camOrientation.v.z = eyeRenderPose->Orientation.y;
		camOrientation.w = eyeRenderPose->Orientation.w;

		cam->setAdditionalOrientation( camOrientation );

		Quat dRot = cam->getDerivedOrientation();

		Vec3 eyeOffsetL = Vec3(eyeRenderPose[0].Position.x, -eyeRenderPose[0].Position.z, eyeRenderPose[0].Position.y);
		Vec3 eyeOffsetR = Vec3(eyeRenderPose[1].Position.x, -eyeRenderPose[1].Position.z, eyeRenderPose[1].Position.y);

		cam->setStereoOffset( dRot * eyeOffsetL, dRot * eyeOffsetR );
		
		Matrix44 leftMat,rightMat;
		
		OVR::Matrix4f projLeft;
		OVR::Matrix4f projRight;

		projLeft = ovrMatrix4f_Projection(EyeRenderDesc[0].Fov, 0.01f, 10000.0f, true);
		projRight = ovrMatrix4f_Projection(EyeRenderDesc[1].Fov, 0.01f, 10000.0f, true);

		leftMat = *((Matrix44*)(&projLeft));
		rightMat = *((Matrix44*)(&projRight));

		leftMat.Transpose();
		rightMat.Transpose();

		cam->setStereoProjMatrix( leftMat, rightMat );

		IRenderSequence* rs = gEnv->pRenderer->RT_GetRenderSequence();
		if (rs)
		{
			rs->addToRenderSequence( m_disortation_renderable_eyes[0], RENDER_LAYER_STEREO_DEVICE );
			rs->addToRenderSequence( m_disortation_renderable_eyes[1], RENDER_LAYER_STEREO_DEVICE );
		}
	}

}
Exemplo n.º 3
0
// Generates an appropriate stereo ortho projection matrix.
void HSWDisplay::GetOrthoProjection(const HMDRenderState& RenderState, Matrix4f OrthoProjection[2])
{
    Matrix4f perspectiveProjection[2];
    perspectiveProjection[0] = ovrMatrix4f_Projection(RenderState.EyeRenderDesc[0].Fov, 0.01f, 10000.f, true);
    perspectiveProjection[1] = ovrMatrix4f_Projection(RenderState.EyeRenderDesc[1].Fov, 0.01f, 10000.f, true);

    const float    orthoDistance = HSWDISPLAY_DISTANCE; // This is meters from the camera (viewer) that we place the ortho plane.
    const Vector2f orthoScale0   = Vector2f(1.f) / Vector2f(RenderState.EyeRenderDesc[0].PixelsPerTanAngleAtCenter);
    const Vector2f orthoScale1   = Vector2f(1.f) / Vector2f(RenderState.EyeRenderDesc[1].PixelsPerTanAngleAtCenter);
    
    OrthoProjection[0] = ovrMatrix4f_OrthoSubProjection(perspectiveProjection[0], orthoScale0, orthoDistance, RenderState.EyeRenderDesc[0].ViewAdjust.x);
    OrthoProjection[1] = ovrMatrix4f_OrthoSubProjection(perspectiveProjection[1], orthoScale1, orthoDistance, RenderState.EyeRenderDesc[1].ViewAdjust.x);
}
Exemplo n.º 4
0
	DLL_EXPORT_API void xnOvrGetFrameProperties(xnOvrSession* session, xnOvrFrameProperties* properties)
	{
		auto leftProj = ovrMatrix4f_Projection(session->Layer.Fov[0], properties->Near, properties->Far, 0);
		auto rightProj = ovrMatrix4f_Projection(session->Layer.Fov[1], properties->Near, properties->Far, 0);

		memcpy(properties->ProjLeft, &leftProj, sizeof(float) * 16);
		memcpy(properties->PosLeft, &session->Layer.RenderPose[0].Position, sizeof(float) * 3);
		memcpy(properties->RotLeft, &session->Layer.RenderPose[0].Orientation, sizeof(float) * 4);
		
		memcpy(properties->ProjRight, &rightProj, sizeof(float) * 16);
		memcpy(properties->PosRight, &session->Layer.RenderPose[1].Position, sizeof(float) * 3);
		memcpy(properties->RotRight, &session->Layer.RenderPose[1].Orientation, sizeof(float) * 4);
	}
Exemplo n.º 5
0
void VR_SetMatrices() {
	vec3_t temp, orientation, position;
	ovrMatrix4f projection;

	// Calculat HMD projection matrix and view offset position
	projection = TransposeMatrix(ovrMatrix4f_Projection(hmd->DefaultEyeFov[current_eye->index], 4, gl_farclip.value, ovrProjection_RightHanded));
	
	// We need to scale the view offset position to quake units and rotate it by the current input angles (viewangle - eye orientation)
	QuatToYawPitchRoll(current_eye->pose.Orientation, orientation);
	temp[0] = -current_eye->pose.Position.z * meters_to_units;
	temp[1] = -current_eye->pose.Position.x * meters_to_units;
	temp[2] = current_eye->pose.Position.y * meters_to_units;
	Vec3RotateZ(temp, (r_refdef.viewangles[YAW]-orientation[YAW])*M_PI_DIV_180, position);


	// Set OpenGL projection and view matrices
	glMatrixMode(GL_PROJECTION);
	glLoadMatrixf((GLfloat*)projection.M);

	glMatrixMode(GL_MODELVIEW);
	glLoadIdentity ();
	
	glRotatef (-90,  1, 0, 0); // put Z going up
	glRotatef (90,  0, 0, 1); // put Z going up
		
	glRotatef (-r_refdef.viewangles[PITCH],  0, 1, 0);
	glRotatef (-r_refdef.viewangles[ROLL],  1, 0, 0);
	glRotatef (-r_refdef.viewangles[YAW],  0, 0, 1);
	
	glTranslatef (-r_refdef.vieworg[0] -position[0],  -r_refdef.vieworg[1]-position[1],  -r_refdef.vieworg[2]-position[2]);
}
  virtual void initGl() {
    CubeScene::initGl();

    ovrRenderAPIConfig cfg;
    memset(&cfg, 0, sizeof(cfg));
    cfg.Header.API = ovrRenderAPI_OpenGL;
    cfg.Header.RTSize = hmd->Resolution;
    cfg.Header.Multisample = 1;

    int distortionCaps = ovrDistortionCap_Chromatic;
    ovrEyeRenderDesc eyeRenderDescs[2];
    int configResult = ovrHmd_ConfigureRendering(hmd, &cfg,
      distortionCaps, hmd->DefaultEyeFov, eyeRenderDescs);

    for_each_eye([&](ovrEyeType eye){
      PerEyeArg & eyeArg = eyes[eye];
      ovrFovPort fov = hmd->DefaultEyeFov[eye];
      ovrTextureHeader & textureHeader = textures[eye].Header;
      ovrSizei texSize = ovrHmd_GetFovTextureSize(hmd, eye, fov, 1.0f);
      textureHeader.API = ovrRenderAPI_OpenGL;
      textureHeader.TextureSize = texSize;
      textureHeader.RenderViewport.Size = texSize;
      textureHeader.RenderViewport.Pos.x = 0;
      textureHeader.RenderViewport.Pos.y = 0;
      eyeArg.frameBuffer.init(Rift::fromOvr(texSize));
      ((ovrGLTexture&)textures[eye]).OGL.TexId = eyeArg.frameBuffer.color->texture;

      ovrVector3f offset = eyeRenderDescs[eye].ViewAdjust;
      ovrMatrix4f projection = ovrMatrix4f_Projection(fov, 0.01f, 100, true);

      eyeArg.projection = Rift::fromOvr(projection);
      eyeArg.modelviewOffset = glm::translate(glm::mat4(), Rift::fromOvr(offset));
    });
  }
  void initGl() {
    RiftGlfwApp::initGl();
    for_each_eye([&](ovrEyeType eye){
      EyeArg & eyeArg = eyeArgs[eye];
      ovrFovPort fov = hmdDesc.DefaultEyeFov[eye];
      ovrEyeRenderDesc renderDesc = ovrHmd_GetRenderDesc(hmd, eye, fov);

      // Set up the per-eye projection matrix
      eyeArg.projection = Rift::fromOvr(
        ovrMatrix4f_Projection(fov, 0.01, 100000, true));
      eyeArg.viewOffset = glm::translate(glm::mat4(), Rift::fromOvr(renderDesc.ViewAdjust));
      ovrRecti texRect;
      texRect.Size = ovrHmd_GetFovTextureSize(hmd, eye,
        hmdDesc.DefaultEyeFov[eye], 1.0f);
      texRect.Pos.x = texRect.Pos.y = 0;

      eyeArg.frameBuffer.init(Rift::fromOvr(texRect.Size));

      ovrVector2f scaleAndOffset[2];
      ovrHmd_GetRenderScaleAndOffset(fov, texRect.Size,
        texRect, scaleAndOffset);
      eyeArg.scale = Rift::fromOvr(scaleAndOffset[0]);
      eyeArg.offset = Rift::fromOvr(scaleAndOffset[1]);

      ovrHmd_CreateDistortionMesh(hmd, eye, fov, 0, &eyeArg.mesh);

      eyeArg.meshVao = gl::VertexArrayPtr(new gl::VertexArray());
      eyeArg.meshVao->bind();

      eyeArg.meshIbo = gl::IndexBufferPtr(new gl::IndexBuffer());
      eyeArg.meshIbo->bind();
      size_t bufferSize = eyeArg.mesh.IndexCount * sizeof(unsigned short);
      eyeArg.meshIbo->load(bufferSize, eyeArg.mesh.pIndexData);

      eyeArg.meshVbo = gl::VertexBufferPtr(new gl::VertexBuffer());
      eyeArg.meshVbo->bind();
      bufferSize = eyeArg.mesh.VertexCount * sizeof(ovrDistortionVertex);
      eyeArg.meshVbo->load(bufferSize, eyeArg.mesh.pVertexData);

      size_t stride = sizeof(ovrDistortionVertex);
      size_t offset = offsetof(ovrDistortionVertex, Pos);
      glEnableVertexAttribArray(gl::Attribute::Position);
      glVertexAttribPointer(gl::Attribute::Position, 2, GL_FLOAT, GL_FALSE,
        stride, (void*)offset);

      offset = offsetof(ovrDistortionVertex, TexG);
      glEnableVertexAttribArray(gl::Attribute::TexCoord0);
      glVertexAttribPointer(gl::Attribute::TexCoord0, 2, GL_FLOAT, GL_FALSE,
        stride, (void*)offset);

      gl::VertexArray::unbind();
      gl::Program::clear();
    });

    distortionProgram = GlUtils::getProgram(
      Resource::SHADERS_DISTORTION_VS,
      Resource::SHADERS_DISTORTION_FS
    );
  }
bool OculusBaseDisplayPlugin::internalActivate() {
    _session = acquireOculusSession();
    if (!_session) {
        return false;
    }

    _hmdDesc = ovr_GetHmdDesc(_session);

    glm::uvec2 eyeSizes[2];
    _viewScaleDesc.HmdSpaceToWorldScaleInMeters = 1.0f;

    _ipd = 0;
    ovr_for_each_eye([&](ovrEyeType eye) {
        _eyeFovs[eye] = _hmdDesc.DefaultEyeFov[eye];
        ovrEyeRenderDesc& erd = _eyeRenderDescs[eye] = ovr_GetRenderDesc(_session, eye, _eyeFovs[eye]);
        ovrMatrix4f ovrPerspectiveProjection =
            ovrMatrix4f_Projection(erd.Fov, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_ClipRangeOpenGL);
        _eyeProjections[eye] = toGlm(ovrPerspectiveProjection);
        _eyeOffsets[eye] = glm::translate(mat4(), toGlm(erd.HmdToEyeOffset));
        eyeSizes[eye] = toGlm(ovr_GetFovTextureSize(_session, eye, erd.Fov, 1.0f));
        _viewScaleDesc.HmdToEyeOffset[eye] = erd.HmdToEyeOffset;
        _ipd += glm::abs(glm::length(toGlm(erd.HmdToEyeOffset)));
    });

    auto combinedFov = _eyeFovs[0];
    combinedFov.LeftTan = combinedFov.RightTan = std::max(combinedFov.LeftTan, combinedFov.RightTan);
    _cullingProjection = toGlm(ovrMatrix4f_Projection(combinedFov, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_ClipRangeOpenGL));

    _renderTargetSize = uvec2(
        eyeSizes[0].x + eyeSizes[1].x,
        std::max(eyeSizes[0].y, eyeSizes[1].y));

    memset(&_sceneLayer, 0, sizeof(ovrLayerEyeFov));
    _sceneLayer.Header.Type = ovrLayerType_EyeFov;
    _sceneLayer.Header.Flags = ovrLayerFlag_TextureOriginAtBottomLeft;
    ovr_for_each_eye([&](ovrEyeType eye) {
        ovrFovPort & fov = _sceneLayer.Fov[eye] = _eyeRenderDescs[eye].Fov;
        ovrSizei & size = _sceneLayer.Viewport[eye].Size = ovr_GetFovTextureSize(_session, eye, fov, 1.0f);
        _sceneLayer.Viewport[eye].Pos = { eye == ovrEye_Left ? 0 : size.w, 0 };
    });

    // This must come after the initialization, so that the values calculated 
    // above are available during the customizeContext call (when not running
    // in threaded present mode)
    return Parent::internalActivate();
}
Exemplo n.º 9
0
ngl::Mat4 OculusInterface::getPerspectiveMatrix(int _eye)
{
	ovrMatrix4f proj;
	proj = ovrMatrix4f_Projection(m_hmd->DefaultEyeFov[_eye], 0.5, 2500.0, 1);
	ngl::Mat4 mat;
	memcpy(&mat.m_openGL[0],&proj.M,sizeof(float)*16 );
	mat.transpose();
	return mat;
}
Exemplo n.º 10
0
  void initGl() {
    GlfwApp::initGl();

    ovrFovPort eyeFovPorts[2];
    for_each_eye([&](ovrEyeType eye){
      ovrTextureHeader & eyeTextureHeader = textures[eye].Header;
      eyeFovPorts[eye] = hmd->DefaultEyeFov[eye];
      eyeTextureHeader.TextureSize = ovrHmd_GetFovTextureSize(hmd, eye, hmd->DefaultEyeFov[eye], 1.0f);
      eyeTextureHeader.RenderViewport.Size = eyeTextureHeader.TextureSize;
      eyeTextureHeader.RenderViewport.Pos.x = 0;
      eyeTextureHeader.RenderViewport.Pos.y = 0;
      eyeTextureHeader.API = ovrRenderAPI_OpenGL;

      eyeFramebuffers[eye] = FramebufferWrapperPtr(new FramebufferWrapper());
      eyeFramebuffers[eye]->init(ovr::toGlm(eyeTextureHeader.TextureSize));
      ((ovrGLTexture&)textures[eye]).OGL.TexId = oglplus::GetName(eyeFramebuffers[eye]->color);
    });

    ovrGLConfig cfg;
    memset(&cfg, 0, sizeof(ovrGLConfig));
    cfg.OGL.Header.API = ovrRenderAPI_OpenGL;
    cfg.OGL.Header.Multisample = 1;

    /**
     * In the Direct3D examples in the Oculus SDK, they make the point that the
     * onscreen window size does not need to match the Rift resolution.  However
     * this doesn't currently work in OpenGL, so we have to create the window at
     * the full resolution of the Rift and ensure that we use the same
     * size here when setting the BackBufferSize.
     */
    cfg.OGL.Header.BackBufferSize = ovr::fromGlm(getSize());

    ON_LINUX([&]{
      cfg.OGL.Disp = (Display*)glfw::getNativeDisplay(getWindow());
    });

    int distortionCaps = 0
        | ovrDistortionCap_TimeWarp
        | ovrDistortionCap_Vignette;

    ON_LINUX([&]{
      distortionCaps |= ovrDistortionCap_LinuxDevFullscreen;
    });

    ovrEyeRenderDesc              eyeRenderDescs[2];
    int configResult = ovrHmd_ConfigureRendering(hmd, &cfg.Config,
        distortionCaps, eyeFovPorts, eyeRenderDescs);
    if (!configResult) {
      FAIL("Unable to configure SDK based distortion rendering");
    }

    for_each_eye([&](ovrEyeType eye){
      eyeOffsets[eye] = eyeRenderDescs[eye].HmdToEyeViewOffset;
      eyeProjections[eye] = ovr::toGlm(
          ovrMatrix4f_Projection(eyeFovPorts[eye], 0.01f, 1000.0f, true));
    });
  }
const OVR::Matrix4f OculusVR::OnEyeRender(int eyeIndex)
{
    m_renderBuffer->OnRender(m_eyeLayer.Viewport[eyeIndex]);

    m_projectionMatrix[eyeIndex] = OVR::Matrix4f(ovrMatrix4f_Projection(m_eyeRenderDesc[eyeIndex].Fov, 0.01f, 10000.0f, ovrProjection_None));
    m_eyeOrientation[eyeIndex]   = OVR::Matrix4f(OVR::Quatf(m_eyeRenderPose[eyeIndex].Orientation).Inverted());
    m_eyePose[eyeIndex]          = OVR::Matrix4f::Translation(-OVR::Vector3f(m_eyeRenderPose[eyeIndex].Position));

    return m_projectionMatrix[eyeIndex] * m_eyeOrientation[eyeIndex] * m_eyePose[eyeIndex];
}
Exemplo n.º 12
0
void GuardianSystemDemo::Render()
{
    // Get current eye pose for rendering
    double eyePoseTime = 0;
    ovrPosef eyePose[ovrEye_Count] = {};
    ovr_GetEyePoses(mSession, mFrameIndex, ovrTrue, mHmdToEyeOffset, eyePose, &eyePoseTime);

    // Render each eye
    for (int i = 0; i < ovrEye_Count; ++i) {
        int renderTargetIndex = 0;
        ovr_GetTextureSwapChainCurrentIndex(mSession, mTextureChain[i], &renderTargetIndex);
        ID3D11RenderTargetView* renderTargetView = mEyeRenderTargets[i][renderTargetIndex];
        ID3D11DepthStencilView* depthTargetView = mEyeDepthTarget[i];

        // Clear and set render/depth target and viewport
        DIRECTX.SetAndClearRenderTarget(renderTargetView, depthTargetView, 0.2f, 0.2f, 0.2f, 1.0f);
        DIRECTX.SetViewport((float)mEyeRenderViewport[i].Pos.x, (float)mEyeRenderViewport[i].Pos.y, 
            (float)mEyeRenderViewport[i].Size.w, (float)mEyeRenderViewport[i].Size.h);

        // Eye
        XMVECTOR eyeRot = XMVectorSet(eyePose[i].Orientation.x, eyePose[i].Orientation.y, 
            eyePose[i].Orientation.z, eyePose[i].Orientation.w);
        XMVECTOR eyePos = XMVectorSet(eyePose[i].Position.x, eyePose[i].Position.y, eyePose[i].Position.z, 0);
        XMVECTOR eyeForward = XMVector3Rotate(XMVectorSet(0, 0, -1, 0), eyeRot);

        // Matrices
        XMMATRIX viewMat = XMMatrixLookAtRH(eyePos, XMVectorAdd(eyePos, eyeForward), 
            XMVector3Rotate(XMVectorSet(0.0f, 1.0f, 0.0f, 0.0f), eyeRot));
        ovrMatrix4f proj = ovrMatrix4f_Projection(mEyeRenderLayer.Fov[i], 0.001f, 1000.0f, ovrProjection_None);
        XMMATRIX projMat = XMMatrixTranspose(XMMATRIX(&proj.M[0][0]));
        XMMATRIX viewProjMat = XMMatrixMultiply(viewMat, projMat);

        // Render and commit to swap chain
        mDynamicScene.Render(&viewProjMat, 1.0f, 1.0f, 1.0f, 1.0f, true);
        ovr_CommitTextureSwapChain(mSession, mTextureChain[i]);

        // Update eye layer
        mEyeRenderLayer.ColorTexture[i] = mTextureChain[i];
        mEyeRenderLayer.RenderPose[i] = eyePose[i];
        mEyeRenderLayer.SensorSampleTime = eyePoseTime;
    }

    // Submit frames
    ovrLayerHeader* layers = &mEyeRenderLayer.Header;
    ovrResult result = ovr_SubmitFrame(mSession, mFrameIndex++, nullptr, &layers, 1);
    if (!OVR_SUCCESS(result)) {
        printf("ovr_SubmitFrame failed"); exit(-1);
    }
}
void RiftRenderingApp::initializeRiftRendering() {
    ovrGLConfig cfg;
    memset(&cfg, 0, sizeof(cfg));
    cfg.OGL.Header.API = ovrRenderAPI_OpenGL;
    cfg.OGL.Header.BackBufferSize = ovr::fromGlm(hmdNativeResolution);
    cfg.OGL.Header.Multisample = 1;

    ON_WINDOWS([&]{
      cfg.OGL.Window = (HWND)getNativeWindow();
    });

    int distortionCaps = 0
      | ovrDistortionCap_Vignette
      | ovrDistortionCap_Overdrive
      | ovrDistortionCap_TimeWarp;

    ON_LINUX([&]{
      distortionCaps |= ovrDistortionCap_LinuxDevFullscreen;
    });

    ovrEyeRenderDesc eyeRenderDescs[2];
    int configResult = ovrHmd_ConfigureRendering(hmd, &cfg.Config,
      distortionCaps, hmd->MaxEyeFov, eyeRenderDescs);
    assert(configResult);

    for_each_eye([&](ovrEyeType eye){
      const ovrEyeRenderDesc & erd = eyeRenderDescs[eye];
      ovrMatrix4f ovrPerspectiveProjection = ovrMatrix4f_Projection(erd.Fov, 0.01f, 100000.0f, true);
      projections[eye] = ovr::toGlm(ovrPerspectiveProjection);
      eyeOffsets[eye] = erd.HmdToEyeViewOffset;
    });

    // Allocate the frameBuffer that will hold the scene, and then be
    // re-rendered to the screen with distortion
    glm::uvec2 frameBufferSize = ovr::toGlm(eyeTextures[0].Header.TextureSize);
    for_each_eye([&](ovrEyeType eye) {
      eyeFramebuffers[eye] = FramebufferWrapperPtr(new FramebufferWrapper());
      eyeFramebuffers[eye]->init(frameBufferSize);
      ((ovrGLTexture&)(eyeTextures[eye])).OGL.TexId =
        oglplus::GetName(eyeFramebuffers[eye]->color);
    });
  }
Exemplo n.º 14
0
void VR::draw(const std::function<void()>& drawer) const
{
    // draw onto our framebuffer, clearing it first
    glBindFramebuffer(GL_FRAMEBUFFER, m_fbo);
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

    // render per-eye
    ovrPosef pose[2];
    auto eye_trans = eye_transforms(pose);
    ovrHmd_BeginFrame(m_hmd, 0);
    for (auto i = 0; i < 2; ++i)
    {
        auto eye = m_hmd->EyeRenderOrder[i];

        // viewport transform: select left/right based on eye
        glViewport(eye == ovrEye_Left ? 0 : m_fb_width / 2, 0,
            m_fb_width / 2, m_fb_height);

        // projection transform: just use OVR's eye matrix
        auto proj = ovrMatrix4f_Projection(m_hmd->DefaultEyeFov[eye],
                                           0.5, 500.0, 1);
        glMatrixMode(GL_PROJECTION);
        glLoadTransposeMatrixf(&proj.M[0][0]); // GL uses column-major

        // view transform: use inverse eye transform
        auto view = inverse(eye_trans[eye]);
        glMatrixMode(GL_MODELVIEW);
        glLoadIdentity();
        glLoadMatrixf(value_ptr(view));

        // draw!
        drawer();
    }

    // draw onto display
    glBindFramebuffer(GL_FRAMEBUFFER, 0);
    ovrHmd_EndFrame(m_hmd, pose, &m_ovr_tex[0].Texture);

    glUseProgram(0); // OVR doesn't restore shader program when done
}
Exemplo n.º 15
0
	bool VRImplOVR::updateTracking(HMD& _hmd)
	{
		if (NULL == m_session)
		{
			return false;
		}

		ovr_GetEyePoses(m_session, 0, ovrTrue, m_viewScale.HmdToEyeOffset, m_renderLayer.RenderPose, &m_renderLayer.SensorSampleTime);

		for (int eye = 0; eye < 2; ++eye)
		{
			const ovrPosef& pose = m_renderLayer.RenderPose[eye];
			HMD::Eye& hmdEye = _hmd.eye[eye];

			hmdEye.rotation[0] = pose.Orientation.x;
			hmdEye.rotation[1] = pose.Orientation.y;
			hmdEye.rotation[2] = pose.Orientation.z;
			hmdEye.rotation[3] = pose.Orientation.w;
			hmdEye.translation[0] = pose.Position.x;
			hmdEye.translation[1] = pose.Position.y;
			hmdEye.translation[2] = pose.Position.z;
			hmdEye.viewOffset[0] = -m_viewScale.HmdToEyeOffset[eye].x;
			hmdEye.viewOffset[1] = -m_viewScale.HmdToEyeOffset[eye].y;
			hmdEye.viewOffset[2] = -m_viewScale.HmdToEyeOffset[eye].z;

			hmdEye.pixelsPerTanAngle[0] = m_pixelsPerTanAngleAtCenter[eye].x;
			hmdEye.pixelsPerTanAngle[1] = m_pixelsPerTanAngleAtCenter[eye].y;

			ovrMatrix4f projection = ovrMatrix4f_Projection(m_eyeFov[eye], 0.1f, 1000.0f, ovrProjection_LeftHanded);
			for (uint32_t ii = 0; ii < 4; ++ii)
			{
				for (uint32_t jj = 0; jj < 4; ++jj)
				{
					hmdEye.projection[4*ii + jj] = projection.M[jj][ii];
				}
			}
		}

		return true;
	}
Exemplo n.º 16
0
GMO double endFrame() {
	static ovrPosef eyeRenderPose[2]; 
	static float    BodyYaw(3.141592f);
	static Vector3f HeadPos(0.0f, 1.6f, -5.0f);
	HeadPos.y = ovrHmd_GetFloat(HMD, OVR_KEY_EYE_HEIGHT, HeadPos.y);

    //pRender->SetRenderTarget ( pRendertargetTexture );
	
    //pRender->SetViewport (Recti(0,0, pRendertargetTexture->GetWidth(),
    //                                    pRendertargetTexture->GetHeight() ));  
    //pRender->Clear();
	for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++)
	{
        ovrEyeType eye = HMD->EyeRenderOrder[eyeIndex];
        eyeRenderPose[eye] = ovrHmd_GetEyePose(HMD, eye);

        // Get view and projection matrices
		Matrix4f rollPitchYaw       = Matrix4f::RotationY(BodyYaw);
		Matrix4f finalRollPitchYaw  = rollPitchYaw * Matrix4f(eyeRenderPose[eye].Orientation);
		Vector3f finalUp            = finalRollPitchYaw.Transform(Vector3f(0,1,0));
		Vector3f finalForward       = finalRollPitchYaw.Transform(Vector3f(0,0,-1));
		Vector3f shiftedEyePos      = HeadPos + rollPitchYaw.Transform(eyeRenderPose[eye].Position);
        Matrix4f view = Matrix4f::LookAtRH(shiftedEyePos, shiftedEyePos + finalForward, finalUp); 
		Matrix4f proj = ovrMatrix4f_Projection(EyeRenderDesc[eye].Fov, 0.01f, 10000.0f, true);

		//pRender->SetViewport(Recti(EyeRenderViewport[eye]));
		//pRender->SetProjection(proj);
		pRender->SetDepthMode(true, true);
		
		//pRoomScene->Render(pRender, Matrix4f::Translation(EyeRenderDesc[eye].ViewAdjust) * view);
	}
	//pRender->BlendState
	
	//pRender->Clear(0.0f, 0.0f, 0.0f, 0.0f);
	//pRender->Present( true );
	pRender->UpdateMonitorOutputs();
    pRender->FinishScene();
	ovrHmd_EndFrame(HMD, headPose, &EyeTexture[0].Texture);
	return 1;
}
  void initGl() {
    RiftGlfwApp::initGl();

    ovrGLConfig cfg;
    memset(&cfg, 0, sizeof(cfg));
    cfg.OGL.Header.API = ovrRenderAPI_OpenGL;
    cfg.OGL.Header.RTSize = Rift::toOvr(windowSize);
    cfg.OGL.Header.Multisample = 1;

    int distortionCaps = 0
      | ovrDistortionCap_Vignette
      | ovrDistortionCap_Chromatic
      | ovrDistortionCap_TimeWarp
      ;

    int configResult = ovrHmd_ConfigureRendering(hmd, &cfg.Config,
      distortionCaps, hmd->MaxEyeFov, eyeRenderDescs);

#ifdef _DEBUG
    ovrhmd_EnableHSWDisplaySDKRender(hmd, false);
#endif

    for_each_eye([&](ovrEyeType eye){
      const ovrEyeRenderDesc & erd = eyeRenderDescs[eye];
      ovrMatrix4f ovrPerspectiveProjection = ovrMatrix4f_Projection(erd.Fov, 0.01f, 100000.0f, true);
      projections[eye] = Rift::fromOvr(ovrPerspectiveProjection);
    });

    ///////////////////////////////////////////////////////////////////////////
    // Initialize OpenGL settings and variables
    glEnable(GL_BLEND);

    ovrLock.lock();
    renderWindow = glfwCreateWindow(100, 100, "Ofscreen", nullptr, window);

    threadPtr = std::unique_ptr<std::thread>(new std::thread(&SimpleScene::runOvrThread, this));
    glfwMakeContextCurrent(window);
  }
  virtual void initGl() {
    RiftGlfwApp::initGl();

    ovrRenderAPIConfig cfg;
    memset(&cfg, 0, sizeof(cfg));
    cfg.Header.API = ovrRenderAPI_OpenGL;
    cfg.Header.BackBufferSize = ovr::fromGlm(getSize());
    cfg.Header.Multisample = 1;

    int distortionCaps = ovrDistortionCap_Vignette;
    ovrEyeRenderDesc eyeRenderDescs[2];
    int configResult = ovrHmd_ConfigureRendering(hmd, &cfg,
        distortionCaps, hmd->DefaultEyeFov, eyeRenderDescs);

    for_each_eye([&](ovrEyeType eye){
      PerEyeArg & eyeArgs = eyes[eye];
      ovrFovPort fov = hmd->DefaultEyeFov[eye];
      ovrSizei texSize = ovrHmd_GetFovTextureSize(hmd, eye, fov, 1.0f);
      eyeArgs.framebuffer = FramebufferWrapperPtr(new FramebufferWrapper());
      eyeArgs.framebuffer->init(ovr::toGlm(texSize));

      ovrTextureHeader & textureHeader = eyeTextures[eye].Header;
      textureHeader.API = ovrRenderAPI_OpenGL;
      textureHeader.TextureSize = texSize;
      textureHeader.RenderViewport.Size = texSize;
      textureHeader.RenderViewport.Pos.x = 0;
      textureHeader.RenderViewport.Pos.y = 0;
      ((ovrGLTextureData&)eyeTextures[eye]).TexId =
        oglplus::GetName(eyeArgs.framebuffer->color);

      eyeArgs.modelviewOffset = glm::translate(glm::mat4(), 
        ovr::toGlm(eyeRenderDescs[eye].HmdToEyeViewOffset));

      ovrMatrix4f projection = ovrMatrix4f_Projection(fov, 0.01f, 100, true);
      eyeArgs.projection = ovr::toGlm(projection);
    });
  }
Exemplo n.º 19
0
void Renderer::renderOVR(fp_t interp)
{
    UNUSED(interp);

    ovrHmd_BeginFrame(hmd_, 0);

    glBindFramebuffer(GL_FRAMEBUFFER, framebuffer_);
    glClear(GL_DEPTH_BUFFER_BIT);

    ovrPosef eyePose[ovrEye_Count];

    for(int i = 0; i < ovrEye_Count; i++)
    {
        ovrEyeType eye = hmd_->EyeRenderOrder[i];

        glViewport(eyeViewport_[eye].Pos.x, eyeViewport_[eye].Pos.y,
                   eyeViewport_[eye].Size.w, eyeViewport_[eye].Size.h);

        glm::mat4 projectionMat = convertOvrMatrix4f(ovrMatrix4f_Projection(eyeRenderDesc_[eye].Fov, 0.1f, 1000.0f, /*rightHanded*/ true));

        eyePose[eye] = ovrHmd_GetEyePose(hmd_, eye);
        Core::get().camera().setHeadPosition(convertOvrVector3f(eyePose[eye].Position));
        Core::get().camera().setHeadRotation(glm::conjugate(convertOvrQuatf(eyePose[eye].Orientation)));

        glm::mat4 viewMat = glm::translate(glm::mat4{}, convertOvrVector3f(eyeRenderDesc_[eye].ViewAdjust));
        viewMat = viewMat * Core::get().camera().viewMatrix();

        skyRenderer_->render();
        landRenderer_->render(projectionMat, viewMat);
        structureRenderer_->render(projectionMat, viewMat);
        modelRenderer_->render(projectionMat, viewMat);
    }

    glBindFramebuffer(GL_FRAMEBUFFER, 0);

    ovrHmd_EndFrame(hmd_, eyePose, (ovrTexture*)eyeTexture_);
}
Exemplo n.º 20
0
void VR::nextTracking()
{
#if defined(_DEBUG)
	// make sure we are only caled once per frame:
	static vector<bool> called;
	if (xapp->getFramenum() < 50000) {
		size_t framenum = (size_t) xapp->getFramenum();
		assert(called.size() <= framenum);
		called.push_back(true);
		assert(called.size() == framenum+1);
	}
#endif

	// Get both eye poses simultaneously, with IPD offset already included. 
	ovrVector3f useHmdToEyeViewOffset[2] = { eyeRenderDesc[0].HmdToEyeOffset, eyeRenderDesc[1].HmdToEyeOffset };
	//ovrPosef temp_EyeRenderPose[2];
	double displayMidpointSeconds = ovr_GetPredictedDisplayTime(session, 0);
	ovrTrackingState ts = ovr_GetTrackingState(session, displayMidpointSeconds, false);
	ovr_CalcEyePoses(ts.HeadPose.ThePose, useHmdToEyeViewOffset, layer.RenderPose);
	ovrResult result;
	ovrBoundaryTestResult btest;
	ovrBool visible;
	result = ovr_GetBoundaryVisible(session, &visible);
	if (0) {
		Log("visible = " << (visible == ovrTrue) << endl);

		result = ovr_TestBoundary(session, ovrTrackedDevice_HMD, ovrBoundary_Outer, &btest);
		if (OVR_SUCCESS(result)) {
			//Log("boundary success");
			if (result == ovrSuccess) Log("success" << endl);
			if (result == ovrSuccess_BoundaryInvalid) Log("success boundary invalid" << endl);
			if (result == ovrSuccess_DeviceUnavailable) Log("success device unavailable" << endl);
		}
	}
	layer.Fov[0] = eyeRenderDesc[0].Fov;
	layer.Fov[1] = eyeRenderDesc[1].Fov;

	// Render the two undistorted eye views into their render buffers.  
	for (int eye = 0; eye < 2; eye++)
	{
		ovrPosef    * useEyePose = &EyeRenderPose[eye];
		float       * useYaw = &YawAtRender[eye];
		float Yaw = XM_PI;
		*useEyePose = layer.RenderPose[eye];
		*useYaw = Yaw;

		// Get view and projection matrices (note near Z to reduce eye strain)
		Matrix4f rollPitchYaw = Matrix4f::RotationY(Yaw);
		Matrix4f finalRollPitchYaw = rollPitchYaw * Matrix4f(useEyePose->Orientation);
		// fix finalRollPitchYaw for LH coordinate system:
		Matrix4f s = Matrix4f::Scaling(1.0f, -1.0f, -1.0f);  // 1 1 -1
		finalRollPitchYaw = s * finalRollPitchYaw * s;

		Vector3f finalUp = finalRollPitchYaw.Transform(Vector3f(0, 1, 0));
		Vector3f finalForward = finalRollPitchYaw.Transform(Vector3f(0, 0, -1));//0 0 1
		Vector3f Posf;
		Posf.x = xapp->camera.pos.x;
		Posf.y = xapp->camera.pos.y;
		Posf.z = xapp->camera.pos.z;
		Vector3f diff = rollPitchYaw.Transform(useEyePose->Position);
		//diff /= 10.0f;
		//diff.x = 0.0f;
		//diff.y = 0.0f;
		//diff.z = 0.0f;
		Vector3f shiftedEyePos;
		shiftedEyePos.x = Posf.x - diff.x;
		shiftedEyePos.y = Posf.y + diff.y;
		shiftedEyePos.z = Posf.z + diff.z;
		xapp->camera.look.x = finalForward.x;
		xapp->camera.look.y = finalForward.y;
		xapp->camera.look.z = finalForward.z;

		Matrix4f view = Matrix4f::LookAtLH(shiftedEyePos, shiftedEyePos + finalForward, finalUp);
		Matrix4f projO = ovrMatrix4f_Projection(eyeRenderDesc[eye].Fov, 0.2f, 2000.0f,  ovrProjection_LeftHanded);
		Matrix4fToXM(this->viewOVR[eye], view.Transposed());
		Matrix4fToXM(this->projOVR[eye], projO.Transposed());
	}
}
Exemplo n.º 21
0
//-------------------------------------------------------------------------------------
void ProcessAndRender()
{
    static ovrPosef eyeRenderPose[2]; 

	// Start timing
    #if SDK_RENDER
	ovrHmd_BeginFrame(HMD, 0); 
    #else
	ovrHmd_BeginFrameTiming(HMD, 0); 
    // Retrieve data useful for handling the Health and Safety Warning - unused, but here for reference
    ovrHSWDisplayState hswDisplayState;
    ovrHmd_GetHSWDisplayState(HMD, &hswDisplayState);
    #endif

	// Adjust eye position and rotation from controls, maintaining y position from HMD.
	static float    BodyYaw(3.141592f);
	static Vector3f HeadPos(0.0f, 1.6f, -5.0f);
//	HeadPos.y = ovrHmd_GetFloat(HMD, OVR_KEY_EYE_HEIGHT, HeadPos.y);
	bool freezeEyeRender = Util_RespondToControls(BodyYaw, HeadPos, eyeRenderPose[1].Orientation);

     pRender->BeginScene();
    
	// Render the two undistorted eye views into their render buffers.
    if (!freezeEyeRender) // freeze to debug for time warp
    {
        pRender->SetRenderTarget ( pRendertargetTexture );
        pRender->SetViewport (Recti(0,0, pRendertargetTexture->GetWidth(),
                                         pRendertargetTexture->GetHeight() ));  
        pRender->Clear();
		for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++)
		{
            ovrEyeType eye = HMD->EyeRenderOrder[eyeIndex];
            eyeRenderPose[eye] = ovrHmd_GetEyePose(HMD, eye);

            // Get view and projection matrices
			Matrix4f rollPitchYaw       = Matrix4f::RotationY(BodyYaw);
			Matrix4f finalRollPitchYaw  = rollPitchYaw * Matrix4f(eyeRenderPose[eye].Orientation);
			Vector3f finalUp            = finalRollPitchYaw.Transform(Vector3f(0,1,0));
			Vector3f finalForward       = finalRollPitchYaw.Transform(Vector3f(0,0,-1));
			Vector3f shiftedEyePos      = HeadPos + rollPitchYaw.Transform(eyeRenderPose[eye].Position);
            Matrix4f view = Matrix4f::LookAtRH(shiftedEyePos, shiftedEyePos + finalForward, finalUp); 
			Matrix4f proj = ovrMatrix4f_Projection(EyeRenderDesc[eye].Fov, 0.01f, 10000.0f, true);

			pRender->SetViewport(Recti(EyeRenderViewport[eye]));
			pRender->SetProjection(proj);
			pRender->SetDepthMode(true, true);
			pRoomScene->Render(pRender, Matrix4f::Translation(EyeRenderDesc[eye].ViewAdjust) * view);
		}
    }
    pRender->FinishScene();

    #if SDK_RENDER	// Let OVR do distortion rendering, Present and flush/sync
	ovrHmd_EndFrame(HMD, eyeRenderPose, &EyeTexture[0].Texture);
    #else
	// Clear screen
	pRender->SetDefaultRenderTarget();
	pRender->SetFullViewport();
	pRender->Clear(0.0f, 0.0f, 0.0f, 0.0f);

	// Setup shader
	ShaderFill distortionShaderFill(Shaders);
	distortionShaderFill.SetTexture(0, pRendertargetTexture);
	distortionShaderFill.SetInputLayout(VertexIL);

	for(int eyeNum = 0; eyeNum < 2; eyeNum++)
	{
		// Get and set shader constants
		Shaders->SetUniform2f("EyeToSourceUVScale",   UVScaleOffset[eyeNum][0].x, UVScaleOffset[eyeNum][0].y);
		Shaders->SetUniform2f("EyeToSourceUVOffset",  UVScaleOffset[eyeNum][1].x, UVScaleOffset[eyeNum][1].y);
 		ovrMatrix4f timeWarpMatrices[2];
		ovrHmd_GetEyeTimewarpMatrices(HMD, (ovrEyeType)eyeNum, eyeRenderPose[eyeNum], timeWarpMatrices);
		Shaders->SetUniform4x4f("EyeRotationStart", timeWarpMatrices[0]);  //Nb transposed when set
		Shaders->SetUniform4x4f("EyeRotationEnd",   timeWarpMatrices[1]);  //Nb transposed when set
		// Perform distortion
		pRender->Render(&distortionShaderFill, MeshVBs[eyeNum], MeshIBs[eyeNum],sizeof(ovrDistortionVertex));
	}

	pRender->SetDefaultRenderTarget();

	pRender->Present( true ); // Vsync enabled

    // Only flush GPU for ExtendDesktop; not needed in Direct App Renering with Oculus driver.
    if (HMD->HmdCaps & ovrHmdCap_ExtendDesktop)
		pRender->WaitUntilGpuIdle();  
	ovrHmd_EndFrameTiming(HMD);
    #endif
}
Exemplo n.º 22
0
void Render(float dt)
{
    static unsigned int frameIndex = 0;
    frameIndex++;
    ovrFrameTiming timing = ovrHmd_BeginFrame(s_hmd, 0);

    // ovrSensorState ss = ovrHmd_GetSensorState(s_hmd, timing.ScanoutMidpointSeconds);
    // TODO: Use this for head tracking...
    // TODO: Use player height from SDK

    glBindBuffer(GL_ARRAY_BUFFER, 0);
    glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);

    // render into fbo
    glBindFramebuffer(GL_FRAMEBUFFER, s_fbo);

    // TODO: enable this when we have more complex rendering.
    glEnable(GL_DEPTH_TEST);
    glDisable(GL_CULL_FACE);
    glEnable(GL_BLEND);
    glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);

    static float t = 0.0;
    t += dt;

    // clear render target
    glViewport(0, 0, s_renderTargetSize.w, s_renderTargetSize.h);
    glClearColor(s_clearColor.x, s_clearColor.y, s_clearColor.z, s_clearColor.w);
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

    for (int i = 0; i < 2; i++)
    {
        ovrEyeType eye = s_hmdDesc.EyeRenderOrder[i];
        ovrPosef pose = ovrHmd_BeginEyeRender(s_hmd, eye);

        glViewport(s_eyeTexture[eye].Header.RenderViewport.Pos.x,
                   s_eyeTexture[eye].Header.RenderViewport.Pos.y,
                   s_eyeTexture[eye].Header.RenderViewport.Size.w,
                   s_eyeTexture[eye].Header.RenderViewport.Size.h);

        Quatf q(pose.Orientation.x, pose.Orientation.y, pose.Orientation.z, pose.Orientation.w);
        Vector3f p(pose.Position.x, pose.Position.y, pose.Position.z);

        Matrixf cameraMatrix = Matrixf::QuatTrans(q, s_cameraPos);
        Matrixf viewCenter = cameraMatrix.OrthoInverse();

        // let ovr compute projection matrix, cause it's hard.
        ovrMatrix4f ovrProj = ovrMatrix4f_Projection(s_eyeRenderDesc[eye].Fov, 0.1f, 10000.0f, true);

        // convert to abaci matrix
        Matrixf projMatrix = Matrixf::Rows(Vector4f(ovrProj.M[0][0], ovrProj.M[0][1], ovrProj.M[0][2], ovrProj.M[0][3]),
                                           Vector4f(ovrProj.M[1][0], ovrProj.M[1][1], ovrProj.M[1][2], ovrProj.M[1][3]),
                                           Vector4f(ovrProj.M[2][0], ovrProj.M[2][1], ovrProj.M[2][2], ovrProj.M[2][3]),
                                           Vector4f(ovrProj.M[3][0], ovrProj.M[3][1], ovrProj.M[3][2], ovrProj.M[3][3]));

        // use EyeRenderDesc.ViewAdjust to do eye offset.
        Matrixf viewMatrix = viewCenter * Matrixf::Trans(Vector3f(s_eyeRenderDesc[eye].ViewAdjust.x,
                                                                  s_eyeRenderDesc[eye].ViewAdjust.y,
                                                                  s_eyeRenderDesc[eye].ViewAdjust.z));

        // compute model matrix for terminal
        const float kTermScale = 0.001f;
        const Vector3f termOrigin(-2 * kFeetToMeters, 6.75f * kFeetToMeters, -2.5 * kFeetToMeters);
        Matrixf modelMatrix = Matrixf::ScaleQuatTrans(Vector3f(kTermScale, -kTermScale, kTermScale),
                                                      Quatf::AxisAngle(Vector3f(0, 1, 0), 0),
                                                      termOrigin);
        RenderBegin();

        RenderFloor(projMatrix, viewMatrix, 0.0f);

        RenderTextBegin(projMatrix, viewMatrix, modelMatrix);
        for (int j = 0; j < win_get_text_count(); j++)
        {
            gb::Text* text = (gb::Text*)win_get_text(j);
            if (text)
            {
                RenderText(text->GetQuadVec());
            }
        }
        RenderTextEnd();

        RenderEnd();
        ovrHmd_EndEyeRender(s_hmd, eye, pose, &s_eyeTexture[eye]);
    }

    ovrHmd_EndFrame(s_hmd);
}
Exemplo n.º 23
0
// return true to retry later (e.g. after display lost)
static bool MainLoop(bool retryCreate)
{
    // Initialize these to nullptr here to handle device lost failures cleanly
    ovrMirrorTexture            mirrorTexture = nullptr;
    OculusEyeTexture*           pEyeRenderTexture[2] = { nullptr, nullptr };
    Scene*                      roomScene = nullptr; 
    Camera*                     mainCam = nullptr;
    ovrMirrorTextureDesc        mirrorDesc = {};

    ovrSession session;
    ovrGraphicsLuid luid;
    ovrResult result = ovr_Create(&session, &luid);
    if (!OVR_SUCCESS(result))
        return retryCreate;

    ovrHmdDesc hmdDesc = ovr_GetHmdDesc(session);

    // Setup Device and Graphics
    // Note: the mirror window can be any size, for this sample we use 1/2 the HMD resolution
    if (!DIRECTX.InitDevice(hmdDesc.Resolution.w / 2, hmdDesc.Resolution.h / 2, reinterpret_cast<LUID*>(&luid)))
        goto Done;

    // Make the eye render buffers (caution if actual size < requested due to HW limits). 
    ovrRecti eyeRenderViewport[2];

    for (int eye = 0; eye < 2; ++eye)
    {
        ovrSizei idealSize = ovr_GetFovTextureSize(session, (ovrEyeType)eye, hmdDesc.DefaultEyeFov[eye], 1.0f);
        pEyeRenderTexture[eye] = new OculusEyeTexture();
        if (!pEyeRenderTexture[eye]->Init(session, idealSize.w, idealSize.h, true))
        {
            if (retryCreate) goto Done;
            FATALERROR("Failed to create eye texture.");
        }

        eyeRenderViewport[eye].Pos.x = 0;
        eyeRenderViewport[eye].Pos.y = 0;
        eyeRenderViewport[eye].Size = idealSize;
        if (!pEyeRenderTexture[eye]->TextureChain)
        {
            if (retryCreate) goto Done;
            FATALERROR("Failed to create texture.");
        }
    }

    // Create a mirror to see on the monitor.
    mirrorDesc.Format = OVR_FORMAT_R8G8B8A8_UNORM_SRGB;
    mirrorDesc.Width = DIRECTX.WinSizeW;
    mirrorDesc.Height = DIRECTX.WinSizeH;
    result = ovr_CreateMirrorTextureDX(session, DIRECTX.CommandQueue, &mirrorDesc, &mirrorTexture);
    if (!OVR_SUCCESS(result))
    {
        if (retryCreate) goto Done;
        FATALERROR("Failed to create mirror texture.");
    }

    // Create the room model
    roomScene = new Scene(false);

    // Create camera
    mainCam = new Camera(XMVectorSet(0.0f, 1.6f, 5.0f, 0), XMQuaternionIdentity());

    // Setup VR components, filling out description
    ovrEyeRenderDesc eyeRenderDesc[2];
    eyeRenderDesc[0] = ovr_GetRenderDesc(session, ovrEye_Left, hmdDesc.DefaultEyeFov[0]);
    eyeRenderDesc[1] = ovr_GetRenderDesc(session, ovrEye_Right, hmdDesc.DefaultEyeFov[1]);

    long long frameIndex = 0;

    bool drawMirror = true;

    DIRECTX.InitFrame(drawMirror);

    // Main loop
    while (DIRECTX.HandleMessages())
    {
        ovrSessionStatus sessionStatus;
        ovr_GetSessionStatus(session, &sessionStatus);
        if (sessionStatus.ShouldQuit)
        {
            // Because the application is requested to quit, should not request retry
            retryCreate = false;
            break;
        }
        if (sessionStatus.ShouldRecenter)
            ovr_RecenterTrackingOrigin(session);

        if (sessionStatus.IsVisible)
        {
            XMVECTOR forward = XMVector3Rotate(XMVectorSet(0, 0, -0.05f, 0), mainCam->GetRotVec());
            XMVECTOR right   = XMVector3Rotate(XMVectorSet(0.05f, 0, 0, 0),  mainCam->GetRotVec());
            XMVECTOR mainCamPos = mainCam->GetPosVec();
            XMVECTOR mainCamRot = mainCam->GetRotVec();
            if (DIRECTX.Key['W'] || DIRECTX.Key[VK_UP])      mainCamPos = XMVectorAdd(     mainCamPos, forward);
            if (DIRECTX.Key['S'] || DIRECTX.Key[VK_DOWN])    mainCamPos = XMVectorSubtract(mainCamPos, forward);
            if (DIRECTX.Key['D'])                            mainCamPos = XMVectorAdd(     mainCamPos, right);
            if (DIRECTX.Key['A'])                            mainCamPos = XMVectorSubtract(mainCamPos, right);
            static float Yaw = 0;
            if (DIRECTX.Key[VK_LEFT])  mainCamRot = XMQuaternionRotationRollPitchYaw(0, Yaw += 0.02f, 0);
            if (DIRECTX.Key[VK_RIGHT]) mainCamRot = XMQuaternionRotationRollPitchYaw(0, Yaw -= 0.02f, 0);

            mainCam->SetPosVec(mainCamPos);
            mainCam->SetRotVec(mainCamRot);

            // Animate the cube
            static float cubeClock = 0;
            roomScene->Models[0]->Pos = XMFLOAT3(9 * sin(cubeClock), 3, 9 * cos(cubeClock += 0.015f));

            // Get both eye poses simultaneously, with IPD offset already included. 
            ovrPosef    EyeRenderPose[2];
            ovrVector3f HmdToEyeOffset[2] = { eyeRenderDesc[0].HmdToEyeOffset,
                                              eyeRenderDesc[1].HmdToEyeOffset };

            double sensorSampleTime;    // sensorSampleTime is fed into the layer later
            ovr_GetEyePoses(session, frameIndex, ovrTrue, HmdToEyeOffset, EyeRenderPose, &sensorSampleTime);

            // Render Scene to Eye Buffers
            for (int eye = 0; eye < 2; ++eye)
            {
                DIRECTX.SetActiveContext(eye == 0 ? DrawContext_EyeRenderLeft : DrawContext_EyeRenderRight);

                DIRECTX.SetActiveEye(eye);

                CD3DX12_RESOURCE_BARRIER resBar = CD3DX12_RESOURCE_BARRIER::Transition(pEyeRenderTexture[eye]->GetD3DResource(),
                                                                                       D3D12_RESOURCE_STATE_PIXEL_SHADER_RESOURCE,
                                                                                       D3D12_RESOURCE_STATE_RENDER_TARGET);
                DIRECTX.CurrentFrameResources().CommandLists[DIRECTX.ActiveContext]->ResourceBarrier(1, &resBar);

                DIRECTX.SetAndClearRenderTarget(pEyeRenderTexture[eye]->GetRtv(), pEyeRenderTexture[eye]->GetDsv());
                DIRECTX.SetViewport((float)eyeRenderViewport[eye].Pos.x, (float)eyeRenderViewport[eye].Pos.y,
                                    (float)eyeRenderViewport[eye].Size.w, (float)eyeRenderViewport[eye].Size.h);
                                
                //Get the pose information in XM format
                XMVECTOR eyeQuat = XMVectorSet(EyeRenderPose[eye].Orientation.x, EyeRenderPose[eye].Orientation.y,
                                               EyeRenderPose[eye].Orientation.z, EyeRenderPose[eye].Orientation.w);
                XMVECTOR eyePos = XMVectorSet(EyeRenderPose[eye].Position.x, EyeRenderPose[eye].Position.y, EyeRenderPose[eye].Position.z, 0);

                // Get view and projection matrices for the Rift camera
                Camera finalCam(XMVectorAdd(mainCamPos, XMVector3Rotate(eyePos, mainCamRot)), XMQuaternionMultiply(eyeQuat, mainCamRot));
                XMMATRIX view = finalCam.GetViewMatrix();
                ovrMatrix4f p = ovrMatrix4f_Projection(eyeRenderDesc[eye].Fov, 0.2f, 1000.0f, ovrProjection_None);
                XMMATRIX proj = XMMatrixSet(p.M[0][0], p.M[1][0], p.M[2][0], p.M[3][0],
                                            p.M[0][1], p.M[1][1], p.M[2][1], p.M[3][1],
                                            p.M[0][2], p.M[1][2], p.M[2][2], p.M[3][2],
                                            p.M[0][3], p.M[1][3], p.M[2][3], p.M[3][3]);
                XMMATRIX prod = XMMatrixMultiply(view, proj);

                roomScene->Render(&prod, 1, 1, 1, 1, true);

                resBar = CD3DX12_RESOURCE_BARRIER::Transition(pEyeRenderTexture[eye]->GetD3DResource(),
                                                              D3D12_RESOURCE_STATE_RENDER_TARGET,
                                                              D3D12_RESOURCE_STATE_PIXEL_SHADER_RESOURCE);
                DIRECTX.CurrentFrameResources().CommandLists[DIRECTX.ActiveContext]->ResourceBarrier(1, &resBar);

                // Commit rendering to the swap chain
                pEyeRenderTexture[eye]->Commit();

                // kick off eye render command lists before ovr_SubmitFrame()
                DIRECTX.SubmitCommandList(DIRECTX.ActiveContext);
            }

            // Initialize our single full screen Fov layer.
            ovrLayerEyeFov ld = {};
            ld.Header.Type = ovrLayerType_EyeFov;
            ld.Header.Flags = 0;

            for (int eye = 0; eye < 2; ++eye)
            {
                ld.ColorTexture[eye] = pEyeRenderTexture[eye]->TextureChain;
                ld.Viewport[eye] = eyeRenderViewport[eye];
                ld.Fov[eye] = hmdDesc.DefaultEyeFov[eye];
                ld.RenderPose[eye] = EyeRenderPose[eye];
                ld.SensorSampleTime = sensorSampleTime;
            }

            ovrLayerHeader* layers = &ld.Header;
            result = ovr_SubmitFrame(session, frameIndex, nullptr, &layers, 1);
            // exit the rendering loop if submit returns an error, will retry on ovrError_DisplayLost
            if (!OVR_SUCCESS(result))
                goto Done;
            
            frameIndex++;
        }
        
        if (drawMirror)
        {
            DIRECTX.SetActiveContext(DrawContext_Final);

            DIRECTX.SetViewport(0.0f, 0.0f, (float)hmdDesc.Resolution.w / 2, (float)hmdDesc.Resolution.h / 2);

            // Render mirror
            ID3D12Resource* mirrorTexRes = nullptr;
            ovr_GetMirrorTextureBufferDX(session, mirrorTexture, IID_PPV_ARGS(&mirrorTexRes));

            //DIRECTX.SetAndClearRenderTarget(DIRECTX.CurrentFrameResources().SwapChainRtvHandle, nullptr, 1.0f, 0.5f, 0.0f, 1.0f);

            CD3DX12_RESOURCE_BARRIER preMirrorBlitBar[] =
            {
                CD3DX12_RESOURCE_BARRIER::Transition(DIRECTX.CurrentFrameResources().SwapChainBuffer, D3D12_RESOURCE_STATE_RENDER_TARGET, D3D12_RESOURCE_STATE_COPY_DEST),
                CD3DX12_RESOURCE_BARRIER::Transition(mirrorTexRes, D3D12_RESOURCE_STATE_RENDER_TARGET, D3D12_RESOURCE_STATE_COPY_SOURCE)
            };

            // Indicate that the back buffer will now be copied into
            DIRECTX.CurrentFrameResources().CommandLists[DIRECTX.ActiveContext]->ResourceBarrier(ARRAYSIZE(preMirrorBlitBar), preMirrorBlitBar);

            DIRECTX.CurrentFrameResources().CommandLists[DIRECTX.ActiveContext]->CopyResource(DIRECTX.CurrentFrameResources().SwapChainBuffer, mirrorTexRes);

            CD3DX12_RESOURCE_BARRIER resBar = CD3DX12_RESOURCE_BARRIER::Transition(mirrorTexRes,
                                                                                   D3D12_RESOURCE_STATE_COPY_SOURCE,
                                                                                   D3D12_RESOURCE_STATE_RENDER_TARGET);
            DIRECTX.CurrentFrameResources().CommandLists[DIRECTX.ActiveContext]->ResourceBarrier(1, &resBar);
        }

        DIRECTX.SubmitCommandListAndPresent(drawMirror);
    }

    // Release resources
Done:
    delete mainCam;
    delete roomScene;
    if (mirrorTexture)
        ovr_DestroyMirrorTexture(session, mirrorTexture);

    for (int eye = 0; eye < 2; ++eye)
    {
        delete pEyeRenderTexture[eye];
    }
    DIRECTX.ReleaseDevice();
    ovr_Destroy(session);

    // Retry on ovrError_DisplayLost
    return retryCreate || (result == ovrError_DisplayLost);
}
Exemplo n.º 24
0
// Display to an HMD with OVR SDK backend.
void displayHMD()
{
    ovrSessionStatus sessionStatus;
    ovr_GetSessionStatus(g_session, &sessionStatus);

    if (sessionStatus.HmdPresent == false)
    {
        displayMonitor();
        return;
    }

    const ovrHmdDesc& hmdDesc = m_Hmd;
    double sensorSampleTime; // sensorSampleTime is fed into the layer later
    if (g_hmdVisible)
    {
        // Call ovr_GetRenderDesc each frame to get the ovrEyeRenderDesc, as the returned values (e.g. HmdToEyeOffset) may change at runtime.
        ovrEyeRenderDesc eyeRenderDesc[2];
        eyeRenderDesc[0] = ovr_GetRenderDesc(g_session, ovrEye_Left, hmdDesc.DefaultEyeFov[0]);
        eyeRenderDesc[1] = ovr_GetRenderDesc(g_session, ovrEye_Right, hmdDesc.DefaultEyeFov[1]);

        // Get eye poses, feeding in correct IPD offset
        ovrVector3f HmdToEyeOffset[2] = {
            eyeRenderDesc[0].HmdToEyeOffset,
            eyeRenderDesc[1].HmdToEyeOffset };
#if 0
        // Get both eye poses simultaneously, with IPD offset already included.
        double displayMidpointSeconds = ovr_GetPredictedDisplayTime(g_session, 0);
        ovrTrackingState hmdState = ovr_GetTrackingState(g_session, displayMidpointSeconds, ovrTrue);
        ovr_CalcEyePoses(hmdState.HeadPose.ThePose, HmdToEyeOffset, m_eyePoses);
#else
        ovr_GetEyePoses(g_session, g_frameIndex, ovrTrue, HmdToEyeOffset, m_eyePoses, &sensorSampleTime);
#endif
        storeHmdPose(m_eyePoses[0]);

        for (int eye = 0; eye < 2; ++eye)
        {
            const FBO& swapfbo = m_swapFBO[eye];
            const ovrTextureSwapChain& chain = g_textureSwapChain[eye];

            int curIndex;
            ovr_GetTextureSwapChainCurrentIndex(g_session, chain, &curIndex);
            GLuint curTexId;
            ovr_GetTextureSwapChainBufferGL(g_session, chain, curIndex, &curTexId);

            glBindFramebuffer(GL_FRAMEBUFFER, swapfbo.id);
            glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, curTexId, 0);

            glViewport(0, 0, swapfbo.w, swapfbo.h);
            glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
            glEnable(GL_FRAMEBUFFER_SRGB);

            {
                glClearColor(0.3f, 0.3f, 0.3f, 0.f);
                glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

                const ovrSizei& downSize = ovr_GetFovTextureSize(g_session, ovrEyeType(eye), hmdDesc.DefaultEyeFov[eye], m_fboScale);
                ovrRecti vp = { 0, 0, downSize.w, downSize.h };
                const int texh = swapfbo.h;
                vp.Pos.y = (texh - vp.Size.h) / 2;
                glViewport(vp.Pos.x, vp.Pos.y, vp.Size.w, vp.Size.h);

                // Cinemascope - letterbox bars scissoring off pixels above and below vp center
                const float hc = .5f * m_cinemaScope;
                const int scisPx = static_cast<int>(hc * static_cast<float>(vp.Size.h));
                ovrRecti sp = vp;
                sp.Pos.y += scisPx;
                sp.Size.h -= 2 * scisPx;
                glScissor(sp.Pos.x, sp.Pos.y, sp.Size.w, sp.Size.h);
                glEnable(GL_SCISSOR_TEST);
                glEnable(GL_DEPTH_TEST);

                // Render the scene for the current eye
                const ovrPosef& eyePose = m_eyePoses[eye];
                const glm::mat4 mview =
                    makeWorldToChassisMatrix() *
                    makeMatrixFromPose(eyePose, m_headSize);
                const ovrMatrix4f ovrproj = ovrMatrix4f_Projection(hmdDesc.DefaultEyeFov[eye], 0.2f, 1000.0f, ovrProjection_None);
                const glm::mat4 proj = makeGlmMatrixFromOvrMatrix(ovrproj);
                g_pScene->RenderForOneEye(glm::value_ptr(glm::inverse(mview)), glm::value_ptr(proj));

                const ovrTextureSwapChain& chain = g_textureSwapChain[eye];
                const ovrResult commitres = ovr_CommitTextureSwapChain(g_session, chain);
                if (!OVR_SUCCESS(commitres))
                {
                    LOG_ERROR("ovr_CommitTextureSwapChain returned %d", commitres);
                    return;
                }
            }
            glDisable(GL_SCISSOR_TEST);

            // Grab a copy of the left eye's undistorted render output for presentation
            // to the desktop window instead of the barrel distorted mirror texture.
            // This blit, while cheap, could cost some framerate to the HMD.
            // An over-the-shoulder view is another option, at a greater performance cost.
            if (0)
            {
                if (eye == ovrEyeType::ovrEye_Left)
                {
                    BlitLeftEyeRenderToUndistortedMirrorTexture();
                }
            }

            glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, 0, 0);
            glBindFramebuffer(GL_FRAMEBUFFER, 0);
        }
    }

    std::vector<const ovrLayerHeader*> layerHeaders;
    {
        // Do distortion rendering, Present and flush/sync
        ovrLayerEyeFov ld;
        ld.Header.Type = ovrLayerType_EyeFov;
        ld.Header.Flags = ovrLayerFlag_TextureOriginAtBottomLeft; // Because OpenGL.

        for (int eye = 0; eye < 2; ++eye)
        {
            const FBO& swapfbo = m_swapFBO[eye];
            const ovrTextureSwapChain& chain = g_textureSwapChain[eye];

            ld.ColorTexture[eye] = chain;

            const ovrSizei& downSize = ovr_GetFovTextureSize(g_session, ovrEyeType(eye), hmdDesc.DefaultEyeFov[eye], m_fboScale);
            ovrRecti vp = { 0, 0, downSize.w, downSize.h };
            const int texh = swapfbo.h;
            vp.Pos.y = (texh - vp.Size.h) / 2;

            ld.Viewport[eye] = vp;
            ld.Fov[eye] = hmdDesc.DefaultEyeFov[eye];
            ld.RenderPose[eye] = m_eyePoses[eye];
            ld.SensorSampleTime = sensorSampleTime;
        }
        layerHeaders.push_back(&ld.Header);

        // Submit layers to HMD for display
        ovrLayerQuad ql;
        if (g_tweakbarQuad.m_showQuadInWorld)
        {
            ql.Header.Type = ovrLayerType_Quad;
            ql.Header.Flags = ovrLayerFlag_TextureOriginAtBottomLeft; // Because OpenGL.

            ql.ColorTexture = g_tweakbarQuad.m_swapChain;
            ovrRecti vp;
            vp.Pos.x = 0;
            vp.Pos.y = 0;
            vp.Size.w = 600; ///@todo
            vp.Size.h = 600; ///@todo
            ql.Viewport = vp;
            ql.QuadPoseCenter = g_tweakbarQuad.m_QuadPoseCenter;
            ql.QuadSize = { 1.f, 1.f }; ///@todo Pass in

            g_tweakbarQuad.SetHmdEyeRay(m_eyePoses[ovrEyeType::ovrEye_Left]); // Writes to m_layerQuad.QuadPoseCenter
            g_tweakbarQuad.DrawToQuad();
            layerHeaders.push_back(&ql.Header);
        }
    }

#if 0
    ovrViewScaleDesc viewScaleDesc;
    viewScaleDesc.HmdToEyeOffset[0] = m_eyeOffsets[0];
    viewScaleDesc.HmdToEyeOffset[1] = m_eyeOffsets[1];
    viewScaleDesc.HmdSpaceToWorldScaleInMeters = 1.f;
#endif

    const ovrResult result = ovr_SubmitFrame(g_session, g_frameIndex, nullptr, &layerHeaders[0], layerHeaders.size());
    if (result == ovrSuccess)
    {
        g_hmdVisible = true;
    }
    else if (result == ovrSuccess_NotVisible)
    {
        g_hmdVisible = false;
        ///@todo Enter a lower-power, polling "no focus/HMD not worn" mode
    }
    else if (result == ovrError_DisplayLost)
    {
        LOG_INFO("ovr_SubmitFrame returned ovrError_DisplayLost");
        g_hmdVisible = false;
        ///@todo Tear down textures and session and re-create
    }
    else
    {
        LOG_INFO("ovr_SubmitFrame returned %d", result);
        //g_hmdVisible = false;
    }

    // Handle OVR session events
    ovr_GetSessionStatus(g_session, &sessionStatus);
    if (sessionStatus.ShouldQuit)
    {
        glfwSetWindowShouldClose(g_pMirrorWindow, 1);
    }
    if (sessionStatus.ShouldRecenter)
    {
        ovr_RecenterTrackingOrigin(g_session);
    }

    // Blit mirror texture to monitor window
    if (g_hmdVisible)
    {
        glViewport(0, 0, g_mirrorWindowSz.x, g_mirrorWindowSz.y);
        const FBO& srcFBO = m_mirrorFBO;
        glBindFramebuffer(GL_READ_FRAMEBUFFER, srcFBO.id);
        glBlitFramebuffer(
            0, srcFBO.h, srcFBO.w, 0,
            0, 0, g_mirrorWindowSz.x, g_mirrorWindowSz.y,
            GL_COLOR_BUFFER_BIT, GL_NEAREST);
        glBindFramebuffer(GL_READ_FRAMEBUFFER, 0);
    }
    else
    {
        displayMonitor();
    }
    ++g_frameIndex;

#ifdef USE_ANTTWEAKBAR
    if (g_tweakbarQuad.m_showQuadInWorld)
    {
        TwDraw();
    }
#endif
}
Exemplo n.º 25
0
void OculusWorldDemoApp::CalculateHmdValues()
{
    // Initialize eye rendering information for ovrHmd_Configure.
    // The viewport sizes are re-computed in case RenderTargetSize changed due to HW limitations.
    ovrFovPort eyeFov[2];
    eyeFov[0] = HmdDesc.DefaultEyeFov[0];
    eyeFov[1] = HmdDesc.DefaultEyeFov[1];

    // Clamp Fov based on our dynamically adjustable FovSideTanMax.
    // Most apps should use the default, but reducing Fov does reduce rendering cost.
    eyeFov[0] = FovPort::Min(eyeFov[0], FovPort(FovSideTanMax));
    eyeFov[1] = FovPort::Min(eyeFov[1], FovPort(FovSideTanMax));


    if (ForceZeroIpd)
    {
        // ForceZeroIpd does three things:
        //  1) Sets FOV to maximum symmetrical FOV based on both eyes
        //  2) Sets eye ViewAdjust values to 0.0 (effective IPD == 0)
        //  3) Uses only the Left texture for rendering.
        
        eyeFov[0] = FovPort::Max(eyeFov[0], eyeFov[1]);
        eyeFov[1] = eyeFov[0];

        Sizei recommenedTexSize = ovrHmd_GetFovTextureSize(Hmd, ovrEye_Left,
                                                           eyeFov[0], DesiredPixelDensity);

        Sizei textureSize = EnsureRendertargetAtLeastThisBig(Rendertarget_Left,  recommenedTexSize);

        EyeRenderSize[0] = Sizei::Min(textureSize, recommenedTexSize);
        EyeRenderSize[1] = EyeRenderSize[0];

        // Store texture pointers that will be passed for rendering.
        EyeTexture[0]                       = RenderTargets[Rendertarget_Left].Tex;
        EyeTexture[0].Header.TextureSize    = textureSize;
        EyeTexture[0].Header.RenderViewport = Recti(EyeRenderSize[0]);
        // Right eye is the same.
        EyeTexture[1] = EyeTexture[0];
    }

    else
    {
        // Configure Stereo settings. Default pixel density is 1.0f.
        Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(Hmd, ovrEye_Left,  eyeFov[0], DesiredPixelDensity);
        Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(Hmd, ovrEye_Right, eyeFov[1], DesiredPixelDensity);

        if (RendertargetIsSharedByBothEyes)
        {
            Sizei  rtSize(recommenedTex0Size.w + recommenedTex1Size.w,
                          Alg::Max(recommenedTex0Size.h, recommenedTex1Size.h));

            // Use returned size as the actual RT size may be different due to HW limits.
            rtSize = EnsureRendertargetAtLeastThisBig(Rendertarget_BothEyes, rtSize);

            // Don't draw more then recommended size; this also ensures that resolution reported
            // in the overlay HUD size is updated correctly for FOV/pixel density change.            
            EyeRenderSize[0] = Sizei::Min(Sizei(rtSize.w/2, rtSize.h), recommenedTex0Size);
            EyeRenderSize[1] = Sizei::Min(Sizei(rtSize.w/2, rtSize.h), recommenedTex1Size);

            // Store texture pointers that will be passed for rendering.
            // Same texture is used, but with different viewports.
            EyeTexture[0]                       = RenderTargets[Rendertarget_BothEyes].Tex;
            EyeTexture[0].Header.TextureSize    = rtSize;
            EyeTexture[0].Header.RenderViewport = Recti(Vector2i(0), EyeRenderSize[0]);
            EyeTexture[1]                       = RenderTargets[Rendertarget_BothEyes].Tex;
            EyeTexture[1].Header.TextureSize    = rtSize;
            EyeTexture[1].Header.RenderViewport = Recti(Vector2i((rtSize.w+1)/2, 0), EyeRenderSize[1]);
        }

        else
        {
            Sizei tex0Size = EnsureRendertargetAtLeastThisBig(Rendertarget_Left,  recommenedTex0Size);
            Sizei tex1Size = EnsureRendertargetAtLeastThisBig(Rendertarget_Right, recommenedTex1Size);

            EyeRenderSize[0] = Sizei::Min(tex0Size, recommenedTex0Size);
            EyeRenderSize[1] = Sizei::Min(tex1Size, recommenedTex1Size);

            // Store texture pointers and viewports that will be passed for rendering.
            EyeTexture[0]                       = RenderTargets[Rendertarget_Left].Tex;
            EyeTexture[0].Header.TextureSize    = tex0Size;
            EyeTexture[0].Header.RenderViewport = Recti(EyeRenderSize[0]);
            EyeTexture[1]                       = RenderTargets[Rendertarget_Right].Tex;
            EyeTexture[1].Header.TextureSize    = tex1Size;
            EyeTexture[1].Header.RenderViewport = Recti(EyeRenderSize[1]);
        }
    }

    // Hmd caps.
    unsigned hmdCaps = (VsyncEnabled ? 0 : ovrHmdCap_NoVSync) |
                       ovrHmdCap_LatencyTest;
    if (IsLowPersistence)
        hmdCaps |= ovrHmdCap_LowPersistence;
    if (DynamicPrediction)
        hmdCaps |= ovrHmdCap_DynamicPrediction;

    ovrHmd_SetEnabledCaps(Hmd, hmdCaps);


	ovrRenderAPIConfig config         = pRender->Get_ovrRenderAPIConfig();
    unsigned           distortionCaps = ovrDistortionCap_Chromatic;
    if (TimewarpEnabled)
        distortionCaps |= ovrDistortionCap_TimeWarp;

    if (!ovrHmd_ConfigureRendering( Hmd, &config, distortionCaps,
                                    eyeFov, EyeRenderDesc ))
    {
        // Fail exit? TBD
        return;
    }

    if (ForceZeroIpd)
    {
        // Remove IPD adjust
        EyeRenderDesc[0].ViewAdjust = Vector3f(0);
        EyeRenderDesc[1].ViewAdjust = Vector3f(0);
    }

    // ovrHmdCap_LatencyTest - enables internal latency feedback
    unsigned sensorCaps = ovrSensorCap_Orientation|ovrSensorCap_YawCorrection;
    if (PositionTrackingEnabled)
        sensorCaps |= ovrSensorCap_Position;
      
    if (StartSensorCaps != sensorCaps)
    {
        ovrHmd_StartSensor(Hmd, sensorCaps, 0);
        StartSensorCaps = sensorCaps;
    }    

    // Calculate projections
    Projection[0] = ovrMatrix4f_Projection(EyeRenderDesc[0].Fov,  0.01f, 10000.0f, true);
    Projection[1] = ovrMatrix4f_Projection(EyeRenderDesc[1].Fov,  0.01f, 10000.0f, true);

    float    orthoDistance = 0.8f; // 2D is 0.8 meter from camera
    Vector2f orthoScale0   = Vector2f(1.0f) / Vector2f(EyeRenderDesc[0].PixelsPerTanAngleAtCenter);
    Vector2f orthoScale1   = Vector2f(1.0f) / Vector2f(EyeRenderDesc[1].PixelsPerTanAngleAtCenter);
    
    OrthoProjection[0] = ovrMatrix4f_OrthoSubProjection(Projection[0], orthoScale0, orthoDistance,
                                                        EyeRenderDesc[0].ViewAdjust.x);
    OrthoProjection[1] = ovrMatrix4f_OrthoSubProjection(Projection[1], orthoScale1, orthoDistance,
                                                        EyeRenderDesc[1].ViewAdjust.x);
}
Exemplo n.º 26
0
int
setup_rift(struct weston_compositor *compositor)
{
  struct oculus_rift *rift = compositor->rift;

  rift->enabled = 1;

  rift->screen_z = -5.0;
  rift->screen_scale = 1.0;

  weston_compositor_add_key_binding(compositor, KEY_5, MODIFIER_SUPER, 
      toggle_sbs, compositor);
  weston_compositor_add_key_binding(compositor, KEY_6, MODIFIER_SUPER, 
      toggle_rotate, compositor);
  weston_compositor_add_key_binding(compositor, KEY_7, MODIFIER_SUPER, 
      move_in, compositor);
  weston_compositor_add_key_binding(compositor, KEY_8, MODIFIER_SUPER, 
      move_out, compositor);
  weston_compositor_add_key_binding(compositor, KEY_9, MODIFIER_SUPER, 
      scale_up, compositor);
  weston_compositor_add_key_binding(compositor, KEY_0, MODIFIER_SUPER, 
      scale_down, compositor);

  /*// use this at some point in the future to detect and grab the rift display
  struct weston_output *output;
  wl_list_for_each(output, &compositor->output_list, link)
  {
    weston_log("Output (%i): %s\n\t%ix%i\n", output->id, output->name,
        output->width, output->height);
  }*/

  rift->distortion_shader = calloc(1, sizeof *(rift->distortion_shader));
  struct distortion_shader_ *d = rift->distortion_shader;
  d->program = CreateProgram(distortion_vertex_shader, distortion_fragment_shader);
  d->EyeToSourceUVScale = glGetUniformLocation(d->program, "EyeToSourceUVScale");
  d->EyeToSourceUVOffset = glGetUniformLocation(d->program, "EyeToSourceUVOffset");
  d->RightEye = glGetUniformLocation(d->program, "RightEye");
  d->angle = glGetUniformLocation(d->program, "angle");
  d->Position = glGetAttribLocation(d->program, "Position");
  d->TexCoord0 = glGetAttribLocation(d->program, "TexCoord0");
  d->TexCoordR = glGetAttribLocation(d->program, "TexCoordR");
  d->TexCoordG = glGetAttribLocation(d->program, "TexCoordG");
  d->TexCoordB = glGetAttribLocation(d->program, "TexCoordB");
  d->eyeTexture = glGetAttribLocation(d->program, "Texture0");

  rift->eye_shader = calloc(1, sizeof *(rift->eye_shader));
  struct eye_shader_ *e = rift->eye_shader;
  e->program = CreateProgram(eye_vertex_shader, eye_fragment_shader);
  e->Position = glGetAttribLocation(d->program, "Position");
  e->TexCoord0 = glGetAttribLocation(d->program, "TexCoord0");
  e->Projection = glGetUniformLocation(e->program, "Projection");
  e->ModelView = glGetUniformLocation(e->program, "ModelView");
  e->virtualScreenTexture = glGetAttribLocation(d->program, "Texture0");

  rift->scene = calloc(1, sizeof *(rift->scene));
  glGenBuffers(1, &rift->scene->vertexBuffer);
  glBindBuffer(GL_ARRAY_BUFFER, rift->scene->vertexBuffer);
  static const GLfloat rectangle[] = 
    {-1.0f, -1.0f, -0.5f, 
      1.0f, -1.0f, -0.5f, 
     -1.0f, 1.0f, -0.5f,
      1.0f, -1.0f, -0.5f, 
      1.0f, 1.0f, -0.5f,
     -1.0f, 1.0f, -0.5f};
  glBufferData(GL_ARRAY_BUFFER, sizeof(rectangle), rectangle, GL_STATIC_DRAW);

  glGenBuffers(2, &rift->scene->SBSuvsBuffer[0]);
  glGenBuffers(1, &rift->scene->uvsBuffer);
  static const GLfloat uvs[3][12] = 
   {{ 0.0, 0.0,
      0.5, 0.0,
      0.0, 1.0,
      0.5, 0.0,
      0.5, 1.0,
      0.0, 1.0},
   {  0.5, 0.0,
      1.0, 0.0,
      0.5, 1.0,
      1.0, 0.0,
      1.0, 1.0,
      0.5, 1.0},
   {  0.0, 0.0,
      1.0, 0.0,
      0.0, 1.0,
      1.0, 0.0,
      1.0, 1.0,
      0.0, 1.0}};
  glBindBuffer(GL_ARRAY_BUFFER, rift->scene->SBSuvsBuffer[0]);
  glBufferData(GL_ARRAY_BUFFER, sizeof(uvs[0]), uvs[0], GL_STATIC_DRAW);

  glBindBuffer(GL_ARRAY_BUFFER, rift->scene->SBSuvsBuffer[1]);
  glBufferData(GL_ARRAY_BUFFER, sizeof(uvs[1]), uvs[1], GL_STATIC_DRAW);

  glBindBuffer(GL_ARRAY_BUFFER, rift->scene->uvsBuffer);
  glBufferData(GL_ARRAY_BUFFER, sizeof(uvs[2]), uvs[2], GL_STATIC_DRAW);

  rift->width = 1920;
  rift->height = 1080;

  glGenTextures(1, &rift->fbTexture);
  glBindTexture(GL_TEXTURE_2D, rift->fbTexture);
  glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, rift->width, rift->height, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

  glGenFramebuffers(1, &rift->redirectedFramebuffer);
  glBindFramebuffer(GL_FRAMEBUFFER, rift->redirectedFramebuffer);
  glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, rift->fbTexture, 0); show_error();
  if(glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE)
  {
    switch(glCheckFramebufferStatus(GL_FRAMEBUFFER))
    {
      case GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT: weston_log("incomplete attachment\n"); break;
      case GL_FRAMEBUFFER_INCOMPLETE_DIMENSIONS: weston_log("incomplete dimensions\n"); break;
      case GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT: weston_log("incomplete missing attachment\n"); break;
      case GL_FRAMEBUFFER_UNSUPPORTED: weston_log("unsupported\n"); break;
    }

    weston_log("framebuffer not working\n");
    show_error();
    exit(1);
  }
  glClear(GL_COLOR_BUFFER_BIT);

  /*EGLint pbufferAttributes[] = {
     EGL_WIDTH,           rift->width,
     EGL_HEIGHT,          rift->height,
     EGL_TEXTURE_FORMAT,  EGL_TEXTURE_RGB,
     EGL_TEXTURE_TARGET,  EGL_TEXTURE_2D,
     EGL_NONE
  };

  rift->pbuffer = eglCreatePbufferSurface(
      rift->egl_display, rift->egl_config, 
      pbufferAttributes);

  glGenTextures(1, &(rift->texture));
  glBindTexture(GL_TEXTURE_2D, rift->texture);
  //glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, rift->width, rift->height, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

  eglMakeCurrent(rift->egl_display, rift->pbuffer, rift->pbuffer, rift->egl_context);
  eglBindTexImage(rift->egl_display, rift->pbuffer, EGL_BACK_BUFFER);
  eglMakeCurrent(rift->egl_display, rift->orig_surface, rift->orig_surface, rift->egl_context);*/

  ovr_Initialize(0);
  rift->hmd = ovrHmd_Create(0);
  if(rift->hmd == NULL)
  {
    rift->hmd = ovrHmd_CreateDebug(ovrHmd_DK2);
  }
  ovrHmd_ConfigureTracking(rift->hmd, ovrTrackingCap_Orientation | 
      ovrTrackingCap_Position | ovrTrackingCap_MagYawCorrection, 0);
  ovrHmd_ResetFrameTiming(rift->hmd, 0);

  int eye;
  for(eye = 0; eye < 2; eye++)
  {
    ovrFovPort fov = rift->hmd->DefaultEyeFov[eye];
    ovrEyeRenderDesc renderDesc = ovrHmd_GetRenderDesc(rift->hmd, eye, fov);
    struct EyeArg *eyeArg = &rift->eyeArgs[eye];

    eyeArg->projection = ovrMatrix4f_Projection(fov, 0.1, 100000, true);
    /*int j, k;
    for(k=0; k<4; k++)
    {
      for(j=0; j<4; j++)
      {
        printf("%f\t", eyeArg->projection.M[k][j]);
      }
      printf("\n");
    }*/
    rift->hmdToEyeOffsets[eye] = renderDesc.HmdToEyeViewOffset;
    ovrRecti texRect;
    texRect.Size = ovrHmd_GetFovTextureSize(rift->hmd, eye, rift->hmd->DefaultEyeFov[eye],
        1.0f);
    texRect.Pos.x = texRect.Pos.y = 0;
    eyeArg->textureWidth = texRect.Size.w;
    eyeArg->textureHeight = texRect.Size.h;

    glGenTextures(1, &eyeArg->texture);
    glBindTexture(GL_TEXTURE_2D, eyeArg->texture);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, eyeArg->textureWidth, eyeArg->textureHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

    glGenFramebuffers(1, &eyeArg->framebuffer); show_error();
    glBindFramebuffer(GL_FRAMEBUFFER, eyeArg->framebuffer); show_error();
    glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, eyeArg->texture, 0); show_error();
    if(glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE)
    {
      switch(glCheckFramebufferStatus(GL_FRAMEBUFFER))
      {
        case GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT: weston_log("incomplete attachment\n"); break;
        case GL_FRAMEBUFFER_INCOMPLETE_DIMENSIONS: weston_log("incomplete dimensions\n"); break;
        case GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT: weston_log("incomplete missing attachment\n"); break;
        case GL_FRAMEBUFFER_UNSUPPORTED: weston_log("unsupported\n"); break;
      }

      weston_log("framebuffer not working\n");
      show_error();
      exit(1);
    }
    if(eye)
    {
      glClearColor(1.0, 0.0, 0.0, 1.0); show_error();
    }
    else
    {
      glClearColor(0.0, 1.0, 0.0, 1.0); show_error();
    }
    glClear(GL_COLOR_BUFFER_BIT); show_error();

    /*EGLint eyePbufferAttributes[] = {
       EGL_WIDTH,           texRect.Size.w,
       EGL_HEIGHT,          texRect.Size.h,
       EGL_TEXTURE_FORMAT,  EGL_TEXTURE_RGB,
       EGL_TEXTURE_TARGET,  EGL_TEXTURE_2D,
       EGL_NONE
    };

    eyeArg.surface = eglCreatePbufferSurface(
        rift->egl_display, rift->egl_config, 
        eyePbufferAttributes);*/

    ovrVector2f scaleAndOffset[2];
    ovrHmd_GetRenderScaleAndOffset(fov, texRect.Size, texRect, scaleAndOffset);
    eyeArg->scale = scaleAndOffset[0];
    eyeArg->offset = scaleAndOffset[1];

    ovrHmd_CreateDistortionMesh(rift->hmd, eye, fov, 0, &eyeArg->mesh);

    glGenBuffers(1, &eyeArg->indexBuffer);
    glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, eyeArg->indexBuffer);
    glBufferData(GL_ELEMENT_ARRAY_BUFFER, eyeArg->mesh.IndexCount * sizeof(unsigned short), eyeArg->mesh.pIndexData, GL_STATIC_DRAW);
    glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);

    float vertices_buffer[eyeArg->mesh.VertexCount*2];
    float uvs_buffer[3][eyeArg->mesh.VertexCount*2];
    uint i;
    for(i=0; i<eyeArg->mesh.VertexCount; i++)
    {
      ovrDistortionVertex vertex = eyeArg->mesh.pVertexData[i];
      vertices_buffer[i*2] = vertex.ScreenPosNDC.x;
      vertices_buffer[(i*2)+1] = vertex.ScreenPosNDC.y;
      uvs_buffer[0][i*2] = vertex.TanEyeAnglesR.x;
      uvs_buffer[0][(i*2)+1] = vertex.TanEyeAnglesR.y;
      uvs_buffer[1][i*2] = vertex.TanEyeAnglesG.x;
      uvs_buffer[1][(i*2)+1] = vertex.TanEyeAnglesG.y;
      uvs_buffer[2][i*2] = vertex.TanEyeAnglesB.x;
      uvs_buffer[2][(i*2)+1] = vertex.TanEyeAnglesB.y;
    }

    glGenBuffers(1, &eyeArg->vertexBuffer);
    glBindBuffer(GL_ARRAY_BUFFER, eyeArg->vertexBuffer);
    glBufferData(GL_ARRAY_BUFFER, eyeArg->mesh.VertexCount * sizeof(GL_FLOAT) * 2, vertices_buffer, GL_STATIC_DRAW);
    glGenBuffers(3, &eyeArg->uvsBuffer[0]);
    for(i=0; i<3; i++)
    {
      glBindBuffer(GL_ARRAY_BUFFER, eyeArg->uvsBuffer[i]);
      glBufferData(GL_ARRAY_BUFFER, eyeArg->mesh.VertexCount * sizeof(GL_FLOAT) * 2, uvs_buffer[i], GL_STATIC_DRAW);
      glBindBuffer(GL_ARRAY_BUFFER, 0);
    }
  }

  return 0;
}
Exemplo n.º 27
0
void OculusRiftNode::render( qint64 pTimeStamp, QUuid pSourcePinId )
{
	Q_UNUSED( pSourcePinId )

#if !defined( OCULUS_PLUGIN_SUPPORTED )
	Q_UNUSED( pTimeStamp )
#else
	fugio::Performance	Perf( mNode, "drawGeometry", pTimeStamp );

	if( !mNode->isInitialised() )
	{
		return;
	}

	if( !mOculusRift->hmd() )
	{
		return;
	}

	// We need to keep a reference to ourselves here as ovr_SubmitFrame can
	// call the main app event loop, which can close the context and delete us!

	QSharedPointer<fugio::NodeControlInterface>	C = mNode->control();

	mOculusRift->drawStart();

	const float		NearPlane = variant( mPinNearPlane ).toFloat();
	const float		FarPlane  = variant( mPinFarPlane ).toFloat();

	//	float Yaw(3.141592f);
	//	Vector3f Pos2(0.0f,1.6f,-5.0f);
	//	Pos2.y = ovrHmd_GetFloat(mHMD, OVR_KEY_EYE_HEIGHT, Pos2.y);

	QMatrix4x4		MatEye = variant( mPinViewMatrix ).value<QMatrix4x4>();
	QVector3D		TrnEye = MatEye.column( 3 ).toVector3D();

	MatEye.setColumn( 3, QVector4D( 0, 0, 0, 1 ) );

	const Vector3f Pos2( TrnEye.x(), TrnEye.y(), TrnEye.z() );

	Matrix4f tempRollPitchYaw;

	memcpy( tempRollPitchYaw.M, MatEye.constData(), sizeof( float ) * 16 );

	const Matrix4f rollPitchYaw = tempRollPitchYaw;

	// Render Scene to Eye Buffers
	for (int eye = 0; eye < 2; eye++)
	{
		mOculusRift->drawEyeStart( eye );

		// Get view and projection matrices
		//Matrix4f rollPitchYaw = Matrix4f( MatEye.transposed().data() );
		Matrix4f finalRollPitchYaw = rollPitchYaw * Matrix4f( mOculusRift->eyeRenderPos( eye ).Orientation);
		Vector3f finalUp = finalRollPitchYaw.Transform(Vector3f(0, 1, 0));
		Vector3f finalForward = finalRollPitchYaw.Transform(Vector3f(0, 0, -1));
		Vector3f shiftedEyePos = Pos2 + rollPitchYaw.Transform( mOculusRift->eyeRenderPos( eye ).Position );

		Matrix4f view = Matrix4f::LookAtRH(shiftedEyePos, shiftedEyePos + finalForward, finalUp);
		Matrix4f proj = ovrMatrix4f_Projection( mOculusRift->defaultEyeFOV( eye ), NearPlane, FarPlane, ovrProjection_None );

		mProjection->setVariant( QMatrix4x4( &proj.M[ 0 ][ 0 ], 4, 4 ).transposed() );

		mView->setVariant( QMatrix4x4( &view.M[ 0 ][ 0 ], 4, 4 ).transposed() );

		fugio::OpenGLStateInterface		*CurrentState = 0;

		for( QSharedPointer<fugio::PinInterface> P : mNode->enumInputPins() )
		{
			if( !P->isConnected() )
			{
				continue;
			}

			if( P->connectedPin().isNull() )
			{
				continue;
			}

			if( P->connectedPin()->control().isNull() )
			{
				continue;
			}

			QObject					*O = P->connectedPin()->control()->qobject();

			if( !O )
			{
				continue;
			}

			if( true )
			{
				fugio::RenderInterface		*Geometry = qobject_cast<fugio::RenderInterface *>( O );

				if( Geometry )
				{
					Geometry->render( pTimeStamp );

					continue;
				}
			}

			if( true )
			{
				fugio::OpenGLStateInterface		*NextState = qobject_cast<fugio::OpenGLStateInterface *>( O );

				if( NextState != 0 )
				{
					if( CurrentState != 0 )
					{
						CurrentState->stateEnd();
					}

					CurrentState = NextState;

					CurrentState->stateBegin();

					continue;
				}
			}
		}

		if( CurrentState != 0 )
		{
			CurrentState->stateEnd();
		}

		mOculusRift->drawEyeEnd( eye );
	}

	mOculusRift->drawEnd();

	pinUpdated( mPinProjection );
	pinUpdated( mPinView );
#endif
}
 glm::mat4 getOrthographic() {
   const ovrEyeRenderDesc & erd = getEyeRenderDesc();
   ovrMatrix4f ovrPerspectiveProjection = ovrMatrix4f_Projection(erd.Fov, 0.01f, 100000.0f, true);
   ovrVector2f scale; scale.x = scaleFactor; scale.y = scaleFactor;
   return ovr::toGlm(ovrMatrix4f_OrthoSubProjection(ovrPerspectiveProjection, scale, 100.8f, erd.HmdToEyeViewOffset.x));
 }
Exemplo n.º 29
0
/**
* Render the Virtual Cinema Theatre.
***/
void* OculusTracker::Provoke(void* pThis, int eD3D, int eD3DInterface, int eD3DMethod, DWORD dwNumberConnected, int& nProvokerIndex)
{
	// update game timer
	m_cGameTimer.Tick();

	static UINT unFrameSkip = 200;
	if (unFrameSkip > 0)
	{
		unFrameSkip--;
		return nullptr;
	}

	// #define _DEBUG_OTR
#ifdef _DEBUG_OTR
	{ wchar_t buf[128]; wsprintf(buf, L"[OTR] ifc %u mtd %u", eD3DInterface, eD3DMethod); OutputDebugString(buf); }
#endif

	// save ini file ?
	if (m_nIniFrameCount)
	{
		if (m_nIniFrameCount == 1)
			SaveIniSettings();
		m_nIniFrameCount--;
	}

	// main menu update ?
	if (m_sMenu.bOnChanged)
	{
		// set back event bool, set ini file frame count
		m_sMenu.bOnChanged = false;
		m_nIniFrameCount = 300;

		// loop through entries
		for (size_t nIx = 0; nIx < m_sMenu.asEntries.size(); nIx++)
		{
			// entry index changed ?
			if (m_sMenu.asEntries[nIx].bOnChanged)
			{
				m_sMenu.asEntries[nIx].bOnChanged = false;

				// touch entries ?
				if (nIx < 25)
				{
					// set new vk code by string
					m_aaunKeys[1][nIx] = GetVkCodeByString(m_sMenu.asEntries[nIx].astrValueEnumeration[m_sMenu.asEntries[nIx].unValue]);
				}
			}
		}
	}

	if (m_hSession)
	{
#pragma region controller
		// controller indices
		static const uint32_t s_unIndexRemote = 0;
		static const uint32_t s_unIndexTouch = 1;
		static const uint32_t s_unIndexXBox = 2;

		// get all connected input states
		ovrInputState sInputState[3] = {};
		unsigned int unControllersConnected = ovr_GetConnectedControllerTypes(m_hSession);
#pragma region Remote
		if (unControllersConnected & ovrControllerType_Remote)
		{
			ovr_GetInputState(m_hSession, ovrControllerType_Remote, &sInputState[s_unIndexRemote]);

			// handle all remote buttons except Oculus private ones
			if (sInputState[s_unIndexRemote].Buttons & ovrButton_Up)
				m_sMenu.bOnUp = true;
			if (sInputState[s_unIndexRemote].Buttons & ovrButton_Down)
				m_sMenu.bOnDown = true;
			if (sInputState[s_unIndexRemote].Buttons & ovrButton_Left)
				m_sMenu.bOnLeft = true;
			if (sInputState[s_unIndexRemote].Buttons & ovrButton_Right)
				m_sMenu.bOnRight = true;
			if (sInputState[s_unIndexRemote].Buttons & ovrButton_Enter)
				m_sMenu.bOnAccept = true;
			if (sInputState[s_unIndexRemote].Buttons & ovrButton_Back)
				m_sMenu.bOnBack = true;
		}
#pragma endregion
#pragma region touch
		if (unControllersConnected & ovrControllerType_Touch)
		{
			// get input state
			ovr_GetInputState(m_hSession, ovrControllerType_Touch, &sInputState[s_unIndexTouch]);

			// loop through controller buttons
			for (UINT unButtonIx = 0; unButtonIx < unButtonNo; unButtonIx++)
			{
				// cast keyboard event
				if (sInputState[s_unIndexTouch].Buttons & aunButtonIds[unButtonIx])
				{
					if (!m_aabKeys[s_unIndexTouch][unButtonIx])
						MapButtonDown(s_unIndexTouch, unButtonIx);
				}
				else
				if (m_aabKeys[s_unIndexTouch][unButtonIx])
					MapButtonUp(s_unIndexTouch, unButtonIx);
			}
		}
#pragma endregion
		if (unControllersConnected & ovrControllerType_XBox)
			ovr_GetInputState(m_hSession, ovrControllerType_XBox, &sInputState[s_unIndexXBox]);



#pragma endregion
#pragma region hmd
		/*// Start the sensor which informs of the Rift's pose and motion   .... obsolete for SDK 1.3.x ??
		ovr_ConfigureTracking(m_hSession, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection |
		ovrTrackingCap_Position, 0);*/

		// get the current tracking state
		ovrTrackingState sTrackingState = ovr_GetTrackingState(m_hSession, ovr_GetTimeInSeconds(), false);

		if (TRUE)//(sTrackingState.StatusFlags & (ovrStatus_OrientationTracked | ovrStatus_PositionTracked))
		{
			// get pose
			ovrPoseStatef sPoseState = sTrackingState.HeadPose;
			m_sPose = sPoseState.ThePose;
			m_sOrientation.x = m_sPose.Orientation.x;
			m_sOrientation.y = m_sPose.Orientation.y;
			m_sOrientation.z = m_sPose.Orientation.z;
			m_sOrientation.w = m_sPose.Orientation.w;

			// backup old euler angles and velocity
			float fEulerOld[3];
			float fEulerVelocityOld[3];
			memcpy(&fEulerOld[0], &m_fEuler[0], sizeof(float)* 3);
			memcpy(&fEulerVelocityOld[0], &m_fEulerVelocity[0], sizeof(float)* 3);

			// predicted euler angles ? for Oculus, due to ATW, we do not predict the euler angles
			if (FALSE)
			{
				// get angles
				m_sOrientation.GetEulerAngles<Axis::Axis_Y, Axis::Axis_X, Axis::Axis_Z, RotateDirection::Rotate_CW, HandedSystem::Handed_R >(&m_fEuler[1], &m_fEuler[0], &m_fEuler[2]);

				// quick fix here...
				m_fEuler[1] *= -1.0f;
				m_fEuler[0] *= -1.0f;
				m_fEuler[2] *= -1.0f;

				// get euler velocity + acceleration
				float fEulerAcceleration[3];
				for (UINT unI = 0; unI < 3; unI++)
				{
					// get the velocity
					m_fEulerVelocity[unI] = (m_fEuler[unI] - fEulerOld[unI]) / (float)m_cGameTimer.DeltaTime();

					// get the acceleration
					fEulerAcceleration[unI] = (m_fEulerVelocity[unI] - fEulerVelocityOld[unI]) / (float)m_cGameTimer.DeltaTime();
				}

				// get predicted euler
				for (UINT unI = 0; unI < 3; unI++)
				{
					// compute predicted euler
					m_fEulerPredicted[unI] = (0.5f * fEulerAcceleration[unI] * ((float)m_cGameTimer.DeltaTime() * (float)m_cGameTimer.DeltaTime())) + (m_fEulerVelocity[unI] * (float)m_cGameTimer.DeltaTime()) + m_fEuler[unI];
				}
			}
			else
			{
				// get angles
				m_sOrientation.GetEulerAngles<Axis::Axis_Y, Axis::Axis_X, Axis::Axis_Z, RotateDirection::Rotate_CW, HandedSystem::Handed_R >(&m_fEulerPredicted[1], &m_fEulerPredicted[0], &m_fEulerPredicted[2]);

				// quick fix here...
				m_fEulerPredicted[1] *= -1.0f;
				m_fEulerPredicted[0] *= -1.0f;
				m_fEulerPredicted[2] *= -1.0f;
			}

			// set the drawing update to true
			m_bControlUpdate = true;

			// set position
			m_afPosition[0] = (float)-m_sPose.Position.x - m_afPositionOrigin[0];
			m_afPosition[1] = (float)-m_sPose.Position.y - m_afPositionOrigin[1];
			m_afPosition[2] = (float)m_sPose.Position.z + m_afPositionOrigin[2];

			// get eye render pose and other fields
			ovrEyeRenderDesc asEyeRenderDesc[2];
			asEyeRenderDesc[0] = ovr_GetRenderDesc(m_hSession, ovrEye_Left, m_sHMDDesc.DefaultEyeFov[0]);
			asEyeRenderDesc[1] = ovr_GetRenderDesc(m_hSession, ovrEye_Right, m_sHMDDesc.DefaultEyeFov[1]);
			ovrPosef asHmdToEyePose[2] = { asEyeRenderDesc[0].HmdToEyePose,asEyeRenderDesc[1].HmdToEyePose };
			//ovrVector3f      asHmdToEyeViewOffset[2] = { asEyeRenderDesc[0].HmdToEyePose, asEyeRenderDesc[1].HmdToEyePose };
			ovrPosef         asEyeRenderPose[2];
			static long long s_frameIndex = 0;
			static double s_sensorSampleTime = 0.0;    // sensorSampleTime is fed into the layer later
			ovr_GetEyePoses(m_hSession, s_frameIndex, ovrTrue, asHmdToEyePose, asEyeRenderPose, &s_sensorSampleTime);
			// ovr_CalcEyePoses(sTrackingState.HeadPose.ThePose, asHmdToEyePose, asEyeRenderPose);

			// create rotation matrix from euler angles
			D3DXMATRIX sRotation;
			D3DXMATRIX sPitch, sYaw, sRoll;
			D3DXMatrixRotationX(&sPitch, m_fEulerPredicted[0]);
			D3DXMatrixRotationY(&sYaw, m_fEulerPredicted[1]);
			D3DXMatrixRotationZ(&sRoll, -m_fEulerPredicted[2]);
			sRotation = sYaw * sPitch * sRoll;

			// create per eye view matrix from rotation and position
			D3DXMATRIX sView[2];
			for (UINT unEye = 0; unEye < 2; unEye++)
			{
				D3DXMATRIX sTranslation;
				D3DXMatrixTranslation(&sTranslation, (float)-asEyeRenderPose[unEye].Position.x - m_afPositionOrigin[0], (float)-asEyeRenderPose[unEye].Position.y - m_afPositionOrigin[1], (float)asEyeRenderPose[unEye].Position.z + m_afPositionOrigin[2]);
				sView[unEye] = sTranslation * sRotation;
			}

			// create head pose view matrix
			D3DXMATRIX sTranslation;
			D3DXMatrixTranslation(&sTranslation, (float)-sTrackingState.HeadPose.ThePose.Position.x - m_afPositionOrigin[0], (float)-sTrackingState.HeadPose.ThePose.Position.y - m_afPositionOrigin[1], (float)sTrackingState.HeadPose.ThePose.Position.z + m_afPositionOrigin[2]);
			m_sView = sTranslation * sRotation;

			// create inverse view matrix
			D3DXMATRIX sVInv = {};
			D3DXMatrixInverse(&sVInv, nullptr, &m_sView);

			// get projection matrices left/right
			D3DXMATRIX asToEye[2];
			D3DXMATRIX asProjection[2];
			for (UINT unEye = 0; unEye < 2; unEye++)
			{
				// get ovr projection
				ovrMatrix4f sProj = ovrMatrix4f_Projection(m_sHMDDesc.DefaultEyeFov[unEye], 0.01f, 30.0f, ovrProjection_LeftHanded);

				// create dx projection
				asProjection[unEye] = D3DXMATRIX(&sProj.M[0][0]);
				D3DXMatrixTranspose(&asProjection[unEye], &asProjection[unEye]);

				// create eventual projection using inverse matrix of the head pose view matrix
				m_asProjection[unEye] = sVInv * sView[unEye] * asProjection[unEye];
			}
		}
#pragma endregion
	}
	else
	{
		// Initialize LibOVR, and the Rift... then create hmd handle
		ovrResult result = ovr_Initialize(nullptr);
		if (!OVR_SUCCESS(result))
		{
			OutputDebugString(L"[OVR] Failed to initialize libOVR.");
			return nullptr;
		}

		result = ovr_Create(&m_hSession, &m_sLuid);
		if (!OVR_SUCCESS(result))
		{
			OutputDebugString(L"[OVR] Failed to retreive HMD handle.");
			return nullptr;
		}
		else
			OutputDebugString(L"[OVR] HMD handle initialized !");

		if (m_hSession)
		{
			// get the description and set pointers
			m_sHMDDesc = ovr_GetHmdDesc(m_hSession);

			// Configure Stereo settings.
			ovrSizei sRecommenedTex0Size = ovr_GetFovTextureSize(m_hSession, ovrEye_Left,
				m_sHMDDesc.DefaultEyeFov[0], 1.0f);
			ovrSizei sRecommenedTex1Size = ovr_GetFovTextureSize(m_hSession, ovrEye_Right,
				m_sHMDDesc.DefaultEyeFov[1], 1.0f);

			ovrSizei sTextureSize;
			sTextureSize.w = max(sRecommenedTex0Size.w, sRecommenedTex1Size.w);
			sTextureSize.h = max(sRecommenedTex0Size.h, sRecommenedTex1Size.h);
			m_unRenderTextureWidth = (UINT)sTextureSize.w;
			m_unRenderTextureHeight = (UINT)sTextureSize.h;

			// get view offset
			ovrEyeRenderDesc asEyeRenderDesc[2];
			asEyeRenderDesc[0] = ovr_GetRenderDesc(m_hSession, ovrEye_Left, m_sHMDDesc.DefaultEyeFov[0]);
			asEyeRenderDesc[1] = ovr_GetRenderDesc(m_hSession, ovrEye_Right, m_sHMDDesc.DefaultEyeFov[1]);
			ovrVector3f asViewOffset[2] = { asEyeRenderDesc[0].HmdToEyePose.Position, asEyeRenderDesc[1].HmdToEyePose.Position };

			// get projection matrices left/right
			D3DXMATRIX asToEye[2];
			D3DXMATRIX asProjection[2];
			for (UINT unEye = 0; unEye < 2; unEye++)
			{
				// get ovr projection
				ovrMatrix4f sProj = ovrMatrix4f_Projection(m_sHMDDesc.DefaultEyeFov[unEye], 0.01f, 30.0f, ovrProjection_LeftHanded);

				// create dx projection
				asProjection[unEye] = D3DXMATRIX(&sProj.M[0][0]);
				D3DXMatrixTranspose(&asProjection[unEye], &asProjection[unEye]);

				// create view offset translation matrix
				D3DXMatrixTranslation(&asToEye[unEye], -asViewOffset[unEye].x, -asViewOffset[unEye].y, -asViewOffset[unEye].z);

				// create eventual projection
				m_asProjection[unEye] = asToEye[unEye] * asProjection[unEye];
			}
		}
	}

	return nullptr;
}
Exemplo n.º 30
0
OculusDevice::OculusDevice(float nearClip, float farClip, bool useTimewarp) : m_hmdDevice(0),
	m_nearClip(nearClip), m_farClip(farClip),
	m_useTimeWarp(useTimewarp),
	m_position(osg::Vec3(0.0f, 0.0f, 0.0f)),
	m_orientation(osg::Quat(0.0f, 0.0f, 0.0f, 1.0f))
{
	ovr_Initialize();
	
	// Enumerate HMD devices
	int numberOfDevices = ovrHmd_Detect();
	osg::notify(osg::DEBUG_INFO) << "Number of connected devices: " << numberOfDevices << std::endl;

	// Get first available HMD
	m_hmdDevice = ovrHmd_Create(0);
	
	// If no HMD is found try an emulated device
	if (!m_hmdDevice) {
		osg::notify(osg::WARN) << "Warning: No device could be found. Creating emulated device " << std::endl;
		m_hmdDevice = ovrHmd_CreateDebug(ovrHmd_DK1);
		ovrHmd_ResetFrameTiming(m_hmdDevice, 0);
	}

	if (m_hmdDevice) {
		// Print out some information about the HMD
		osg::notify(osg::ALWAYS) << "Product:         " << m_hmdDevice->ProductName << std::endl;
		osg::notify(osg::ALWAYS) << "Manufacturer:    " << m_hmdDevice->Manufacturer << std::endl;
		osg::notify(osg::ALWAYS) << "VendorId:        " << m_hmdDevice->VendorId << std::endl;
		osg::notify(osg::ALWAYS) << "ProductId:       " << m_hmdDevice->ProductId << std::endl;
		osg::notify(osg::ALWAYS) << "SerialNumber:    " << m_hmdDevice->SerialNumber << std::endl;
		osg::notify(osg::ALWAYS) << "FirmwareVersion: " << m_hmdDevice->FirmwareMajor << "."  << m_hmdDevice->FirmwareMinor << std::endl;

		// Get more details about the HMD.
		m_resolution = m_hmdDevice->Resolution;
		
		// Compute recommended render texture size
		float pixelsPerDisplayPixel = 1.0f; // Decrease this value to scale the size on render texture on lower performance hardware. Values above 1.0 is unnecessary.

		ovrSizei recommenedLeftTextureSize = ovrHmd_GetFovTextureSize(m_hmdDevice, ovrEye_Left, m_hmdDevice->DefaultEyeFov[0], pixelsPerDisplayPixel);
		ovrSizei recommenedRightTextureSize = ovrHmd_GetFovTextureSize(m_hmdDevice, ovrEye_Right, m_hmdDevice->DefaultEyeFov[1], pixelsPerDisplayPixel);

		// Compute size of render target
		m_renderTargetSize.w = recommenedLeftTextureSize.w + recommenedRightTextureSize.w;
		m_renderTargetSize.h = osg::maximum(recommenedLeftTextureSize.h, recommenedRightTextureSize.h);

		// Initialize ovrEyeRenderDesc struct.
		m_eyeRenderDesc[0] = ovrHmd_GetRenderDesc(m_hmdDevice, ovrEye_Left, m_hmdDevice->DefaultEyeFov[0]);
		m_eyeRenderDesc[1] = ovrHmd_GetRenderDesc(m_hmdDevice, ovrEye_Right, m_hmdDevice->DefaultEyeFov[1]);
		
		ovrVector3f leftEyeAdjust = m_eyeRenderDesc[0].ViewAdjust;
		m_leftEyeAdjust.set(leftEyeAdjust.x, leftEyeAdjust.y, leftEyeAdjust.z);
		ovrVector3f rightEyeAdjust = m_eyeRenderDesc[1].ViewAdjust;
		m_rightEyeAdjust.set(rightEyeAdjust.x, rightEyeAdjust.y, rightEyeAdjust.z);

		bool isRightHanded = true;
		
		ovrMatrix4f leftEyeProjectionMatrix = ovrMatrix4f_Projection(m_eyeRenderDesc[0].Fov, m_nearClip, m_farClip, isRightHanded);
		// Transpose matrix
		m_leftEyeProjectionMatrix.set(leftEyeProjectionMatrix.M[0][0], leftEyeProjectionMatrix.M[1][0], leftEyeProjectionMatrix.M[2][0], leftEyeProjectionMatrix.M[3][0],
			leftEyeProjectionMatrix.M[0][1], leftEyeProjectionMatrix.M[1][1], leftEyeProjectionMatrix.M[2][1], leftEyeProjectionMatrix.M[3][1],
			leftEyeProjectionMatrix.M[0][2], leftEyeProjectionMatrix.M[1][2], leftEyeProjectionMatrix.M[2][2], leftEyeProjectionMatrix.M[3][2],
			leftEyeProjectionMatrix.M[0][3], leftEyeProjectionMatrix.M[1][3], leftEyeProjectionMatrix.M[2][3], leftEyeProjectionMatrix.M[3][3]);

		ovrMatrix4f rightEyeProjectionMatrix = ovrMatrix4f_Projection(m_eyeRenderDesc[1].Fov, m_nearClip, m_farClip, isRightHanded);
		// Transpose matrix
		m_rightEyeProjectionMatrix.set(rightEyeProjectionMatrix.M[0][0], rightEyeProjectionMatrix.M[1][0], rightEyeProjectionMatrix.M[2][0], rightEyeProjectionMatrix.M[3][0],
			rightEyeProjectionMatrix.M[0][1], rightEyeProjectionMatrix.M[1][1], rightEyeProjectionMatrix.M[2][1], rightEyeProjectionMatrix.M[3][1],
			rightEyeProjectionMatrix.M[0][2], rightEyeProjectionMatrix.M[1][2], rightEyeProjectionMatrix.M[2][2], rightEyeProjectionMatrix.M[3][2],
			rightEyeProjectionMatrix.M[0][3], rightEyeProjectionMatrix.M[1][3], rightEyeProjectionMatrix.M[2][3], rightEyeProjectionMatrix.M[3][3]);

		// Start the sensor which provides the Rift’s pose and motion.
		ovrHmd_ConfigureTracking(m_hmdDevice, ovrTrackingCap_Orientation |
			ovrTrackingCap_MagYawCorrection |
			ovrTrackingCap_Position, 0);

		beginFrameTiming();
	}
}