/// From the OVR SDK.
void OculusAppSkeleton::AssembleViewMatrix()
{
    // Rotate and position m_oculusView Camera, using YawPitchRoll in BodyFrame coordinates.
    // 
    OVR::Matrix4f rollPitchYaw = GetRollPitchYaw();
    OVR::Vector3f up      = rollPitchYaw.Transform(UpVector);
    OVR::Vector3f forward = rollPitchYaw.Transform(ForwardVector);

    // Minimal head modelling.
    float headBaseToEyeHeight     = 0.15f;  // Vertical height of eye from base of head
    float headBaseToEyeProtrusion = 0.09f;  // Distance forward of eye from base of head

    OVR::Vector3f eyeCenterInHeadFrame(0.0f, headBaseToEyeHeight, -headBaseToEyeProtrusion);
    OVR::Vector3f shiftedEyePos = EyePos + rollPitchYaw.Transform(eyeCenterInHeadFrame);
    shiftedEyePos.y -= eyeCenterInHeadFrame.y; // Bring the head back down to original height

    m_oculusView = OVR::Matrix4f::LookAtRH(shiftedEyePos, shiftedEyePos + forward, up); 

    // This is what transformation would be without head modeling.
    // m_oculusView = Matrix4f::LookAtRH(EyePos, EyePos + forward, up);


    /// Set up a third person(or otherwise) view for control window
    {
        OVR::Vector3f txFollowDisp = rollPitchYaw.Transform(FollowCamDisplacement);
        FollowCamPos = EyePos + txFollowDisp;
        m_controlView = OVR::Matrix4f::LookAtRH(FollowCamPos, EyePos, up);
    }
}
Beispiel #2
0
void RiftAppSkeleton::_drawSceneMono() const
{
    _resetGLState();
    glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

    const int w = m_Cfg.OGL.Header.RTSize.w;
    const int h = m_Cfg.OGL.Header.RTSize.h;

    const glm::vec3 EyePos(m_chassisPos.x, m_chassisPos.y, m_chassisPos.z);
    const glm::vec3 LookVec(0.0f, 0.0f, -1.0f);
    const glm::vec3 up(0.0f, 1.0f, 0.0f);

    ovrPosef eyePose;
    eyePose.Orientation = OVR::Quatf();
    eyePose.Position = OVR::Vector3f();
    const OVR::Matrix4f view = _MakeModelviewMatrix(
        eyePose,
        OVR::Vector3f(0.0f),
        m_chassisYaw,
        m_chassisPos);

    const glm::mat4 persp = glm::perspective(
        90.0f,
        static_cast<float>(w)/static_cast<float>(h),
        0.004f,
        500.0f);

    ovrRecti rvp = {0,0,w,h};
    _DrawScenes(&view.Transposed().M[0][0], glm::value_ptr(persp), rvp);
}
Beispiel #3
0
void OVRScene::RenderForOneEye(const float* pMview, const float* pPersp) const
{
    printf("Rendering in OVRScene!\n");
    if (m_bDraw == false)
        return;
    if (pMview == false)
        return;
    if (pPersp == false)
        return;

    const glm::mat4 modelview = glm::make_mat4(pMview);
    const glm::mat4 projection = glm::make_mat4(pPersp);

    // Assemble modelview matrix to lock camera in with real world geometry:
    // We still have to use the assembled HMD stereo modelview matrices from OVRSDK05AppSkeleton,
    // but we undo the effects of chassis yaw and position so the frustum follows the viewer.
    if (m_pHmd != NULL)
    {
        const ovrTrackingState ts = ovrHmd_GetTrackingState(m_pHmd, ovr_GetTimeInSeconds());
        const ovrPosef& cp = ts.CameraPose;

        OVR::Matrix4f camMtx = OVR::Matrix4f();
        camMtx *= OVR::Matrix4f::Translation(cp.Position)
            * OVR::Matrix4f(OVR::Quatf(cp.Orientation));

        glm::mat4 ogmat = glm::make_mat4(&camMtx.Transposed().M[0][0]);

        DrawScene(modelview * ogmat, projection);
    }
}
Beispiel #4
0
OVR_PUBLIC_FUNCTION(ovrTrackerPose) ovr_GetTrackerPose(ovrSession session, unsigned int trackerPoseIndex)
{
	ovrTrackerPose pose = { 0 };

	// Get the index for this tracker.
	vr::TrackedDeviceIndex_t trackers[vr::k_unMaxTrackedDeviceCount];
	g_VRSystem->GetSortedTrackedDeviceIndicesOfClass(vr::TrackedDeviceClass_TrackingReference, trackers, vr::k_unMaxTrackedDeviceCount);
	vr::TrackedDeviceIndex_t index = trackers[trackerPoseIndex];

	// Set the flags
	pose.TrackerFlags = 0;
	if (session->poses[index].bDeviceIsConnected)
		pose.TrackerFlags |= ovrTracker_Connected;
	if (session->poses[index].bPoseIsValid)
		pose.TrackerFlags |= ovrTracker_PoseTracked;

	// Convert the pose
	OVR::Matrix4f matrix;
	if (session->poses[index].bPoseIsValid)
		matrix = REV_HmdMatrixToOVRMatrix(session->poses[index].mDeviceToAbsoluteTracking);
	OVR::Quatf quat = OVR::Quatf(matrix);
	pose.Pose.Orientation = quat;
	pose.Pose.Position = matrix.GetTranslation();

	// Level the pose
	float yaw;
	quat.GetYawPitchRoll(&yaw, nullptr, nullptr);
	pose.LeveledPose.Orientation = OVR::Quatf(OVR::Axis_Y, yaw);
	pose.LeveledPose.Position = matrix.GetTranslation();

	return pose;
}
Beispiel #5
0
OVR::Matrix4f vx_ovr_namespace_::OVRHMDHandleWithDevice::getViewMatrix(ovrEyeType eye, float pos_x, float pos_y, float pos_z, float yaw) const
{
	auto height = ovr_GetFloat(session_, OVR_KEY_EYE_HEIGHT, 1.8f);

	OVR::Matrix4f rollPitchYaw = OVR::Matrix4f::RotationY(yaw);
	OVR::Matrix4f finalRollPitchYaw = rollPitchYaw * OVR::Matrix4f(eyeRenderPosef_[eye].Orientation);
	OVR::Vector3f finalUp = finalRollPitchYaw.Transform(OVR::Vector3f(0.0, 1.0, 0.0));
	OVR::Vector3f finalForward = finalRollPitchYaw.Transform(OVR::Vector3f(0.0, 0.0, -1.0));
	OVR::Vector3f shiftedEyePos = OVR::Vector3f(pos_x, pos_y + height, pos_z) + rollPitchYaw.Transform(eyeRenderPosef_[eye].Position);

	return OVR::Matrix4f::LookAtRH(shiftedEyePos, shiftedEyePos + finalForward, finalUp);
}
Beispiel #6
0
glm::mat4 CameraOvr::getOrientation(OVR::Quatf orientationQuat, const OVR::Util::Render::StereoEyeParams& eyeParams) {

    orientationQuat.GetEulerAngles<OVR::Axis_Y, OVR::Axis_X, OVR::Axis_Z>(&_hmdRy, &_hmdRx, &_hmdRz);

    OVR::Matrix4f orientation = OVR::Matrix4f::RotationY(_hmdRy+_ry)
                                * OVR::Matrix4f::RotationX(_hmdRx+_rx)
                                * OVR::Matrix4f::RotationZ(_hmdRz+_rz);

    OVR::Matrix4f view = orientation.Inverted() * eyeParams.ViewAdjust;

    return ovrToGlmMat4(view);
}
Beispiel #7
0
// Store HMD position and direction for gaze tracking in timestep.
// OVR SDK requires head pose be queried between ovrHmd_BeginFrameTiming and ovrHmd_EndFrameTiming.
void RiftAppSkeleton::_StoreHmdPose(const ovrPosef& eyePose)
{
    m_hmdRo.x = eyePose.Position.x + m_chassisPos.x;
    m_hmdRo.y = eyePose.Position.y + m_chassisPos.y;
    m_hmdRo.z = eyePose.Position.z + m_chassisPos.z;

    const OVR::Matrix4f rotmtx = OVR::Matrix4f::RotationY(-m_chassisYaw) // Not sure why negative...
        * OVR::Matrix4f(eyePose.Orientation);
    const OVR::Vector4f rotvec = rotmtx.Transform(OVR::Vector4f(0.0f, 0.0f, -1.0f, 0.0f));
    m_hmdRd.x = rotvec.x;
    m_hmdRd.y = rotvec.y;
    m_hmdRd.z = rotvec.z;
}
Beispiel #8
0
void RiftAppSkeleton::RenderThumbnails()
{
    std::vector<Pane*>& panes = m_paneScene.m_panes;
    for (std::vector<Pane*>::iterator it = panes.begin();
        it != panes.end();
        ++it)
    {
        ShaderPane* pP = reinterpret_cast<ShaderPane*>(*it);
        if (pP == NULL)
            continue;
        ShaderToy* pSt = pP->m_pShadertoy;

        // Render a view of the shader to the FBO
        // We must keep the previously bound FBO and restore
        GLint bound_fbo = 0;
        glGetIntegerv(GL_FRAMEBUFFER_BINDING, &bound_fbo);
        bindFBO(pP->m_paneRenderBuffer);

        //pP->DrawToFBO();
        {
            const glm::vec3 hp = pSt->GetHeadPos();
            const glm::vec3 LookVec(0.0f, 0.0f, -1.0f);
            const glm::vec3 up(0.0f, 1.0f, 0.0f);

            ovrPosef eyePose;
            eyePose.Orientation = OVR::Quatf();
            eyePose.Position = OVR::Vector3f();
            const OVR::Matrix4f view = _MakeModelviewMatrix(
                eyePose,
                OVR::Vector3f(0.0f),
                static_cast<float>(M_PI),
                OVR::Vector3f(hp.x, hp.y, hp.z));

            const glm::mat4 persp = glm::perspective(
                90.0f,
                static_cast<float>(pP->m_paneRenderBuffer.w) / static_cast<float>(pP->m_paneRenderBuffer.h),
                0.004f,
                500.0f);

            const bool wasDrawing = m_shaderToyScene.m_bDraw;
            m_shaderToyScene.m_bDraw = true;
            m_shaderToyScene.SetShaderToy(pSt);
            m_shaderToyScene.RenderForOneEye(&view.Transposed().M[0][0], glm::value_ptr(persp));
            m_shaderToyScene.m_bDraw = wasDrawing;
            m_shaderToyScene.SetShaderToy(NULL);
        }

        unbindFBO();
        glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, bound_fbo);
    }
}
Beispiel #9
0
void Entity_DrawChildren( const OVR::Matrix4f &view, const SxTransform& xform, SRef first )
{
    SRef  			ref;
    SEntity 		*entity;
    SxTransform		entityXform;
    SxTransform		childXform;
    OVR::Matrix4f 	m;

    for ( ref = first; ref != S_NULL_REF; ref = entity->parentLink.next )
    {
        entity = Registry_GetEntity( ref );
        assert( entity );

        if ( entity->visibility <= 0.0f )
            continue;

        OrientationToTransform( entity->orientation, &entityXform );
        ConcatenateTransforms( xform, entityXform, &childXform );

        // if ( strstr( entity->id, "vnc" ) )
        // {
        // 	S_Log( "entityXform %s:", entity->id );
        // 	S_Log( "xAxis: %f %f %f", entityXform.axes.x.x, entityXform.axes.x.y, entityXform.axes.x.z );
        // 	S_Log( "yAxis: %f %f %f", entityXform.axes.y.x, entityXform.axes.y.y, entityXform.axes.y.z );
        // 	S_Log( "zAxis: %f %f %f", entityXform.axes.z.x, entityXform.axes.z.y, entityXform.axes.z.z );
        // 	S_Log( "origin: %f %f %f", entityXform.origin.x, entityXform.origin.y, entityXform.origin.z );
        // 	S_Log( "scale: %f %f %f", entityXform.scale.x, entityXform.scale.y, entityXform.scale.z );
        // 	S_Log( "childXform %s:", entity->id );
        // 	S_Log( "xAxis: %f %f %f", childXform.axes.x.x, childXform.axes.x.y, childXform.axes.x.z );
        // 	S_Log( "yAxis: %f %f %f", childXform.axes.y.x, childXform.axes.y.y, childXform.axes.y.z );
        // 	S_Log( "zAxis: %f %f %f", childXform.axes.z.x, childXform.axes.z.y, childXform.axes.z.z );
        // 	S_Log( "origin: %f %f %f", childXform.origin.x, childXform.origin.y, childXform.origin.z );
        // 	S_Log( "scale: %f %f %f", childXform.scale.x, childXform.scale.y, childXform.scale.z );
        // }

        m = OVR::Matrix4f(
                childXform.axes.x.x * childXform.scale.x, childXform.axes.x.y * childXform.scale.x, childXform.axes.x.z * childXform.scale.x, 0.0f,
                childXform.axes.y.x * childXform.scale.y, childXform.axes.y.y * childXform.scale.y, childXform.axes.y.z * childXform.scale.y, 0.0f,
                childXform.axes.z.x * childXform.scale.z, childXform.axes.z.y * childXform.scale.z, childXform.axes.z.z * childXform.scale.z, 0.0f,
                childXform.origin.x, childXform.origin.y, childXform.origin.z, 1.0f );

        Entity_DrawEntity( entity, view * m.Transposed() );

        if ( entity->firstChild != S_NULL_REF )
        {
            // S_Log( "%s has children", entity->id );
            Entity_DrawChildren( view, childXform, entity->firstChild );
        }
    }
}
Beispiel #10
0
OVR_PUBLIC_FUNCTION(ovrEyeRenderDesc) ovr_GetRenderDesc(ovrSession session, ovrEyeType eyeType, ovrFovPort fov)
{
	ovrEyeRenderDesc desc;
	desc.Eye = eyeType;
	desc.Fov = fov;

	OVR::Matrix4f HmdToEyeMatrix = REV_HmdMatrixToOVRMatrix(g_VRSystem->GetEyeToHeadTransform((vr::EVREye)eyeType));
	float WidthTan = fov.LeftTan + fov.RightTan;
	float HeightTan = fov.UpTan + fov.DownTan;
	ovrSizei size = ovr_GetFovTextureSize(session, eyeType, fov, 1.0);

	desc.DistortedViewport = OVR::Recti(eyeType == ovrEye_Right ? size.w : 0, 0, size.w, size.h);
	desc.PixelsPerTanAngleAtCenter = OVR::Vector2f(size.w / WidthTan, size.h / HeightTan);
	desc.HmdToEyeOffset = HmdToEyeMatrix.GetTranslation();

	return desc;
}
Beispiel #11
0
void RiftAppSkeleton::_initPresentFbo()
{
    m_presentFbo.bindVAO();

    const float verts[] = {
        -1, -1,
        1, -1,
        1, 1,
        -1, 1
    };
    const float texs[] = {
        0, 0,
        1, 0,
        1, 1,
        0, 1,
    };

    GLuint vertVbo = 0;
    glGenBuffers(1, &vertVbo);
    m_presentFbo.AddVbo("vPosition", vertVbo);
    glBindBuffer(GL_ARRAY_BUFFER, vertVbo);
    glBufferData(GL_ARRAY_BUFFER, 4*2*sizeof(GLfloat), verts, GL_STATIC_DRAW);
    glVertexAttribPointer(m_presentFbo.GetAttrLoc("vPosition"), 2, GL_FLOAT, GL_FALSE, 0, NULL);

    GLuint texVbo = 0;
    glGenBuffers(1, &texVbo);
    m_presentFbo.AddVbo("vTex", texVbo);
    glBindBuffer(GL_ARRAY_BUFFER, texVbo);
    glBufferData(GL_ARRAY_BUFFER, 4*2*sizeof(GLfloat), texs, GL_STATIC_DRAW);
    glVertexAttribPointer(m_presentFbo.GetAttrLoc("vTex"), 2, GL_FLOAT, GL_FALSE, 0, NULL);

    glEnableVertexAttribArray(m_presentFbo.GetAttrLoc("vPosition"));
    glEnableVertexAttribArray(m_presentFbo.GetAttrLoc("vTex"));

    glUseProgram(m_presentFbo.prog());
    {
        OVR::Matrix4f id = OVR::Matrix4f::Identity();
        glUniformMatrix4fv(m_presentFbo.GetUniLoc("mvmtx"), 1, false, &id.Transposed().M[0][0]);
        glUniformMatrix4fv(m_presentFbo.GetUniLoc("prmtx"), 1, false, &id.Transposed().M[0][0]);
    }
    glUseProgram(0);

    glBindVertexArray(0);
}
Beispiel #12
0
OVR_PUBLIC_FUNCTION(ovrTrackingState) ovr_GetTrackingState(ovrSession session, double absTime, ovrBool latencyMarker)
{
	ovrTrackingState state = { 0 };

	if (!session)
		return state;

	// Gain focus for the compositor
	float time = (float)ovr_GetTimeInSeconds();

	// Get the absolute tracking poses
	vr::TrackedDevicePose_t* poses = session->poses;

	// Convert the head pose
	state.HeadPose = REV_TrackedDevicePoseToOVRPose(poses[vr::k_unTrackedDeviceIndex_Hmd], time);
	state.StatusFlags = REV_TrackedDevicePoseToOVRStatusFlags(poses[vr::k_unTrackedDeviceIndex_Hmd]);

	// Convert the hand poses
	vr::TrackedDeviceIndex_t hands[] = { g_VRSystem->GetTrackedDeviceIndexForControllerRole(vr::TrackedControllerRole_LeftHand),
		g_VRSystem->GetTrackedDeviceIndexForControllerRole(vr::TrackedControllerRole_RightHand) };
	for (int i = 0; i < ovrHand_Count; i++)
	{
		vr::TrackedDeviceIndex_t deviceIndex = hands[i];
		if (deviceIndex == vr::k_unTrackedDeviceIndexInvalid)
		{
			state.HandPoses[i].ThePose = OVR::Posef::Identity();
			continue;
		}

		state.HandPoses[i] = REV_TrackedDevicePoseToOVRPose(poses[deviceIndex], time);
		state.HandStatusFlags[i] = REV_TrackedDevicePoseToOVRStatusFlags(poses[deviceIndex]);
	}

	OVR::Matrix4f origin = REV_HmdMatrixToOVRMatrix(g_VRSystem->GetSeatedZeroPoseToStandingAbsoluteTrackingPose());

	// The calibrated origin should be the location of the seated origin relative to the absolute tracking space.
	// It currently describes the location of the absolute origin relative to the seated origin, so we have to invert it.
	origin.Invert();

	state.CalibratedOrigin.Orientation = OVR::Quatf(origin);
	state.CalibratedOrigin.Position = origin.GetTranslation();

	return state;
}
Beispiel #13
0
/// Scale the parallax translation and head pose motion vector by the head size
/// dictated by the shader. Thanks to the elegant design decision of putting the
/// head's default position at the origin, this is simple.
OVR::Matrix4f _MakeModelviewMatrix(
    ovrPosef eyePose,
    ovrVector3f viewAdjust,
    float chassisYaw,
    ovrVector3f chassisPos,
    float headScale=1.0f)
{
    const OVR::Matrix4f eyePoseMatrix =
        OVR::Matrix4f::Translation(OVR::Vector3f(eyePose.Position) * headScale)
        * OVR::Matrix4f(OVR::Quatf(eyePose.Orientation));

    const OVR::Matrix4f view =
        OVR::Matrix4f::Translation(OVR::Vector3f(viewAdjust) * headScale)
        * eyePoseMatrix.Inverted()
        * OVR::Matrix4f::RotationY(chassisYaw)
        * OVR::Matrix4f::Translation(-OVR::Vector3f(chassisPos));

    return view;
}
Beispiel #14
0
// This function calculates the transformation Matrix, needed for the Oculus Rift display
glm::mat4 RetinaManager::CalcTransMatrix(ovrEyeType Eye) {
	glm::mat4 projMat;
	glm::mat4 modelViewMat;
	// Get Projection and ModelView matrices from the device
	OVR::Matrix4f projectionMatrix = ovrMatrix4f_Projection(this->eyeRenderDesc[Eye].Fov, 0.3f, 1000.0f, true);

	// Convert the matrices into OpenGl form
	memcpy(glm::value_ptr(projMat), &(projectionMatrix.Transposed().M[0][0]), sizeof(projectionMatrix));
	modelViewMat = glm::mat4(1.0); //Identity matrix for model-view
	// Adjust IPD and the distance from FOV
	glm::mat4 translateIPD = glm::translate(glm::mat4(1.0),
			glm::vec3(this->eyeRenderDesc[Eye].ViewAdjust.x, this->eyeRenderDesc[Eye].ViewAdjust.y,
					this->eyeRenderDesc[Eye].ViewAdjust.z));

	glm::mat4 translateBack = glm::translate(glm::mat4(1.0),
			glm::vec3(0, 0, this->paramManager.getTranslateBackOffset()));

	// Calc and Return the transformed Mat
	return projMat * modelViewMat * translateBack * translateIPD;;
}
Beispiel #15
0
ovrPoseStatef REV_TrackedDevicePoseToOVRPose(vr::TrackedDevicePose_t pose, double time)
{
	ovrPoseStatef result = { 0 };
	result.ThePose = OVR::Posef::Identity();

	OVR::Matrix4f matrix;
	if (pose.bPoseIsValid)
		matrix = REV_HmdMatrixToOVRMatrix(pose.mDeviceToAbsoluteTracking);
	else
		return result;

	result.ThePose.Orientation = OVR::Quatf(matrix);
	result.ThePose.Position = matrix.GetTranslation();
	result.AngularVelocity = REV_HmdVectorToOVRVector(pose.vAngularVelocity);
	result.LinearVelocity = REV_HmdVectorToOVRVector(pose.vVelocity);
	// TODO: Calculate acceleration.
	result.AngularAcceleration = ovrVector3f();
	result.LinearAcceleration = ovrVector3f();
	result.TimeInSeconds = time;

	return result;
}
/// Handle input's influence on orientation variables.
void OculusAppSkeleton::AccumulateInputs(float dt)
{
    // Handle Sensor motion.
    // We extract Yaw, Pitch, Roll instead of directly using the orientation
    // to allow "additional" yaw manipulation with mouse/controller.
    if (m_ok.SensorActive())
    {
        OVR::Quatf    hmdOrient = m_ok.GetOrientation();
        float    yaw = 0.0f;

        hmdOrient.GetEulerAngles<OVR::Axis_Y, OVR::Axis_X, OVR::Axis_Z>(&yaw, &EyePitch, &EyeRoll);

        EyeYaw += (yaw - LastSensorYaw);
        LastSensorYaw = yaw;
    }


    // Gamepad rotation.
    EyeYaw -= GamepadRotate.x * dt;

    if (!m_ok.SensorActive())
    {
        // Allow gamepad to look up/down, but only if there is no Rift sensor.
        EyePitch -= GamepadRotate.y * dt;
        EyePitch -= MouseRotate.y * dt;
        EyeYaw   -= MouseRotate.x * dt;

        const float maxPitch = ((3.1415f/2)*0.98f);
        if (EyePitch > maxPitch)
            EyePitch = maxPitch;
        if (EyePitch < -maxPitch)
            EyePitch = -maxPitch;
    }

    if (GamepadMove.LengthSq() > 0)
    {
        OVR::Matrix4f yawRotate = OVR::Matrix4f::RotationY(EyeYaw);
        OVR::Vector3f orientationVector = yawRotate.Transform(GamepadMove);
        orientationVector *= MoveSpeed * dt;
        EyePos += orientationVector;
    }

    if (MouseMove.LengthSq() > 0)
    {
        OVR::Matrix4f yawRotate = OVR::Matrix4f::RotationY(EyeYaw);
        OVR::Vector3f orientationVector = yawRotate.Transform(MouseMove);
        orientationVector *= MoveSpeed * dt;
        EyePos += orientationVector;
    }

    if (KeyboardMove.LengthSq() > 0)
    {
        OVR::Matrix4f yawRotate = OVR::Matrix4f::RotationY(EyeYaw);
        OVR::Vector3f orientationVector = yawRotate.Transform(KeyboardMove);
        orientationVector *= MoveSpeed * dt;
        EyePos += orientationVector;
    }
}
Beispiel #17
0
///@todo Even though this function shares most of its code with client rendering,
/// which appears to work fine, it is non-convergable. It appears that the projection
/// matrices for each eye are too far apart? Could be modelview...
void RiftAppSkeleton::display_stereo_undistorted() //const
{
    ovrHmd hmd = m_Hmd;
    if (hmd == NULL)
        return;

    //ovrFrameTiming hmdFrameTiming =
    ovrHmd_BeginFrameTiming(hmd, 0);

    bindFBO(m_renderBuffer, m_fboScale);

    glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

    for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++)
    {
        ovrEyeType eye = hmd->EyeRenderOrder[eyeIndex];
        ovrPosef eyePose = ovrHmd_GetEyePose(hmd, eye);

        const ovrGLTexture& otex = l_EyeTexture[eye];
        const ovrRecti& rvp = otex.OGL.Header.RenderViewport;
        const ovrRecti rsc = {
            static_cast<int>(m_fboScale * rvp.Pos.x),
            static_cast<int>(m_fboScale * rvp.Pos.y),
            static_cast<int>(m_fboScale * rvp.Size.w),
            static_cast<int>(m_fboScale * rvp.Size.h)
        };
        glViewport(rsc.Pos.x, rsc.Pos.y, rsc.Size.w, rsc.Size.h);

        OVR::Quatf orientation = OVR::Quatf(eyePose.Orientation);
        OVR::Matrix4f proj = ovrMatrix4f_Projection(
            m_EyeRenderDesc[eye].Fov,
            0.01f, 10000.0f, true);

        //m_EyeRenderDesc[eye].DistortedViewport;
        OVR::Vector3f EyePos = m_chassisPos;
        OVR::Matrix4f view = OVR::Matrix4f(orientation.Inverted())
            * OVR::Matrix4f::RotationY(m_chassisYaw)
            * OVR::Matrix4f::Translation(-EyePos);
        OVR::Matrix4f eyeview = OVR::Matrix4f::Translation(m_EyeRenderDesc[eye].ViewAdjust) * view;

        _resetGLState();

        _DrawScenes(&eyeview.Transposed().M[0][0], &proj.Transposed().M[0][0], rvp);
    }
    unbindFBO();

    glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
    glDisable(GL_DEPTH_TEST);
    glDisable(GL_CULL_FACE);

    // Present FBO to screen
    const GLuint prog = m_presentFbo.prog();
    glUseProgram(prog);
    m_presentFbo.bindVAO();
    {
        glActiveTexture(GL_TEXTURE0);
        glBindTexture(GL_TEXTURE_2D, m_renderBuffer.tex);
        glUniform1i(m_presentFbo.GetUniLoc("fboTex"), 0);

        // This is the only uniform that changes per-frame
        glUniform1f(m_presentFbo.GetUniLoc("fboScale"), m_fboScale);

        glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
    }
    glBindVertexArray(0);
    glUseProgram(0);

    ovrHmd_EndFrameTiming(hmd);
}
/// Set up view matrices, then draw scene
void OculusAppSkeleton::display(bool useOculus) const
{
    /// This may save us some frame rate
    if (!useOculus && !m_displaySceneInControl)
    {
        glClearColor(0,0,0,0);
        glClear(GL_COLOR_BUFFER_BIT);
        return;
    }

    glEnable(GL_DEPTH_TEST);

    m_ok.BindRenderBuffer();
    {
        glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

        const int fboWidth = m_ok.GetRenderBufferWidth();
        const int fboHeight = m_ok.GetRenderBufferHeight();
        const int halfWidth = fboWidth/2;
        if (useOculus)
        {
            const OVR::HMDInfo& hmd = m_ok.GetHMD();
            // Compute Aspect Ratio. Stereo mode cuts width in half.
            float aspectRatio = float(hmd.HResolution * 0.5f) / float(hmd.VResolution);

            // Compute Vertical FOV based on distance.
            float halfScreenDistance = (hmd.VScreenSize / 2);
            float yfov = 2.0f * atan(halfScreenDistance/hmd.EyeToScreenDistance);

            // Post-projection viewport coordinates range from (-1.0, 1.0), with the
            // center of the left viewport falling at (1/4) of horizontal screen size.
            // We need to shift this projection center to match with the lens center.
            // We compute this shift in physical units (meters) to correct
            // for different screen sizes and then rescale to viewport coordinates.
            float viewCenterValue = hmd.HScreenSize * 0.25f;
            float eyeProjectionShift = viewCenterValue - hmd.LensSeparationDistance * 0.5f;
            float projectionCenterOffset = 4.0f * eyeProjectionShift / hmd.HScreenSize;

            // Projection matrix for the "center eye", which the left/right matrices are based on.
            OVR::Matrix4f projCenter = OVR::Matrix4f::PerspectiveRH(yfov, aspectRatio, 0.3f, 1000.0f);
            OVR::Matrix4f projLeft   = OVR::Matrix4f::Translation(projectionCenterOffset, 0, 0) * projCenter;
            OVR::Matrix4f projRight  = OVR::Matrix4f::Translation(-projectionCenterOffset, 0, 0) * projCenter;
        
            // m_oculusView transformation translation in world units.
            float halfIPD = hmd.InterpupillaryDistance * 0.5f;
            OVR::Matrix4f viewLeft = OVR::Matrix4f::Translation(halfIPD, 0, 0) * m_oculusView;
            OVR::Matrix4f viewRight= OVR::Matrix4f::Translation(-halfIPD, 0, 0) * m_oculusView;

            glViewport(0        ,0,(GLsizei)halfWidth, (GLsizei)fboHeight);
            glScissor (0        ,0,(GLsizei)halfWidth, (GLsizei)fboHeight);
            m_scene.RenderForOneEye(viewLeft, projLeft);

            glViewport(halfWidth,0,(GLsizei)halfWidth, (GLsizei)fboHeight);
            glScissor (halfWidth,0,(GLsizei)halfWidth, (GLsizei)fboHeight);
            m_scene.RenderForOneEye(viewRight, projRight);
        }
        else
        {
            /// Set up our 3D transformation matrices
            /// Remember DX and OpenGL use transposed conventions. And doesn't DX use left-handed coords?
            OVR::Matrix4f mview = m_controlView;
            OVR::Matrix4f persp = OVR::Matrix4f::PerspectiveRH(
                m_viewAngleDeg * M_PI / 180.0f,
                (float)m_windowWidth/(float)m_windowHeight,
                0.004f,
                500.0f);

            glViewport(0,0,(GLsizei)fboWidth, (GLsizei)fboHeight);
            m_scene.RenderForOneEye(mview, persp);


            /// Render avatar of Oculus user
            //if (UseFollowCam)
            const GLuint prog = m_avatarProg;
            glUseProgram(prog);
            {
                OVR::Matrix4f rollPitchYaw = GetRollPitchYaw();
                OVR::Matrix4f eyetx = mview
                    * OVR::Matrix4f::Translation(EyePos.x, EyePos.y, EyePos.z)
                    * rollPitchYaw;

                glUniformMatrix4fv(getUniLoc(prog, "mvmtx"), 1, false, &eyetx.Transposed().M[0][0]);
                glUniformMatrix4fv(getUniLoc(prog, "prmtx"), 1, false, &persp.Transposed().M[0][0]);

                glLineWidth(4.0f);
                DrawOrigin2();
                const float aspect = (float)GetOculusWidth() / (float)GetOculusHeight();
                DrawViewFrustum(aspect);
                glLineWidth(1.0f);
            }
        }
    }
    m_ok.UnBindRenderBuffer();

    glDisable(GL_LIGHTING);
    glDisable(GL_DEPTH_TEST);

    const OVRkill::PostProcessType post = useOculus ? OVRkill::PostProcess_Distortion : OVRkill::PostProcess_None;
    m_ok.PresentFbo(post);
}
Beispiel #19
0
int main(int argc, char **argv)
{
    // initialize everything
    if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_EVENTS) < 0)
    {
        return 1;
    }

    if (!g_oculusVR.InitVR())
    {
        SDL_Quit();
        return 1;
    }

    ovrSizei hmdResolution = g_oculusVR.GetResolution();
    ovrSizei windowSize = { hmdResolution.w / 2, hmdResolution.h / 2 };

    g_renderContext.Init("Oculus Rift IR Camera Bounds Renderer", 100, 100, windowSize.w, windowSize.h);
    SDL_ShowCursor(SDL_DISABLE);

    if (glewInit() != GLEW_OK)
    {
        g_renderContext.Destroy();
        g_oculusVR.DestroyVR();
        SDL_Quit();
        return 1;
    }

    if (!g_oculusVR.InitVRBuffers(windowSize.w, windowSize.h))
    {
        g_renderContext.Destroy();
        g_oculusVR.DestroyVR();
        SDL_Quit();
        return 1;
    }

    ShaderManager::GetInstance()->LoadShaders();
    g_application.OnStart();

    while (g_application.Running())
    {
        // handle key presses
        processEvents();

        glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
        glClearColor(0.2f, 0.2f, 0.6f, 0.0f);

        g_oculusVR.OnRenderStart();

        for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++)
        {
            OVR::Matrix4f MVPMatrix = g_oculusVR.OnEyeRender(eyeIndex);

            // update MVP in both shaders
            const ShaderProgram &shader = ShaderManager::GetInstance()->UseShaderProgram(ShaderManager::BasicShader);
            glUniformMatrix4fv(shader.uniforms[ModelViewProjectionMatrix], 1, GL_FALSE, &MVPMatrix.Transposed().M[0][0]);
            const ShaderProgram &shader2 = ShaderManager::GetInstance()->UseShaderProgram(ShaderManager::OVRFrustumShader);
            glUniformMatrix4fv(shader2.uniforms[ModelViewProjectionMatrix], 1, GL_FALSE, &MVPMatrix.Transposed().M[0][0]);

            g_application.OnRender();     
            g_oculusVR.RenderTrackerFrustum();
            g_oculusVR.OnEyeRenderFinish(eyeIndex);
        }

        g_oculusVR.SubmitFrame();
        g_oculusVR.BlitMirror();
        SDL_GL_SwapWindow(g_renderContext.window);
    }

    g_renderContext.Destroy();
    g_oculusVR.DestroyVR();

    SDL_Quit(); 

    return 0;
}
Beispiel #20
0
void RiftAppSkeleton::display_sdk() //const
{
    ovrHmd hmd = m_Hmd;
    if (hmd == NULL)
        return;

    //const ovrFrameTiming hmdFrameTiming =
    ovrHmd_BeginFrame(m_Hmd, 0);

    bindFBO(m_renderBuffer);

    glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

    // For passing to EndFrame once rendering is done
    ovrPosef renderPose[2];
    ovrTexture eyeTexture[2];

    for (int eyeIndex=0; eyeIndex<ovrEye_Count; eyeIndex++)
    {
        const ovrEyeType eye = hmd->EyeRenderOrder[eyeIndex];
        const ovrPosef eyePose = ovrHmd_GetEyePose(m_Hmd, eye);
        m_eyeOri = eyePose.Orientation; // cache this for movement direction
        _StoreHmdPose(eyePose);

        const ovrGLTexture& otex = l_EyeTexture[eye];
        const ovrRecti& rvp = otex.OGL.Header.RenderViewport;
        glViewport(
            rvp.Pos.x,
            rvp.Pos.y,
            rvp.Size.w,
            rvp.Size.h
            );

        const OVR::Matrix4f proj = ovrMatrix4f_Projection(
            m_EyeRenderDesc[eye].Fov,
            0.01f, 10000.0f, true);

        const OVR::Matrix4f view = _MakeModelviewMatrix(
            eyePose,
            -OVR::Vector3f(m_EyeRenderDesc[eye].ViewAdjust), // not sure why negative...
            m_chassisYaw,
            m_chassisPos);

        const OVR::Matrix4f scaledView = _MakeModelviewMatrix(
            eyePose,
            -OVR::Vector3f(m_EyeRenderDesc[eye].ViewAdjust), // not sure why negative...
            m_chassisYaw,
            m_chassisPos,
            m_headSize);

        _resetGLState();

        _DrawScenes(&view.Transposed().M[0][0], &proj.Transposed().M[0][0], rvp, &scaledView.Transposed().M[0][0]);

        renderPose[eyeIndex] = eyePose;
        eyeTexture[eyeIndex] = l_EyeTexture[eye].Texture;
    }
    unbindFBO();

    ovrHmd_EndFrame(m_Hmd, renderPose, eyeTexture);

    glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
    glBindBuffer(GL_ARRAY_BUFFER, 0);
    glUseProgram(0);
}
Beispiel #21
0
void OVRScene::timestep(double /*absTime*/, double dt)
{
    (void)dt;
    if (m_pHmd == NULL)
        return;

    const ovrTrackingState ts = ovrHmd_GetTrackingState(m_pHmd, ovr_GetTimeInSeconds());
    const ovrVector3f& hp = ts.HeadPose.ThePose.Position;
    glm::vec4 headPt(hp.x, hp.y, hp.z, 1.0f);

    // Get camera pose as a matrix
    const ovrPosef& cp = ts.CameraPose;
    OVR::Matrix4f camMtx = OVR::Matrix4f();
    camMtx *= OVR::Matrix4f::Translation(cp.Position)
        * OVR::Matrix4f(OVR::Quatf(cp.Orientation));

    const glm::mat4 gcamMtx = glm::make_mat4(&camMtx.Inverted().Transposed().M[0][0]);
    headPt = gcamMtx * headPt;
    m_tanFromCameraCenterline.x = fabs(headPt.x / headPt.z);
    m_tanFromCameraCenterline.y = fabs(headPt.y / headPt.z);

#if 0
    std::vector<glm::vec3> txFrustumPts = m_frustumVerts;
    for (std::vector<glm::vec3>::const_iterator it = txFrustumPts.begin();
        it != txFrustumPts.end();
        ++it)
    {
        glm::vec3 pt = *it;
        glm::vec4 pt4(pt, 1.0f);
        pt4 = gcamMtx * pt4;
        pt.x = pt4.x;
        pt.y = pt4.y;
        pt.z = pt4.z;
    }

    // Calculate minimum distance to frustum
    std::vector<glm::ivec3> planeIndices;
    planeIndices.push_back(glm::ivec3(2, 3, 4));
    planeIndices.push_back(glm::ivec3(8, 7, 6));
    planeIndices.push_back(glm::ivec3(2, 3, 7));
    planeIndices.push_back(glm::ivec3(3, 4, 8));
    planeIndices.push_back(glm::ivec3(4, 5, 9));
    planeIndices.push_back(glm::ivec3(5, 2, 6));

    float minDist = 999.0f;
    for (std::vector<glm::ivec3>::const_iterator it = planeIndices.begin();
        it != planeIndices.end();
        ++it)
    {
        const glm::ivec3& idxs = *it;
        // Assume this point has already been transformed
        // If our indices are out of bounds, we're hosed
        const glm::vec3& p1 = txFrustumPts[idxs.x];
        const glm::vec3& p2 = txFrustumPts[idxs.y];
        const glm::vec3& p3 = txFrustumPts[idxs.z];
        const glm::vec3 v1 = p1 - p2;
        const glm::vec3 v2 = p3 - p2;
        const glm::vec3 norm = glm::normalize(glm::cross(v1, v2));
        const glm::vec3 ptDist = headPt - p2;
        const float dist = fabs(glm::dot(norm, ptDist)); // shouldn't need fabs if ordering is correct
        minDist = std::min(minDist, dist);
    }
    m_distanceToFrustum = minDist;
#endif
}
Beispiel #22
0
void RiftAppSkeleton::timestep(float dt)
{
    for (std::vector<IScene*>::iterator it = m_scenes.begin();
        it != m_scenes.end();
        ++it)
    {
        IScene* pScene = *it;
        if (pScene != NULL)
        {
            pScene->timestep(dt);
        }
    }

    glm::vec3 hydraMove = glm::vec3(0.0f, 0.0f, 0.0f);
#ifdef USE_SIXENSE
    const sixenseAllControllerData& state = m_fm.GetCurrentState();
    for (int i = 0; i<2; ++i)
    {
        const sixenseControllerData& cd = state.controllers[i];
        const float moveScale = pow(10.0f, cd.trigger);
        hydraMove.x += cd.joystick_x * moveScale;

        const FlyingMouse::Hand h = static_cast<FlyingMouse::Hand>(i);
        if (m_fm.IsPressed(h, SIXENSE_BUTTON_JOYSTICK)) ///@note left hand does not work
            hydraMove.y += cd.joystick_y * moveScale;
        else
            hydraMove.z -= cd.joystick_y * moveScale;
    }

    if (m_fm.WasJustPressed(FlyingMouse::Right, SIXENSE_BUTTON_START))
    {
        ToggleShaderWorld();
    }

    // Adjust cinemascope feel with left trigger
    // Mouse wheel will still work if Hydra is not present or not pressed(0.0 trigger value).
    const float trigger = m_fm.GetTriggerValue(FlyingMouse::Left); // [0,1]
    if (trigger > 0.0f)
    {
        const float deadzone = 0.1f;
        const float topval = 0.95f;
        const float trigScaled = (trigger - deadzone) / (1.0f - deadzone);
        m_cinemaScopeFactor = std::max(0.0f, topval * trigScaled);
    }
#endif

    const glm::vec3 move_dt = m_headSize * (m_keyboardMove + m_joystickMove + m_mouseMove + hydraMove) * dt;
    ovrVector3f kbm;
    kbm.x = move_dt.x;
    kbm.y = move_dt.y;
    kbm.z = move_dt.z;

    // Move in the direction the viewer is facing.
    const OVR::Matrix4f rotmtx = 
          OVR::Matrix4f::RotationY(-m_chassisYaw)
        * OVR::Matrix4f(m_eyeOri);
    const OVR::Vector3f kbmVec = rotmtx.Transform(OVR::Vector3f(kbm));

    m_chassisPos.x += kbmVec.x;
    m_chassisPos.y += kbmVec.y;
    m_chassisPos.z += kbmVec.z;

    m_chassisYaw += (m_keyboardYaw + m_joystickYaw + m_mouseDeltaYaw) * dt;

    m_fm.updateHydraData();
    m_hyif.updateHydraData(m_fm, 1.0f);
}
Beispiel #23
0
void Entity_DrawEntity( SEntity *entity, const OVR::Matrix4f &view )
{
    STexture 	*texture;
    SGeometry	*geometry;
    uint 		geometryIndex;
    uint 		textureIndex;
    GLuint 		texId;
    float 		uScale;
    float 		vScale;
    GLuint 		vertexArrayObject;
    int 		triCount;
    int 		indexOffset;
    int 		batchTriCount;
    int 		triCountLeft;

    Prof_Start( PROF_DRAW_ENTITY );

    assert( entity );

    OVR::GL_CheckErrors( "before Entity_DrawEntity" );

    geometry = Registry_GetGeometry( entity->geometryRef );
    assert( geometry );

    geometryIndex = geometry->drawIndex % BUFFER_COUNT;

    vertexArrayObject = geometry->vertexArrayObjects[geometryIndex];
    if ( !vertexArrayObject )
    {
        Prof_Stop( PROF_DRAW_ENTITY );
        return;
    }

    glUseProgram( s_ent.shader.program );

    glUniformMatrix4fv( s_ent.shader.uMvp, 1, GL_FALSE, view.Transposed().M[0] );

    glBindVertexArrayOES_( vertexArrayObject );

    glActiveTexture( GL_TEXTURE0 );

    if ( entity->textureRef != S_NULL_REF )
    {
        texture = Registry_GetTexture( entity->textureRef );
        assert( texture );

        textureIndex = texture->drawIndex % BUFFER_COUNT;
        texId = texture->texId[textureIndex];

        glBindTexture( GL_TEXTURE_2D, texId );

        if ( texId )
        {
            assert( texture->texWidth[textureIndex] );
            assert( texture->texHeight[textureIndex] );

            uScale = (float)texture->width / texture->texWidth[textureIndex];
            vScale = (float)texture->height / texture->texHeight[textureIndex];

            glUniform4f( s_ent.shader.uColor, uScale, vScale, 1.0f, 1.0f );
        }
        else
        {
            glUniform4f( s_ent.shader.uColor, 1.0f, 1.0f, 1.0f, 1.0f );
        }

        if ( texture->format == SxTextureFormat_R8G8B8A8 ||
                texture->format == SxTextureFormat_R8G8B8A8_SRGB )
        {
            glEnable( GL_BLEND );
            glBlendFunc( GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA );
        }
    }
    else
    {
        glBindTexture( GL_TEXTURE_2D, 0 );
        glUniform4f( s_ent.shader.uColor, 1.0f, 1.0f, 1.0f, 1.0f );
        glDisable( GL_BLEND );
    }

    indexOffset = 0;
    triCount = geometry->indexCounts[geometryIndex] / 3;

    triCountLeft = triCount;

    while ( triCountLeft )
    {
#if USE_SPLIT_DRAW
        batchTriCount = S_Min( triCountLeft, S_Max( 1, triCount / 10 ) );
#else // #if USE_SPLIT_DRAW
        batchTriCount = triCount;
#endif // #else // #if USE_SPLIT_DRAW

        glDrawElements( GL_TRIANGLES, batchTriCount * 3, GL_UNSIGNED_SHORT, (void *)indexOffset );

        indexOffset += batchTriCount * sizeof( ushort ) * 3;
        triCountLeft -= batchTriCount;
    }

    glBindVertexArrayOES_( 0 );

    glBindTexture( GL_TEXTURE_2D, 0 );

    glDisable( GL_BLEND );

    OVR::GL_CheckErrors( "after Entity_DrawEntity" );

    Prof_Stop( PROF_DRAW_ENTITY );
}
void Render()
{
	ovrFrameTiming frameTiming = ovrHmd_BeginFrameTiming(HMD, 0);

	// 箱の回転の値を更新
	rotationBoxValue += 2.0f*frameTiming.DeltaSeconds;

	// キーボード等で操作する場合の目の位置を指定します。
	static OVR::Vector3f EyePos;
	EyePos.x = 0.0f, EyePos.y = 0.0f, EyePos.z = 0.0f;

	// マウスの回転等でYawを操作する場合に使用する。
	static float eyeYaw = 0;

	// センサーから取得
	ovrPosef movePose = ovrHmd_GetSensorState(HMD, frameTiming.ScanoutMidpointSeconds).Predicted.Pose;
	static ovrPosef eyeRenderPose[2];

	//身長ぶんの考慮をする際の計算
	//EyePos.y = ovrHmd_GetFloat(HMD, OVR_KEY_EYE_HEIGHT, EyePos.y);	

	// 今回は TriangleList しか使わない。
	g_pImmediateContext->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);

	//レンダーターゲットの設定
	g_pImmediateContext->OMSetRenderTargets(1, &g_pRenderTargetViewOculus, g_pDepthStencilViewOculus);

	//画面のクリア・深度バッファクリア
	float ClearColor[4] = { 0.0f, 0.125f, 0.3f, 1.0f }; // R,G,B,A の順番
	g_pImmediateContext->ClearRenderTargetView(g_pRenderTargetViewOculus, ClearColor);
	g_pImmediateContext->ClearDepthStencilView(g_pDepthStencilViewOculus, D3D11_CLEAR_DEPTH, 1.0f, 0);

	//それぞれの目に対応するシーンを描画します。
	for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++)
	{
		ConstantBuffer cb;
		ovrEyeType eye = HMDDesc.EyeRenderOrder[eyeIndex];
		eyeRenderPose[eye] = ovrHmd_GetEyePose(HMD, eye);

		// ビュー行列を計算します。
		OVR::Matrix4f rotation = OVR::Matrix4f::RotationY(eyeYaw);											// あらかじめ(マウスなどで)計算された回転行列を適用する
		OVR::Matrix4f resultRotation = rotation * OVR::Matrix4f(eyeRenderPose[eye].Orientation) *			// 目の姿勢(回転)を計算する
										OVR::Matrix4f(1, 0, 0, 0, 0, -1, 0, 0, 0, 0, -1, 0, 0, 0, 0, 1);	// 軸に合うように方向を合わせる
		OVR::Vector3f resultUp = resultRotation.Transform(OVR::Vector3f(0, 1, 0));							// 上ベクトルを計算
		OVR::Vector3f forward = resultRotation.Transform(OVR::Vector3f(0, 0, -1));							// 前ベクトルを計算
		OVR::Vector3f resultEyePos = EyePos + rotation.Transform(eyeRenderPose[eye].Position);				// 最終的な目の位置を計算する
		OVR::Vector3f resultEyeAt = EyePos + rotation.Transform(eyeRenderPose[eye].Position) + forward;		// 最終的な目視先を計算する

		// 計算した値から xnamath でビュー行列を計算します。
		XMVECTOR Eye = XMVectorSet(resultEyePos.x, resultEyePos.y, resultEyePos.z, 0.0f);		//カメラの位置
		XMVECTOR At = XMVectorSet(resultEyeAt.x, resultEyeAt.y, resultEyeAt.z, 0.0f);			//カメラの注視先
		XMVECTOR Up = XMVectorSet(resultUp.x, resultUp.y, resultUp.z, 0.0f);					//カメラの真上のベクトル
		g_View = XMMatrixLookAtLH(Eye, At,Up) * XMMatrixTranslation(EyeRenderDesc[eye].ViewAdjust.x, EyeRenderDesc[eye].ViewAdjust.y, EyeRenderDesc[eye].ViewAdjust.z);

		// EyeRenderDesc からプロジェクション行列を計算します。
		// 目の中心からそれぞれ上下左右のfovの正接値(tan)が格納されているので libovr 専用の関数で計算します。
		// OVR::Matrix4f は xnamath と違い行と列が反対なので転置にしておきます。
		OVR::Matrix4f proj = OVR::CreateProjection(false, EyeRenderDesc[eye].Fov, 0.01f, 100.0f);
		proj.Transpose();
		memcpy_s(&g_Projection, 64, &proj, 64);		

		//ビューポートの設定(片目ぶんずつ設定)
		D3D11_VIEWPORT vp;
		vp.TopLeftX = EyeRenderViewport[eye].Pos.x;
		vp.TopLeftY = EyeRenderViewport[eye].Pos.y;
		vp.Width = EyeRenderViewport[eye].Size.w;
		vp.Height = EyeRenderViewport[eye].Size.h;
		vp.MinDepth = 0.0f;
		vp.MaxDepth = 1.0f;
		g_pImmediateContext->RSSetViewports(1, &vp);

		// コンスタントバッファに投げるための行列を設定
		// シェーダーに渡す際に転置行列になるため、ここで転置しておきます。
		cb.mView = XMMatrixTranspose(g_View);
		cb.mProjection = XMMatrixTranspose(g_Projection);

		//シーンを描画
		Scene(cb);
	}


	//ここでレンダーターゲットに描画したシーンをゆがませてバックバッファに描画します。
	DistortionMeshRender(3, HMD, frameTiming.TimewarpPointSeconds,eyeRenderPose);

	g_pSwapChain->Present(0, 0);
	//pRender->WaitUntilGpuIdle();  //今回はクエリ実装してない
	ovrHmd_EndFrameTiming(HMD);

}
Beispiel #25
0
void RiftAppSkeleton::display_client() //const
{
    ovrHmd hmd = m_Hmd;
    if (hmd == NULL)
        return;

    //ovrFrameTiming hmdFrameTiming =
    ovrHmd_BeginFrameTiming(hmd, 0);

    bindFBO(m_renderBuffer, m_fboScale);

    glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

    for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++)
    {
        const ovrEyeType eye = hmd->EyeRenderOrder[eyeIndex];
        const ovrPosef eyePose = ovrHmd_GetEyePose(hmd, eye);
        m_eyeOri = eyePose.Orientation; // cache this for movement direction
        _StoreHmdPose(eyePose);

        const ovrGLTexture& otex = l_EyeTexture[eye];
        const ovrRecti& rvp = otex.OGL.Header.RenderViewport;
        const ovrRecti rsc = {
            static_cast<int>(m_fboScale * rvp.Pos.x),
            static_cast<int>(m_fboScale * rvp.Pos.y),
            static_cast<int>(m_fboScale * rvp.Size.w),
            static_cast<int>(m_fboScale * rvp.Size.h)
        };
        glViewport(rsc.Pos.x, rsc.Pos.y, rsc.Size.w, rsc.Size.h);

        const OVR::Matrix4f proj = ovrMatrix4f_Projection(
            m_EyeRenderDesc[eye].Fov,
            0.01f, 10000.0f, true);

        ///@todo Should we be using this variable?
        //m_EyeRenderDesc[eye].DistortedViewport;

        const OVR::Matrix4f view = _MakeModelviewMatrix(
            eyePose,
            m_EyeRenderDesc[eye].ViewAdjust,
            m_chassisYaw,
            m_chassisPos);

        const OVR::Matrix4f scaledView = _MakeModelviewMatrix(
            eyePose,
            m_EyeRenderDesc[eye].ViewAdjust,
            m_chassisYaw,
            m_chassisPos,
            m_headSize);

        _resetGLState();

        _DrawScenes(&view.Transposed().M[0][0], &proj.Transposed().M[0][0], rsc, &scaledView.Transposed().M[0][0]);
    }
    unbindFBO();


    // Set full viewport...?
    const int w = m_Cfg.OGL.Header.RTSize.w;
    const int h = m_Cfg.OGL.Header.RTSize.h;
    glViewport(0, 0, w, h);

    glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
    glDisable(GL_DEPTH_TEST);
    glDisable(GL_CULL_FACE);

    // Now draw the distortion mesh...
    for(int eyeNum = 0; eyeNum < 2; eyeNum++)
    {
        const ShaderWithVariables& eyeShader = eyeNum == 0 ?
            m_presentDistMeshL :
            m_presentDistMeshR;
        const GLuint prog = eyeShader.prog();
        glUseProgram(prog);
        //glBindVertexArray(eyeShader.m_vao);
        {
            const ovrDistortionMesh& mesh = m_DistMeshes[eyeNum];
            glBindBuffer(GL_ARRAY_BUFFER, 0);

            const int a_pos =  glGetAttribLocation(prog, "vPosition");
            glVertexAttribPointer(a_pos, 4, GL_FLOAT, GL_FALSE, sizeof(ovrDistortionVertex), &mesh.pVertexData[0].ScreenPosNDC.x);
            glEnableVertexAttribArray(a_pos);

            const int a_texR =  glGetAttribLocation(prog, "vTexR");
            if (a_texR > -1)
            {
                glVertexAttribPointer(a_texR, 2, GL_FLOAT, GL_FALSE, sizeof(ovrDistortionVertex), &mesh.pVertexData[0].TanEyeAnglesR);
                glEnableVertexAttribArray(a_texR);
            }

            const int a_texG =  glGetAttribLocation(prog, "vTexG");
            if (a_texG > -1)
            {
                glVertexAttribPointer(a_texG, 2, GL_FLOAT, GL_FALSE, sizeof(ovrDistortionVertex), &mesh.pVertexData[0].TanEyeAnglesG);
                glEnableVertexAttribArray(a_texG);
            }

            const int a_texB =  glGetAttribLocation(prog, "vTexB");
            if (a_texB > -1)
            {
                glVertexAttribPointer(a_texB, 2, GL_FLOAT, GL_FALSE, sizeof(ovrDistortionVertex), &mesh.pVertexData[0].TanEyeAnglesB);
                glEnableVertexAttribArray(a_texB);
            }

            ovrVector2f uvoff =
                m_uvScaleOffsetOut[2*eyeNum + 1];
                //DistortionData.UVScaleOffset[eyeNum][0];
            ovrVector2f uvscale =
                m_uvScaleOffsetOut[2*eyeNum + 0];
                //DistortionData.UVScaleOffset[eyeNum][1];

            glUniform2f(eyeShader.GetUniLoc("EyeToSourceUVOffset"), uvoff.x, uvoff.y);
            glUniform2f(eyeShader.GetUniLoc("EyeToSourceUVScale"), uvscale.x, uvscale.y);


#if 0
            // Setup shader constants
            DistortionData.Shaders->SetUniform2f(
                "EyeToSourceUVScale",
                DistortionData.UVScaleOffset[eyeNum][0].x,
                DistortionData.UVScaleOffset[eyeNum][0].y);
            DistortionData.Shaders->SetUniform2f(
                "EyeToSourceUVOffset",
                DistortionData.UVScaleOffset[eyeNum][1].x,
                DistortionData.UVScaleOffset[eyeNum][1].y);

            if (distortionCaps & ovrDistortionCap_TimeWarp)
            { // TIMEWARP - Additional shader constants required
                ovrMatrix4f timeWarpMatrices[2];
                ovrHmd_GetEyeTimewarpMatrices(HMD, (ovrEyeType)eyeNum, eyeRenderPoses[eyeNum], timeWarpMatrices);
                //WARNING!!! These matrices are transposed in SetUniform4x4f, before being used by the shader.
                DistortionData.Shaders->SetUniform4x4f("EyeRotationStart", Matrix4f(timeWarpMatrices[0]));
                DistortionData.Shaders->SetUniform4x4f("EyeRotationEnd", Matrix4f(timeWarpMatrices[1]));
            }

            // Perform distortion
            pRender->Render(
                &distortionShaderFill,
                DistortionData.MeshVBs[eyeNum],
                DistortionData.MeshIBs[eyeNum]);
#endif

            glActiveTexture(GL_TEXTURE0);
            glBindTexture(GL_TEXTURE_2D, m_renderBuffer.tex);
            glUniform1i(eyeShader.GetUniLoc("fboTex"), 0);

            // This is the only uniform that changes per-frame
            glUniform1f(eyeShader.GetUniLoc("fboScale"), m_fboScale);


            glDrawElements(
                GL_TRIANGLES,
                mesh.IndexCount,
                GL_UNSIGNED_SHORT,
                &mesh.pIndexData[0]);
        }
        glBindVertexArray(0);
        glUseProgram(0);
    }

    ovrHmd_EndFrameTiming(hmd);
}