Пример #1
0
void CineCameraClass::Pan(float degree)
{
	//NOTE: currently the argument delta is not used
	wchar_t* outstring = L"CineCameraClass::Pan\n";
	WriteConsole(GetStdHandle(STD_OUTPUT_HANDLE), outstring, wcslen(outstring), NULL, NULL);

	/*TO DO
	Pan the camera left rotating CW about the up vector direction vector

	Create a Rotaton Quaternion that represents a rotation about the
	camera's up vector.
	A Quaternion is created by supplying a vector and an angle and
	represents a rotation of that angle about the vector
	See how this is done for the tilt operation
	*/

	rotation.x += degree*(XM_PIDIV2/100*CAMERA_PAN_SPEED);
	//Pan the camera right rotating CW about the up vector direction vector
	double horizontalMagnitude = sqrt(direction.x * direction.x + direction.z * direction.z);
	float angle = (float)atan(direction.y / horizontalMagnitude);

	XMVECTOR sideWaysVector = XMVector3Normalize(XMVector3Cross(XMLoadFloat3(&upDirection), XMLoadFloat3(&direction) ));
	XMVECTOR tiltRotationQuaternion = XMQuaternionRotationAxis(sideWaysVector, angle);
	XMStoreFloat3(&direction, XMVector3Rotate( XMLoadFloat3(&direction), tiltRotationQuaternion));
	XMStoreFloat3(&upDirection, XMVector3Rotate( XMLoadFloat3(&upDirection), tiltRotationQuaternion));

	XMVECTOR panRotationQuaternion = XMQuaternionRotationAxis(XMLoadFloat3(&upDirection), degree*(XM_PIDIV2/100*CAMERA_PAN_SPEED));
	XMStoreFloat3(&direction, XMVector3Rotate( XMLoadFloat3(&direction), panRotationQuaternion));
	
	sideWaysVector = XMVector3Normalize(XMVector3Cross(XMLoadFloat3(&upDirection), XMLoadFloat3(&direction) ));
	tiltRotationQuaternion = XMQuaternionRotationAxis(sideWaysVector, -angle);
	XMStoreFloat3(&direction, XMVector3Rotate( XMLoadFloat3(&direction), tiltRotationQuaternion));
	XMStoreFloat3(&upDirection, XMVector3Rotate( XMLoadFloat3(&upDirection), tiltRotationQuaternion));
}
Пример #2
0
    void MainLoop()
    {
	    Layer[0] = new VRLayer(Session);

	    while (HandleMessages())
	    {
			//Need to check we're visible, before proceeding with velocity changes, 
			//otherwise it does this a lot of times, and we
			//end up miles away from our start point from the sheer number of iterations.
			ovrSessionStatus sessionStatus;
			ovr_GetSessionStatus(Session, &sessionStatus);
			if (sessionStatus.IsVisible)
			{
				// Take out manual yaw rotation (leaving button move for now)
				ActionFromInput(1, false);
				ovrTrackingState trackingState = Layer[0]->GetEyePoses();

				// Set various control methods into camera
				MainCam->Pos = XMVectorAdd(MainCam->Pos, FindVelocityFromTilt(this, Layer[0], &trackingState));

				MainCam->Pos = XMVectorSet(XMVectorGetX(MainCam->Pos),
					GetAccelJumpPosY(this, &trackingState),
					XMVectorGetZ(MainCam->Pos), 0);

				MainCam->Rot = GetAutoYawRotation(Layer[0]);

				// If tap side of Rift, then fire a bullet
				bool singleTap = WasItTapped(trackingState.HeadPose.LinearAcceleration);

				static XMVECTOR bulletPos = XMVectorZero();
				static XMVECTOR bulletVel = XMVectorZero();
				if (singleTap)
				{
					XMVECTOR eye0 = ConvertToXM(Layer[0]->EyeRenderPose[0].Position);
					XMVECTOR eye1 = ConvertToXM(Layer[0]->EyeRenderPose[1].Position);
					XMVECTOR midEyePos = XMVectorScale(XMVectorAdd(eye0, eye1), 0.5f);

					XMVECTOR totalRot = XMQuaternionMultiply(ConvertToXM(Layer[0]->EyeRenderPose[0].Orientation), MainCam->Rot);
					XMVECTOR posOfOrigin = XMVectorAdd(MainCam->Pos, XMVector3Rotate(midEyePos, MainCam->Rot));

					XMVECTOR unitDirOfMainCamera = XMVector3Rotate(XMVectorSet(0, 0, -1, 0), totalRot);

					bulletPos = XMVectorAdd(posOfOrigin, XMVectorScale(unitDirOfMainCamera, 2.0f));
					bulletVel = XMVectorScale(unitDirOfMainCamera, 0.3f);
				}

				// Move missile on, and set its position
				bulletPos = XMVectorAdd(bulletPos, bulletVel);
				XMStoreFloat3(&RoomScene->Models[1]->Pos, bulletPos);

				for (int eye = 0; eye < 2; ++eye)
				{
					Layer[0]->RenderSceneToEyeBuffer(MainCam, RoomScene, eye);
				}

				Layer[0]->PrepareLayerHeader();
				DistortAndPresent(1);
			}
	    }
    }
Пример #3
0
void Graphics::exe_cam_curr(uint32_t const _i_zad) {
	XMStoreFloat4x4(&cam.mtx_view, XMMatrixLookAtLH(
		XMLoadFloat3(&cam.pos),
		XMVector3Rotate(XMVectorSet(0.0f, 0.0f, 1.0f, 0.0f), XMLoadFloat4(&cam.quat)) + XMLoadFloat3(&cam.pos),
		XMVector3Rotate(XMVectorSet(0.0f, 1.0f, 0.0f, 0.0f), XMLoadFloat4(&cam.quat))
	));
	XMStoreFloat4x4(&cam.mtx_proj, XMMatrixPerspectiveFovLH(
		cam.angle * 3.14f/180, float(szerRend)/wysRend, cam.near_z, cam.far_z
	));
	task.erase(_i_zad);
}
Пример #4
0
void GuardianSystemDemo::Render()
{
    // Get current eye pose for rendering
    double eyePoseTime = 0;
    ovrPosef eyePose[ovrEye_Count] = {};
    ovr_GetEyePoses(mSession, mFrameIndex, ovrTrue, mHmdToEyeOffset, eyePose, &eyePoseTime);

    // Render each eye
    for (int i = 0; i < ovrEye_Count; ++i) {
        int renderTargetIndex = 0;
        ovr_GetTextureSwapChainCurrentIndex(mSession, mTextureChain[i], &renderTargetIndex);
        ID3D11RenderTargetView* renderTargetView = mEyeRenderTargets[i][renderTargetIndex];
        ID3D11DepthStencilView* depthTargetView = mEyeDepthTarget[i];

        // Clear and set render/depth target and viewport
        DIRECTX.SetAndClearRenderTarget(renderTargetView, depthTargetView, 0.2f, 0.2f, 0.2f, 1.0f);
        DIRECTX.SetViewport((float)mEyeRenderViewport[i].Pos.x, (float)mEyeRenderViewport[i].Pos.y, 
            (float)mEyeRenderViewport[i].Size.w, (float)mEyeRenderViewport[i].Size.h);

        // Eye
        XMVECTOR eyeRot = XMVectorSet(eyePose[i].Orientation.x, eyePose[i].Orientation.y, 
            eyePose[i].Orientation.z, eyePose[i].Orientation.w);
        XMVECTOR eyePos = XMVectorSet(eyePose[i].Position.x, eyePose[i].Position.y, eyePose[i].Position.z, 0);
        XMVECTOR eyeForward = XMVector3Rotate(XMVectorSet(0, 0, -1, 0), eyeRot);

        // Matrices
        XMMATRIX viewMat = XMMatrixLookAtRH(eyePos, XMVectorAdd(eyePos, eyeForward), 
            XMVector3Rotate(XMVectorSet(0.0f, 1.0f, 0.0f, 0.0f), eyeRot));
        ovrMatrix4f proj = ovrMatrix4f_Projection(mEyeRenderLayer.Fov[i], 0.001f, 1000.0f, ovrProjection_None);
        XMMATRIX projMat = XMMatrixTranspose(XMMATRIX(&proj.M[0][0]));
        XMMATRIX viewProjMat = XMMatrixMultiply(viewMat, projMat);

        // Render and commit to swap chain
        mDynamicScene.Render(&viewProjMat, 1.0f, 1.0f, 1.0f, 1.0f, true);
        ovr_CommitTextureSwapChain(mSession, mTextureChain[i]);

        // Update eye layer
        mEyeRenderLayer.ColorTexture[i] = mTextureChain[i];
        mEyeRenderLayer.RenderPose[i] = eyePose[i];
        mEyeRenderLayer.SensorSampleTime = eyePoseTime;
    }

    // Submit frames
    ovrLayerHeader* layers = &mEyeRenderLayer.Header;
    ovrResult result = ovr_SubmitFrame(mSession, mFrameIndex++, nullptr, &layers, 1);
    if (!OVR_SUCCESS(result)) {
        printf("ovr_SubmitFrame failed"); exit(-1);
    }
}
Пример #5
0
XMFLOAT3 Transform::GetForwardVector()
{
	XMVECTOR forward = XMVector3Rotate(XMVectorSet(0.0f, 0.0f, 1.0f, 0.0f), XMQuaternionRotationRollPitchYawFromVector(XMLoadFloat3(&rotation)));
	XMFLOAT3 realForward;
	XMStoreFloat3(&realForward, forward);
	return realForward;
}
Пример #6
0
void Transform::MoveRelative(float addX, float addY, float addZ)
{
	isDirty = true;
	XMVECTOR dir = XMVector3Rotate(XMVectorSet(addX, addY, addZ, 0.0f),
		XMQuaternionRotationRollPitchYawFromVector(XMLoadFloat3(&rotation)));
	XMStoreFloat3(&position, XMVectorAdd(dir, XMLoadFloat3(&position)));
}
Пример #7
0
void Renderer::Cull( XNA::Frustum* frustum, Transform* pTransform )
{
	if (m_Mesh)
	{
		XNA::OrientedBox objectBox;
		for( int i=0; i<m_Mesh->GetNumberOfSubmeshes(); i++ )
		{
			Submesh* submesh = m_Mesh->GetSubmesh( i );

			XMVECTOR extents = XMLoadFloat3( &submesh->GetGeometryChunk()->GetAABB()->Extents );
			XMVECTOR scale = XMLoadFloat3( &pTransform->GetScale().intoXMFLOAT3() );
			XMVECTOR offset = XMLoadFloat3( &submesh->GetGeometryChunk()->GetAABB()->Center )*scale;
			XMVECTOR position = XMVector3Rotate( offset, XMLoadFloat4(&pTransform->GetOrientation().intoXMFLOAT4()) );
			position += XMLoadFloat3( &pTransform->GetPosition().intoXMFLOAT3() );
			XMStoreFloat3( &objectBox.Center, position );
			XMStoreFloat3( &objectBox.Extents, extents*scale );
			objectBox.Orientation = pTransform->GetOrientation().intoXMFLOAT4();
	
			if( XNA::IntersectOrientedBoxFrustum( &objectBox, frustum ) > 0 )
				m_SubmeshRenderData[i].bVisible = true;
			else
				m_SubmeshRenderData[i].bVisible = false;
		}
	}
}
Пример #8
0
void Renderer::CullLight( SpotLight* light, Transform* pTransform )
{
	if (m_Mesh)
	{
		XNA::OrientedBox objectBox;
		XNA::Frustum lightFrustum = light->GetFrustum();
		for( int i=0; i<m_Mesh->GetNumberOfSubmeshes(); i++ )
		{
			if (m_SubmeshRenderData[i].bVisible)
			{
				Submesh* submesh = m_Mesh->GetSubmesh( i );

				XMVECTOR extents = XMLoadFloat3( &submesh->GetGeometryChunk()->GetAABB()->Extents );
				XMVECTOR scale = XMLoadFloat3( &pTransform->GetScale().intoXMFLOAT3() );
				XMVECTOR offset = XMLoadFloat3( &submesh->GetGeometryChunk()->GetAABB()->Center )*scale;
				XMVECTOR position = XMVector3Rotate( offset, XMLoadFloat4(&pTransform->GetOrientation().intoXMFLOAT4()) );
				position += XMLoadFloat3( &pTransform->GetPosition().intoXMFLOAT3() );
				XMStoreFloat3( &objectBox.Center, position );
				XMStoreFloat3( &objectBox.Extents, extents*scale );
				objectBox.Orientation = pTransform->GetOrientation().intoXMFLOAT4();

				if( XNA::IntersectOrientedBoxFrustum( &objectBox, &lightFrustum ) > 0 )
					m_SubmeshRenderData[i].AffectingSpotLights.push_back( light );
			}
		}
	}
}
Пример #9
0
void Camera::rotate(float amt)
{
	XMVECTOR fwd = XMLoadFloat3(&forward);
	XMVECTOR camPos = XMLoadFloat3(&position);
	XMVECTOR rotation = XMQuaternionRotationRollPitchYaw(0, amt, 0);  
	fwd = XMVector3Rotate(fwd, rotation);
	XMStoreFloat3(&forward, fwd);
}
Пример #10
0
bool MyListener::frameStarted(float timeSinceLastFrame)
{
	
	mKeyboard->capture();
	mMouse->capture();
	cameraMan->update(timeSinceLastFrame);
	debugWindow->updateFPS(timeSinceLastFrame);

	elapsedTime+=timeSinceLastFrame*0.3f;
	Light* l = mSceneMgr->mShadingMgr->directionalLight;

	
	//XMStoreFloat3(&baseDir,XMVector2Normalize(XMLoadFloat3(&baseDir)));

	//l->direction = baseDir;//XMFLOAT3(baseDir.x*cos(elapsedTime)-baseDir.z*sin(elapsedTime),baseDir.y,baseDir.x*cos(elapsedTime)+baseDir.z*sin(elapsedTime));
	//XMStoreFloat3(&l->direction,XMVector2Normalize(XMLoadFloat3(&l->direction)));
	XMStoreFloat3(&l->direction,XMVector3Rotate(XMLoadFloat3(&baseDir),XMLoadFloat4(&lightQuat)));

	mSceneMgr->getGuiManager()->update(timeSinceLastFrame/1000.0f);

	mPhysicsMgr->fetchResults(true);
	mPhysicsMgr->synchronizeEntities();
	mPhysicsMgr->startSimulating(timeSinceLastFrame);

	mShadowMapping->renderShadowMaps();

	//if(count%2==0)
	voxelScene->voxelizeScene(XMFLOAT3(30,30,30),XMFLOAT3(0,0,0));

	mShadowMapping->renderCaustics();

	voxelScene->endFrame(XMFLOAT3(30,30,30),XMFLOAT3(0,0,0));

	count++;

	//voxelScene->unifyVoxels();
	mSceneMgr->setCurrentCamera(mSceneMgr->getCamera("main"));
	mSceneMgr->mShadingMgr->updatePerFrameConstants(timeSinceLastFrame,mSceneMgr->getCamera(),mSceneMgr->getCamera("sun"));
	
	mRS->setBackbufferAsRenderTarget();
	mRS->clearViews();
	//mSceneMgr->renderSceneWithMaterial(mSceneMgr->getMaterial("depthWrite"));
	//mSceneMgr->renderScene();

	pp->render();


	mSceneMgr->getGuiManager()->render();

	mRS->swapChain_->Present(0,0);

	if(xp) xr+=timeSinceLastFrame; else if(xm) xr-=timeSinceLastFrame;
	if(zp) zr+=timeSinceLastFrame; else if(zm) zr-=timeSinceLastFrame;

	XMStoreFloat4(&lightQuat,XMQuaternionRotationRollPitchYaw(xr,0,zr));

	return continue_rendering;
}
Пример #11
0
//-----------------------------------------------------------------------------
// Transform an axis aligned box by an angle preserving transform.
//-----------------------------------------------------------------------------
VOID TransformAxisAlignedBoxCustom(XNA::AxisAlignedBox* pOut, const XNA::AxisAlignedBox* pIn, FXMVECTOR Scale, FXMVECTOR Rotation, FXMVECTOR Translation )
{
    XMASSERT( pOut );
    XMASSERT( pIn );

    static XMVECTOR Offset[8] =
    {
        { -1.0f, -1.0f, -1.0f, 0.0f },
        { -1.0f, -1.0f,  1.0f, 0.0f },
        { -1.0f,  1.0f, -1.0f, 0.0f },
        { -1.0f,  1.0f,  1.0f, 0.0f },
        {  1.0f, -1.0f, -1.0f, 0.0f },
        {  1.0f, -1.0f,  1.0f, 0.0f },
        {  1.0f,  1.0f, -1.0f, 0.0f },
        {  1.0f,  1.0f,  1.0f, 0.0f }
    };

    // Load center and extents.
    XMVECTOR Center = XMLoadFloat3( &pIn->Center );
    XMVECTOR Extents = XMLoadFloat3( &pIn->Extents );

    XMVECTOR VectorScale = Scale;//XMVectorReplicate( Scale );

    // Compute and transform the corners and find new min/max bounds.
    XMVECTOR Corner = Center + Extents * Offset[0];
    Corner = XMVector3Rotate( Corner * VectorScale, Rotation ) + Translation;

    XMVECTOR Min, Max;
    Min = Max = Corner;

    for( INT i = 1; i < 8; i++ )
    {
        Corner = Center + Extents * Offset[i];
        Corner = XMVector3Rotate( Corner * VectorScale, Rotation ) + Translation;

        Min = XMVectorMin( Min, Corner );
        Max = XMVectorMax( Max, Corner );
    }

    // Store center and extents.
    XMStoreFloat3( &pOut->Center, ( Min + Max ) * 0.5f );
    XMStoreFloat3( &pOut->Extents, ( Max - Min ) * 0.5f );

    return;
}
void SphereWalker::orientationToPos(XMFLOAT4 qorientation, XMFLOAT3 &npos)
{
	float r = SphereWorld::instance()->getRadius();
	XMVECTOR xorientation = XMLoadFloat4(&qorientation);
	xorientation = XMQuaternionNormalize(xorientation);
	XMVECTOR qvec= XMVectorSet(0,0,r,0);
	qvec = XMVector3Rotate(qvec, xorientation);
	XMStoreFloat3(&npos, qvec);
}
Пример #13
0
void Graphics::exe_cam_curr_pos(uint32_t const _i_zad) {
	XMVECTOR _v = XMVectorSet(cam.v.x, 0.0f, cam.v.z, 0.0f);
	XMVECTOR _dl_v = XMVector3LengthEst(_v);
	if(XMVectorGetX(_dl_v) != 0.0f) {
		_v = XMVector3Rotate(_v, XMLoadFloat4(&cam.quat));
		_v = XMVectorSetY(_v, 0.0f);
		XMVECTOR _v_modul = XMVectorAbs(_v);
		_v = _v / (XMVectorGetX(_v_modul) + XMVectorGetZ(_v_modul)) * _dl_v;
	}
	_v = XMVectorSetY(_v, cam.v.y);
	XMStoreFloat3(&cam.pos, XMLoadFloat3(&cam.pos) + _v);
	task.erase(_i_zad);
}
Пример #14
0
void CineCameraClass::Tilt(float degree)
{

	double horizontalMagnitude = sqrt(direction.x * direction.x + direction.z * direction.z);
	float angle = (float)atan(direction.y / horizontalMagnitude);
	if ((angle - degree*(XM_PIDIV2/100*CAMERA_TILT_SPEED) < XM_PIDIV2) &&
		(angle - degree*(XM_PIDIV2/100*CAMERA_TILT_SPEED) > -XM_PIDIV2))
	{
		rotation.y += degree*(XM_PIDIV2/100*CAMERA_TILT_SPEED);
		//NOTE: currently the argument delta is not used
		wchar_t* outstring = L"CineCameraClass::Tilt\n";
		WriteConsole(GetStdHandle(STD_OUTPUT_HANDLE), outstring, wcslen(outstring), NULL, NULL);

		//Tilt the camera upwards rotating about the sideways direction vector
	
		XMVECTOR sideWaysVector = XMVector3Normalize(XMVector3Cross(XMLoadFloat3(&upDirection), XMLoadFloat3(&direction) ));
		XMVECTOR tiltRotationQuaternion = XMQuaternionRotationAxis(sideWaysVector, degree*(XM_PIDIV2/100*CAMERA_TILT_SPEED));

		XMStoreFloat3(&direction, XMVector3Rotate( XMLoadFloat3(&direction), tiltRotationQuaternion));
		XMStoreFloat3(&upDirection, XMVector3Rotate( XMLoadFloat3(&upDirection), tiltRotationQuaternion));
	}
}
Пример #15
0
    void MainLoop()
    {
	    Layer[0] = new VRLayer(Session);

	    while (HandleMessages())
	    {
		    ActionFromInput();
		    Layer[0]->GetEyePoses();


			// Read the remote state 
			ovrInputState inputState;
			ovr_GetInputState(Session, ovrControllerType_Remote, &inputState);
			unsigned int result = ovr_GetConnectedControllerTypes(Session);
			bool isRemoteConnected = (result & ovrControllerType_Remote) ? true : false;

			// Some auxiliary controls we're going to read from the remote. 
			XMVECTOR forward = XMVector3Rotate(XMVectorSet(0, 0, -0.05f, 0), MainCam->Rot);
			XMVECTOR right = XMVector3Rotate(XMVectorSet(0.05f, 0, 0, 0), MainCam->Rot);
			if (inputState.Buttons & ovrButton_Up)	  MainCam->Pos = XMVectorAdd(MainCam->Pos, forward);
			if (inputState.Buttons & ovrButton_Down)  MainCam->Pos = XMVectorSubtract(MainCam->Pos, forward);
			if (inputState.Buttons & ovrButton_Left)  MainCam->Pos = XMVectorSubtract(MainCam->Pos, right);
			if (inputState.Buttons & ovrButton_Right)  MainCam->Pos = XMVectorAdd(MainCam->Pos, right);


			for (int eye = 0; eye < 2; ++eye)
		    {
				//Tint the world, green for it the controller is attached, otherwise red
				if (isRemoteConnected)
					Layer[0]->RenderSceneToEyeBuffer(MainCam, RoomScene, eye, 0, 0, 1,/**/ 1, 0.5f, 1, 0.5f /*green*/);
				else 
					Layer[0]->RenderSceneToEyeBuffer(MainCam, RoomScene, eye, 0, 0, 1,/**/ 1, 1, 0, 0 /*red*/);
			}

		    Layer[0]->PrepareLayerHeader();
		    DistortAndPresent(1);
	    }
    }
Пример #16
0
	bool Camera::Update()
	{
		bool transformed = Transform.Moved;

		if (Transform.Moved)
		{
			// Get position and rotation.
			XMVECTOR rotation = Transform.GetRotation();
			XMVECTOR position = Transform.GetPosition();

			// Calculate local space directions.
			XMVECTOR forward = XMVector3Rotate(Vector3::Forward, rotation);
			XMVECTOR up = XMVector3Rotate(Vector3::Up, rotation);

			//update view matrix
			_view = XMMatrixLookToLH(position, forward, up);
			_vp = XMMatrixMultiply(_view, _projection);

			Transform.Moved = false;
		}

		return transformed;
	}
Пример #17
0
	void Transform::Translate(Vector a_Translation, Space a_RelativeTo)
	{
		switch (a_RelativeTo)
		{
		case Space::World:
			m_Position += a_Translation;
			break;
		case Space::Local:
			XMVECTOR translationVector(a_Translation.AsXMVECTOR());
			XMVECTOR rotationVector(m_Rotation.AsXMVECTOR());
			m_Position += Vector(XMVector3Rotate(translationVector, rotationVector));
			break;
		}
	}
Пример #18
0
void CineCameraClass::RollLeft()
{
	//NOTE: currently the argument delta is not used
	wchar_t* outstring = L"CineCameraClass::RollLeft\n";
	WriteConsole(GetStdHandle(STD_OUTPUT_HANDLE), outstring, wcslen(outstring), NULL, NULL);

	//create the quaternion the rotates about the direction vector of the camera
	XMVECTOR rollRotationQuaternion = XMQuaternionRotationAxis(XMLoadFloat3(&direction), XM_PIDIV4/100*CAMERA_ROLL_SPEED);

	//Modify both the camera's up vector using the
	//rotation quaternion. 

	XMStoreFloat3(&upDirection, XMVector3Rotate( XMLoadFloat3(&upDirection), rollRotationQuaternion));

}
Пример #19
0
// return true to retry later (e.g. after display lost)
static bool MainLoop(bool retryCreate)
{
    // Initialize these to nullptr here to handle device lost failures cleanly
    ovrMirrorTexture mirrorTexture = nullptr;
    OculusTexture  * pEyeRenderTexture = nullptr;
    DepthBuffer    * pEyeDepthBuffer = nullptr;
    Scene          * roomScene = nullptr; 
    Camera         * mainCam = nullptr;
    ovrMirrorTextureDesc desc = {};

    bool isVisible          = true;
    long long frameIndex    = 0;
    bool useInstancing      = false;
    const int repeatDrawing = 1;

    ovrSession session;
    ovrGraphicsLuid luid;
    ovrResult result = ovr_Create(&session, &luid);
    if (!OVR_SUCCESS(result))
        return retryCreate;

    ovrHmdDesc hmdDesc = ovr_GetHmdDesc(session);

    // Setup Device and Graphics
    // Note: the mirror window can be any size, for this sample we use 1/2 the HMD resolution
    if (!DIRECTX.InitDevice(hmdDesc.Resolution.w / 2, hmdDesc.Resolution.h / 2, reinterpret_cast<LUID*>(&luid)))
        goto Done;

    ovrRecti eyeRenderViewport[2];

    // Make a single eye texture
    {
        ovrSizei eyeTexSizeL = ovr_GetFovTextureSize(session, ovrEye_Left, hmdDesc.DefaultEyeFov[0], 1.0f);
        ovrSizei eyeTexSizeR = ovr_GetFovTextureSize(session, ovrEye_Right, hmdDesc.DefaultEyeFov[1], 1.0f);
        ovrSizei textureSize;
        textureSize.w = eyeTexSizeL.w + eyeTexSizeR.w;
        textureSize.h = max(eyeTexSizeL.h, eyeTexSizeR.h);

        pEyeRenderTexture = new OculusTexture();
        if (!pEyeRenderTexture->Init(session, textureSize.w, textureSize.h))
        {
            if (retryCreate) goto Done;
            VALIDATE(OVR_SUCCESS(result), "Failed to create eye texture.");
        }

        pEyeDepthBuffer = new DepthBuffer(DIRECTX.Device, textureSize.w, textureSize.h);

        // set viewports
        eyeRenderViewport[0].Pos.x = 0;
        eyeRenderViewport[0].Pos.y = 0;
        eyeRenderViewport[0].Size = eyeTexSizeL;

        eyeRenderViewport[1].Pos.x = eyeTexSizeL.w;
        eyeRenderViewport[1].Pos.y = 0;
        eyeRenderViewport[1].Size = eyeTexSizeR;
    }

    if (!pEyeRenderTexture->TextureChain)
    {
        if (retryCreate) goto Done;
        VALIDATE(false, "Failed to create texture.");
    }

    // Create a mirror to see on the monitor.
    desc.Format = OVR_FORMAT_R8G8B8A8_UNORM_SRGB;
    desc.Width = DIRECTX.WinSizeW;
    desc.Height = DIRECTX.WinSizeH;
    result = ovr_CreateMirrorTextureDX(session, DIRECTX.Device, &desc, &mirrorTexture);
    if (!OVR_SUCCESS(result))
    {
        if (retryCreate) goto Done;
        VALIDATE(false, "Failed to create mirror texture.");
    }

    // Create the room model
    roomScene = new Scene(false);

    // Create camera
    mainCam = new Camera(&XMVectorSet(0.0f, 1.6f, 5.0f, 0), &XMQuaternionIdentity());

    // Setup VR components, filling out description
    ovrEyeRenderDesc eyeRenderDesc[2];
    eyeRenderDesc[0] = ovr_GetRenderDesc(session, ovrEye_Left, hmdDesc.DefaultEyeFov[0]);
    eyeRenderDesc[1] = ovr_GetRenderDesc(session, ovrEye_Right, hmdDesc.DefaultEyeFov[1]);

    // Main loop
    while (DIRECTX.HandleMessages())
    {
        XMVECTOR forward = XMVector3Rotate(XMVectorSet(0, 0, -0.05f, 0), mainCam->Rot);
        XMVECTOR right   = XMVector3Rotate(XMVectorSet(0.05f, 0, 0, 0),  mainCam->Rot);
        XMVECTOR up      = XMVector3Rotate(XMVectorSet(0, 0.05f, 0, 0), mainCam->Rot);
        if (DIRECTX.Key['W'] || DIRECTX.Key[VK_UP])	  mainCam->Pos = XMVectorAdd(mainCam->Pos, forward);
        if (DIRECTX.Key['S'] || DIRECTX.Key[VK_DOWN]) mainCam->Pos = XMVectorSubtract(mainCam->Pos, forward);
        if (DIRECTX.Key['D'])                         mainCam->Pos = XMVectorAdd(mainCam->Pos, right);
        if (DIRECTX.Key['A'])                         mainCam->Pos = XMVectorSubtract(mainCam->Pos, right);
        if (DIRECTX.Key['Q'])                         mainCam->Pos = XMVectorAdd(mainCam->Pos, up);
        if (DIRECTX.Key['E'])                         mainCam->Pos = XMVectorSubtract(mainCam->Pos, up);

        static float Yaw = 0;
        if (DIRECTX.Key[VK_LEFT])  mainCam->Rot = XMQuaternionRotationRollPitchYaw(0, Yaw += 0.02f, 0);
        if (DIRECTX.Key[VK_RIGHT]) mainCam->Rot = XMQuaternionRotationRollPitchYaw(0, Yaw -= 0.02f, 0);

        if (DIRECTX.Key['P'])
            ovr_SetInt(session, OVR_PERF_HUD_MODE, int(ovrPerfHud_AppRenderTiming));
        else
            ovr_SetInt(session, OVR_PERF_HUD_MODE, int(ovrPerfHud_Off));

        useInstancing = DIRECTX.Key['I'];

        // Animate the cube
        static float cubeClock = 0;
        roomScene->Models[0]->Pos = XMFLOAT3(9 * sin(cubeClock), 3, 9 * cos(cubeClock += 0.015f));

        // Get both eye poses simultaneously, with IPD offset already included. 
        ovrPosef         EyeRenderPose[2];
        ovrVector3f      HmdToEyeOffset[2] = { eyeRenderDesc[0].HmdToEyeOffset,
                                               eyeRenderDesc[1].HmdToEyeOffset };

        double sensorSampleTime;    // sensorSampleTime is fed into the layer later
        ovr_GetEyePoses(session, frameIndex, ovrTrue, HmdToEyeOffset, EyeRenderPose, &sensorSampleTime);

        // Render scene to eye texture
        if (isVisible)
        {
            DIRECTX.SetAndClearRenderTarget(pEyeRenderTexture->GetRTV(), pEyeDepthBuffer);

            // calculate eye transforms
            XMMATRIX viewProjMatrix[2];
            for (int eye = 0; eye < 2; ++eye)
            {
                //Get the pose information in XM format
                XMVECTOR eyeQuat = XMLoadFloat4((XMFLOAT4 *)&EyeRenderPose[eye].Orientation.x);
                XMVECTOR eyePos = XMVectorSet(EyeRenderPose[eye].Position.x, EyeRenderPose[eye].Position.y, EyeRenderPose[eye].Position.z, 0);

                // Get view and projection matrices for the Rift camera
                XMVECTOR CombinedPos = XMVectorAdd(mainCam->Pos, XMVector3Rotate(eyePos, mainCam->Rot));
                Camera finalCam(&CombinedPos, &(XMQuaternionMultiply(eyeQuat, mainCam->Rot)));
                XMMATRIX view = finalCam.GetViewMatrix();
                ovrMatrix4f p = ovrMatrix4f_Projection(eyeRenderDesc[eye].Fov, 0.1f, 100.0f, ovrProjection_None);
                XMMATRIX proj = XMMatrixSet(p.M[0][0], p.M[1][0], p.M[2][0], p.M[3][0],
                    p.M[0][1], p.M[1][1], p.M[2][1], p.M[3][1],
                    p.M[0][2], p.M[1][2], p.M[2][2], p.M[3][2],
                    p.M[0][3], p.M[1][3], p.M[2][3], p.M[3][3]);

                if (useInstancing)
                {
                    // scale and offset projection matrix to shift image to correct part of texture for each eye
                    XMMATRIX scale = XMMatrixScaling(0.5f, 1.0f, 1.0f);
                    XMMATRIX translate = XMMatrixTranslation((eye==0) ? -0.5f : 0.5f, 0.0f, 0.0f);
                    proj = XMMatrixMultiply(proj, scale);
                    proj = XMMatrixMultiply(proj, translate);
                }

                viewProjMatrix[eye] = XMMatrixMultiply(view, proj);
            }

            if (useInstancing)
            {
                // use instancing for stereo
                DIRECTX.SetViewport(0.0f, 0.0f, (float)eyeRenderViewport[0].Size.w + eyeRenderViewport[1].Size.w, (float)eyeRenderViewport[0].Size.h);

                // render scene
                for (int i = 0; i < repeatDrawing; i++)
                    roomScene->RenderInstanced(&viewProjMatrix[0], 1, 1, 1, 1, true);
            }
            else
            {
                // non-instanced path
                for (int eye = 0; eye < 2; ++eye)
                {
                    // set viewport
                    DIRECTX.SetViewport((float)eyeRenderViewport[eye].Pos.x, (float)eyeRenderViewport[eye].Pos.y,
                        (float)eyeRenderViewport[eye].Size.w, (float)eyeRenderViewport[eye].Size.h);

                    // render scene
                    for (int i = 0; i < repeatDrawing; i++)
                        roomScene->Render(&viewProjMatrix[eye], 1, 1, 1, 1, true);
                }
            }

            // Commit rendering to the swap chain
            pEyeRenderTexture->Commit();
        }

        // Initialize our single full screen Fov layer.
        ovrLayerEyeFov ld = {};
        ld.Header.Type = ovrLayerType_EyeFov;
        ld.Header.Flags = 0;
        ld.SensorSampleTime = sensorSampleTime;

        for (int eye = 0; eye < 2; ++eye)
        {
            ld.ColorTexture[eye] = pEyeRenderTexture->TextureChain;
            ld.Viewport[eye] = eyeRenderViewport[eye];
            ld.Fov[eye] = hmdDesc.DefaultEyeFov[eye];
            ld.RenderPose[eye] = EyeRenderPose[eye];
        }

        ovrLayerHeader* layers = &ld.Header;
        result = ovr_SubmitFrame(session, frameIndex, nullptr, &layers, 1);
        // exit the rendering loop if submit returns an error, will retry on ovrError_DisplayLost
        if (!OVR_SUCCESS(result))
            goto Done;

        isVisible = (result == ovrSuccess);

        // Render mirror
        ID3D11Texture2D* tex = nullptr;
        ovr_GetMirrorTextureBufferDX(session, mirrorTexture, IID_PPV_ARGS(&tex));
        DIRECTX.Context->CopyResource(DIRECTX.BackBuffer, tex);
        tex->Release();
        DIRECTX.SwapChain->Present(0, 0);

        frameIndex++;
    }

    // Release resources
Done:
    delete mainCam;
    delete roomScene;
    if (mirrorTexture) ovr_DestroyMirrorTexture(session, mirrorTexture);
    delete pEyeRenderTexture;
    delete pEyeDepthBuffer;

    DIRECTX.ReleaseDevice();
    ovr_Destroy(session);

    // Retry on ovrError_DisplayLost
    return retryCreate || OVR_SUCCESS(result) || (result == ovrError_DisplayLost);
}
Пример #20
0
// return true to retry later (e.g. after display lost)
static bool MainLoop(bool retryCreate)
{
    // Initialize these to nullptr here to handle device lost failures cleanly
    ovrMirrorTexture            mirrorTexture = nullptr;
    OculusEyeTexture*           pEyeRenderTexture[2] = { nullptr, nullptr };
    Scene*                      roomScene = nullptr; 
    Camera*                     mainCam = nullptr;
    ovrMirrorTextureDesc        mirrorDesc = {};

    ovrSession session;
    ovrGraphicsLuid luid;
    ovrResult result = ovr_Create(&session, &luid);
    if (!OVR_SUCCESS(result))
        return retryCreate;

    ovrHmdDesc hmdDesc = ovr_GetHmdDesc(session);

    // Setup Device and Graphics
    // Note: the mirror window can be any size, for this sample we use 1/2 the HMD resolution
    if (!DIRECTX.InitDevice(hmdDesc.Resolution.w / 2, hmdDesc.Resolution.h / 2, reinterpret_cast<LUID*>(&luid)))
        goto Done;

    // Make the eye render buffers (caution if actual size < requested due to HW limits). 
    ovrRecti eyeRenderViewport[2];

    for (int eye = 0; eye < 2; ++eye)
    {
        ovrSizei idealSize = ovr_GetFovTextureSize(session, (ovrEyeType)eye, hmdDesc.DefaultEyeFov[eye], 1.0f);
        pEyeRenderTexture[eye] = new OculusEyeTexture();
        if (!pEyeRenderTexture[eye]->Init(session, idealSize.w, idealSize.h, true))
        {
            if (retryCreate) goto Done;
            FATALERROR("Failed to create eye texture.");
        }

        eyeRenderViewport[eye].Pos.x = 0;
        eyeRenderViewport[eye].Pos.y = 0;
        eyeRenderViewport[eye].Size = idealSize;
        if (!pEyeRenderTexture[eye]->TextureChain)
        {
            if (retryCreate) goto Done;
            FATALERROR("Failed to create texture.");
        }
    }

    // Create a mirror to see on the monitor.
    mirrorDesc.Format = OVR_FORMAT_R8G8B8A8_UNORM_SRGB;
    mirrorDesc.Width = DIRECTX.WinSizeW;
    mirrorDesc.Height = DIRECTX.WinSizeH;
    result = ovr_CreateMirrorTextureDX(session, DIRECTX.CommandQueue, &mirrorDesc, &mirrorTexture);
    if (!OVR_SUCCESS(result))
    {
        if (retryCreate) goto Done;
        FATALERROR("Failed to create mirror texture.");
    }

    // Create the room model
    roomScene = new Scene(false);

    // Create camera
    mainCam = new Camera(XMVectorSet(0.0f, 1.6f, 5.0f, 0), XMQuaternionIdentity());

    // Setup VR components, filling out description
    ovrEyeRenderDesc eyeRenderDesc[2];
    eyeRenderDesc[0] = ovr_GetRenderDesc(session, ovrEye_Left, hmdDesc.DefaultEyeFov[0]);
    eyeRenderDesc[1] = ovr_GetRenderDesc(session, ovrEye_Right, hmdDesc.DefaultEyeFov[1]);

    long long frameIndex = 0;

    bool drawMirror = true;

    DIRECTX.InitFrame(drawMirror);

    // Main loop
    while (DIRECTX.HandleMessages())
    {
        ovrSessionStatus sessionStatus;
        ovr_GetSessionStatus(session, &sessionStatus);
        if (sessionStatus.ShouldQuit)
        {
            // Because the application is requested to quit, should not request retry
            retryCreate = false;
            break;
        }
        if (sessionStatus.ShouldRecenter)
            ovr_RecenterTrackingOrigin(session);

        if (sessionStatus.IsVisible)
        {
            XMVECTOR forward = XMVector3Rotate(XMVectorSet(0, 0, -0.05f, 0), mainCam->GetRotVec());
            XMVECTOR right   = XMVector3Rotate(XMVectorSet(0.05f, 0, 0, 0),  mainCam->GetRotVec());
            XMVECTOR mainCamPos = mainCam->GetPosVec();
            XMVECTOR mainCamRot = mainCam->GetRotVec();
            if (DIRECTX.Key['W'] || DIRECTX.Key[VK_UP])      mainCamPos = XMVectorAdd(     mainCamPos, forward);
            if (DIRECTX.Key['S'] || DIRECTX.Key[VK_DOWN])    mainCamPos = XMVectorSubtract(mainCamPos, forward);
            if (DIRECTX.Key['D'])                            mainCamPos = XMVectorAdd(     mainCamPos, right);
            if (DIRECTX.Key['A'])                            mainCamPos = XMVectorSubtract(mainCamPos, right);
            static float Yaw = 0;
            if (DIRECTX.Key[VK_LEFT])  mainCamRot = XMQuaternionRotationRollPitchYaw(0, Yaw += 0.02f, 0);
            if (DIRECTX.Key[VK_RIGHT]) mainCamRot = XMQuaternionRotationRollPitchYaw(0, Yaw -= 0.02f, 0);

            mainCam->SetPosVec(mainCamPos);
            mainCam->SetRotVec(mainCamRot);

            // Animate the cube
            static float cubeClock = 0;
            roomScene->Models[0]->Pos = XMFLOAT3(9 * sin(cubeClock), 3, 9 * cos(cubeClock += 0.015f));

            // Get both eye poses simultaneously, with IPD offset already included. 
            ovrPosef    EyeRenderPose[2];
            ovrVector3f HmdToEyeOffset[2] = { eyeRenderDesc[0].HmdToEyeOffset,
                                              eyeRenderDesc[1].HmdToEyeOffset };

            double sensorSampleTime;    // sensorSampleTime is fed into the layer later
            ovr_GetEyePoses(session, frameIndex, ovrTrue, HmdToEyeOffset, EyeRenderPose, &sensorSampleTime);

            // Render Scene to Eye Buffers
            for (int eye = 0; eye < 2; ++eye)
            {
                DIRECTX.SetActiveContext(eye == 0 ? DrawContext_EyeRenderLeft : DrawContext_EyeRenderRight);

                DIRECTX.SetActiveEye(eye);

                CD3DX12_RESOURCE_BARRIER resBar = CD3DX12_RESOURCE_BARRIER::Transition(pEyeRenderTexture[eye]->GetD3DResource(),
                                                                                       D3D12_RESOURCE_STATE_PIXEL_SHADER_RESOURCE,
                                                                                       D3D12_RESOURCE_STATE_RENDER_TARGET);
                DIRECTX.CurrentFrameResources().CommandLists[DIRECTX.ActiveContext]->ResourceBarrier(1, &resBar);

                DIRECTX.SetAndClearRenderTarget(pEyeRenderTexture[eye]->GetRtv(), pEyeRenderTexture[eye]->GetDsv());
                DIRECTX.SetViewport((float)eyeRenderViewport[eye].Pos.x, (float)eyeRenderViewport[eye].Pos.y,
                                    (float)eyeRenderViewport[eye].Size.w, (float)eyeRenderViewport[eye].Size.h);
                                
                //Get the pose information in XM format
                XMVECTOR eyeQuat = XMVectorSet(EyeRenderPose[eye].Orientation.x, EyeRenderPose[eye].Orientation.y,
                                               EyeRenderPose[eye].Orientation.z, EyeRenderPose[eye].Orientation.w);
                XMVECTOR eyePos = XMVectorSet(EyeRenderPose[eye].Position.x, EyeRenderPose[eye].Position.y, EyeRenderPose[eye].Position.z, 0);

                // Get view and projection matrices for the Rift camera
                Camera finalCam(XMVectorAdd(mainCamPos, XMVector3Rotate(eyePos, mainCamRot)), XMQuaternionMultiply(eyeQuat, mainCamRot));
                XMMATRIX view = finalCam.GetViewMatrix();
                ovrMatrix4f p = ovrMatrix4f_Projection(eyeRenderDesc[eye].Fov, 0.2f, 1000.0f, ovrProjection_None);
                XMMATRIX proj = XMMatrixSet(p.M[0][0], p.M[1][0], p.M[2][0], p.M[3][0],
                                            p.M[0][1], p.M[1][1], p.M[2][1], p.M[3][1],
                                            p.M[0][2], p.M[1][2], p.M[2][2], p.M[3][2],
                                            p.M[0][3], p.M[1][3], p.M[2][3], p.M[3][3]);
                XMMATRIX prod = XMMatrixMultiply(view, proj);

                roomScene->Render(&prod, 1, 1, 1, 1, true);

                resBar = CD3DX12_RESOURCE_BARRIER::Transition(pEyeRenderTexture[eye]->GetD3DResource(),
                                                              D3D12_RESOURCE_STATE_RENDER_TARGET,
                                                              D3D12_RESOURCE_STATE_PIXEL_SHADER_RESOURCE);
                DIRECTX.CurrentFrameResources().CommandLists[DIRECTX.ActiveContext]->ResourceBarrier(1, &resBar);

                // Commit rendering to the swap chain
                pEyeRenderTexture[eye]->Commit();

                // kick off eye render command lists before ovr_SubmitFrame()
                DIRECTX.SubmitCommandList(DIRECTX.ActiveContext);
            }

            // Initialize our single full screen Fov layer.
            ovrLayerEyeFov ld = {};
            ld.Header.Type = ovrLayerType_EyeFov;
            ld.Header.Flags = 0;

            for (int eye = 0; eye < 2; ++eye)
            {
                ld.ColorTexture[eye] = pEyeRenderTexture[eye]->TextureChain;
                ld.Viewport[eye] = eyeRenderViewport[eye];
                ld.Fov[eye] = hmdDesc.DefaultEyeFov[eye];
                ld.RenderPose[eye] = EyeRenderPose[eye];
                ld.SensorSampleTime = sensorSampleTime;
            }

            ovrLayerHeader* layers = &ld.Header;
            result = ovr_SubmitFrame(session, frameIndex, nullptr, &layers, 1);
            // exit the rendering loop if submit returns an error, will retry on ovrError_DisplayLost
            if (!OVR_SUCCESS(result))
                goto Done;
            
            frameIndex++;
        }
        
        if (drawMirror)
        {
            DIRECTX.SetActiveContext(DrawContext_Final);

            DIRECTX.SetViewport(0.0f, 0.0f, (float)hmdDesc.Resolution.w / 2, (float)hmdDesc.Resolution.h / 2);

            // Render mirror
            ID3D12Resource* mirrorTexRes = nullptr;
            ovr_GetMirrorTextureBufferDX(session, mirrorTexture, IID_PPV_ARGS(&mirrorTexRes));

            //DIRECTX.SetAndClearRenderTarget(DIRECTX.CurrentFrameResources().SwapChainRtvHandle, nullptr, 1.0f, 0.5f, 0.0f, 1.0f);

            CD3DX12_RESOURCE_BARRIER preMirrorBlitBar[] =
            {
                CD3DX12_RESOURCE_BARRIER::Transition(DIRECTX.CurrentFrameResources().SwapChainBuffer, D3D12_RESOURCE_STATE_RENDER_TARGET, D3D12_RESOURCE_STATE_COPY_DEST),
                CD3DX12_RESOURCE_BARRIER::Transition(mirrorTexRes, D3D12_RESOURCE_STATE_RENDER_TARGET, D3D12_RESOURCE_STATE_COPY_SOURCE)
            };

            // Indicate that the back buffer will now be copied into
            DIRECTX.CurrentFrameResources().CommandLists[DIRECTX.ActiveContext]->ResourceBarrier(ARRAYSIZE(preMirrorBlitBar), preMirrorBlitBar);

            DIRECTX.CurrentFrameResources().CommandLists[DIRECTX.ActiveContext]->CopyResource(DIRECTX.CurrentFrameResources().SwapChainBuffer, mirrorTexRes);

            CD3DX12_RESOURCE_BARRIER resBar = CD3DX12_RESOURCE_BARRIER::Transition(mirrorTexRes,
                                                                                   D3D12_RESOURCE_STATE_COPY_SOURCE,
                                                                                   D3D12_RESOURCE_STATE_RENDER_TARGET);
            DIRECTX.CurrentFrameResources().CommandLists[DIRECTX.ActiveContext]->ResourceBarrier(1, &resBar);
        }

        DIRECTX.SubmitCommandListAndPresent(drawMirror);
    }

    // Release resources
Done:
    delete mainCam;
    delete roomScene;
    if (mirrorTexture)
        ovr_DestroyMirrorTexture(session, mirrorTexture);

    for (int eye = 0; eye < 2; ++eye)
    {
        delete pEyeRenderTexture[eye];
    }
    DIRECTX.ReleaseDevice();
    ovr_Destroy(session);

    // Retry on ovrError_DisplayLost
    return retryCreate || (result == ovrError_DisplayLost);
}
Пример #21
0
void TransformTool::Update(float dt)
{
	HydraManager* hydra = InputSystem::get()->getHydra();

	mPreviousHydraPositions[0] = mHydraPositions[0];
	mPreviousHydraPositions[1] = mHydraPositions[1];

	mHydraPositions[0] = hydra->getPointerPosition(0);
	mHydraPositions[1] = hydra->getPointerPosition(1);

	XMVECTOR hydraAveragePosition = (mHydraPositions[0] + mHydraPositions[1]) * 0.5f;

	static const int bumperBit = 1 << 7;

	if (mpTargetTransform != NULL && (hydra->getButtons(0) & bumperBit) && (hydra->getButtons(1) & bumperBit)) //Rotation
	{
		XMVECTOR oldOrientationVector = XMVector3Normalize(mPreviousHydraPositions[0] - mPreviousHydraPositions[1]);
		XMVECTOR orientationVector = XMVector3Normalize(mHydraPositions[0] - mHydraPositions[1]);

		if (!XMVector3Equal(oldOrientationVector, orientationVector))
		{
			XMVECTOR rotationAxis = XMVector3Normalize(XMVector3Cross(oldOrientationVector, orientationVector));
			float rotationAmount = XMVectorGetX(XMVector3Dot(oldOrientationVector, orientationVector));

			//Avoid undefined values from acosf
			if (rotationAmount > 1.0) 
			{
				rotationAmount = 1.0;
			}
			else if (rotationAmount < -1.0)
			{
				rotationAmount = -1.0;
			}

			rotationAmount = acosf(rotationAmount);
			
			XMVECTOR rotationQuaternion = XMQuaternionRotationAxis(rotationAxis, rotationAmount);

			//Handle rotations around the grab point
			XMVECTOR newTranslation = mpTargetTransform->getTranslation();
			XMVECTOR translationOffset = newTranslation - hydraAveragePosition;//From point between two controllers to origin of translation
			newTranslation = hydraAveragePosition + XMVector3Rotate(translationOffset, rotationQuaternion); 

			mpTargetTransform->setTranslation(newTranslation);
			mpTargetTransform->rotate(rotationQuaternion);
		}
	}
	else if (mpTargetTransform != NULL && (hydra->getButtons(1) & bumperBit)) //Translation
	{
		XMVECTOR offsetVector =  mHydraPositions[1] - mPreviousHydraPositions[1];

		mpTargetTransform->translate(offsetVector);
	}
	else if (mpTargetTransform != NULL && (hydra->getButtons(0) & bumperBit)) //Scaling
	{
		float oldDistance = XMVectorGetX(XMVector3Length(mPreviousHydraPositions[0] - mPreviousHydraPositions[1]));
		float newDistance = XMVectorGetX(XMVector3Length(mHydraPositions[0] - mHydraPositions[1]));

		float ratio =  newDistance / oldDistance;

		XMVECTOR newTranslation = mpTargetTransform->getTranslation();
		XMVECTOR translationOffset = newTranslation - hydraAveragePosition;//From point between two controllers to origin of translation
		newTranslation = hydraAveragePosition + translationOffset * ratio;

		mpTargetTransform->setTranslation(newTranslation);
		mpTargetTransform->scale(ratio);
	}
}
Пример #22
0
// return true to retry later (e.g. after display lost)
static bool MainLoop(bool retryCreate)
{
    // Initialize these to nullptr here to handle device lost failures cleanly
	ovrTexture     * mirrorTexture = nullptr;
	OculusTexture  * pEyeRenderTexture[2] = { nullptr, nullptr };
	DepthBuffer    * pEyeDepthBuffer[2] = { nullptr, nullptr };
    Scene          * roomScene = nullptr; 
    Camera         * mainCam = nullptr;
	D3D11_TEXTURE2D_DESC td = {};

	ovrHmd HMD;
	ovrGraphicsLuid luid;
	ovrResult result = ovr_Create(&HMD, &luid);
    if (!OVR_SUCCESS(result))
        return retryCreate;

    ovrHmdDesc hmdDesc = ovr_GetHmdDesc(HMD);

	// -------------------------------------------------------------------
	// Add: Make Instance that CL Eye Camera Capture Class
	CLEyeCameraCapture* cam[2] = { NULL };

	// Query for number of connected camera
	int numCams = CLEyeGetCameraCount();
	if (numCams == 0)
	{
		printf_s("No PS3Eye Camera detected\n");
		goto Done;
	}
	printf_s("Found %d cameras\n", numCams);

	for (int iCam = 0; iCam < numCams; iCam++)
	{
		char windowName[64];

		// Query unique camera uuid
		GUID guid = CLEyeGetCameraUUID(iCam);
		printf("Camera %d GUID: [%08x-%04x-%04x-%02x%02x-%02x%02x%02x%02x%02x%02x]\n",
			iCam + 1, guid.Data1, guid.Data2, guid.Data3,
			guid.Data4[0], guid.Data4[1], guid.Data4[2],
			guid.Data4[3], guid.Data4[4], guid.Data4[5],
			guid.Data4[6], guid.Data4[7]);
		sprintf_s(windowName, "Camera Window %d", iCam + 1);

		// Create camera capture object
		cam[iCam] = new CLEyeCameraCapture(windowName, guid, CLEYE_COLOR_RAW, CLEYE_VGA, 30);
		cam[iCam]->StartCapture();
	}
	// -------------------------------------------------------------------

	// Setup Device and Graphics
	// Note: the mirror window can be any size, for this sample we use 1/2 the HMD resolution
    if (!DIRECTX.InitDevice(hmdDesc.Resolution.w / 2, hmdDesc.Resolution.h / 2, reinterpret_cast<LUID*>(&luid)))
        goto Done;

	// Make the eye render buffers (caution if actual size < requested due to HW limits). 
	ovrRecti         eyeRenderViewport[2];

	for (int eye = 0; eye < 2; ++eye)
	{
		ovrSizei idealSize = ovr_GetFovTextureSize(HMD, (ovrEyeType)eye, hmdDesc.DefaultEyeFov[eye], 1.0f);
		pEyeRenderTexture[eye] = new OculusTexture();
        if (!pEyeRenderTexture[eye]->Init(HMD, idealSize.w, idealSize.h))
        {
            if (retryCreate) goto Done;
	        VALIDATE(OVR_SUCCESS(result), "Failed to create eye texture.");
        }
		pEyeDepthBuffer[eye] = new DepthBuffer(DIRECTX.Device, idealSize.w, idealSize.h);
		eyeRenderViewport[eye].Pos.x = 0;
		eyeRenderViewport[eye].Pos.y = 0;
		eyeRenderViewport[eye].Size = idealSize;
        if (!pEyeRenderTexture[eye]->TextureSet)
        {
            if (retryCreate) goto Done;
            VALIDATE(false, "Failed to create texture.");
        }
	}

	// Create a mirror to see on the monitor.
	td.ArraySize = 1;
    td.Format = DXGI_FORMAT_R8G8B8A8_UNORM_SRGB;
	td.Width = DIRECTX.WinSizeW;
	td.Height = DIRECTX.WinSizeH;
	td.Usage = D3D11_USAGE_DEFAULT;
	td.SampleDesc.Count = 1;
	td.MipLevels = 1;
    result = ovr_CreateMirrorTextureD3D11(HMD, DIRECTX.Device, &td, 0, &mirrorTexture);
    if (!OVR_SUCCESS(result))
    {
        if (retryCreate) goto Done;
        VALIDATE(false, "Failed to create mirror texture.");
    }

	// Create the room model
    roomScene = new Scene(false);

	// Create camera
    mainCam = new Camera(&XMVectorSet(0.0f, 1.6f, 5.0f, 0), &XMQuaternionIdentity());

	// Setup VR components, filling out description
	ovrEyeRenderDesc eyeRenderDesc[2];
	eyeRenderDesc[0] = ovr_GetRenderDesc(HMD, ovrEye_Left, hmdDesc.DefaultEyeFov[0]);
	eyeRenderDesc[1] = ovr_GetRenderDesc(HMD, ovrEye_Right, hmdDesc.DefaultEyeFov[1]);

    bool isVisible = true;

	DCB portConfig;
	portConfig.BaudRate = 115200;
	portConfig.Parity = EVENPARITY;

	g_seriPort.Start("\\\\.\\COM3", &portConfig);


	// Main loop
	while (DIRECTX.HandleMessages())
	{
		XMVECTOR forward = XMVector3Rotate(XMVectorSet(0, 0, -0.05f, 0), mainCam->Rot);
		XMVECTOR right   = XMVector3Rotate(XMVectorSet(0.05f, 0, 0, 0),  mainCam->Rot);
		if (DIRECTX.Key['W'] || DIRECTX.Key[VK_UP])	  mainCam->Pos = XMVectorAdd(mainCam->Pos, forward);
		if (DIRECTX.Key['S'] || DIRECTX.Key[VK_DOWN]) mainCam->Pos = XMVectorSubtract(mainCam->Pos, forward);
		if (DIRECTX.Key['D'])                         mainCam->Pos = XMVectorAdd(mainCam->Pos, right);
		if (DIRECTX.Key['A'])                         mainCam->Pos = XMVectorSubtract(mainCam->Pos, right);
		static float Yaw = 0;
		if (DIRECTX.Key[VK_LEFT])  mainCam->Rot = XMQuaternionRotationRollPitchYaw(0, Yaw += 0.02f, 0);
		if (DIRECTX.Key[VK_RIGHT]) mainCam->Rot = XMQuaternionRotationRollPitchYaw(0, Yaw -= 0.02f, 0);

		// Animate the cube
		static float cubeClock = 0;
		roomScene->Models[0]->Pos = XMFLOAT3(9 * sin(cubeClock), 3, 9 * cos(cubeClock += 0.015f));

		// Get both eye poses simultaneously, with IPD offset already included. 
		ovrPosef         EyeRenderPose[2];
		ovrVector3f      HmdToEyeViewOffset[2] = { eyeRenderDesc[0].HmdToEyeViewOffset,
			                                       eyeRenderDesc[1].HmdToEyeViewOffset };
        double frameTime = ovr_GetPredictedDisplayTime(HMD, 0);
        // Keeping sensorSampleTime as close to ovr_GetTrackingState as possible - fed into the layer
        double           sensorSampleTime = ovr_GetTimeInSeconds();
		ovrTrackingState hmdState = ovr_GetTrackingState(HMD, frameTime, ovrTrue);
		ovr_CalcEyePoses(hmdState.HeadPose.ThePose, HmdToEyeViewOffset, EyeRenderPose);

		// --------------------------------------------------------------------------
		// Add: Get Head Yaw Roll Pitch
		float hmdPitch = 0.0f;
		float hmdRoll = 0.0f;
		float hmdYaw = 0.0f;

		OVR::Posef HeadPose = hmdState.HeadPose.ThePose;
		HeadPose.Rotation.GetEulerAngles<OVR::Axis_Y, OVR::Axis_X, OVR::Axis_Z>(&hmdYaw, &hmdPitch, &hmdRoll);

		SetPos(2, ServoRoll(hmdYaw));
		SetPos(3, ServoRoll(hmdPitch));

		// --------------------------------------------------------------------------


		// Render Scene to Eye Buffers
        if (isVisible)
        {
            for (int eye = 0; eye < 2; ++eye)
		    {
			    // Increment to use next texture, just before writing
			    pEyeRenderTexture[eye]->AdvanceToNextTexture();

			    // Clear and set up rendertarget
			    int texIndex = pEyeRenderTexture[eye]->TextureSet->CurrentIndex;
			    DIRECTX.SetAndClearRenderTarget(pEyeRenderTexture[eye]->TexRtv[texIndex], pEyeDepthBuffer[eye]);
			    DIRECTX.SetViewport((float)eyeRenderViewport[eye].Pos.x, (float)eyeRenderViewport[eye].Pos.y,
				    (float)eyeRenderViewport[eye].Size.w, (float)eyeRenderViewport[eye].Size.h);

			    //Get the pose information in XM format
			    XMVECTOR eyeQuat = XMVectorSet(EyeRenderPose[eye].Orientation.x, EyeRenderPose[eye].Orientation.y,
				                               EyeRenderPose[eye].Orientation.z, EyeRenderPose[eye].Orientation.w);
			    XMVECTOR eyePos = XMVectorSet(EyeRenderPose[eye].Position.x, EyeRenderPose[eye].Position.y, EyeRenderPose[eye].Position.z, 0);

			    // Get view and projection matrices for the Rift camera
			    XMVECTOR CombinedPos = XMVectorAdd(mainCam->Pos, XMVector3Rotate(eyePos, mainCam->Rot));
			    Camera finalCam(&CombinedPos, &(XMQuaternionMultiply(eyeQuat,mainCam->Rot)));
			    XMMATRIX view = finalCam.GetViewMatrix();
			    ovrMatrix4f p = ovrMatrix4f_Projection(eyeRenderDesc[eye].Fov, 0.2f, 1000.0f, ovrProjection_RightHanded);
			    XMMATRIX proj = XMMatrixSet(p.M[0][0], p.M[1][0], p.M[2][0], p.M[3][0],
				                            p.M[0][1], p.M[1][1], p.M[2][1], p.M[3][1],
				                            p.M[0][2], p.M[1][2], p.M[2][2], p.M[3][2],
				                            p.M[0][3], p.M[1][3], p.M[2][3], p.M[3][3]);
			    XMMATRIX prod = XMMatrixMultiply(view, proj);
			    roomScene->Render(&prod, 1, 1, 1, 1, true);
		    }
        }

		// Initialize our single full screen Fov layer.
        ovrLayerEyeFov ld = {};
		ld.Header.Type = ovrLayerType_EyeFov;
		ld.Header.Flags = 0;

		for (int eye = 0; eye < 2; ++eye)
		{
			ld.ColorTexture[eye] = pEyeRenderTexture[eye]->TextureSet;
			ld.Viewport[eye] = eyeRenderViewport[eye];
			ld.Fov[eye] = hmdDesc.DefaultEyeFov[eye];
			ld.RenderPose[eye] = EyeRenderPose[eye];
            ld.SensorSampleTime = sensorSampleTime;
		}

        ovrLayerHeader* layers = &ld.Header;
        result = ovr_SubmitFrame(HMD, 0, nullptr, &layers, 1);
        // exit the rendering loop if submit returns an error, will retry on ovrError_DisplayLost
        if (!OVR_SUCCESS(result))
            goto Done;

        isVisible = (result == ovrSuccess);

        // Render mirror
        ovrD3D11Texture* tex = (ovrD3D11Texture*)mirrorTexture;
        DIRECTX.Context->CopyResource(DIRECTX.BackBuffer, tex->D3D11.pTexture);
        DIRECTX.SwapChain->Present(0, 0);
	}

	// Release resources
Done:
    delete mainCam;
    delete roomScene;
	if (mirrorTexture) ovr_DestroyMirrorTexture(HMD, mirrorTexture);
    for (int eye = 0; eye < 2; ++eye)
    {
	    delete pEyeRenderTexture[eye];
        delete pEyeDepthBuffer[eye];
    }
	DIRECTX.ReleaseDevice();
	ovr_Destroy(HMD);

	g_seriPort.End();

	for (int iCam = 0; iCam < numCams; iCam++)
	{
		cam[iCam]->StopCapture();
		delete cam[iCam];
	}

    // Retry on ovrError_DisplayLost
    return retryCreate || OVR_SUCCESS(result) || (result == ovrError_DisplayLost);
}
Пример #23
0
void Game::Update()
{
	m_pCube->Update();
	XMVECTOR cameraPos = XMLoadFloat3(&m_pCamera->GetCameraPos());
	/*cameraPos += XMVectorSet(0.0001f,0.0f,0.0f,0.0f);
	XMFLOAT3 newCameraPos;
	XMStoreFloat3(&newCameraPos,cameraPos);*/
	//cameraPos * XMMatrixRotationY(XM_PIDIV2/9.0f);

	//Camera Rotation code
	/*XMFLOAT3 newCameraPos;
	cameraPos = XMVector3Rotate(cameraPos,XMQuaternionRotationMatrix(XMMatrixRotationY(-(XM_PIDIV2/9000.0f))));
	XMStoreFloat3(&newCameraPos,cameraPos);
	m_pCamera->SetCameraPos(newCameraPos);
	m_pCamera->UpdateViewMatrix();*/

	//keyboard input test code
	//bool pressed = ENGINE->GetInputManager()->IsKeyDown(VK_UP);

	//cout << pressed;
	XMFLOAT3 newCameraPos;
	if(ENGINE->GetInputManager()->IsKeyDown(VK_UP))
	{
		cameraPos = XMVector3Rotate(cameraPos,XMQuaternionRotationMatrix(XMMatrixRotationX((XM_PIDIV2/9000.0f))));
	}
	if(ENGINE->GetInputManager()->IsKeyDown(VK_DOWN))
	{
		cameraPos = XMVector3Rotate(cameraPos,XMQuaternionRotationMatrix(XMMatrixRotationX(-(XM_PIDIV2/9000.0f))));
	}
	if(ENGINE->GetInputManager()->IsKeyDown(VK_LEFT))
	{
		cameraPos = XMVector3Rotate(cameraPos,XMQuaternionRotationMatrix(XMMatrixRotationY((XM_PIDIV2/9000.0f))));
	}
	//if(ENGINE->GetInputManager()->IsKeyDown(VK_RIGHT))
	if(ENGINE->GetInputManager()->IsCommandDown(L"RIGHT"))
	{
		cameraPos = XMVector3Rotate(cameraPos,XMQuaternionRotationMatrix(XMMatrixRotationY(-(XM_PIDIV2/9000.0f))));
	}
	if(ENGINE->GetInputManager()->IsKeyDown(0x57))//W
	{
		cameraPos = XMVector3Transform(cameraPos,XMMatrixTranslation(0,0,0.01f));
	}
	if(ENGINE->GetInputManager()->IsKeyDown(0x53))//S
	{
		cameraPos = XMVector3Transform(cameraPos,XMMatrixTranslation(0,0,-0.01f));
	}
	if(ENGINE->GetInputManager()->IsKeyDown(0x41))//A
	{
		cameraPos = XMVector3Transform(cameraPos,XMMatrixTranslation(-0.01f,0,0));
	}
	if(ENGINE->GetInputManager()->IsKeyDown(0x44))//D
	{
		cameraPos = XMVector3Transform(cameraPos,XMMatrixTranslation(0.01f,0,0));
	}
	if(ENGINE->GetInputManager()->IsKeyDown(0x51))//Q
	{
		cameraPos = XMVector3Transform(cameraPos,XMMatrixTranslation(0,-0.01f,0));
	}
	if(ENGINE->GetInputManager()->IsKeyDown(0x45))//E
	{
		cameraPos = XMVector3Transform(cameraPos,XMMatrixTranslation(0,0.01f,0));
	}
	
	XMStoreFloat3(&newCameraPos,cameraPos);
	m_pCamera->SetCameraPos(newCameraPos);
	m_pCamera->UpdateViewMatrix();
}
Пример #24
0
void BasicPlane::Update(float deltaTime)
{
	XMVECTOR forward = XMVector3Rotate(XMVectorSet(0, 0, 1, 0), XMLoadFloat4(&gameObject->transform.rotation));
	XMVECTOR right = XMVector3Rotate(XMVectorSet(1, 0, 0, 0), XMLoadFloat4(&gameObject->transform.rotation));
	XMVECTOR up = XMVector3Rotate(XMVectorSet(0, 1, 0, 0), XMLoadFloat4(&gameObject->transform.rotation));
	Vector3 velocityDirection = particleModel->velocity;
	velocityDirection.Normalize();
	Vector3 velocityForwards = forward * particleModel->velocity.Dot(forward);
	//thrust
	const float maxPlaneThrust = 30 * particleModel->mass;
	const float thrustChangeSpeed = 5 * particleModel->mass;
	static float planeThrust = maxPlaneThrust * 0.0;
	if (GetAsyncKeyState(VK_HOME))
	{
		planeThrust += thrustChangeSpeed * deltaTime;
	}
	if (GetAsyncKeyState(VK_DELETE))
	{
		planeThrust -= thrustChangeSpeed * deltaTime;
	}
	planeThrust = fmax(0.0f, fmin(maxPlaneThrust, planeThrust));
	particleModel->AddForce(forward * planeThrust);
	//lift and drag
	float lift;
	float percentVelocityForwards = particleModel->velocity.Length() < 1e-4 ? 0 : (velocityForwards.Length() / particleModel->velocity.Length());
	lift = 100 * particleModel->velocity.LengthSquared() * (percentVelocityForwards);
	if (planeThrust > 0.25 * maxPlaneThrust)
	{
		planeThrust -= thrustChangeSpeed * 0.5 * deltaTime;
	}
	//float density = 0.5;
	//float area = 10;
	//float liftCoefficient = 0.8f;
	//lift = 0.5 * density * particleModel->velocity.LengthSquared() * area * liftCoefficient;
	particleModel->AddForce(up * lift);
	float drag;
	drag = 300 * particleModel->velocity.LengthSquared() * (percentVelocityForwards * 0.2 + (1.0 - percentVelocityForwards) * 1.0);
	//float dragCoefficient = 0.1f;
	//drag = dragCoefficient * area * density * (particleModel->velocity.LengthSquared() / 2);
	particleModel->AddForce(-velocityDirection * drag);
	//rotate
	const float planeRotateSpeed = XMConvertToRadians(20 * deltaTime);
	XMVECTOR rotate = XMQuaternionIdentity();
	if (GetAsyncKeyState(VK_RIGHT))
	{
		rotate = XMQuaternionMultiply(rotate, XMQuaternionRotationNormal(forward, -planeRotateSpeed));
	}
	else if (GetAsyncKeyState(VK_LEFT))
	{
		rotate = XMQuaternionMultiply(rotate, XMQuaternionRotationNormal(forward, planeRotateSpeed));
	}
	if (GetAsyncKeyState(VK_UP))
	{
		rotate = XMQuaternionMultiply(rotate, XMQuaternionRotationNormal(right, -planeRotateSpeed));
	}
	else if (GetAsyncKeyState(VK_DOWN))
	{
		rotate = XMQuaternionMultiply(rotate, XMQuaternionRotationNormal(right, planeRotateSpeed));
	}
	gameObject->transform.rotation *= rotate;
}
Пример #25
0
XMVECTOR HydraManager::getPointerPosition(int controllerIndex) const
{
	XMVECTOR position = getPosition(controllerIndex);

	return position + XMVector3Rotate(XMVectorSet(0.0f, 0.0f, mPointerDistance, 0.0f), getRotation(controllerIndex)); 
}