traceResult_t RtTrace::Trace_Exhaustive( const Vector3f & start, const Vector3f & end ) const { traceResult_t result; result.triangleIndex = -1; result.fraction = 1.0f; result.uv = Vector2f( 0.0f ); result.normal = Vector3f( 0.0f ); const Vector3f rayDelta = end - start; const float rayLengthSqr = rayDelta.LengthSq(); const float rayLengthRcp = RcpSqrt( rayLengthSqr ); const float rayLength = rayLengthSqr * rayLengthRcp; const Vector3f rayStart = start; const Vector3f rayDir = rayDelta * rayLengthRcp; float bestDistance = rayLength; Vector2f uv; for ( int i = 0; i < header.numIndices; i += 3 ) { float distance; float u; float v; if ( RtIntersect::RayTriangle( rayStart, rayDir, vertices[indices[i + 0]], vertices[indices[i + 1]], vertices[indices[i + 2]], distance, u, v ) ) { if ( distance >= 0.0f && distance < bestDistance ) { bestDistance = distance; result.triangleIndex = i; uv.x = u; uv.y = v; } } } if ( result.triangleIndex != -1 ) { result.fraction = bestDistance * rayLengthRcp; result.uv = uvs[indices[result.triangleIndex + 0]] * ( 1.0f - uv.x - uv.y ) + uvs[indices[result.triangleIndex + 1]] * uv.x + uvs[indices[result.triangleIndex + 2]] * uv.y; const Vector3f d1 = vertices[indices[result.triangleIndex + 1]] - vertices[indices[result.triangleIndex + 0]]; const Vector3f d2 = vertices[indices[result.triangleIndex + 2]] - vertices[indices[result.triangleIndex + 0]]; result.normal = d1.Cross( d2 ).Normalized(); } return result; }
void OvrSceneView::UpdateViewMatrix(const VrFrame vrFrame ) { // Experiments with position tracking const bool useHeadModel = !AllowPositionTracking || ( ( vrFrame.Input.buttonState & ( BUTTON_A | BUTTON_X ) ) == 0 ); // Delta time in seconds since last frame. const float dt = vrFrame.DeltaSeconds; const float yawSpeed = 1.5f; Vector3f GamepadMove; // Allow up / down movement if there is no floor collision model if ( vrFrame.Input.buttonState & BUTTON_RIGHT_TRIGGER ) { FootPos.y -= vrFrame.Input.sticks[0][1] * dt * MoveSpeed; } else { GamepadMove.z = vrFrame.Input.sticks[0][1]; } GamepadMove.x = vrFrame.Input.sticks[0][0]; // Turn based on the look stick // Because this can be predicted ahead by async TimeWarp, we apply // the yaw from the previous frame's controls, trading a frame of // latency on stick controls to avoid a bounce-back. YawOffset -= YawVelocity * dt; if ( !( vrFrame.OvrStatus & ovrStatus_OrientationTracked ) ) { PitchOffset -= yawSpeed * vrFrame.Input.sticks[1][1] * dt; YawVelocity = yawSpeed * vrFrame.Input.sticks[1][0]; } else { YawVelocity = 0.0f; } // We extract Yaw, Pitch, Roll instead of directly using the orientation // to allow "additional" yaw manipulation with mouse/controller. const Quatf quat = vrFrame.PoseState.Pose.Orientation; quat.GetEulerAngles<Axis_Y, Axis_X, Axis_Z>( &EyeYaw, &EyePitch, &EyeRoll ); EyeYaw += YawOffset; // If the sensor isn't plugged in, allow right stick up/down // to adjust pitch, which can be useful for debugging. Never // do this when head tracking if ( !( vrFrame.OvrStatus & ovrStatus_OrientationTracked ) ) { EyePitch += PitchOffset; } // Perform player movement. if ( GamepadMove.LengthSq() > 0.0f ) { const Matrix4f yawRotate = Matrix4f::RotationY( EyeYaw ); const Vector3f orientationVector = yawRotate.Transform( GamepadMove ); // Don't let move get too crazy fast const float moveDistance = OVR::Alg::Min<float>( MoveSpeed * (float)dt, 1.0f ); if ( WorldModel.Definition ) { FootPos = SlideMove( FootPos, ViewParms.EyeHeight, orientationVector, moveDistance, WorldModel.Definition->Collisions, WorldModel.Definition->GroundCollisions ); } else { // no scene loaded, walk without any collisions CollisionModel collisionModel; CollisionModel groundCollisionModel; FootPos = SlideMove( FootPos, ViewParms.EyeHeight, orientationVector, moveDistance, collisionModel, groundCollisionModel ); } } // Rotate and position View Camera, using YawPitchRoll in BodyFrame coordinates. Matrix4f rollPitchYaw = Matrix4f::RotationY( EyeYaw ) * Matrix4f::RotationX( EyePitch ) * Matrix4f::RotationZ( EyeRoll ); const Vector3f up = rollPitchYaw.Transform( UpVector ); const Vector3f forward = rollPitchYaw.Transform( ForwardVector ); const Vector3f right = rollPitchYaw.Transform( RightVector ); // Have sensorFusion zero the integration when not using it, so the // first frame is correct. if ( vrFrame.Input.buttonPressed & (BUTTON_A | BUTTON_X) ) { LatchedHeadModelOffset = LastHeadModelOffset; } // Calculate the shiftedEyePos ShiftedEyePos = CenterEyePos(); Vector3f headModelOffset = HeadModelOffset( EyeRoll, EyePitch, EyeYaw, ViewParms.HeadModelDepth, ViewParms.HeadModelHeight ); if ( useHeadModel ) { ShiftedEyePos += headModelOffset; } headModelOffset += forward * ImuToEyeCenter.z; headModelOffset += right * ImuToEyeCenter.x; LastHeadModelOffset = headModelOffset; if ( !useHeadModel ) { // Use position tracking from the sensor system, which is in absolute // coordinates without the YawOffset ShiftedEyePos += Matrix4f::RotationY( YawOffset ).Transform( vrFrame.PoseState.Pose.Position ); ShiftedEyePos -= forward * ImuToEyeCenter.z; ShiftedEyePos -= right * ImuToEyeCenter.x; ShiftedEyePos += LatchedHeadModelOffset; } ViewMatrix = Matrix4f::LookAtRH( ShiftedEyePos, ShiftedEyePos + forward, up ); }
void Player::HandleMovement(double dt, std::vector<Ptr<CollisionModel> >* collisionModels, std::vector<Ptr<CollisionModel> >* groundCollisionModels, bool shiftDown) { // Handle keyboard movement. // This translates BasePos based on the orientation and keys pressed. // Note that Pitch and Roll do not affect movement (they only affect view). Vector3f controllerMove; if(MoveForward || MoveBack || MoveLeft || MoveRight) { if (MoveForward) { controllerMove += ForwardVector; } else if (MoveBack) { controllerMove -= ForwardVector; } if (MoveRight) { controllerMove += RightVector; } else if (MoveLeft) { controllerMove -= RightVector; } } else if (GamepadMove.LengthSq() > 0) { controllerMove = GamepadMove; } controllerMove = GetOrientation(bMotionRelativeToBody).Rotate(controllerMove); controllerMove.y = 0; // Project to the horizontal plane if (controllerMove.LengthSq() > 0) { // Normalize vector so we don't move faster diagonally. controllerMove.Normalize(); controllerMove *= OVR::Alg::Min<float>(MoveSpeed * (float)dt * (shiftDown ? 3.0f : 1.0f), 1.0f); } // Compute total move direction vector and move length Vector3f orientationVector = controllerMove; float moveLength = orientationVector.Length(); if (moveLength > 0) orientationVector.Normalize(); float checkLengthForward = moveLength; Planef collisionPlaneForward; bool gotCollision = false; for(size_t i = 0; i < collisionModels->size(); ++i) { // Checks for collisions at model base level, which should prevent us from // slipping under walls if (collisionModels->at(i)->TestRay(BodyPos, orientationVector, checkLengthForward, &collisionPlaneForward)) { gotCollision = true; break; } } if (gotCollision) { // Project orientationVector onto the plane Vector3f slideVector = orientationVector - collisionPlaneForward.N * (orientationVector.Dot(collisionPlaneForward.N)); // Make sure we aren't in a corner for(size_t j = 0; j < collisionModels->size(); ++j) { if (collisionModels->at(j)->TestPoint(BodyPos - Vector3f(0.0f, RailHeight, 0.0f) + (slideVector * (moveLength))) ) { moveLength = 0; break; } } if (moveLength != 0) { orientationVector = slideVector; } } // Checks for collisions at foot level, which allows us to follow terrain orientationVector *= moveLength; BodyPos += orientationVector; Planef collisionPlaneDown; float adjustedUserEyeHeight = GetFloorDistanceFromTrackingOrigin(ovrTrackingOrigin_EyeLevel); float finalDistanceDown = adjustedUserEyeHeight + 10.0f; // Only apply down if there is collision model (otherwise we get jitter). if (groundCollisionModels->size()) { for(size_t i = 0; i < groundCollisionModels->size(); ++i) { float checkLengthDown = adjustedUserEyeHeight + 10; if (groundCollisionModels->at(i)->TestRay(BodyPos, Vector3f(0.0f, -1.0f, 0.0f), checkLengthDown, &collisionPlaneDown)) { finalDistanceDown = Alg::Min(finalDistanceDown, checkLengthDown); } } // Maintain the minimum camera height if (adjustedUserEyeHeight - finalDistanceDown < 1.0f) { BodyPos.y += adjustedUserEyeHeight - finalDistanceDown; } } SetBodyPos(BodyPos, false); }
traceResult_t RtTrace::Trace( const Vector3f & start, const Vector3f & end ) const { traceResult_t result; result.triangleIndex = -1; result.fraction = 1.0f; result.uv = Vector2f( 0.0f ); result.normal = Vector3f( 0.0f ); const Vector3f rayDelta = end - start; const float rayLengthSqr = rayDelta.LengthSq(); const float rayLengthRcp = RcpSqrt( rayLengthSqr ); const float rayLength = rayLengthSqr * rayLengthRcp; const Vector3f rayDir = rayDelta * rayLengthRcp; const float rcpRayDirX = ( fabsf( rayDir.x ) > Math<float>::SmallestNonDenormal ) ? ( 1.0f / rayDir.x ) : Math<float>::HugeNumber; const float rcpRayDirY = ( fabsf( rayDir.y ) > Math<float>::SmallestNonDenormal ) ? ( 1.0f / rayDir.y ) : Math<float>::HugeNumber; const float rcpRayDirZ = ( fabsf( rayDir.z ) > Math<float>::SmallestNonDenormal ) ? ( 1.0f / rayDir.z ) : Math<float>::HugeNumber; const float sX = ( header.bounds.GetMins()[0] - start.x ) * rcpRayDirX; const float sY = ( header.bounds.GetMins()[1] - start.y ) * rcpRayDirY; const float sZ = ( header.bounds.GetMins()[2] - start.z ) * rcpRayDirZ; const float tX = ( header.bounds.GetMaxs()[0] - start.x ) * rcpRayDirX; const float tY = ( header.bounds.GetMaxs()[1] - start.y ) * rcpRayDirY; const float tZ = ( header.bounds.GetMaxs()[2] - start.z ) * rcpRayDirZ; const float minX = Alg::Min( sX, tX ); const float minY = Alg::Min( sY, tY ); const float minZ = Alg::Min( sZ, tZ ); const float maxX = Alg::Max( sX, tX ); const float maxY = Alg::Max( sY, tY ); const float maxZ = Alg::Max( sZ, tZ ); const float t0 = Alg::Max( minX, Alg::Max( minY, minZ ) ); const float t1 = Alg::Min( maxX, Alg::Min( maxY, maxZ ) ); if ( t0 >= t1 ) { return result; } float entryDistance = Alg::Max( t0, 0.0f ); float bestDistance = Alg::Min( t1 + 0.00001f, rayLength ); Vector2f uv; const kdtree_node_t * currentNode = &nodes[0]; for ( int i = 0; i < RT_KDTREE_MAX_ITERATIONS; i++ ) { const Vector3f rayEntryPoint = start + rayDir * entryDistance; // Step down the tree until a leaf node is found. while ( ( currentNode->data & 1 ) == 0 ) { // Select the child node based on whether the entry point is left or right of the split plane. // If the entry point is directly at the split plane then choose the side based on the ray direction. const int nodePlane = ( ( currentNode->data >> 1 ) & 3 ); int child; if ( rayEntryPoint[nodePlane] - currentNode->dist < 0.00001f ) child = 0; else if ( rayEntryPoint[nodePlane] - currentNode->dist > 0.00001f ) child = 1; else child = ( rayDelta[nodePlane] > 0.0f ); currentNode = &nodes[( currentNode->data >> 3 ) + child]; } // Check for an intersection with a triangle in this leaf. const kdtree_leaf_t * currentLeaf = &leafs[( currentNode->data >> 3 )]; const int * leafTriangles = currentLeaf->triangles; int leafTriangleCount = RT_KDTREE_MAX_LEAF_TRIANGLES; for ( int j = 0; j < leafTriangleCount; j++ ) { int currentTriangle = leafTriangles[j]; if ( currentTriangle < 0 ) { if ( currentTriangle == -1 ) { break; } const int offset = ( currentTriangle & 0x7FFFFFFF ); leafTriangles = &overflow[offset]; leafTriangleCount = header.numOverflow - offset; j = 0; currentTriangle = leafTriangles[0]; } float distance; float u; float v; if ( RtIntersect::RayTriangle( start, rayDir, vertices[indices[currentTriangle * 3 + 0]], vertices[indices[currentTriangle * 3 + 1]], vertices[indices[currentTriangle * 3 + 2]], distance, u, v ) ) { if ( distance >= 0.0f && distance < bestDistance ) { bestDistance = distance; result.triangleIndex = currentTriangle * 3; uv.x = u; uv.y = v; } } } // Calculate the distance along the ray where the next leaf is entered. const float sX = ( currentLeaf->bounds.GetMins()[0] - start.x ) * rcpRayDirX; const float sY = ( currentLeaf->bounds.GetMins()[1] - start.y ) * rcpRayDirY; const float sZ = ( currentLeaf->bounds.GetMins()[2] - start.z ) * rcpRayDirZ; const float tX = ( currentLeaf->bounds.GetMaxs()[0] - start.x ) * rcpRayDirX; const float tY = ( currentLeaf->bounds.GetMaxs()[1] - start.y ) * rcpRayDirY; const float tZ = ( currentLeaf->bounds.GetMaxs()[2] - start.z ) * rcpRayDirZ; const float maxX = Alg::Max( sX, tX ); const float maxY = Alg::Max( sY, tY ); const float maxZ = Alg::Max( sZ, tZ ); entryDistance = Alg::Min( maxX, Alg::Min( maxY, maxZ ) ); if ( entryDistance >= bestDistance ) { break; } // Calculate the exit plane. const int exitX = ( 0 << 1 ) | ( ( sX < tX ) ? 1 : 0 ); const int exitY = ( 1 << 1 ) | ( ( sY < tY ) ? 1 : 0 ); const int exitZ = ( 2 << 1 ) | ( ( sZ < tZ ) ? 1 : 0 ); const int exitPlane = ( maxX < maxY ) ? ( maxX < maxZ ? exitX : exitZ ) : ( maxY < maxZ ? exitY : exitZ ); // Use a rope to enter the adjacent leaf. const int exitNodeIndex = currentLeaf->ropes[exitPlane]; if ( exitNodeIndex == -1 ) { break; } currentNode = &nodes[exitNodeIndex]; } if ( result.triangleIndex != -1 ) { result.fraction = bestDistance * rayLengthRcp; result.uv = uvs[indices[result.triangleIndex + 0]] * ( 1.0f - uv.x - uv.y ) + uvs[indices[result.triangleIndex + 1]] * uv.x + uvs[indices[result.triangleIndex + 2]] * uv.y; const Vector3f d1 = vertices[indices[result.triangleIndex + 1]] - vertices[indices[result.triangleIndex + 0]]; const Vector3f d2 = vertices[indices[result.triangleIndex + 2]] - vertices[indices[result.triangleIndex + 0]]; result.normal = d1.Cross( d2 ).Normalized(); } return result; }
void OvrSceneView::Frame( const VrFrame & vrFrame, const ovrHeadModelParms & headModelParms_, const long long supressModelsWithClientId_ ) { HeadModelParms = headModelParms_; SupressModelsWithClientId = supressModelsWithClientId_; CurrentTracking = vrFrame.Tracking; // Delta time in seconds since last frame. const float dt = vrFrame.DeltaSeconds; const float angleSpeed = 1.5f; // // Player view angles // // Turn based on the look stick // Because this can be predicted ahead by async TimeWarp, we apply // the yaw from the previous frame's controls, trading a frame of // latency on stick controls to avoid a bounce-back. StickYaw -= YawVelocity * dt; YawVelocity = angleSpeed * vrFrame.Input.sticks[1][0]; // Only if there is no head tracking, allow right stick up/down to adjust pitch, // which can be useful for debugging without having to dock the device. if ( ( vrFrame.Tracking.Status & VRAPI_TRACKING_STATUS_ORIENTATION_TRACKED ) == 0 ) { StickPitch -= angleSpeed * vrFrame.Input.sticks[1][1] * dt; } else { StickPitch = 0.0f; } // We extract Yaw, Pitch, Roll instead of directly using the orientation // to allow "additional" yaw manipulation with mouse/controller and scene offsets. const Quatf quat = vrFrame.Tracking.HeadPose.Pose.Orientation; quat.GetEulerAngles<Axis_Y, Axis_X, Axis_Z>( &EyeYaw, &EyePitch, &EyeRoll ); // Yaw is modified by both joystick and application-set scene yaw. // Pitch is only modified by joystick when no head tracking sensor is active. EyeYaw += StickYaw + SceneYaw; EyePitch += StickPitch; // // Player movement // // Allow up / down movement if there is no floor collision model or in 'free move' mode. const bool upDown = ( WorldModel.Definition == NULL || FreeMove ) && ( ( vrFrame.Input.buttonState & BUTTON_RIGHT_TRIGGER ) != 0 ); const Vector3f gamepadMove( vrFrame.Input.sticks[0][0], upDown ? -vrFrame.Input.sticks[0][1] : 0.0f, upDown ? 0.0f : vrFrame.Input.sticks[0][1] ); // Perform player movement if there is input. if ( gamepadMove.LengthSq() > 0.0f ) { const Matrix4f yawRotate = Matrix4f::RotationY( EyeYaw ); const Vector3f orientationVector = yawRotate.Transform( gamepadMove ); // Don't let move get too crazy fast const float moveDistance = OVR::Alg::Min<float>( MoveSpeed * (float)dt, 1.0f ); if ( WorldModel.Definition != NULL && !FreeMove ) { FootPos = SlideMove( FootPos, HeadModelParms.EyeHeight, orientationVector, moveDistance, WorldModel.Definition->Collisions, WorldModel.Definition->GroundCollisions ); } else { // no scene loaded, walk without any collisions ModelCollision collisionModel; ModelCollision groundCollisionModel; FootPos = SlideMove( FootPos, HeadModelParms.EyeHeight, orientationVector, moveDistance, collisionModel, groundCollisionModel ); } } // // Center eye transform // UpdateCenterEye(); // // Model animations // if ( !Paused ) { for ( int i = 0; i < Models.GetSizeI(); i++ ) { if ( Models[i] != NULL ) { Models[i]->AnimateJoints( static_cast<float>( vrFrame.PredictedDisplayTimeInSeconds ) ); } } } // External systems can add surfaces to this list before drawing. EmitSurfaces.Resize( 0 ); }