void OvrOutOfSpaceMenu::BuildMenu( int memoryInKB ) { const VRMenuFontParms fontParms( true, true, false, false, true, 0.505f, 0.43f, 1.0f ); Array< VRMenuObjectParms const * > parms; int menuId = 9000; // --- // Icon { VRMenuSurfaceParms iconSurfParms( "", "res/raw/out_of_disk_space_warning.png", SURFACE_TEXTURE_DIFFUSE, "", SURFACE_TEXTURE_MAX, "", SURFACE_TEXTURE_MAX ); VRMenuObjectParms iconParms( VRMENU_STATIC, Array< VRMenuComponent* >(), iconSurfParms, "", Posef( Quatf(), Vector3f( 0.0f, CENTER_TO_ICON_Y_OFFSET, 0.0f ) ), Vector3f( 1.0f ), fontParms, VRMenuId_t( ++menuId ), VRMenuObjectFlags_t( VRMENUOBJECT_DONT_HIT_ALL ), VRMenuObjectInitFlags_t( VRMENUOBJECT_INIT_FORCE_POSITION ) ); parms.PushBack( &iconParms ); AddItems( AppPtr->GetVRMenuMgr(), AppPtr->GetDefaultFont(), parms, GetRootHandle(), true ); parms.Clear(); } // --- // Message { String outOfSpaceMsg; VrLocale::GetString( AppPtr->GetVrJni(), AppPtr->GetJavaObject(), "@string/out_of_memory", "To use this app, please free up at least %dKB of storage space on your phone.", outOfSpaceMsg ); char charBuff[10]; sprintf( charBuff, "%d", memoryInKB ); outOfSpaceMsg = VrLocale::GetXliffFormattedString( outOfSpaceMsg, charBuff ); BitmapFont & font = AppPtr->GetDefaultFont(); font.WordWrapText( outOfSpaceMsg, 1.4f ); VRMenuObjectParms titleParms( VRMENU_STATIC, Array< VRMenuComponent* >(), VRMenuSurfaceParms(), (const char*)outOfSpaceMsg, Posef( Quatf(), Vector3f( 0.0f, CENTER_TO_TEXT_Y_OFFSET, 0.0f ) ), Vector3f( 1.0f ), fontParms, VRMenuId_t( ++menuId ), VRMenuObjectFlags_t( VRMENUOBJECT_DONT_HIT_TEXT ), VRMenuObjectInitFlags_t( VRMENUOBJECT_INIT_FORCE_POSITION ) ); parms.PushBack( &titleParms ); AddItems( AppPtr->GetVRMenuMgr(), AppPtr->GetDefaultFont(), parms, GetRootHandle(), true ); parms.Clear(); } this->SetMenuPose( Posef( Quatf(), Vector3f( 0.0f, 0.0f, -3.0f ) ) ); }
Trackball::Trackball() { _active = false; _activateButton = ELeftButton; _qRot = Quatf(0.0f, 0.0f, 0.0f, 1.0f); _qDrag = Quatf(0.0f, 0.0f, 0.0f, 1.0f); _ballSize = 100; _ballCen = Vec2f(50,50); }
// Resets the current orientation void SensorFusion::Reset() { Lock::Locker lockScope(Handler.GetHandlerLock()); Q = Quatf(); QUncorrected = Quatf(); Stage = 0; RunningTime = 0; MagNumReferences = 0; MagRefIdx = -1; GyroOffset = Vector3f(); }
void setUp() { v1 = Vector3f(1, 0, 0); r1 = Vector3f(0, 0, 1); v2 = Vector4f(15, 0, 0, 1); r2 = Vector4f(0, -15, 0, 1); q5 = Quatf(0.5, Vector3f(1, 2, 3)); q5n = Quatf(-0.5, Vector3f(-1, -2, -3)); q5c = Quatf(0.5, Vector3f(-1, -2, -3)); }
IntersectRayBoundsResult Scene::IntersectRayBounds(SceneObject *target, bool axisInWorld) { Matrix4f worldToModelM = target->GetMatrixWorld().Inverted(); Matrix4f invertedCenterViewM = centerViewM.Inverted(); Vector3f inWorldCenterViewPos = invertedCenterViewM.GetTranslation(); Quatf centerViewRot = Quatf(invertedCenterViewM); const Vector3f rayStart = worldToModelM.Transform(inWorldCenterViewPos); const Vector3f rayDir = worldToModelM.Transform(centerViewRot.Rotate(Vector3f(0.0f, 0.0f, -1.0f))) - rayStart; const BoundingBoxInfo boundingBoxInfo = target->GetRenderData()->GetMesh()->GetBoundingBoxInfo(); float t0 = 0.0f; float t1 = 0.0f; bool intersected = Intersect_RayBounds(rayStart, rayDir, boundingBoxInfo.mins, boundingBoxInfo.maxs, t0, t1); IntersectRayBoundsResult result; result.intersected = intersected && t0 > 0; if (intersected) { result.first = rayStart + t0 * rayDir; result.second = rayStart + t1 * rayDir; if (axisInWorld) { result.first = target->GetMatrixWorld().Transform(result.first); result.second = target->GetMatrixWorld().Transform(result.second); } } return result; }
void Camera::lookAt( const Vec3f &aEyePoint, const Vec3f &target ) { mEyePoint = aEyePoint; mViewDirection = ( target - mEyePoint ).normalized(); mOrientation = Quatf( Matrix44f::alignZAxisWithTarget( -mViewDirection, mWorldUp ) ).normalized(); mModelViewCached = false; }
Posef Player::VirtualWorldTransformfromRealPose(const Posef &sensorHeadPose) { Quatf baseQ = Quatf(Vector3f(0,1,0), BodyYaw.Get()); return Posef(baseQ * sensorHeadPose.Rotation, BodyPos + baseQ.Rotate(sensorHeadPose.Translation)); }
QuatTrackBall::QuatTrackBall(const Vec3f& _centre, float _eye_dist, unsigned _width, unsigned _height): centre(_centre), width(_width), height(_height), eye_dist(_eye_dist) { // This size should really be based on the distance from the center of // rotation to the point on the object underneath the mouse. That // point would then track the mouse as closely as possible. This is a // simple example, though, so that is left as an exercise. ballsize = 2.0f; screen_centre = Vec2i(width/2, height/2); qrot = Quatf(0.0, 0.0, 0.0, 1.0); qinc = Quatf(0.0, 0.0, 0.0, 1.0); trans = Vec2f(0.0, 0.0); }
const Quatf Entity::rotation() const { const RotationInfo info = rotationInfo(); switch (info.type) { case RTZAngle: { const PropertyValue* angleValue = propertyForKey(info.property); if (angleValue == NULL) return Quatf(0.0f, Vec3f::PosZ); float angle = static_cast<float>(std::atof(angleValue->c_str())); return Quatf(Math<float>::radians(angle), Vec3f::PosZ); } case RTZAngleWithUpDown: { const PropertyValue* angleValue = propertyForKey(info.property); if (angleValue == NULL) return Quatf(0.0f, Vec3f::PosZ); float angle = static_cast<float>(std::atof(angleValue->c_str())); if (angle == -1.0f) return Quatf(-Math<float>::Pi / 2.0f, Vec3f::PosY); if (angle == -2.0f) return Quatf(Math<float>::Pi / 2.0f, Vec3f::PosY); return Quatf(Math<float>::radians(angle), Vec3f::PosZ); } case RTEulerAngles: { const PropertyValue* angleValue = propertyForKey(info.property); Vec3f angles = angleValue != NULL ? Vec3f(*angleValue) : Vec3f::Null; Quatf zRotation(Math<float>::radians( angles.x()), Vec3f::PosZ); Quatf yRotation(Math<float>::radians(-angles.y()), Vec3f::PosY); return zRotation * yRotation; } default: return Quatf(0.0f, Vec3f::PosZ); } }
Posef Player::VirtualWorldTransformfromRealPose(const Posef &sensorHeadPose, ovrTrackingOrigin trackingOrigin) { Quatf baseQ = Quatf(Vector3f(0,1,0), GetApparentBodyYaw().Get()); Vector3f bodyPosInOrigin = trackingOrigin == ovrTrackingOrigin_EyeLevel ? BodyPos : BodyPoseFloorLevel; return Posef(baseQ * sensorHeadPose.Rotation, bodyPosInOrigin + baseQ.Rotate(sensorHeadPose.Translation)); }
void UILabel::AddToMenu( UIMenu *menu, UIObject *parent ) { const Posef pose( Quatf( Vector3f( 0.0f, 1.0f, 0.0f ), 0.0f ), Vector3f( 0.0f, 0.0f, 0.0f ) ); VRMenuObjectParms parms( VRMENU_STATIC, Array< VRMenuComponent* >(), VRMenuSurfaceParms(), "", pose, Vector3f( 1.0f ), FontParms, menu->AllocId(), VRMenuObjectFlags_t(), VRMenuObjectInitFlags_t( VRMENUOBJECT_INIT_FORCE_POSITION ) ); AddToMenuWithParms( menu, parent, parms ); }
Quatf Tracker::CalculateTimeWarpMatrix(Quatf inFrom, Quatf inTo) { bool fromValid = inFrom.LengthSq() > 0.95f; bool toValid = inTo.LengthSq() > 0.95f; if (!fromValid) { MOJING_ERROR(g_APIlogger, "Invalid Srouce view rotate"); if (toValid) { inFrom = inTo; } else { inFrom = Quatf(0.0f, 0.0f, 0.0f, 1.0f); // just force identity } } if (!toValid) { MOJING_ERROR(g_APIlogger, "Invalid Targer view rotate"); if (fromValid) { inTo = inFrom; } else { inTo = Quatf(0.0f, 0.0f, 0.0f, 1.0f); // just force identity } } /************************************************************************ 注意: 下面的代码如果写成 (inFrom * inTo.Inverted()).Inverted(),那么预测的图像方向将与运动方 向一致,这就导致了运动时的卡顿。 /************************************************************************/ // 20170731 Quatf qFix = (inFrom * inTo.Inverted())/*.Inverted()*/; // qFix.z *= -1; return qFix; }
void UIImage::AddToMenuFlags( UIMenu *menu, UIWidget *parent, VRMenuObjectFlags_t const flags ) { const Posef pose( Quatf( Vector3f( 0.0f, 1.0f, 0.0f ), 0.0f ), Vector3f( 0.0f, 0.0f, 0.0f ) ); Vector3f defaultScale( 1.0f ); VRMenuFontParms fontParms( true, true, false, false, false, 1.0f ); VRMenuObjectParms parms( VRMENU_BUTTON, Array< VRMenuComponent* >(), VRMenuSurfaceParms(), "", pose, defaultScale, fontParms, menu->AllocId(), flags, VRMenuObjectInitFlags_t( VRMENUOBJECT_INIT_FORCE_POSITION ) ); AddToMenuWithParms( menu, parent, parms ); }
//============================== // OvrDefaultComponent::Frame eMsgStatus OvrDefaultComponent::Frame( App * app, VrFrame const & vrFrame, OvrVRMenuMgr & menuMgr, VRMenuObject * self, VRMenuEvent const & event ) { double t = TimeInSeconds(); if ( StartFadeInTime >= 0.0f && t >= StartFadeInTime ) { HilightFader.StartFadeIn(); StartFadeInTime = -1.0f; } else if ( StartFadeOutTime >= 0.0f && t > StartFadeOutTime ) { HilightFader.StartFadeOut(); StartFadeOutTime = -1.0f; } float const fadeRate = 1.0f / FadeDuration; HilightFader.Update( fadeRate, vrFrame.DeltaSeconds ); float const hilightAlpha = HilightFader.GetFinalAlpha(); Vector3f offset = HilightOffset * hilightAlpha; self->SetHilightPose( Posef( Quatf(), offset ) ); int additiveSurfIndex = self->FindSurfaceWithTextureType( SURFACE_TEXTURE_ADDITIVE, true ); if ( additiveSurfIndex >= 0 ) { Vector4f surfColor = self->GetSurfaceColor( additiveSurfIndex ); surfColor.w = hilightAlpha; self->SetSurfaceColor( additiveSurfIndex, surfColor ); } float const scale = ( ( HilightScale - 1.0f ) * hilightAlpha ) + 1.0f; self->SetHilightScale( scale ); if ( SuppressText ) { self->SetTextColor( Vector4f( 0.0f ) ); } else { Vector4f colorDelta = TextHilightColor - TextNormalColor; Vector4f curColor = TextNormalColor + ( colorDelta * hilightAlpha ); self->SetTextColor( curColor ); } return MSG_STATUS_ALIVE; }
// This is a simple predictive filter based only on extrapolating the smoothed, current angular velocity. // Note that both QP (the predicted future orientation) and Q (the current orientation) are both maintained. Quatf SensorFusion::GetPredictedOrientation() { Lock::Locker lockScope(Handler.GetHandlerLock()); Quatf qP = QUncorrected; if (EnablePrediction) { #if 1 Vector3f angVelF = FAngV.SavitzkyGolaySmooth8(); float angVelFL = angVelF.Length(); if (angVelFL > 0.001f) { Vector3f rotAxisP = angVelF / angVelFL; float halfRotAngleP = angVelFL * PredictionDT * 0.5f; float sinaHRAP = sin(halfRotAngleP); Quatf deltaQP(rotAxisP.x*sinaHRAP, rotAxisP.y*sinaHRAP, rotAxisP.z*sinaHRAP, cos(halfRotAngleP)); qP = QUncorrected * deltaQP; } #else Quatd qpd = Quatd(Q.x,Q.y,Q.z,Q.w); int predictionStages = (int)(PredictionDT / DeltaT); Vector3f aa = FAngV.SavitzkyGolayDerivative12(); Vector3d aad = Vector3d(aa.x,aa.y,aa.z); Vector3f angVelF = FAngV.SavitzkyGolaySmooth8(); Vector3d avkd = Vector3d(angVelF.x,angVelF.y,angVelF.z); for (int i = 0; i < predictionStages; i++) { double angVelLengthd = avkd.Length(); Vector3d rotAxisd = avkd / angVelLengthd; double halfRotAngled = angVelLengthd * DeltaT * 0.5; double sinHRAd = sin(halfRotAngled); Quatd deltaQd = Quatd(rotAxisd.x*sinHRAd, rotAxisd.y*sinHRAd, rotAxisd.z*sinHRAd, cos(halfRotAngled)); qpd = qpd * deltaQd; // Update vel avkd += aad; } qP = Quatf((float)qpd.x,(float)qpd.y,(float)qpd.z,(float)qpd.w); #endif } return qP; }
void ProjectionCamera::setShadowModel() { Vec3f lightPosLocal =Vec3f ( _buildingOffsetX,_buildingOffsetY,0)+lightPos; Vec3f mViewDirection =-(lightPos); Quatf mOrientation = Quatf( Matrix44f::alignZAxisWithTarget( -mViewDirection, Vec3f::zAxis() ) ).normalized(); Vec3f mW = -mViewDirection.normalized(); Vec3f mU = mOrientation * Vec3f::xAxis(); Vec3f mV = mOrientation * Vec3f::yAxis(); Vec3f d( -lightPosLocal.dot( mU ), -lightPosLocal.dot( mV ), -lightPosLocal.dot( mW ) ); float *m = worldMatrixShadow.m; m[ 0] = mU.x; m[ 4] = mU.y; m[ 8] = mU.z; m[12] = d.x; m[ 1] = mV.x; m[ 5] = mV.y; m[ 9] = mV.z; m[13] = d.y; m[ 2] = mW.x; m[ 6] = mW.y; m[10] = mW.z; m[14] = d.z; m[ 3] = 0.0f; m[ 7] = 0.0f; m[11] = 0.0f; m[15] = 1.0f; }
void UIDiscreteSlider::AddCells( unsigned int maxValue, unsigned int startValue, float cellSpacing ) { MaxValue = maxValue; StartValue = startValue; DiscreteSliderComponent = new UIDiscreteSliderComponent( *this, StartValue ); OVR_ASSERT( DiscreteSliderComponent ); AddComponent( DiscreteSliderComponent ); float cellOffset = 0.0f; const float pixelCellSpacing = cellSpacing * VRMenuObject::DEFAULT_TEXEL_SCALE; VRMenuFontParms fontParms( HORIZONTAL_CENTER, VERTICAL_CENTER, false, false, false, 1.0f ); Vector3f defaultScale( 1.0f ); for ( unsigned int cellIndex = 0; cellIndex <= MaxValue; ++cellIndex ) { const Posef pose( Quatf( Vector3f( 0.0f, 1.0f, 0.0f ), 0.0f ), Vector3f( cellOffset, 0.f, 0.0f ) ); cellOffset += pixelCellSpacing; VRMenuObjectParms cellParms( VRMENU_BUTTON, Array< VRMenuComponent* >(), VRMenuSurfaceParms(), "", pose, defaultScale, fontParms, Menu->AllocId(), VRMenuObjectFlags_t(), VRMenuObjectInitFlags_t( VRMENUOBJECT_INIT_FORCE_POSITION ) ); UICell * cellObject = new UICell( GuiSys ); cellObject->AddToDiscreteSlider( Menu, this, cellParms ); cellObject->SetImage( 0, SURFACE_TEXTURE_DIFFUSE, CellOffTexture ); UICellComponent * cellComp = new UICellComponent( *DiscreteSliderComponent, cellIndex ); VRMenuObject * object = cellObject->GetMenuObject(); OVR_ASSERT( object ); object->AddComponent( cellComp ); DiscreteSliderComponent->AddCell( cellObject ); } DiscreteSliderComponent->HighlightCells( StartValue ); }
void QuatTrackBall::calcRotation(const Vec2f& new_pos) { // Check for zero rotation if (new_pos == last_pos) qinc = Quatf(0.0f, 0.0f, 0.0f, 1.0f); else { // Form two vectors based on input points, find rotation axis Vec3f p1 = Vec3f(new_pos[0], new_pos[1], projectToSphere(new_pos)); Vec3f p2 = Vec3f(last_pos[0], last_pos[1], projectToSphere(last_pos)); qinc.make_rot(normalize(p1), normalize(p2)); /* Vec3f q = cross(p1, p2); // axis of rotation from p1 and p2 float L = sqrt(1.0f-dot(q,q) / (dot(p1,p1) * dot(p2,p2))); q.normalize(); // q' = axis of rotation q *= sqrt((1 - L)/2); // q' = q' * sin(phi) qinc.set(q[0],q[1],q[2],sqrt((1 + L)/2)); */ } }
void HMDState::GetTimewarpMatrices(ovrEyeType eyeId, ovrPosef renderPose, ovrMatrix4f twmOut[2]) { // Get timewarp start/end timing double timewarpStartEnd[2]; getTimewarpStartEnd(eyeId, timewarpStartEnd); ovrTrackingState startState = PredictedTrackingState(timewarpStartEnd[0]); ovrTrackingState endState = PredictedTrackingState(timewarpStartEnd[1]); Quatf quatFromEye = Quatf(renderPose.Orientation); quatFromEye.Invert(); // because we need the view matrix, not the camera matrix Matrix4f timewarpStart, timewarpEnd; CalculateOrientationTimewarpMatrix( quatFromEye, startState.HeadPose.ThePose.Orientation, timewarpStart); CalculateOrientationTimewarpMatrix( quatFromEye, endState.HeadPose.ThePose.Orientation, timewarpEnd); twmOut[0] = timewarpStart; twmOut[1] = timewarpEnd; }
void MoviePlayerView::CreateMenu( App * app, OvrVRMenuMgr & menuMgr, BitmapFont const & font ) { Menu = VRMenu::Create( "MoviePlayerMenu" ); Array< VRMenuObjectParms const * > parms; Posef moveScreenPose( Quatf( Vector3f( 0.0f, 1.0f, 0.0f ), 0.0f ), Vector3f( 0.0f, 0.0f, -1.8f ) ); VRMenuFontParms moveScreenFontParms( true, true, false, false, false, 0.5f ); VRMenuSurfaceParms moveScreenSurfParms( "", NULL, SURFACE_TEXTURE_MAX, NULL, SURFACE_TEXTURE_MAX, NULL, SURFACE_TEXTURE_MAX ); VRMenuObjectParms moveScreenParms( VRMENU_BUTTON, Array< VRMenuComponent* >(), moveScreenSurfParms, Strings::MoviePlayer_Reorient, moveScreenPose, Vector3f( 1.0f ), moveScreenFontParms, ID_MOVE_SCREEN, VRMenuObjectFlags_t(), VRMenuObjectInitFlags_t( VRMENUOBJECT_INIT_FORCE_POSITION ) ); parms.PushBack( &moveScreenParms ); Menu->InitWithItems(menuMgr, font, 0.0f, VRMenuFlags_t( VRMENU_FLAG_TRACK_GAZE ) | VRMENU_FLAG_BACK_KEY_DOESNT_EXIT, parms); parms.Clear(); MoveScreenHandle = Menu->HandleForId( menuMgr, ID_MOVE_SCREEN ); MoveScreenObj = menuMgr.ToObject( MoveScreenHandle ); MoveScreenObj->AddFlags( VRMENUOBJECT_DONT_RENDER ); Vector3f moveScreenTextPosition = Vector3f( 0.0f, -24 * VRMenuObject::DEFAULT_TEXEL_SCALE, 0.0f ); MoveScreenObj->SetTextLocalPosition( moveScreenTextPosition ); // ============================================================================== // // finalize // Cinema.app->GetGuiSys().AddMenu( Menu ); }
IntersectRayBoundsResult IntersectRayBounds(const Matrix4f ¢erViewMatrix, const Matrix4f &targetWorldMatrix, const GlGeometry &targetGeometry, bool axisInWorld) { Matrix4f worldToModelM = targetWorldMatrix.Inverted(); Matrix4f invertedCenterViewM = centerViewMatrix.Inverted(); Vector3f inWorldCenterViewPos = invertedCenterViewM.GetTranslation(); Quatf centerViewRot = Quatf(invertedCenterViewM); const Vector3f rayStart = worldToModelM.Transform(inWorldCenterViewPos); const Vector3f rayDir = worldToModelM.Transform(centerViewRot.Rotate( Vector3f(0.0f, 0.0f, -1.0f))) - rayStart; const Vector3f boundingBoxMins = targetGeometry.localBounds.GetMins(); const Vector3f boundingBoxMaxs = targetGeometry.localBounds.GetMaxs(); float t0 = 0.0f; float t1 = 0.0f; bool intersected = Intersect_RayBounds(rayStart, rayDir, boundingBoxMins, boundingBoxMaxs, t0, t1); IntersectRayBoundsResult result; result.intersected = intersected && t0 > 0; if (intersected) { result.first = rayStart + t0 * rayDir; result.second = rayStart + t1 * rayDir; if (axisInWorld) { result.first = targetWorldMatrix.Transform(result.first); result.second = targetWorldMatrix.Transform(result.second); } } return result; }
OvrVideoMenu::OvrVideoMenu( OvrGuiSys & guiSys, OvrMetaData & metaData, float radius ) : VRMenu( MENU_NAME ) , MetaData( metaData ) , LoadingIconHandle( 0 ) , AttributionHandle( 0 ) , BrowserButtonHandle( 0 ) , VideoControlButtonHandle( 0 ) , Radius( radius ) , ButtonCoolDown( 0.0f ) , OpenTime( 0.0 ) { // Init with empty root Init( guiSys, 0.0f, VRMenuFlags_t() ); // Create Attribution info view Array< VRMenuObjectParms const * > parms; Array< VRMenuComponent* > comps; VRMenuId_t attributionPanelId( ID_CENTER_ROOT.Get() + 10 ); comps.PushBack( new OvrVideoMenuRootComponent( *this ) ); Quatf rot( DOWN, 0.0f ); Vector3f dir( -FWD ); Posef panelPose( rot, dir * Radius ); Vector3f panelScale( 1.0f ); const VRMenuFontParms fontParms( true, true, false, false, true, 0.525f, 0.45f, 1.0f ); VRMenuObjectParms attrParms( VRMENU_STATIC, comps, VRMenuSurfaceParms(), "Attribution Panel", panelPose, panelScale, Posef(), Vector3f( 1.0f ), fontParms, attributionPanelId, VRMenuObjectFlags_t(), VRMenuObjectInitFlags_t( VRMENUOBJECT_INIT_FORCE_POSITION ) ); parms.PushBack( &attrParms ); AddItems( guiSys, parms, GetRootHandle(), false ); parms.Clear(); comps.Clear(); AttributionHandle = HandleForId( guiSys.GetVRMenuMgr(), attributionPanelId ); VRMenuObject * attributionObject = guiSys.GetVRMenuMgr().ToObject( AttributionHandle ); OVR_ASSERT( attributionObject != NULL ); //Browser button float const ICON_HEIGHT = 80.0f * VRMenuObject::DEFAULT_TEXEL_SCALE; Array< VRMenuSurfaceParms > surfParms; Posef browserButtonPose( Quatf(), UP * ICON_HEIGHT * 2.0f ); comps.PushBack( new OvrDefaultComponent( Vector3f( 0.0f, 0.0f, 0.05f ), 1.05f, 0.25f, 0.0f, Vector4f( 1.0f ), Vector4f( 1.0f ) ) ); comps.PushBack( new OvrButton_OnUp( this, ID_BROWSER_BUTTON ) ); comps.PushBack( new OvrSurfaceToggleComponent( ) ); surfParms.PushBack( VRMenuSurfaceParms( "browser", "apk:///assets/nav_home_off.png", SURFACE_TEXTURE_DIFFUSE, NULL, SURFACE_TEXTURE_MAX, NULL, SURFACE_TEXTURE_MAX ) ); surfParms.PushBack( VRMenuSurfaceParms( "browser", "apk:///assets/nav_home_on.png", SURFACE_TEXTURE_DIFFUSE, NULL, SURFACE_TEXTURE_MAX, NULL, SURFACE_TEXTURE_MAX ) ); VRMenuObjectParms browserButtonParms( VRMENU_BUTTON, comps, surfParms, "", browserButtonPose, Vector3f( 1.0f ), Posef(), Vector3f( 1.0f ), fontParms, ID_BROWSER_BUTTON, VRMenuObjectFlags_t( VRMENUOBJECT_DONT_HIT_TEXT ), VRMenuObjectInitFlags_t( VRMENUOBJECT_INIT_FORCE_POSITION ) ); parms.PushBack( &browserButtonParms ); AddItems( guiSys, parms, AttributionHandle, false ); parms.Clear(); comps.Clear(); surfParms.Clear(); BrowserButtonHandle = attributionObject->ChildHandleForId( guiSys.GetVRMenuMgr(), ID_BROWSER_BUTTON ); VRMenuObject * browserButtonObject = guiSys.GetVRMenuMgr().ToObject( BrowserButtonHandle ); OVR_ASSERT( browserButtonObject != NULL ); OVR_UNUSED( browserButtonObject ); //Video control button Posef videoButtonPose( Quatf(), DOWN * ICON_HEIGHT * 2.0f ); comps.PushBack( new OvrDefaultComponent( Vector3f( 0.0f, 0.0f, 0.05f ), 1.05f, 0.25f, 0.0f, Vector4f( 1.0f ), Vector4f( 1.0f ) ) ); comps.PushBack( new OvrButton_OnUp( this, ID_VIDEO_BUTTON ) ); comps.PushBack( new OvrSurfaceToggleComponent( ) ); surfParms.PushBack( VRMenuSurfaceParms( "browser", "apk:///assets/nav_restart_off.png", SURFACE_TEXTURE_DIFFUSE, NULL, SURFACE_TEXTURE_MAX, NULL, SURFACE_TEXTURE_MAX ) ); surfParms.PushBack( VRMenuSurfaceParms( "browser", "apk:///assets/nav_restart_on.png", SURFACE_TEXTURE_DIFFUSE, NULL, SURFACE_TEXTURE_MAX, NULL, SURFACE_TEXTURE_MAX ) ); VRMenuObjectParms controlButtonParms( VRMENU_BUTTON, comps, surfParms, "", videoButtonPose, Vector3f( 1.0f ), Posef(), Vector3f( 1.0f ), fontParms, ID_VIDEO_BUTTON, VRMenuObjectFlags_t( VRMENUOBJECT_DONT_HIT_TEXT ), VRMenuObjectInitFlags_t( VRMENUOBJECT_INIT_FORCE_POSITION ) ); parms.PushBack( &controlButtonParms ); AddItems( guiSys, parms, AttributionHandle, false ); parms.Clear(); comps.Clear(); VideoControlButtonHandle = attributionObject->ChildHandleForId( guiSys.GetVRMenuMgr(), ID_VIDEO_BUTTON ); VRMenuObject * controlButtonObject = guiSys.GetVRMenuMgr().ToObject( VideoControlButtonHandle ); OVR_ASSERT( controlButtonObject != NULL ); OVR_UNUSED( controlButtonObject ); }
void SensorFusion::handleMessage(const MessageBodyFrame& msg) { if (msg.Type != Message_BodyFrame) return; // Put the sensor readings into convenient local variables Vector3f angVel = msg.RotationRate; Vector3f rawAccel = msg.Acceleration; Vector3f mag = msg.MagneticField; // Set variables accessible through the class API DeltaT = msg.TimeDelta; AngV = msg.RotationRate; AngV.y *= YawMult; // Warning: If YawMult != 1, then AngV is not true angular velocity A = rawAccel; // Allow external access to uncalibrated magnetometer values RawMag = mag; // Apply the calibration parameters to raw mag if (HasMagCalibration()) { mag.x += MagCalibrationMatrix.M[0][3]; mag.y += MagCalibrationMatrix.M[1][3]; mag.z += MagCalibrationMatrix.M[2][3]; } // Provide external access to calibrated mag values // (if the mag is not calibrated, then the raw value is returned) CalMag = mag; float angVelLength = angVel.Length(); float accLength = rawAccel.Length(); // Acceleration in the world frame (Q is current HMD orientation) Vector3f accWorld = Q.Rotate(rawAccel); // Keep track of time Stage++; float currentTime = Stage * DeltaT; // Assumes uniform time spacing // Insert current sensor data into filter history FRawMag.AddElement(RawMag); FAccW.AddElement(accWorld); FAngV.AddElement(angVel); // Update orientation Q based on gyro outputs. This technique is // based on direct properties of the angular velocity vector: // Its direction is the current rotation axis, and its magnitude // is the rotation rate (rad/sec) about that axis. Our sensor // sampling rate is so fast that we need not worry about integral // approximation error (not yet, anyway). if (angVelLength > 0.0f) { Vector3f rotAxis = angVel / angVelLength; float halfRotAngle = angVelLength * DeltaT * 0.5f; float sinHRA = sin(halfRotAngle); Quatf deltaQ(rotAxis.x*sinHRA, rotAxis.y*sinHRA, rotAxis.z*sinHRA, cos(halfRotAngle)); Q = Q * deltaQ; } // The quaternion magnitude may slowly drift due to numerical error, // so it is periodically normalized. if (Stage % 5000 == 0) Q.Normalize(); // Maintain the uncorrected orientation for later use by predictive filtering QUncorrected = Q; // Perform tilt correction using the accelerometer data. This enables // drift errors in pitch and roll to be corrected. Note that yaw cannot be corrected // because the rotation axis is parallel to the gravity vector. if (EnableGravity) { // Correcting for tilt error by using accelerometer data const float gravityEpsilon = 0.4f; const float angVelEpsilon = 0.1f; // Relatively slow rotation const int tiltPeriod = 50; // Req'd time steps of stability const float maxTiltError = 0.05f; const float minTiltError = 0.01f; // This condition estimates whether the only measured acceleration is due to gravity // (the Rift is not linearly accelerating). It is often wrong, but tends to average // out well over time. if ((fabs(accLength - 9.81f) < gravityEpsilon) && (angVelLength < angVelEpsilon)) TiltCondCount++; else TiltCondCount = 0; // After stable measurements have been taken over a sufficiently long period, // estimate the amount of tilt error and calculate the tilt axis for later correction. if (TiltCondCount >= tiltPeriod) { // Update TiltErrorEstimate TiltCondCount = 0; // Use an average value to reduce noice (could alternatively use an LPF) Vector3f accWMean = FAccW.Mean(); // Project the acceleration vector into the XZ plane Vector3f xzAcc = Vector3f(accWMean.x, 0.0f, accWMean.z); // The unit normal of xzAcc will be the rotation axis for tilt correction Vector3f tiltAxis = Vector3f(xzAcc.z, 0.0f, -xzAcc.x).Normalized(); Vector3f yUp = Vector3f(0.0f, 1.0f, 0.0f); // This is the amount of rotation float tiltAngle = yUp.Angle(accWMean); // Record values if the tilt error is intolerable if (tiltAngle > maxTiltError) { TiltErrorAngle = tiltAngle; TiltErrorAxis = tiltAxis; } } // This part performs the actual tilt correction as needed if (TiltErrorAngle > minTiltError) { if ((TiltErrorAngle > 0.4f)&&(Stage < 8000)) { // Tilt completely to correct orientation Q = Quatf(TiltErrorAxis, -TiltErrorAngle) * Q; TiltErrorAngle = 0.0f; } else { //LogText("Performing tilt correction - Angle: %f Axis: %f %f %f\n", // TiltErrorAngle,TiltErrorAxis.x,TiltErrorAxis.y,TiltErrorAxis.z); //float deltaTiltAngle = -Gain*TiltErrorAngle*0.005f; // This uses agressive correction steps while your head is moving fast float deltaTiltAngle = -Gain*TiltErrorAngle*0.005f*(5.0f*angVelLength+1.0f); // Incrementally "untilt" by a small step size Q = Quatf(TiltErrorAxis, deltaTiltAngle) * Q; TiltErrorAngle += deltaTiltAngle; } } } // Yaw drift correction based on magnetometer data. This corrects the part of the drift // that the accelerometer cannot handle. // This will only work if the magnetometer has been enabled, calibrated, and a reference // point has been set. const float maxAngVelLength = 3.0f; const int magWindow = 5; const float yawErrorMax = 0.1f; const float yawErrorMin = 0.01f; const int yawErrorCountLimit = 50; const float yawRotationStep = 0.00002f; if (angVelLength < maxAngVelLength) MagCondCount++; else MagCondCount = 0; YawCorrectionInProgress = false; if (EnableYawCorrection && MagReady && (currentTime > 2.0f) && (MagCondCount >= magWindow) && (Q.Distance(MagRefQ) < MagRefDistance)) { // Use rotational invariance to bring reference mag value into global frame Vector3f grefmag = MagRefQ.Rotate(GetCalibratedMagValue(MagRefM)); // Bring current (averaged) mag reading into global frame Vector3f gmag = Q.Rotate(GetCalibratedMagValue(FRawMag.Mean())); // Calculate the reference yaw in the global frame float gryaw = atan2(grefmag.x,grefmag.z); // Calculate the current yaw in the global frame float gyaw = atan2(gmag.x,gmag.z); //LogText("Yaw error estimate: %f\n",YawErrorAngle); // The difference between reference and current yaws is the perceived error YawErrorAngle = AngleDifference(gyaw,gryaw); // If the perceived error is large, keep count if ((fabs(YawErrorAngle) > yawErrorMax) && (!YawCorrectionActivated)) YawErrorCount++; // After enough iterations of high perceived error, start the correction process if (YawErrorCount > yawErrorCountLimit) YawCorrectionActivated = true; // If the perceived error becomes small, turn off the yaw correction if ((fabs(YawErrorAngle) < yawErrorMin) && YawCorrectionActivated) { YawCorrectionActivated = false; YawErrorCount = 0; } // Perform the actual yaw correction, due to previously detected, large yaw error if (YawCorrectionActivated) { YawCorrectionInProgress = true; int sign = (YawErrorAngle > 0.0f) ? 1 : -1; // Incrementally "unyaw" by a small step size Q = Quatf(Vector3f(0.0f,1.0f,0.0f), -yawRotationStep * sign) * Q; } } }
void SensorFusion::handleMessage(const MessageBodyFrame& msg) { if (msg.Type != Message_BodyFrame || !IsMotionTrackingEnabled()) return; // Put the sensor readings into convenient local variables Vector3f gyro = msg.RotationRate; Vector3f accel = msg.Acceleration; Vector3f mag = msg.MagneticField; // Insert current sensor data into filter history FRawMag.AddElement(mag); FAngV.AddElement(gyro); // Apply the calibration parameters to raw mag Vector3f calMag = MagCalibrated ? GetCalibratedMagValue(FRawMag.Mean()) : FRawMag.Mean(); // Set variables accessible through the class API DeltaT = msg.TimeDelta; AngV = gyro; A = accel; RawMag = mag; CalMag = calMag; // Keep track of time Stage++; RunningTime += DeltaT; // Small preprocessing Quatf Qinv = Q.Inverted(); Vector3f up = Qinv.Rotate(Vector3f(0, 1, 0)); Vector3f gyroCorrected = gyro; // Apply integral term // All the corrections are stored in the Simultaneous Orthogonal Rotations Angle representation, // which allows to combine and scale them by just addition and multiplication if (EnableGravity || EnableYawCorrection) gyroCorrected -= GyroOffset; if (EnableGravity) { const float spikeThreshold = 0.01f; const float gravityThreshold = 0.1f; float proportionalGain = 5 * Gain; // Gain parameter should be removed in a future release float integralGain = 0.0125f; Vector3f tiltCorrection = SensorFusion_ComputeCorrection(accel, up); if (Stage > 5) { // Spike detection float tiltAngle = up.Angle(accel); TiltAngleFilter.AddElement(tiltAngle); if (tiltAngle > TiltAngleFilter.Mean() + spikeThreshold) proportionalGain = integralGain = 0; // Acceleration detection const float gravity = 9.8f; if (fabs(accel.Length() / gravity - 1) > gravityThreshold) integralGain = 0; } else // Apply full correction at the startup { proportionalGain = 1 / DeltaT; integralGain = 0; } gyroCorrected += (tiltCorrection * proportionalGain); GyroOffset -= (tiltCorrection * integralGain * DeltaT); } if (EnableYawCorrection && MagCalibrated && RunningTime > 2.0f) { const float maxMagRefDist = 0.1f; const float maxTiltError = 0.05f; float proportionalGain = 0.01f; float integralGain = 0.0005f; // Update the reference point if needed if (MagRefIdx < 0 || calMag.Distance(MagRefsInBodyFrame[MagRefIdx]) > maxMagRefDist) { // Delete a bad point if (MagRefIdx >= 0 && MagRefScore < 0) { MagNumReferences--; MagRefsInBodyFrame[MagRefIdx] = MagRefsInBodyFrame[MagNumReferences]; MagRefsInWorldFrame[MagRefIdx] = MagRefsInWorldFrame[MagNumReferences]; } // Find a new one MagRefIdx = -1; MagRefScore = 1000; float bestDist = maxMagRefDist; for (int i = 0; i < MagNumReferences; i++) { float dist = calMag.Distance(MagRefsInBodyFrame[i]); if (bestDist > dist) { bestDist = dist; MagRefIdx = i; } } // Create one if needed if (MagRefIdx < 0 && MagNumReferences < MagMaxReferences) { MagRefIdx = MagNumReferences; MagRefsInBodyFrame[MagRefIdx] = calMag; MagRefsInWorldFrame[MagRefIdx] = Q.Rotate(calMag).Normalized(); MagNumReferences++; } } if (MagRefIdx >= 0) { Vector3f magEstimated = Qinv.Rotate(MagRefsInWorldFrame[MagRefIdx]); Vector3f magMeasured = calMag.Normalized(); // Correction is computed in the horizontal plane (in the world frame) Vector3f yawCorrection = SensorFusion_ComputeCorrection(magMeasured.ProjectToPlane(up), magEstimated.ProjectToPlane(up)); if (fabs(up.Dot(magEstimated - magMeasured)) < maxTiltError) { MagRefScore += 2; } else // If the vertical angle is wrong, decrease the score { MagRefScore -= 1; proportionalGain = integralGain = 0; } gyroCorrected += (yawCorrection * proportionalGain); GyroOffset -= (yawCorrection * integralGain * DeltaT); } } // Update the orientation quaternion based on the corrected angular velocity vector Q = Q * Quatf(gyroCorrected, gyroCorrected.Length() * DeltaT); // The quaternion magnitude may slowly drift due to numerical error, // so it is periodically normalized. if (Stage % 500 == 0) Q.Normalize(); }
void OvrSliderComponent::GetVerticalSliderParms( VRMenu & menu, VRMenuId_t const parentId, VRMenuId_t const rootId, Posef const & rootLocalPose, VRMenuId_t const scrubberId, VRMenuId_t const bubbleId, float const sliderFrac, Vector3f const & localSlideDelta, float const minValue, float const maxValue, float const sensitivityScale, Array< VRMenuObjectParms const* > & parms ) { Vector3f const fwd( 0.0f, 0.0f, -1.0f ); Vector3f const right( 1.0f, 0.0f, 0.0f ); Vector3f const up( 0.0f, 1.0f, 0.0f ); // would be nice to determine these sizes from the images, but we do not load // images until much later, meaning we'd need to do the positioning after creation / init. float const SLIDER_BUBBLE_WIDTH = 59.0f * VRMenuObject::DEFAULT_TEXEL_SCALE; float const SLIDER_BUBBLE_CENTER = 33.0f * VRMenuObject::DEFAULT_TEXEL_SCALE; float const SLIDER_TRACK_WIDTH = 9.0f * VRMenuObject::DEFAULT_TEXEL_SCALE; float const TRACK_OFFSET = 35.0f * VRMenuObject::DEFAULT_TEXEL_SCALE; float const vertical = true; // add parms for the root object that holds all the slider components { Array< VRMenuComponent* > comps; comps.PushBack( new OvrSliderComponent( menu, sliderFrac, localSlideDelta, minValue, maxValue, sensitivityScale, rootId, scrubberId, VRMenuId_t(), bubbleId ) ); Array< VRMenuSurfaceParms > surfParms; char const * text = "slider_root"; Vector3f scale( 1.0f ); Posef pose( rootLocalPose ); Posef textPose( Quatf(), Vector3f( 0.0f ) ); Vector3f textScale( 1.0f ); VRMenuFontParms fontParms; VRMenuObjectFlags_t objectFlags; VRMenuObjectInitFlags_t initFlags( VRMENUOBJECT_INIT_FORCE_POSITION ); VRMenuObjectParms * itemParms = new VRMenuObjectParms( VRMENU_CONTAINER, comps, surfParms, text, pose, scale, textPose, textScale, fontParms, rootId, objectFlags, initFlags ); itemParms->ParentId = parentId; parms.PushBack( itemParms ); } // add parms for the base image that underlays the whole slider { Array< VRMenuComponent* > comps; Array< VRMenuSurfaceParms > surfParms; VRMenuSurfaceParms baseParms( "base", GetSliderImage( SLIDER_IMAGE_BASE, vertical ), SURFACE_TEXTURE_DIFFUSE, NULL, SURFACE_TEXTURE_MAX, NULL, SURFACE_TEXTURE_MAX ); surfParms.PushBack( baseParms ); char const * text = "base"; Vector3f scale( 1.0f ); Posef pose( Quatf(), Vector3f() + fwd * 0.1f ); Posef textPose( Quatf(), Vector3f( 0.0f ) ); Vector3f textScale( 1.0f ); VRMenuFontParms fontParms; VRMenuObjectFlags_t objectFlags( VRMENUOBJECT_DONT_RENDER_TEXT ); VRMenuObjectInitFlags_t initFlags( VRMENUOBJECT_INIT_FORCE_POSITION ); VRMenuObjectParms * itemParms = new VRMenuObjectParms( VRMENU_BUTTON, comps, surfParms, text, pose, scale, textPose, textScale, fontParms, VRMenuId_t(), objectFlags, initFlags ); itemParms->ParentId = rootId; parms.PushBack( itemParms ); } // add parms for the track image { Array< VRMenuComponent* > comps; Array< VRMenuSurfaceParms > surfParms; VRMenuSurfaceParms baseParms( "track", GetSliderImage( SLIDER_IMAGE_TRACK, vertical ), SURFACE_TEXTURE_DIFFUSE, NULL, SURFACE_TEXTURE_MAX, NULL, SURFACE_TEXTURE_MAX ); surfParms.PushBack( baseParms ); char const * text = "track"; Vector3f scale( 1.0f ); Posef pose( Quatf(), up * TRACK_OFFSET + fwd * 0.09f ); Posef textPose( Quatf(), Vector3f( 0.0f ) ); Vector3f textScale( 1.0f ); VRMenuFontParms fontParms; VRMenuObjectFlags_t objectFlags( VRMENUOBJECT_DONT_RENDER_TEXT ); VRMenuObjectInitFlags_t initFlags( VRMENUOBJECT_INIT_FORCE_POSITION ); VRMenuObjectParms * itemParms = new VRMenuObjectParms( VRMENU_BUTTON, comps, surfParms, text, pose, scale, textPose, textScale, fontParms, VRMenuId_t(), objectFlags, initFlags ); itemParms->ParentId = rootId; parms.PushBack( itemParms ); } // add parms for the filled track image { Array< VRMenuComponent* > comps; Array< VRMenuSurfaceParms > surfParms; VRMenuSurfaceParms baseParms( "track_full", GetSliderImage( SLIDER_IMAGE_TRACK_FULL, vertical ), SURFACE_TEXTURE_DIFFUSE, NULL, SURFACE_TEXTURE_MAX, NULL, SURFACE_TEXTURE_MAX ); surfParms.PushBack( baseParms ); char const * text = "track_full"; Vector3f scale( 1.0f ); Posef pose( Quatf(), up * TRACK_OFFSET + fwd * 0.08f ); Posef textPose( Quatf(), Vector3f( 0.0f ) ); Vector3f textScale( 1.0f ); VRMenuFontParms fontParms; VRMenuObjectFlags_t objectFlags( VRMENUOBJECT_DONT_RENDER_TEXT ); VRMenuObjectInitFlags_t initFlags( VRMENUOBJECT_INIT_FORCE_POSITION ); VRMenuObjectParms * itemParms = new VRMenuObjectParms( VRMENU_BUTTON, comps, surfParms, text, pose, scale, textPose, textScale, fontParms, VRMenuId_t(), objectFlags, initFlags ); itemParms->ParentId = rootId; parms.PushBack( itemParms ); } // add parms for the scrubber { Array< VRMenuComponent* > comps; Array< VRMenuSurfaceParms > surfParms; VRMenuSurfaceParms baseParms( "scrubber", GetSliderImage( SLIDER_IMAGE_SCRUBBER, vertical ), SURFACE_TEXTURE_DIFFUSE, NULL, SURFACE_TEXTURE_MAX, NULL, SURFACE_TEXTURE_MAX ); surfParms.PushBack( baseParms ); char const * text = "scrubber"; Vector3f scale( 1.0f ); Posef pose( Quatf(), up * TRACK_OFFSET + fwd * 0.07f ); Posef textPose( Quatf(), Vector3f( 0.0f ) ); Vector3f textScale( 1.0f ); VRMenuFontParms fontParms; VRMenuObjectFlags_t objectFlags( VRMENUOBJECT_DONT_RENDER_TEXT ); VRMenuObjectInitFlags_t initFlags( VRMENUOBJECT_INIT_FORCE_POSITION ); VRMenuObjectParms * itemParms = new VRMenuObjectParms( VRMENU_BUTTON, comps, surfParms, text, pose, scale, textPose, textScale, fontParms, scrubberId, objectFlags, initFlags ); itemParms->ParentId = rootId; parms.PushBack( itemParms ); } // add parms for the bubble { Array< VRMenuComponent* > comps; Array< VRMenuSurfaceParms > surfParms; VRMenuSurfaceParms baseParms( "bubble", GetSliderImage( SLIDER_IMAGE_BUBBLE, vertical ), SURFACE_TEXTURE_DIFFUSE, NULL, SURFACE_TEXTURE_MAX, NULL, SURFACE_TEXTURE_MAX ); surfParms.PushBack( baseParms ); char const * text = NULL; Vector3f scale( 1.0f ); Posef pose( Quatf(), right * ( SLIDER_TRACK_WIDTH + SLIDER_BUBBLE_CENTER ) + fwd * 0.06f ); const float bubbleTextScale = 0.66f; const float bubbleTextCenterOffset = SLIDER_BUBBLE_CENTER - ( SLIDER_BUBBLE_WIDTH * 0.5f ); const Vector3f textPosition = right * bubbleTextCenterOffset; Posef textPose( Quatf(), textPosition ); Vector3f textScale( 1.0f ); VRMenuFontParms fontParms( true, true, false, false, true, bubbleTextScale ); VRMenuObjectFlags_t objectFlags; VRMenuObjectInitFlags_t initFlags( VRMENUOBJECT_INIT_FORCE_POSITION ); VRMenuObjectParms * itemParms = new VRMenuObjectParms( VRMENU_BUTTON, comps, surfParms, text, pose, scale, textPose, textScale, fontParms, bubbleId, objectFlags, initFlags ); itemParms->ParentId = scrubberId; parms.PushBack( itemParms ); } }
JNIEXPORT jobject JNICALL Java_com_eje_1c_meganekko_Scene_getViewOrientation(JNIEnv * jni, jobject obj, jlong jscene) { Scene* scene = reinterpret_cast<Scene*>(jscene); Quatf orientation = Quatf(scene->GetCenterViewMatrix().InvertedHomogeneousTransform()); return ToJava(jni, orientation); }
//============================== // BitmapFontSurfaceLocal::DrawText3D void BitmapFontSurfaceLocal::DrawText3D( BitmapFont const & font, fontParms_t const & parms, Vector3f const & pos, Vector3f const & normal, Vector3f const & up, float scale, Vector4f const & color, char const * text ) { if ( text == NULL || text[0] == '\0' ) { return; // nothing to do here, move along } // TODO: multiple line support -- we would need to calculate the horizontal width // for each string ending in \n size_t len; float width; float height; float ascent; float descent; int const MAX_LINES = 128; float lineWidths[MAX_LINES]; int numLines; AsLocal( font ).CalcTextMetrics( text, len, width, height, ascent, descent, lineWidths, MAX_LINES, numLines ); // LOG( "BitmapFontSurfaceLocal::DrawText3D( \"%s\" %s %s ) : width = %.2f, height = %.2f, numLines = %i, fh = %.2f", // text, parms.CenterVert ? "cv" : "", parms.CenterHoriz ? "ch" : "", // width, height, numLines, AsLocal( font ).GetFontInfo().FontHeight ); if ( len == 0 ) { return; } DROID_ASSERT( normal.IsNormalized(), "BitmapFont" ); DROID_ASSERT( up.IsNormalized(), "BitmapFont" ); const FontInfoType & fontInfo = AsLocal( font ).GetFontInfo(); float imageWidth = (float)AsLocal( font ).GetImageWidth(); float const xScale = AsLocal( font ).GetFontInfo().ScaleFactor * scale; float const yScale = AsLocal( font ).GetFontInfo().ScaleFactor * scale; // allocate a vertex block size_t numVerts = 4 * len; VertexBlockType vb( font, numVerts, pos, Quatf(), parms.Billboard, parms.TrackRoll ); Vector3f const right = up.Cross( normal ); Vector3f const r = ( parms.Billboard ) ? Vector3f( 1.0f, 0.0f, 0.0f ) : right; Vector3f const u = ( parms.Billboard ) ? Vector3f( 0.0f, 1.0f, 0.0f ) : up; Vector3f curPos( 0.0f ); if ( parms.CenterVert ) { float const vofs = ( height * 0.5f ) - ascent; curPos += u * ( vofs * scale ); } Vector3f basePos = curPos; if ( parms.CenterHoriz ) { curPos -= r * ( lineWidths[0] * 0.5f * scale ); } Vector3f lineInc = u * ( fontInfo.FontHeight * yScale ); float const distanceScale = imageWidth / FontInfoType::DEFAULT_SCALE_FACTOR; const uint8_t fontParms[4] = { (uint8_t)( OVR::Alg::Clamp( parms.AlphaCenter + fontInfo.CenterOffset, 0.0f, 1.0f ) * 255 ), (uint8_t)( OVR::Alg::Clamp( parms.ColorCenter + fontInfo.CenterOffset, 0.0f, 1.0f ) * 255 ), (uint8_t)( OVR::Alg::Clamp( distanceScale, 1.0f, 255.0f ) ), 0 }; int iColor = ColorToABGR( color ); int curLine = 0; fontVertex_t * v = vb.Verts; char const * p = text; size_t i = 0; uint32_t charCode = UTF8Util::DecodeNextChar( &p ); for ( ; charCode != '\0'; i++, charCode = UTF8Util::DecodeNextChar( &p ) ) { OVR_ASSERT( i < len ); if ( charCode == '\n' && curLine < numLines && curLine < MAX_LINES ) { // move to next line curLine++; basePos -= lineInc; curPos = basePos; if ( parms.CenterHoriz ) { curPos -= r * ( lineWidths[curLine] * 0.5f * scale ); } } FontGlyphType const & g = AsLocal( font ).GlyphForCharCode( charCode ); float s0 = g.X; float t0 = g.Y; float s1 = ( g.X + g.Width ); float t1 = ( g.Y + g.Height ); float bearingX = g.BearingX * xScale; float bearingY = g.BearingY * yScale ; float rw = ( g.Width + g.BearingX ) * xScale; float rh = ( g.Height - g.BearingY ) * yScale; // lower left v[i * 4 + 0].xyz = curPos + ( r * bearingX ) - ( u * rh ); v[i * 4 + 0].s = s0; v[i * 4 + 0].t = t1; *(UInt32*)(&v[i * 4 + 0].rgba[0]) = iColor; *(UInt32*)(&v[i * 4 + 0].fontParms[0]) = *(UInt32*)(&fontParms[0]); // upper left v[i * 4 + 1].xyz = curPos + ( r * bearingX ) + ( u * bearingY ); v[i * 4 + 1].s = s0; v[i * 4 + 1].t = t0; *(UInt32*)(&v[i * 4 + 1].rgba[0]) = iColor; *(UInt32*)(&v[i * 4 + 1].fontParms[0]) = *(UInt32*)(&fontParms[0]); // upper right v[i * 4 + 2].xyz = curPos + ( r * rw ) + ( u * bearingY ); v[i * 4 + 2].s = s1; v[i * 4 + 2].t = t0; *(UInt32*)(&v[i * 4 + 2].rgba[0]) = iColor; *(UInt32*)(&v[i * 4 + 2].fontParms[0]) = *(UInt32*)(&fontParms[0]); // lower right v[i * 4 + 3].xyz = curPos + ( r * rw ) - ( u * rh ); v[i * 4 + 3].s = s1; v[i * 4 + 3].t = t1; *(UInt32*)(&v[i * 4 + 3].rgba[0]) = iColor; *(UInt32*)(&v[i * 4 + 3].fontParms[0]) = *(UInt32*)(&fontParms[0]); // advance to start of next char curPos += r * ( g.AdvanceX * xScale ); } // add the new vertex block to the array of vertex blocks VertexBlocks.PushBack( vb ); }
Quatf Player::GetOrientation(bool baseOnly) { Quatf baseQ = Quatf(Vector3f(0,1,0), GetApparentBodyYaw().Get()); return baseOnly ? baseQ : baseQ * HeadPose.Rotation; }
Vector3f Player::GetHeadPosition(ovrTrackingOrigin trackingOrigin) { return GetBodyPos(trackingOrigin) + Quatf(Vector3f(0, 1, 0), GetApparentBodyYaw().Get()).Rotate(HeadPose.Translation); }
bool TrackballTransformManipulator::rotate() { if ( ( getCurrentX() != getLastX() ) || ( getCurrentY() != getLastY() ) ) { DP_ASSERT( getViewState()->getCamera().isPtrTo<FrustumCamera>() ); TransformSharedPtr transform = m_transformPath->getTail().staticCast<Transform>(); FrustumCameraSharedPtr const& camera = getViewState()->getCamera().staticCast<FrustumCamera>(); if ( camera && transform ) { unsigned int rtWidth = getRenderTarget()->getWidth(); unsigned int rtHeight = getRenderTarget()->getHeight(); Vec2f camWinSize = camera->getWindowSize(); if ( ( 0 < rtHeight ) && ( 0 < rtWidth ) && ( FLT_EPSILON < fabs( camWinSize[0] ) ) && ( FLT_EPSILON < fabs( camWinSize[1] ) ) ) { // get all the matrices needed here Mat44f m2w, w2m, w2v, v2w, v2s, m2v; m_transformPath->getModelToWorldMatrix( m2w, w2m ); // model->world and world->model w2v = camera->getWorldToViewMatrix(); // world->view v2w = camera->getViewToWorldMatrix(); // view->world v2s = camera->getProjection(); // view->screen (normalized) m2v = m2w * w2v; const Sphere3f& bs = transform->getBoundingSphere(); // center of the object in view coordinates Vec4f centerV = Vec4f( bs.getCenter(), 1.0f ) * m2v; DP_ASSERT( fabs( centerV[3] - 1.0f ) < FLT_EPSILON ); // center of the object in normalized screen coordinates Vec4f centerNS = centerV * v2s; DP_ASSERT( centerNS[3] != 0.0f ); centerNS /= centerNS[3]; // center of the object in screen space Vec2f centerS( rtWidth * ( 1 + centerNS[0] ) / 2, rtHeight * ( 1 - centerNS[1] ) / 2 ); // move the input points relative to the center // move the input points absolutely //Vec2f last( m_orbitCursor ); Vec2f last( getLastCursorPosition() ); Vec2f p0( last[0] - centerS[0], centerS[1] - last[1] ); Vec2f p1( getCurrentX() - centerS[0], centerS[1] - getCurrentY() ); DP_ASSERT( p0[0] != p1[0] || p0[1] != p1[1] ); // get the scaling (from model to view) Vec3f scaling, translation; Quatf orientation, scaleOrientation; decompose( m2v, translation, orientation, scaling, scaleOrientation ); float maxScale = std::max( scaling[0], std::max( scaling[1], scaling[2] ) ); DP_ASSERT( FLT_EPSILON < fabs( maxScale ) ); // determine the radius in screen space (in the centers depth) Vec2f centerWindowSize = - centerV[2] / getViewState()->getTargetDistance() * camWinSize; float radius = bs.getRadius() * maxScale * rtWidth / centerWindowSize[0]; // with p0, p1, and the radius determine the axis and angle of rotation via the Trackball utility // => axis is in view space then Vec3f axis; float angle; m_trackball.setSize( radius ); m_trackball.apply( p0, p1, axis, angle ); float dx = p1[0]-p0[0]; float dy = p1[1]-p0[1]; checkLockAxis(dx, dy); if ( m_activeLockAxis[static_cast<size_t>(Axis::X)] ) { if ( dx < 0 ) axis = Vec3f(0.f, -1.f, 0.f); else if ( dx > 0) axis = Vec3f(0.f, 1.f, 0.f); else return false; } else if ( m_activeLockAxis[static_cast<size_t>(Axis::Y)] ) { if ( dy < 0 ) axis = Vec3f(1.f, 0.f, 0.f); else if ( dy > 0) axis = Vec3f(-1.f, 0.f, 0.f); else return false; } // transform axis back into model space axis = Vec3f( Vec4f( axis, 0.0f ) * v2w * w2m ); axis.normalize(); // create the rotation around the center (in model space) Trafo trafo; trafo.setCenter( bs.getCenter() ); trafo.setOrientation( Quatf( axis, angle ) ); // concatenate this rotation with the current transformation trafo.setMatrix( transform->getTrafo().getMatrix() * trafo.getMatrix() ); // concatenate this rotation with the original transformation //trafo.setMatrix( m_matrix * trafo.getMatrix() ); // set the current transform transform->setTrafo( trafo ); return true; } } } return false; }