void FrameTimeManager::GetTimewarpMatrices(ovrHmd hmd, ovrEyeType eyeId,
                                           ovrPosef renderPose, ovrMatrix4f twmOut[2])
{
    if (!hmd)
    {
        return;
    }

    double timewarpStartEnd[2] = { 0.0, 0.0 };    
    GetTimewarpPredictions(eyeId, timewarpStartEnd);
      
    //HMDState* p = (HMDState*)hmd;
    ovrTrackingState startState = ovrHmd_GetTrackingState(hmd, timewarpStartEnd[0]);
    ovrTrackingState endState   = ovrHmd_GetTrackingState(hmd, timewarpStartEnd[1]);

    if (TimewarpIMUTimeSeconds == 0.0)
    {
        // TODO: Figure out why this are not as accurate as ovr_GetTimeInSeconds()
        //TimewarpIMUTimeSeconds = startState.RawSensorData.TimeInSeconds;
        TimewarpIMUTimeSeconds = ovr_GetTimeInSeconds();
    }

    Quatf quatFromStart = startState.HeadPose.ThePose.Orientation;
    Quatf quatFromEnd   = endState.HeadPose.ThePose.Orientation;
    Quatf quatFromEye   = renderPose.Orientation; //EyeRenderPoses[eyeId].Orientation;
    quatFromEye.Invert();
    
    Quatf timewarpStartQuat = quatFromEye * quatFromStart;
    Quatf timewarpEndQuat   = quatFromEye * quatFromEnd;

    Matrix4f timewarpStart(timewarpStartQuat);
    Matrix4f timewarpEnd(timewarpEndQuat);
    

    // The real-world orientations have:                                  X=right, Y=up,   Z=backwards.
    // The vectors inside the mesh are in NDC to keep the shader simple: X=right, Y=down, Z=forwards.
    // So we need to perform a similarity transform on this delta matrix.
    // The verbose code would look like this:
    /*
    Matrix4f matBasisChange;
    matBasisChange.SetIdentity();
    matBasisChange.M[0][0] =  1.0f;
    matBasisChange.M[1][1] = -1.0f;
    matBasisChange.M[2][2] = -1.0f;
    Matrix4f matBasisChangeInv = matBasisChange.Inverted();
    matRenderFromNow = matBasisChangeInv * matRenderFromNow * matBasisChange;
    */
    // ...but of course all the above is a constant transform and much more easily done.
    // We flip the signs of the Y&Z row, then flip the signs of the Y&Z column,
    // and of course most of the flips cancel:
    // +++                        +--                     +--
    // +++ -> flip Y&Z columns -> +-- -> flip Y&Z rows -> -++
    // +++                        +--                     -++
    timewarpStart.M[0][1] = -timewarpStart.M[0][1];
    timewarpStart.M[0][2] = -timewarpStart.M[0][2];
    timewarpStart.M[1][0] = -timewarpStart.M[1][0];
    timewarpStart.M[2][0] = -timewarpStart.M[2][0];

    timewarpEnd  .M[0][1] = -timewarpEnd  .M[0][1];
    timewarpEnd  .M[0][2] = -timewarpEnd  .M[0][2];
    timewarpEnd  .M[1][0] = -timewarpEnd  .M[1][0];
    timewarpEnd  .M[2][0] = -timewarpEnd  .M[2][0];

    twmOut[0] = timewarpStart;
    twmOut[1] = timewarpEnd;
}
Esempio n. 2
0
//----------------------------------------------------------------------
void Tracker::Draw(ovrSession Session, RenderDevice* pRender, Player ThePlayer, ovrTrackingOrigin TrackingOriginType,
	               bool Sitting, float ExtraSittingAltitude, Matrix4f * /*ViewFromWorld*/, int eye, ovrPosef * EyeRenderPose)
{
    OVR_UNUSED2(ExtraSittingAltitude, Sitting);

	// Don't render if not ready
	if (!TrackerHeadModel) return;

	// Initial rendering setup
	pRender->SetDepthMode(true, true);
	pRender->SetCullMode(OVR::Render::D3D11::RenderDevice::Cull_Off);

	// Draw in local frame of reference, so get view matrix
	Quatf eyeRot = EyeRenderPose[eye].Orientation;
	Vector3f up = eyeRot.Rotate(UpVector);
	Vector3f forward = eyeRot.Rotate(ForwardVector);
	Vector3f viewPos = EyeRenderPose[eye].Position;
	Matrix4f localViewMat = Matrix4f::LookAtRH(viewPos, viewPos + forward, up);

	// Get some useful values about the situation
	Vector3f          headWorldPos  = ThePlayer.GetHeadPosition(TrackingOriginType);
	ovrTrackerPose    trackerPose   = ovr_GetTrackerPose(Session, 0);
	Vector3f          centreEyePos  = ((Vector3f)(EyeRenderPose[0].Position) + (Vector3f)(EyeRenderPose[1].Position))*0.5f;
	double            ftiming       = ovr_GetPredictedDisplayTime(Session, 0);
	ovrTrackingState  trackingState = ovr_GetTrackingState(Session, ftiming, ovrTrue);
	bool              tracked       = trackingState.StatusFlags & ovrStatus_PositionTracked ? true : false;

	// Find altitude of stand.
    // If we are at floor level, display the tracker stand on the physical floor.
    // If are using eye level coordinate system, just render the standard height of the stalk.
    float altitudeOfFloorInLocalSpace;
    if (TrackingOriginType == ovrTrackingOrigin_FloorLevel)
        altitudeOfFloorInLocalSpace = 0.01f;
    else
        altitudeOfFloorInLocalSpace = trackerPose.Pose.Position.y - 0.22f;  //0.18f;

	Vector3f localStandPos = Vector3f(trackerPose.Pose.Position.x, altitudeOfFloorInLocalSpace,
                                      trackerPose.Pose.Position.z);

	// Set position of tracker models according to pose.
	TrackerHeadModel->SetPosition(trackerPose.Pose.Position);
	TrackerHeadModel->SetOrientation(trackerPose.Pose.Orientation);
	
    // We scale the stalk so that it has correct physical height.
    Matrix4f stalkScale = Matrix4f::Scaling(1.0f, trackerPose.Pose.Position.y - altitudeOfFloorInLocalSpace - 0.0135f, 1.0f);
    TrackerStalkModel->SetMatrix(Matrix4f::Translation(Vector3f(trackerPose.Pose.Position) - Vector3f(0,0.0135f,0)) * stalkScale *
                                 Matrix4f(TrackerStalkModel->GetOrientation()));
	
    TrackerStandModel->SetPosition(localStandPos);
	TrackerConeModel->SetPosition(trackerPose.Pose.Position);
	TrackerConeModel->SetOrientation(trackerPose.Pose.Orientation);
	TrackerLinesModel->SetPosition(trackerPose.Pose.Position);
	TrackerLinesModel->SetOrientation(trackerPose.Pose.Orientation);


    if (trackerLinesAlwaysVisible)
        pRender->SetDepthMode(false, true);

	// Set rendering tint proportional to proximity, and red if not tracked. 
	float dist = DistToBoundary(centreEyePos, trackerPose.Pose, true);    
	 //OVR_DEBUG_LOG(("Dist = %0.3f\n", dist));
    
    // This defines a color ramp at specified distances from the edge.
    // Display staring at 0.4 - 0.2 meter [alpha 0->1]
    // Turn to yellow after [0.2]
    float       distThreshods[4]   = { 0.0f, 0.1f, 0.2f, 0.35f };
    Vector4f    thresholdColors[4] = {
        Vector4f(1.0f, 0.3f, 0.0f, 1.0f),   // Yellow-red
        Vector4f(1.0f, 1.0f, 0.0f, 0.8f),   // Yellow
        Vector4f(1.0f, 1.0f, 1.0f, 0.6f),   // White
        Vector4f(1.0f, 1.0f, 1.0f, 0.0f)    // White-transparent
    };

    // Assign tint based on the lookup table
    Vector4f globalTint = Vector4f(1, 1, 1, 0);

    int distSearch = 0;
    if (dist <= 0.0f)
        dist = 0.001f;
    for (; distSearch < sizeof(distThreshods) / sizeof(distThreshods[0]) - 1; distSearch++)
    {
        if (dist < distThreshods[distSearch+1])
        {
            float startT = distThreshods[distSearch];
            float endT   = distThreshods[distSearch+1];
            float factor = (dist - startT) / (endT - startT);

            globalTint = thresholdColors[distSearch] * (1.0f - factor) +
                         thresholdColors[distSearch + 1] * factor;
            break;
        }
    }
    
    if (!tracked)
        globalTint = Vector4f(1, 0, 0, 1);
    
    pRender->SetGlobalTint(globalTint);

    if (minimumAlphaOfTracker > globalTint.w)
        globalTint.w = minimumAlphaOfTracker;

    // We try to draw twice here: Once with Z clipping to give a bright image,
    // and once with Z testing off to give a dim outline for those cases.

    // Solid bakground
    if (globalTint.w > 0.01)
    {
        pRender->SetDepthMode(true, true);

        // Draw the tracker representation
        LOCAL_RenderModelWithAlpha(pRender, TrackerStandModel, localViewMat);
        LOCAL_RenderModelWithAlpha(pRender, TrackerStalkModel, localViewMat);
        LOCAL_RenderModelWithAlpha(pRender, TrackerHeadModel, localViewMat);
        LOCAL_RenderModelWithAlpha(pRender, TrackerLinesModel, localViewMat);
        if (drawWalls)
            LOCAL_RenderModelWithAlpha(pRender, TrackerConeModel, localViewMat);
    }

    
    if (globalTint.w > 0.01f)
        globalTint.w = 0.01f;    
    pRender->SetGlobalTint(globalTint);
    pRender->SetDepthMode(false, true);
    LOCAL_RenderModelWithAlpha(pRender, TrackerStandModel, localViewMat);
    LOCAL_RenderModelWithAlpha(pRender, TrackerStalkModel, localViewMat);
    LOCAL_RenderModelWithAlpha(pRender, TrackerHeadModel, localViewMat);
    LOCAL_RenderModelWithAlpha(pRender, TrackerLinesModel, localViewMat);
    if (drawWalls)
        LOCAL_RenderModelWithAlpha(pRender, TrackerConeModel, localViewMat);

	// Revert to rendering defaults
	pRender->SetGlobalTint(Vector4f(1, 1, 1, 1));
	pRender->SetCullMode(RenderDevice::Cull_Back);
	pRender->SetDepthMode(true, true);
}
Esempio n. 3
0
void Puppeteer::update(SKELETON::SKELETON& skeleton)
{
    if ( skeleton.joints[XN_SKEL_LEFT_SHOULDER].confidence == 0 || skeleton.joints[XN_SKEL_RIGHT_SHOULDER].confidence == 0 ) {
        return;
    }
    // ----------------------------legs

    Vec3f &lHip = skeleton.joints[XN_SKEL_LEFT_HIP].position;
    Vec3f &rHip = skeleton.joints[XN_SKEL_RIGHT_HIP].position;
    Vec3f &lKnee = skeleton.joints[XN_SKEL_LEFT_KNEE].position;
    Vec3f &rKnee = skeleton.joints[XN_SKEL_RIGHT_KNEE].position;
    float legLenL = lKnee.distance(lHip);
    float legLegR = rKnee.distance(rHip);
    float legPosL = arduinoUnit * math<float>::clamp(1.0f - (lHip.y - lKnee.y + legLenL * .25f) / (legLenL * 1.25f), 0.0f, 1.0f);
    float legPosR = arduinoUnit * math<float>::clamp(1.0f - (rHip.y - rKnee.y + legLegR * .25f) / (legLegR * 1.25f), 0.0f, 1.0f);

    legPosL = CHECK_DELTA(legPosL, lastLegPosL);
    legPosR = CHECK_DELTA(legPosR, lastLegPosR);

    lastLegPosL = legPosL;
    lastLegPosR = legLegR;

    // ----------------------------hands
    shoulderL = skeleton.joints[XN_SKEL_LEFT_SHOULDER].position;
    shoulderR = skeleton.joints[XN_SKEL_RIGHT_SHOULDER].position;

    // calculate length for both left and right arms based on skeleton size
    armLenL = shoulderL.distance(skeleton.joints[XN_SKEL_LEFT_ELBOW].position)
              + skeleton.joints[XN_SKEL_LEFT_ELBOW].position.distance(skeleton.joints[XN_SKEL_LEFT_HAND].position);
    armLenR = shoulderR.distance(skeleton.joints[XN_SKEL_RIGHT_ELBOW].position)
              + skeleton.joints[XN_SKEL_RIGHT_ELBOW].position.distance(skeleton.joints[XN_SKEL_RIGHT_HAND].position);

    // get the 3 axis aligned to the body
    axisHoriz = (skeleton.joints[XN_SKEL_LEFT_SHOULDER].position - skeleton.joints[XN_SKEL_RIGHT_SHOULDER].position).normalized();
    axisVert = (skeleton.joints[XN_SKEL_NECK].position - skeleton.joints[XN_SKEL_TORSO].position).normalized();
    normal = axisHoriz.cross(axisVert).normalized();

    Vec3f v1 = Vec3f(0, 0, -1).normalized();
    Vec3f v2 = normal;
    // align rectangular region to z-axis
    Quatf q = Quatf( v1.cross(v2).normalized(), -acos(v1.dot(v2)) );
    Matrix33<float> m1 = Matrix33<float>::createRotation(q.getAxis(), q.getAngle());
    // normal aligned with z-axis but rectangular region is rotated around the z-axis, we need to undo this rotation
    Vec3f p = m1.transformVec(axisVert);
    float theta = atan2(p.y, p.x);
    Matrix33<float> m2 = Matrix33<float>::createRotation(Vec3f::zAxis(), -theta + M_PI / 2.0f);
    normalizationMatrix = m2 * m1;

    handL = normalizationMatrix.transformVec(skeleton.joints[XN_SKEL_LEFT_HAND].position - shoulderL);
    if (handL.x > 0) handL.x = 0;
    handR = normalizationMatrix.transformVec(skeleton.joints[XN_SKEL_RIGHT_HAND].position - shoulderR);
    if (handR.x < 0) handR.x = 0;
    // ----------------------------send to arduino
    if (cinder::app::App::get()->getElapsedSeconds() - lastUpdateTime >= updateInterval) {
        lastUpdateTime = cinder::app::App::get()->getElapsedSeconds();
        Vec3f handPosL = Vec3f(
                             arduinoUnit * math<float>::clamp((handL.x / armLenL) * -1.0f, 0.0f, 1.0f),
                             arduinoUnit * math<float>::clamp((handL.y + armLenL) / (armLenL * 2.0f), 0.0f, 1.0f),
                             arduinoUnit * math<float>::clamp((handL.z / armLenL) * -1.0f, 0.0f, 1.0f)
                         );
        Vec3f handPosR = Vec3f(
                             arduinoUnit * math<float>::clamp((handR.x / armLenR), 0.0f, 1.0f),
                             arduinoUnit * math<float>::clamp((handR.y + armLenR) / (armLenR * 2.0f), 0.0f, 1.0f),
                             arduinoUnit * math<float>::clamp((handR.z / armLenR) * -1.0f, 0.0f, 1.0f)
                         );

        handPosL.x = CHECK_DELTA(handPosL.x, lastHandPosL.x);
        handPosL.y = CHECK_DELTA(handPosL.y, lastHandPosL.y);
        handPosL.z = CHECK_DELTA(handPosL.z, lastHandPosL.z);
        handPosR.x = CHECK_DELTA(handPosR.x, lastHandPosR.x);
        handPosR.y = CHECK_DELTA(handPosR.y, lastHandPosR.y);
        handPosR.z = CHECK_DELTA(handPosR.z, lastHandPosR.z);


        if (Constants::Debug::USE_ARDUINO) {
            if (UserTracker::getInstance()->activeUserId == mUserTracked) {
                if (mUserTracked == 0 && ++mUserNoneFrames > 500) {
                    // reset every 500 frames if there's no active user
                    mUserNoneFrames = 0;
//					arduino->sendMessage("k|");
                } else {
                    // send normal message
                    std::ostringstream message;
                    message << round(handPosL.x) << "," << round(handPosL.y) << "," << round(handPosL.z) << ","
                            << round(handPosR.x) << "," << round(handPosR.y) << "," << round(handPosR.z) << ","
                            << round(legPosL) << ","
                            << round(legPosR) << "|";
                    arduino->sendMessage(message.str());
//					std::cout << message.str() << std::endl;
                }
            } else {
                mUserTracked = UserTracker::getInstance()->activeUserId;
                mUserNoneFrames = 0;
//				arduino->sendMessage("k|");
            }
        }

        lastHandPosL.x = handPosL.x;
        lastHandPosL.y = handPosL.y;
        lastHandPosL.z = handPosL.z;
        lastHandPosR.x = handPosR.x;
        lastHandPosR.y = handPosR.y;
        lastHandPosR.z = handPosR.z;
    }


}
Esempio n. 4
0
//------------------------------------------------------------------------------
//!
RCP<SkeletalAnimation>
Puppeteer::blend(
   SkeletalAnimation* startAnim,
   SkeletalAnimation* endAnim
)
{
   DBG_BLOCK( os_pup, "Puppeteer::blend(" << startAnim << ", " << endAnim << ")" );
   RCP<SkeletalAnimation> anim = new SkeletalAnimation();
   anim->skeleton( startAnim->skeleton() );

   startAnim->makeRelative();
   endAnim->makeRelative();

   // Compute duration, rate and number of poses.
   float duration = (startAnim->duration() + endAnim->duration()) * 0.5f;
   float rate     = CGM::max( startAnim->rate(), endAnim->rate() );
   uint numPoses  = uint(rate*duration) + 1;

   anim->reservePoses( numPoses );
   anim->rate( rate );
   anim->velocity( (startAnim->velocity() + endAnim->velocity())*0.5f );
   anim->offset( (startAnim->offset() + endAnim->offset())*0.5f );

   // Compute poses.
   SkeletalPose* sp0;
   SkeletalPose* sp1;
   SkeletalPose* ep0;
   SkeletalPose* ep1;

   for( uint p = 0; p < numPoses; ++p )
   {
      float t = float(p) / float(numPoses-1);
      float st;
      float et;

      startAnim->getPosesClamped( startAnim->duration() * t, sp0, sp1, st );
      endAnim->getPosesClamped( endAnim->duration() * t, ep0, ep1, et );

      const SkeletalPose::BoneContainer& sb0 = sp0->bones();
      const SkeletalPose::BoneContainer& sb1 = sp1->bones();
      const SkeletalPose::BoneContainer& eb0 = ep0->bones();
      const SkeletalPose::BoneContainer& eb1 = ep1->bones();

      //Reff sref = sp0->referential().slerp( sp1->referential(), st );
      //Reff eref = ep0->referential().slerp( ep1->referential(), et );
      //Reff ref  = sref.slerp( eref, t );
      Reff sref = sp0->referential().nlerp( sp1->referential(), st );
      Reff eref = ep0->referential().nlerp( ep1->referential(), et );
      Reff ref  = sref.nlerp( eref, t );

      SkeletalPose* pose = anim->addPose( ref );
      pose->reserveBones( uint(sb0.size()) );
      for( uint i = 0; i < sb0.size(); ++i )
      {
         //Quatf sorient = sb0[i].slerp( sb1[i], st );
         //Quatf eorient = eb0[i].slerp( eb1[i], et );
         //Quatf orient  = sorient.slerp( eorient, t );
         Quatf sorient = sb0[i].nlerp( sb1[i], st );
         Quatf eorient = eb0[i].nlerp( eb1[i], et );
         Quatf orient  = sorient.nlerp( eorient, t );
         pose->addBone( orient );
      }
   }
   return anim;
}