コード例 #1
0
ファイル: JointState.cpp プロジェクト: BrianPrz/hifi
void JointState::copyState(const JointState& state) {
    _rotationInParentFrame = state._rotationInParentFrame;
    _transform = state._transform;
    _rotation = extractRotation(_transform);
    _animationPriority = state._animationPriority;
    // DO NOT copy _fbxJoint
}
コード例 #2
0
 Vector7d toVectorQT(const Isometry3d& t){
   Quaterniond q(extractRotation(t));
   q.normalize();
   Vector7d v;
   v[3] = q.x(); v[4] = q.y(); v[5] = q.z(); v[6] = q.w();
   v.block<3,1>(0,0) = t.translation();
   return v;
 }
コード例 #3
0
ファイル: LeapMotionPlugin.cpp プロジェクト: SeijiEmery/hifi
void LeapMotionPlugin::InputDevice::update(float deltaTime, const controller::InputCalibrationData& inputCalibrationData,
        const std::vector<LeapMotionPlugin::LeapMotionJoint>& joints,
        const std::vector<LeapMotionPlugin::LeapMotionJoint>& prevJoints) {

    glm::mat4 controllerToAvatar = glm::inverse(inputCalibrationData.avatarMat) * inputCalibrationData.sensorToWorldMat;
    glm::quat controllerToAvatarRotation = glmExtractRotation(controllerToAvatar);

    glm::vec3 hmdSensorPosition;    // HMD
    glm::quat hmdSensorOrientation; // HMD
    glm::vec3 leapMotionOffset;     // Desktop
    if (_isLeapOnHMD) {
        hmdSensorPosition = extractTranslation(inputCalibrationData.hmdSensorMat);
        hmdSensorOrientation = extractRotation(inputCalibrationData.hmdSensorMat);
    } else {
        // Desktop "zero" position is some distance above the Leap Motion sensor and half the avatar's shoulder-to-hand length 
        // in front of avatar.
        float halfShouldToHandLength = fabsf(extractTranslation(inputCalibrationData.defaultLeftHand).x
            - extractTranslation(inputCalibrationData.defaultLeftArm).x) / 2.0f;
        leapMotionOffset = glm::vec3(0.0f, _desktopHeightOffset, halfShouldToHandLength);
    }

    for (size_t i = 0; i < joints.size(); i++) {
        int poseIndex = LeapMotionJointIndexToPoseIndex((LeapMotionJointIndex)i);

        if (joints[i].position == Vectors::ZERO) {
            _poseStateMap[poseIndex] = controller::Pose();
            continue;
        }

        glm::vec3 pos;
        glm::quat rot;
        if (_isLeapOnHMD) {
            auto jointPosition = joints[i].position;
            const glm::vec3 HMD_EYE_TO_LEAP_OFFSET = glm::vec3(0.0f, 0.0f, -0.09f);  // Eyes to surface of Leap Motion.
            jointPosition = glm::vec3(-jointPosition.x, -jointPosition.z, -jointPosition.y) + HMD_EYE_TO_LEAP_OFFSET;
            jointPosition = hmdSensorPosition + hmdSensorOrientation * jointPosition;
            pos = transformPoint(controllerToAvatar, jointPosition);

            glm::quat jointOrientation = joints[i].orientation;
            jointOrientation = glm::quat(jointOrientation.w, -jointOrientation.x, -jointOrientation.z, -jointOrientation.y);
            rot = controllerToAvatarRotation * hmdSensorOrientation * jointOrientation;
        } else {
            pos = controllerToAvatarRotation * (joints[i].position - leapMotionOffset);
            const glm::quat ZERO_HAND_ORIENTATION = glm::quat(glm::vec3(PI_OVER_TWO, PI, 0.0f));
            rot = controllerToAvatarRotation * joints[i].orientation * ZERO_HAND_ORIENTATION;
        }

        glm::vec3 linearVelocity, angularVelocity;
        if (i < prevJoints.size()) {
            linearVelocity = (pos - (prevJoints[i].position * METERS_PER_CENTIMETER)) / deltaTime;  // m/s
            // quat log imaginary part points along the axis of rotation, with length of one half the angle of rotation.
            glm::quat d = glm::log(rot * glm::inverse(prevJoints[i].orientation));
            angularVelocity = glm::vec3(d.x, d.y, d.z) / (0.5f * deltaTime); // radians/s
        }

        _poseStateMap[poseIndex] = controller::Pose(pos, rot, linearVelocity, angularVelocity);
    }
}
コード例 #4
0
ファイル: SkeletonModel.cpp プロジェクト: CryptArc/hifi
// Called within Model::simulate call, below.
void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
    const FBXGeometry& geometry = getFBXGeometry();

    Head* head = _owningAvatar->getHead();

    if (_owningAvatar->isMyAvatar()) {
        MyAvatar* myAvatar = static_cast<MyAvatar*>(_owningAvatar);

        Rig::HeadParameters headParams;
        headParams.enableLean = qApp->isHMDMode();
        headParams.leanSideways = head->getFinalLeanSideways();
        headParams.leanForward = head->getFinalLeanForward();
        headParams.torsoTwist = head->getTorsoTwist();

        if (qApp->isHMDMode()) {
            headParams.isInHMD = true;

            // get HMD position from sensor space into world space, and back into rig space
            glm::mat4 worldHMDMat = myAvatar->getSensorToWorldMatrix() * myAvatar->getHMDSensorMatrix();
            glm::mat4 rigToWorld = createMatFromQuatAndPos(getRotation(), getTranslation());
            glm::mat4 worldToRig = glm::inverse(rigToWorld);
            glm::mat4 rigHMDMat = worldToRig * worldHMDMat;

            headParams.rigHeadPosition = extractTranslation(rigHMDMat);
            headParams.rigHeadOrientation = extractRotation(rigHMDMat);
            headParams.worldHeadOrientation = extractRotation(worldHMDMat);
        } else {
            headParams.isInHMD = false;

            // We don't have a valid localHeadPosition.
            headParams.rigHeadOrientation = Quaternions::Y_180 * head->getFinalOrientationInLocalFrame();
            headParams.worldHeadOrientation = head->getFinalOrientationInWorldFrame();
        }

        headParams.leanJointIndex = geometry.leanJointIndex;
        headParams.neckJointIndex = geometry.neckJointIndex;
        headParams.isTalking = head->getTimeWithoutTalking() <= 1.5f;

        _rig->updateFromHeadParameters(headParams, deltaTime);

        Rig::HandParameters handParams;

        auto leftPose = myAvatar->getLeftHandControllerPoseInAvatarFrame();
        if (leftPose.isValid()) {
            handParams.isLeftEnabled = true;
            handParams.leftPosition = Quaternions::Y_180 * leftPose.getTranslation();
            handParams.leftOrientation = Quaternions::Y_180 * leftPose.getRotation();
        } else {
            handParams.isLeftEnabled = false;
        }

        auto rightPose = myAvatar->getRightHandControllerPoseInAvatarFrame();
        if (rightPose.isValid()) {
            handParams.isRightEnabled = true;
            handParams.rightPosition = Quaternions::Y_180 * rightPose.getTranslation();
            handParams.rightOrientation = Quaternions::Y_180 * rightPose.getRotation();
        } else {
            handParams.isRightEnabled = false;
        }

        handParams.bodyCapsuleRadius = myAvatar->getCharacterController()->getCapsuleRadius();
        handParams.bodyCapsuleHalfHeight = myAvatar->getCharacterController()->getCapsuleHalfHeight();
        handParams.bodyCapsuleLocalOffset = myAvatar->getCharacterController()->getCapsuleLocalOffset();

        _rig->updateFromHandParameters(handParams, deltaTime);

        Rig::CharacterControllerState ccState = convertCharacterControllerState(myAvatar->getCharacterController()->getState());

        auto velocity = myAvatar->getLocalVelocity();
        auto position = myAvatar->getLocalPosition();
        auto orientation = myAvatar->getLocalOrientation();
        _rig->computeMotionAnimationState(deltaTime, position, velocity, orientation, ccState);

        // evaluate AnimGraph animation and update jointStates.
        Model::updateRig(deltaTime, parentTransform);

        Rig::EyeParameters eyeParams;
        eyeParams.worldHeadOrientation = headParams.worldHeadOrientation;
        eyeParams.eyeLookAt = head->getLookAtPosition();
        eyeParams.eyeSaccade = head->getSaccade();
        eyeParams.modelRotation = getRotation();
        eyeParams.modelTranslation = getTranslation();
        eyeParams.leftEyeJointIndex = geometry.leftEyeJointIndex;
        eyeParams.rightEyeJointIndex = geometry.rightEyeJointIndex;

        _rig->updateFromEyeParameters(eyeParams);

    } else {

        Model::updateRig(deltaTime, parentTransform);

        // This is a little more work than we really want.
        //
        // Other avatars joint, including their eyes, should already be set just like any other joints
        // from the wire data. But when looking at me, we want the eyes to use the corrected lookAt.
        //
        // Thus this should really only be ... else if (_owningAvatar->getHead()->isLookingAtMe()) {...
        // However, in the !isLookingAtMe case, the eyes aren't rotating the way they should right now.
        // We will revisit that as priorities allow, and particularly after the new rig/animation/joints.

        // If the head is not positioned, updateEyeJoints won't get the math right
        glm::quat headOrientation;
        _rig->getJointRotation(geometry.headJointIndex, headOrientation);
        glm::vec3 eulers = safeEulerAngles(headOrientation);
        head->setBasePitch(glm::degrees(-eulers.x));
        head->setBaseYaw(glm::degrees(eulers.y));
        head->setBaseRoll(glm::degrees(-eulers.z));

        Rig::EyeParameters eyeParams;
        eyeParams.worldHeadOrientation = head->getFinalOrientationInWorldFrame();
        eyeParams.eyeLookAt = head->getCorrectedLookAtPosition();
        eyeParams.eyeSaccade = glm::vec3();
        eyeParams.modelRotation = getRotation();
        eyeParams.modelTranslation = getTranslation();
        eyeParams.leftEyeJointIndex = geometry.leftEyeJointIndex;
        eyeParams.rightEyeJointIndex = geometry.rightEyeJointIndex;

        _rig->updateFromEyeParameters(eyeParams);
     }
}
コード例 #5
0
ファイル: JointState.cpp プロジェクト: BrianPrz/hifi
void JointState::computeTransform(const glm::mat4& parentTransform) {
    glm::quat modifiedRotation = _fbxJoint->preRotation * _rotationInParentFrame * _fbxJoint->postRotation;
    glm::mat4 modifiedTransform = _fbxJoint->preTransform * glm::mat4_cast(modifiedRotation) * _fbxJoint->postTransform;
    _transform = parentTransform * glm::translate(_fbxJoint->translation) * modifiedTransform;
    _rotation = extractRotation(_transform);
}
コード例 #6
0
 // functions to handle the toVector of the whole transformations
 Vector6d toVectorMQT(const Isometry3d& t) {
   Vector6d v;
   v.block<3,1>(3,0) = toCompactQuaternion(extractRotation(t));
   v.block<3,1>(0,0) = t.translation();
   return v;
 }
コード例 #7
0
 Vector6d toVectorET(const Isometry3d& t) {
   Vector6d v;
   v.block<3,1>(3,0)=toEuler(extractRotation(t));
   v.block<3,1>(0,0) = t.translation();
   return v;
 }
コード例 #8
0
std::vector<geometry_msgs::Quaternion> classification::PoseDataPoint::getRotations() const
{
	std::vector<geometry_msgs::Quaternion> rotations(15);

	rotations[0] = extractRotation(poseData.head);
	rotations[1] = extractRotation(poseData.neck);
	rotations[2] = extractRotation(poseData.torso);
	rotations[3] = extractRotation(poseData.left_shoulder);
	rotations[4] = extractRotation(poseData.left_elbow);
	rotations[5] = extractRotation(poseData.left_hand);
	rotations[6] = extractRotation(poseData.right_shoulder);
	rotations[7] = extractRotation(poseData.right_elbow);
	rotations[8] = extractRotation(poseData.right_hand);
	rotations[9] = extractRotation(poseData.left_hip);
	rotations[10] = extractRotation(poseData.left_knee);
	rotations[11] = extractRotation(poseData.left_foot);
	rotations[12] = extractRotation(poseData.right_hip);
	rotations[13] = extractRotation(poseData.right_knee);
	rotations[14] = extractRotation(poseData.right_foot);

	return rotations;
}