Esempio n. 1
0
static void reportJoint(int index, JointState joint) { // Handy for debugging
    std::cout << "\n";
    std::cout << index << " " << joint.getName().toUtf8().data() << "\n";
    std::cout << " pos:" << joint.getPosition() << "/" << joint.getPositionInParentFrame() << " from " << joint.getParentIndex() << "\n";
    std::cout << " rot:" << safeEulerAngles(joint.getRotation()) << "/" << safeEulerAngles(joint.getRotationInParentFrame()) << "/" << safeEulerAngles(joint.getRotationInBindFrame()) << "\n";
    std::cout << "\n";
}
Esempio n. 2
0
void HeadData::setHeadOrientation(const glm::quat& orientation) {
    glm::quat bodyOrientation = _owningAvatar->getOrientation();
    glm::vec3 eulers = glm::degrees(safeEulerAngles(glm::inverse(bodyOrientation) * orientation));
    _basePitch = eulers.x;
    _baseYaw = eulers.y;
    _baseRoll = eulers.z;
}
Esempio n. 3
0
void GLMHelpersTests::testEulerDecomposition() {
    // quat to euler and back again....

    const glm::quat ROT_X_90 = glm::angleAxis(PI / 2.0f, glm::vec3(1.0f, 0.0f, 0.0f));
    const glm::quat ROT_Y_180 = glm::angleAxis(PI, glm::vec3(0.0f, 1.0, 0.0f));
    const glm::quat ROT_Z_30 = glm::angleAxis(PI / 6.0f, glm::vec3(1.0f, 0.0f, 0.0f));

    const float EPSILON = 0.00001f;

    std::vector<glm::quat> quatVec = {
        glm::quat(),
        ROT_X_90,
        ROT_Y_180,
        ROT_Z_30,
        ROT_X_90 * ROT_Y_180 * ROT_Z_30,
        ROT_X_90 * ROT_Z_30 * ROT_Y_180,
        ROT_Y_180 * ROT_Z_30 * ROT_X_90,
        ROT_Y_180 * ROT_X_90 * ROT_Z_30,
        ROT_Z_30 * ROT_X_90 * ROT_Y_180,
        ROT_Z_30 * ROT_Y_180 * ROT_X_90,
    };

    for (auto& q : quatVec) {
        glm::vec3 euler = safeEulerAngles(q);
        glm::quat r(euler);

        // when the axis and angle are flipped.
        if (glm::dot(q, r) < 0.0f) {
            r = -r;
        }

        QCOMPARE_WITH_ABS_ERROR(q, r, EPSILON);
    }
}
Esempio n. 4
0
static void reportJoint(RigPointer rig, int index) { // Handy for debugging
    std::cout << "\n";
    std::cout << index << " " << rig->getAnimSkeleton()->getJointName(index).toUtf8().data() << "\n";
    glm::vec3 pos;
    rig->getJointPosition(index, pos);
    glm::quat rot;
    rig->getJointRotation(index, rot);
    std::cout << " pos:" << pos << "\n";
    std::cout << " rot:" << safeEulerAngles(rot) << "\n";
    std::cout << "\n";
}
Esempio n. 5
0
void HeadData::setOrientation(const glm::quat& orientation) {
    // rotate body about vertical axis
    glm::quat bodyOrientation = _owningAvatar->getOrientation();
    glm::vec3 newFront = glm::inverse(bodyOrientation) * (orientation * IDENTITY_FRONT);
    bodyOrientation = bodyOrientation * glm::angleAxis(atan2f(-newFront.x, -newFront.z), glm::vec3(0.0f, 1.0f, 0.0f));
    _owningAvatar->setOrientation(bodyOrientation);
    
    // the rest goes to the head
    glm::vec3 eulers = glm::degrees(safeEulerAngles(glm::inverse(bodyOrientation) * orientation));
    _basePitch = eulers.x;
    _baseYaw = eulers.y;
    _baseRoll = eulers.z;
}
Esempio n. 6
0
// Called within Model::simulate call, below.
void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
    assert(!_owningAvatar->isMyAvatar());
    const FBXGeometry& geometry = getFBXGeometry();

    Head* head = _owningAvatar->getHead();

    // make sure lookAt is not too close to face (avoid crosseyes)
    glm::vec3 lookAt = head->getCorrectedLookAtPosition();
    glm::vec3 focusOffset = lookAt - _owningAvatar->getHead()->getEyePosition();
    float focusDistance = glm::length(focusOffset);
    const float MIN_LOOK_AT_FOCUS_DISTANCE = 1.0f;
    if (focusDistance < MIN_LOOK_AT_FOCUS_DISTANCE && focusDistance > EPSILON) {
        lookAt = _owningAvatar->getHead()->getEyePosition() + (MIN_LOOK_AT_FOCUS_DISTANCE / focusDistance) * focusOffset;
    }

    // no need to call Model::updateRig() because otherAvatars get their joint state
    // copied directly from AvtarData::_jointData (there are no Rig animations to blend)
    _needsUpdateClusterMatrices = true;

    // This is a little more work than we really want.
    //
    // Other avatars joint, including their eyes, should already be set just like any other joints
    // from the wire data. But when looking at me, we want the eyes to use the corrected lookAt.
    //
    // Thus this should really only be ... else if (_owningAvatar->getHead()->isLookingAtMe()) {...
    // However, in the !isLookingAtMe case, the eyes aren't rotating the way they should right now.
    // We will revisit that as priorities allow, and particularly after the new rig/animation/joints.

    // If the head is not positioned, updateEyeJoints won't get the math right
    glm::quat headOrientation;
    _rig.getJointRotation(geometry.headJointIndex, headOrientation);
    glm::vec3 eulers = safeEulerAngles(headOrientation);
    head->setBasePitch(glm::degrees(-eulers.x));
    head->setBaseYaw(glm::degrees(eulers.y));
    head->setBaseRoll(glm::degrees(-eulers.z));

    Rig::EyeParameters eyeParams;
    eyeParams.eyeLookAt = lookAt;
    eyeParams.eyeSaccade = glm::vec3(0.0f);
    eyeParams.modelRotation = getRotation();
    eyeParams.modelTranslation = getTranslation();
    eyeParams.leftEyeJointIndex = geometry.leftEyeJointIndex;
    eyeParams.rightEyeJointIndex = geometry.rightEyeJointIndex;

    _rig.updateFromEyeParameters(eyeParams);
}
Esempio n. 7
0
void FaceModel::maybeUpdateNeckRotation(const JointState& parentState, const FBXJoint& joint, JointState& state) {
    Avatar* owningAvatar = static_cast<Avatar*>(_owningHead->_owningAvatar);
    // get the rotation axes in joint space and use them to adjust the rotation
    glm::mat3 axes = glm::mat3_cast(glm::quat());
    glm::mat3 inverse = glm::mat3(glm::inverse(parentState.getTransform() *
                                               glm::translate(state.getDefaultTranslationInConstrainedFrame()) *
                                               joint.preTransform * glm::mat4_cast(joint.preRotation)));
    glm::vec3 pitchYawRoll = safeEulerAngles(_owningHead->getFinalOrientationInLocalFrame());
    glm::vec3 lean = glm::radians(glm::vec3(_owningHead->getFinalLeanForward(),
                                            _owningHead->getTorsoTwist(),
                                            _owningHead->getFinalLeanSideways()));
    pitchYawRoll -= lean;
    state.setRotationInConstrainedFrame(glm::angleAxis(-pitchYawRoll.z, glm::normalize(inverse * axes[2]))
                                        * glm::angleAxis(pitchYawRoll.y, glm::normalize(inverse * axes[1]))
                                        * glm::angleAxis(-pitchYawRoll.x, glm::normalize(inverse * axes[0]))
                                        * joint.rotation, DEFAULT_PRIORITY);
}
Esempio n. 8
0
void FaceModel::maybeUpdateNeckRotation(const JointState& parentState, const JointState& state, int index) {
    // get the rotation axes in joint space and use them to adjust the rotation
    glm::mat3 axes = glm::mat3_cast(glm::quat());
    glm::mat3 inverse = glm::mat3(glm::inverse(parentState.getTransform() *
                                               glm::translate(_rig->getJointDefaultTranslationInConstrainedFrame(index)) *
                                               state.getPreTransform() * glm::mat4_cast(state.getPreRotation())));
    glm::vec3 pitchYawRoll = safeEulerAngles(_owningHead->getFinalOrientationInLocalFrame());
    glm::vec3 lean = glm::radians(glm::vec3(_owningHead->getFinalLeanForward(),
                                            _owningHead->getTorsoTwist(),
                                            _owningHead->getFinalLeanSideways()));
    pitchYawRoll -= lean;
    _rig->setJointRotationInConstrainedFrame(index,
                                             glm::angleAxis(-pitchYawRoll.z, glm::normalize(inverse * axes[2]))
                                             * glm::angleAxis(pitchYawRoll.y, glm::normalize(inverse * axes[1]))
                                             * glm::angleAxis(-pitchYawRoll.x, glm::normalize(inverse * axes[0]))
                                             * state.getDefaultRotation(), DEFAULT_PRIORITY);
}
Esempio n. 9
0
void Profile::updateOrientation(const glm::quat& orientation) {
    glm::vec3 eulerAngles = safeEulerAngles(orientation);
    if (_lastOrientation == eulerAngles) {
        return;
    }
    const quint64 DATA_SERVER_ORIENTATION_UPDATE_INTERVAL_USECS = 5 * 1000 * 1000;
    const float DATA_SERVER_ORIENTATION_CHANGE_THRESHOLD_DEGREES = 5.0f;
    
    quint64 now = usecTimestampNow();
    if (now - _lastOrientationSend >= DATA_SERVER_ORIENTATION_UPDATE_INTERVAL_USECS &&
            glm::distance(_lastOrientation, eulerAngles) >= DATA_SERVER_ORIENTATION_CHANGE_THRESHOLD_DEGREES) {
        DataServerClient::putValueForKeyAndUserString(DataServerKey::Orientation, QString(createByteArray(eulerAngles)),
                                                      getUserString());
        
        _lastOrientation = eulerAngles;
        _lastOrientationSend = now;
    }
}
Esempio n. 10
0
void JointState::applyRotationDelta(const glm::quat& delta, bool constrain, float priority) {
    // NOTE: delta is in jointParent-frame
    assert(_fbxJoint != NULL);
    if (priority < _animationPriority) {
        return;
    }
    _animationPriority = priority;
    if (!constrain || (_fbxJoint->rotationMin == glm::vec3(-PI, -PI, -PI) &&
            _fbxJoint->rotationMax == glm::vec3(PI, PI, PI))) {
        // no constraints
        _rotationInParentFrame = _rotationInParentFrame * glm::inverse(_rotation) * delta * _rotation;
        _rotation = delta * _rotation;
        return;
    }
    glm::quat targetRotation = delta * _rotation;
    glm::vec3 eulers = safeEulerAngles(_rotationInParentFrame * glm::inverse(_rotation) * targetRotation);
    glm::quat newRotation = glm::quat(glm::clamp(eulers, _fbxJoint->rotationMin, _fbxJoint->rotationMax));
    _rotation = _rotation * glm::inverse(_rotationInParentFrame) * newRotation;
    _rotationInParentFrame = newRotation;
}
Esempio n. 11
0
void Player::play() {
    computeCurrentFrame();
    if (_currentFrame < 0 || (_currentFrame >= _recording->getFrameNumber() - 2)) { // -2 because of interpolation
        if (_loop) {
            loopRecording();
        } else {
            stopPlaying();
        }
        return;
    }
    
    const RecordingContext* context = &_recording->getContext();
    if (_playFromCurrentPosition) {
        context = &_currentContext;
    }
    const RecordingFrame& currentFrame = _recording->getFrame(_currentFrame);
    const RecordingFrame& nextFrame = _recording->getFrame(_currentFrame + 1);
    
    glm::vec3 translation = glm::mix(currentFrame.getTranslation(),
                                     nextFrame.getTranslation(),
                                     _frameInterpolationFactor);
    _avatar->setPosition(context->position + context->orientation * translation);
    
    glm::quat rotation = safeMix(currentFrame.getRotation(),
                                 nextFrame.getRotation(),
                                 _frameInterpolationFactor);
    _avatar->setOrientation(context->orientation * rotation);
    
    float scale = glm::mix(currentFrame.getScale(),
                           nextFrame.getScale(),
                           _frameInterpolationFactor);
    _avatar->setTargetScale(context->scale * scale);
    
    
    QVector<glm::quat> jointRotations(currentFrame.getJointRotations().size());
    for (int i = 0; i < currentFrame.getJointRotations().size(); ++i) {
        jointRotations[i] = safeMix(currentFrame.getJointRotations()[i],
                                    nextFrame.getJointRotations()[i],
                                    _frameInterpolationFactor);
    }
    _avatar->setJointRotations(jointRotations);
    
    HeadData* head = const_cast<HeadData*>(_avatar->getHeadData());
    if (head) {
        // Make sure fake face tracker connection doesn't get turned off
        _avatar->setForceFaceTrackerConnected(true);
        
        QVector<float> blendCoef(currentFrame.getBlendshapeCoefficients().size());
        for (int i = 0; i < currentFrame.getBlendshapeCoefficients().size(); ++i) {
            blendCoef[i] = glm::mix(currentFrame.getBlendshapeCoefficients()[i],
                                    nextFrame.getBlendshapeCoefficients()[i],
                                    _frameInterpolationFactor);
        }
        head->setBlendshapeCoefficients(blendCoef);
        
        float leanSideways = glm::mix(currentFrame.getLeanSideways(),
                                      nextFrame.getLeanSideways(),
                                      _frameInterpolationFactor);
        head->setLeanSideways(leanSideways);
        
        float leanForward = glm::mix(currentFrame.getLeanForward(),
                                     nextFrame.getLeanForward(),
                                     _frameInterpolationFactor);
        head->setLeanForward(leanForward);
        
        glm::quat headRotation = safeMix(currentFrame.getHeadRotation(),
                                         nextFrame.getHeadRotation(),
                                         _frameInterpolationFactor);
        glm::vec3 eulers = glm::degrees(safeEulerAngles(headRotation));
        head->setFinalPitch(eulers.x);
        head->setFinalYaw(eulers.y);
        head->setFinalRoll(eulers.z);
        
        
        glm::vec3 lookAt = glm::mix(currentFrame.getLookAtPosition(),
                                    nextFrame.getLookAtPosition(),
                                    _frameInterpolationFactor);
        head->setLookAtPosition(context->position + context->orientation * lookAt);
    } else {
        qCDebug(avatars) << "WARNING: Player couldn't find head data.";
    }
    
    _options.position = _avatar->getPosition();
    _options.orientation = _avatar->getOrientation();
    _injector->setOptions(_options);
}
Esempio n. 12
0
// Called within Model::simulate call, below.
void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
    const FBXGeometry& geometry = getFBXGeometry();

    Head* head = _owningAvatar->getHead();

    if (_owningAvatar->isMyAvatar()) {
        MyAvatar* myAvatar = static_cast<MyAvatar*>(_owningAvatar);

        Rig::HeadParameters headParams;
        headParams.enableLean = qApp->isHMDMode();
        headParams.leanSideways = head->getFinalLeanSideways();
        headParams.leanForward = head->getFinalLeanForward();
        headParams.torsoTwist = head->getTorsoTwist();

        if (qApp->isHMDMode()) {
            headParams.isInHMD = true;

            // get HMD position from sensor space into world space, and back into rig space
            glm::mat4 worldHMDMat = myAvatar->getSensorToWorldMatrix() * myAvatar->getHMDSensorMatrix();
            glm::mat4 rigToWorld = createMatFromQuatAndPos(getRotation(), getTranslation());
            glm::mat4 worldToRig = glm::inverse(rigToWorld);
            glm::mat4 rigHMDMat = worldToRig * worldHMDMat;

            headParams.rigHeadPosition = extractTranslation(rigHMDMat);
            headParams.rigHeadOrientation = extractRotation(rigHMDMat);
            headParams.worldHeadOrientation = extractRotation(worldHMDMat);
        } else {
            headParams.isInHMD = false;

            // We don't have a valid localHeadPosition.
            headParams.rigHeadOrientation = Quaternions::Y_180 * head->getFinalOrientationInLocalFrame();
            headParams.worldHeadOrientation = head->getFinalOrientationInWorldFrame();
        }

        headParams.leanJointIndex = geometry.leanJointIndex;
        headParams.neckJointIndex = geometry.neckJointIndex;
        headParams.isTalking = head->getTimeWithoutTalking() <= 1.5f;

        _rig->updateFromHeadParameters(headParams, deltaTime);

        Rig::HandParameters handParams;

        auto leftPose = myAvatar->getLeftHandControllerPoseInAvatarFrame();
        if (leftPose.isValid()) {
            handParams.isLeftEnabled = true;
            handParams.leftPosition = Quaternions::Y_180 * leftPose.getTranslation();
            handParams.leftOrientation = Quaternions::Y_180 * leftPose.getRotation();
        } else {
            handParams.isLeftEnabled = false;
        }

        auto rightPose = myAvatar->getRightHandControllerPoseInAvatarFrame();
        if (rightPose.isValid()) {
            handParams.isRightEnabled = true;
            handParams.rightPosition = Quaternions::Y_180 * rightPose.getTranslation();
            handParams.rightOrientation = Quaternions::Y_180 * rightPose.getRotation();
        } else {
            handParams.isRightEnabled = false;
        }

        handParams.bodyCapsuleRadius = myAvatar->getCharacterController()->getCapsuleRadius();
        handParams.bodyCapsuleHalfHeight = myAvatar->getCharacterController()->getCapsuleHalfHeight();
        handParams.bodyCapsuleLocalOffset = myAvatar->getCharacterController()->getCapsuleLocalOffset();

        _rig->updateFromHandParameters(handParams, deltaTime);

        Rig::CharacterControllerState ccState = convertCharacterControllerState(myAvatar->getCharacterController()->getState());

        auto velocity = myAvatar->getLocalVelocity();
        auto position = myAvatar->getLocalPosition();
        auto orientation = myAvatar->getLocalOrientation();
        _rig->computeMotionAnimationState(deltaTime, position, velocity, orientation, ccState);

        // evaluate AnimGraph animation and update jointStates.
        Model::updateRig(deltaTime, parentTransform);

        Rig::EyeParameters eyeParams;
        eyeParams.worldHeadOrientation = headParams.worldHeadOrientation;
        eyeParams.eyeLookAt = head->getLookAtPosition();
        eyeParams.eyeSaccade = head->getSaccade();
        eyeParams.modelRotation = getRotation();
        eyeParams.modelTranslation = getTranslation();
        eyeParams.leftEyeJointIndex = geometry.leftEyeJointIndex;
        eyeParams.rightEyeJointIndex = geometry.rightEyeJointIndex;

        _rig->updateFromEyeParameters(eyeParams);

    } else {

        Model::updateRig(deltaTime, parentTransform);

        // This is a little more work than we really want.
        //
        // Other avatars joint, including their eyes, should already be set just like any other joints
        // from the wire data. But when looking at me, we want the eyes to use the corrected lookAt.
        //
        // Thus this should really only be ... else if (_owningAvatar->getHead()->isLookingAtMe()) {...
        // However, in the !isLookingAtMe case, the eyes aren't rotating the way they should right now.
        // We will revisit that as priorities allow, and particularly after the new rig/animation/joints.

        // If the head is not positioned, updateEyeJoints won't get the math right
        glm::quat headOrientation;
        _rig->getJointRotation(geometry.headJointIndex, headOrientation);
        glm::vec3 eulers = safeEulerAngles(headOrientation);
        head->setBasePitch(glm::degrees(-eulers.x));
        head->setBaseYaw(glm::degrees(eulers.y));
        head->setBaseRoll(glm::degrees(-eulers.z));

        Rig::EyeParameters eyeParams;
        eyeParams.worldHeadOrientation = head->getFinalOrientationInWorldFrame();
        eyeParams.eyeLookAt = head->getCorrectedLookAtPosition();
        eyeParams.eyeSaccade = glm::vec3();
        eyeParams.modelRotation = getRotation();
        eyeParams.modelTranslation = getTranslation();
        eyeParams.leftEyeJointIndex = geometry.leftEyeJointIndex;
        eyeParams.rightEyeJointIndex = geometry.rightEyeJointIndex;

        _rig->updateFromEyeParameters(eyeParams);
     }
}
Esempio n. 13
0
void Player::play() {
    computeCurrentFrame();
    if (_currentFrame < 0 || (_currentFrame >= _recording->getFrameNumber() - 2)) { // -2 because of interpolation
        if (_loop) {
            loopRecording();
        } else {
            stopPlaying();
        }
        return;
    }
    
    const RecordingContext* context = &_recording->getContext();
    if (_playFromCurrentPosition) {
        context = &_currentContext;
    }
    const RecordingFrame& currentFrame = _recording->getFrame(_currentFrame);
    const RecordingFrame& nextFrame = _recording->getFrame(_currentFrame + 1);
    
    glm::vec3 translation = glm::mix(currentFrame.getTranslation(),
                                     nextFrame.getTranslation(),
                                     _frameInterpolationFactor);
    _avatar->setPosition(context->position + context->orientation * translation);
    
    glm::quat rotation = safeMix(currentFrame.getRotation(),
                                 nextFrame.getRotation(),
                                 _frameInterpolationFactor);
    _avatar->setOrientation(context->orientation * rotation);
    
    float scale = glm::mix(currentFrame.getScale(),
                           nextFrame.getScale(),
                           _frameInterpolationFactor);
    _avatar->setTargetScale(context->scale * scale);

    // Joint array playback
    // FIXME: THis is still using a deprecated path to assign the joint orientation since setting the full RawJointData array doesn't
    //        work for Avatar. We need to fix this working with the animation team
    const auto& prevJointArray = currentFrame.getJointArray();
    const auto& nextJointArray = currentFrame.getJointArray();
    QVector<JointData> jointArray(prevJointArray.size());
    QVector<glm::quat> jointRotations(prevJointArray.size()); // FIXME: remove once the setRawJointData is fixed
    QVector<glm::vec3> jointTranslations(prevJointArray.size()); // FIXME: remove once the setRawJointData is fixed

    for (int i = 0; i < jointArray.size(); i++) {
        const auto& prevJoint = prevJointArray[i];
        const auto& nextJoint = nextJointArray[i];
        auto& joint = jointArray[i];

        // Rotation
        joint.rotationSet = prevJoint.rotationSet || nextJoint.rotationSet;
        if (joint.rotationSet) {
            joint.rotation = safeMix(prevJoint.rotation, nextJoint.rotation, _frameInterpolationFactor);
            jointRotations[i] = joint.rotation; // FIXME: remove once the setRawJointData is fixed
        }

        joint.translationSet = prevJoint.translationSet || nextJoint.translationSet;
        if (joint.translationSet) {
            joint.translation = glm::mix(prevJoint.translation, nextJoint.translation, _frameInterpolationFactor);
            jointTranslations[i] = joint.translation; // FIXME: remove once the setRawJointData is fixed
        }
    }

   // _avatar->setRawJointData(jointArray); // FIXME: Enable once the setRawJointData is fixed
     _avatar->setJointRotations(jointRotations); // FIXME: remove once the setRawJointData is fixed
   //  _avatar->setJointTranslations(jointTranslations); // FIXME: remove once the setRawJointData is fixed

    HeadData* head = const_cast<HeadData*>(_avatar->getHeadData());
    if (head) {
        // Make sure fake face tracker connection doesn't get turned off
        _avatar->setForceFaceTrackerConnected(true);
        
        QVector<float> blendCoef(currentFrame.getBlendshapeCoefficients().size());
        for (int i = 0; i < currentFrame.getBlendshapeCoefficients().size(); ++i) {
            blendCoef[i] = glm::mix(currentFrame.getBlendshapeCoefficients()[i],
                                    nextFrame.getBlendshapeCoefficients()[i],
                                    _frameInterpolationFactor);
        }
        head->setBlendshapeCoefficients(blendCoef);
        
        float leanSideways = glm::mix(currentFrame.getLeanSideways(),
                                      nextFrame.getLeanSideways(),
                                      _frameInterpolationFactor);
        head->setLeanSideways(leanSideways);
        
        float leanForward = glm::mix(currentFrame.getLeanForward(),
                                     nextFrame.getLeanForward(),
                                     _frameInterpolationFactor);
        head->setLeanForward(leanForward);
        
        glm::quat headRotation = safeMix(currentFrame.getHeadRotation(),
                                         nextFrame.getHeadRotation(),
                                         _frameInterpolationFactor);
        glm::vec3 eulers = glm::degrees(safeEulerAngles(headRotation));
        head->setFinalPitch(eulers.x);
        head->setFinalYaw(eulers.y);
        head->setFinalRoll(eulers.z);
        
        
        glm::vec3 lookAt = glm::mix(currentFrame.getLookAtPosition(),
                                    nextFrame.getLookAtPosition(),
                                    _frameInterpolationFactor);
        head->setLookAtPosition(context->position + context->orientation * lookAt);
    } else {
        qCDebug(avatars) << "WARNING: Player couldn't find head data.";
    }
    
    _options.position = _avatar->getPosition();
    _options.orientation = _avatar->getOrientation();
    _injector->setOptions(_options);
}
Esempio n. 14
0
void SerialInterface::readData(float deltaTime) {
#ifndef _WIN32
    
    int initialSamples = totalSamples;
    
    if (USING_INVENSENSE_MPU9150) { 

        // ask the invensense for raw gyro data
        short accelData[3];
        if (mpu_get_accel_reg(accelData, 0)) {
            close(_serialDescriptor);
            qDebug("Disconnected SerialUSB.\n");
            _active = false;
            return; // disconnected
        }
        
        const float LSB_TO_METERS_PER_SECOND2 = 1.f / 16384.f * GRAVITY_EARTH;
                                                                //  From MPU-9150 register map, with setting on
                                                                //  highest resolution = +/- 2G
        
        _lastAcceleration = glm::vec3(-accelData[2], -accelData[1], -accelData[0]) * LSB_TO_METERS_PER_SECOND2;
          
        short gyroData[3];
        mpu_get_gyro_reg(gyroData, 0);
        
        //  Convert the integer rates to floats
        const float LSB_TO_DEGREES_PER_SECOND = 1.f / 16.4f;     //  From MPU-9150 register map, 2000 deg/sec.
        glm::vec3 rotationRates;
        rotationRates[0] = ((float) -gyroData[2]) * LSB_TO_DEGREES_PER_SECOND;
        rotationRates[1] = ((float) -gyroData[1]) * LSB_TO_DEGREES_PER_SECOND;
        rotationRates[2] = ((float) -gyroData[0]) * LSB_TO_DEGREES_PER_SECOND;
      
        short compassData[3];
        mpu_get_compass_reg(compassData, 0);
      
        // Convert integer values to floats, update extents
        _lastCompass = glm::vec3(compassData[2], -compassData[0], -compassData[1]);
        
        // update and subtract the long term average
        _averageRotationRates = (1.f - 1.f/(float)LONG_TERM_RATE_SAMPLES) * _averageRotationRates +
                1.f/(float)LONG_TERM_RATE_SAMPLES * rotationRates;
        rotationRates -= _averageRotationRates;

        // compute the angular acceleration
        glm::vec3 angularAcceleration = (deltaTime < EPSILON) ? glm::vec3() : (rotationRates - _lastRotationRates) / deltaTime;
        _lastRotationRates = rotationRates;
        
        //  Update raw rotation estimates
        glm::quat estimatedRotation = glm::quat(glm::radians(_estimatedRotation)) *
            glm::quat(glm::radians(deltaTime * _lastRotationRates));
        
        //  Update acceleration estimate: first, subtract gravity as rotated into current frame
        _estimatedAcceleration = (totalSamples < GRAVITY_SAMPLES) ? glm::vec3() :
            _lastAcceleration - glm::inverse(estimatedRotation) * _gravity;
        
        // update and subtract the long term average
        _averageAcceleration = (1.f - 1.f/(float)LONG_TERM_RATE_SAMPLES) * _averageAcceleration +
                1.f/(float)LONG_TERM_RATE_SAMPLES * _estimatedAcceleration;
        _estimatedAcceleration -= _averageAcceleration;
        
        //  Consider updating our angular velocity/acceleration to linear acceleration mapping
        if (glm::length(_estimatedAcceleration) > EPSILON &&
                (glm::length(_lastRotationRates) > EPSILON || glm::length(angularAcceleration) > EPSILON)) {
            // compute predicted linear acceleration, find error between actual and predicted
            glm::vec3 predictedAcceleration = _angularVelocityToLinearAccel * _lastRotationRates +
                _angularAccelToLinearAccel * angularAcceleration;
            glm::vec3 error = _estimatedAcceleration - predictedAcceleration;
            
            // the "error" is actually what we want: the linear acceleration minus rotational influences
            _estimatedAcceleration = error;
            
            // adjust according to error in each dimension, in proportion to input magnitudes
            for (int i = 0; i < 3; i++) {
                if (fabsf(error[i]) < EPSILON) {
                    continue;
                }
                const float LEARNING_RATE = 0.001f;
                float rateSum = fabsf(_lastRotationRates.x) + fabsf(_lastRotationRates.y) + fabsf(_lastRotationRates.z);
                if (rateSum > EPSILON) {
                    for (int j = 0; j < 3; j++) {
                        float proportion = LEARNING_RATE * fabsf(_lastRotationRates[j]) / rateSum;
                        if (proportion > EPSILON) {
                            _angularVelocityToLinearAccel[j][i] += error[i] * proportion / _lastRotationRates[j];
                        }
                    }
                }
                float accelSum = fabsf(angularAcceleration.x) + fabsf(angularAcceleration.y) + fabsf(angularAcceleration.z);
                if (accelSum > EPSILON) {
                    for (int j = 0; j < 3; j++) {
                        float proportion = LEARNING_RATE * fabsf(angularAcceleration[j]) / accelSum;
                        if (proportion > EPSILON) {
                            _angularAccelToLinearAccel[j][i] += error[i] * proportion / angularAcceleration[j];
                        }
                    }                
                }
            }
        }
        
        // rotate estimated acceleration into global rotation frame
        _estimatedAcceleration = estimatedRotation * _estimatedAcceleration;
        
        //  Update estimated position and velocity
        float const DECAY_VELOCITY = 0.975f;
        float const DECAY_POSITION = 0.975f;
        _estimatedVelocity += deltaTime * _estimatedAcceleration;
        _estimatedPosition += deltaTime * _estimatedVelocity;
        _estimatedVelocity *= DECAY_VELOCITY;
        
        //  Attempt to fuse gyro position with webcam position
        Webcam* webcam = Application::getInstance()->getWebcam();
        if (webcam->isActive()) {
            const float WEBCAM_POSITION_FUSION = 0.5f;
            _estimatedPosition = glm::mix(_estimatedPosition, webcam->getEstimatedPosition(), WEBCAM_POSITION_FUSION);
               
        } else {
            _estimatedPosition *= DECAY_POSITION;
        }
            
        //  Accumulate a set of initial baseline readings for setting gravity
        if (totalSamples == 0) {
            _gravity = _lastAcceleration;
        } 
        else {
            if (totalSamples < GRAVITY_SAMPLES) {
                _gravity = glm::mix(_gravity, _lastAcceleration, 1.0f / GRAVITY_SAMPLES);
                
                //  North samples start later, because the initial compass readings are screwy
                int northSample = totalSamples - (GRAVITY_SAMPLES - NORTH_SAMPLES);
                if (northSample == 0) {
                    _north = _lastCompass;
                    
                } else if (northSample > 0) {
                    _north = glm::mix(_north, _lastCompass, 1.0f / NORTH_SAMPLES);
                }
            } else {
                //  Use gravity reading to do sensor fusion on the pitch and roll estimation
                estimatedRotation = safeMix(estimatedRotation,
                    rotationBetween(estimatedRotation * _lastAcceleration, _gravity) * estimatedRotation,
                    1.0f / ACCELERATION_SENSOR_FUSION_SAMPLES);
                
                //  Update the compass extents
                _compassMinima = glm::min(_compassMinima, _lastCompass);
                _compassMaxima = glm::max(_compassMaxima, _lastCompass);
        
                //  Same deal with the compass heading
                estimatedRotation = safeMix(estimatedRotation,
                    rotationBetween(estimatedRotation * recenterCompass(_lastCompass),
                        recenterCompass(_north)) * estimatedRotation,
                    1.0f / COMPASS_SENSOR_FUSION_SAMPLES);
            }
        }
        
        _estimatedRotation = safeEulerAngles(estimatedRotation); 
        
        totalSamples++;
    } 
    
    if (initialSamples == totalSamples) {        
        timeval now;
        gettimeofday(&now, NULL);
        
        if (diffclock(&lastGoodRead, &now) > NO_READ_MAXIMUM_MSECS) {
            qDebug("No data - Shutting down SerialInterface.\n");
            resetSerial();
        }
    } else {
        gettimeofday(&lastGoodRead, NULL);
    }
#endif
}
Esempio n. 15
0
void SerialInterface::readData(float deltaTime) {
#ifdef __APPLE__
    
    int initialSamples = totalSamples;
    
    if (USING_INVENSENSE_MPU9150) { 
        unsigned char sensorBuffer[36];
        
        // ask the invensense for raw gyro data
        write(_serialDescriptor, "RD683B0E\n", 9);
        read(_serialDescriptor, sensorBuffer, 36);
        
        int accelXRate, accelYRate, accelZRate;
        
        convertHexToInt(sensorBuffer + 6, accelZRate);
        convertHexToInt(sensorBuffer + 10, accelYRate);
        convertHexToInt(sensorBuffer + 14, accelXRate);
        
        const float LSB_TO_METERS_PER_SECOND2 = 1.f / 16384.f * GRAVITY_EARTH;
                                                                //  From MPU-9150 register map, with setting on
                                                                //  highest resolution = +/- 2G
        
        _lastAcceleration = glm::vec3(-accelXRate, -accelYRate, -accelZRate) * LSB_TO_METERS_PER_SECOND2;
                
        
        int rollRate, yawRate, pitchRate;
        
        convertHexToInt(sensorBuffer + 22, rollRate);
        convertHexToInt(sensorBuffer + 26, yawRate);
        convertHexToInt(sensorBuffer + 30, pitchRate);
        
        //  Convert the integer rates to floats
        const float LSB_TO_DEGREES_PER_SECOND = 1.f / 16.4f;     //  From MPU-9150 register map, 2000 deg/sec.
        glm::vec3 rotationRates;
        rotationRates[0] = ((float) -pitchRate) * LSB_TO_DEGREES_PER_SECOND;
        rotationRates[1] = ((float) -yawRate) * LSB_TO_DEGREES_PER_SECOND;
        rotationRates[2] = ((float) -rollRate) * LSB_TO_DEGREES_PER_SECOND;

        // update and subtract the long term average
        _averageRotationRates = (1.f - 1.f/(float)LONG_TERM_RATE_SAMPLES) * _averageRotationRates +
                1.f/(float)LONG_TERM_RATE_SAMPLES * rotationRates;
        rotationRates -= _averageRotationRates;

        // compute the angular acceleration
        glm::vec3 angularAcceleration = (deltaTime < EPSILON) ? glm::vec3() : (rotationRates - _lastRotationRates) / deltaTime;
        _lastRotationRates = rotationRates;
        
        //  Update raw rotation estimates
        glm::quat estimatedRotation = glm::quat(glm::radians(_estimatedRotation)) *
            glm::quat(glm::radians(deltaTime * _lastRotationRates));
        
        //  Update acceleration estimate: first, subtract gravity as rotated into current frame
        _estimatedAcceleration = (totalSamples < GRAVITY_SAMPLES) ? glm::vec3() :
            _lastAcceleration - glm::inverse(estimatedRotation) * _gravity;
        
        // update and subtract the long term average
        _averageAcceleration = (1.f - 1.f/(float)LONG_TERM_RATE_SAMPLES) * _averageAcceleration +
                1.f/(float)LONG_TERM_RATE_SAMPLES * _estimatedAcceleration;
        _estimatedAcceleration -= _averageAcceleration;
        
        //  Consider updating our angular velocity/acceleration to linear acceleration mapping
        if (glm::length(_estimatedAcceleration) > EPSILON &&
                (glm::length(_lastRotationRates) > EPSILON || glm::length(angularAcceleration) > EPSILON)) {
            // compute predicted linear acceleration, find error between actual and predicted
            glm::vec3 predictedAcceleration = _angularVelocityToLinearAccel * _lastRotationRates +
                _angularAccelToLinearAccel * angularAcceleration;
            glm::vec3 error = _estimatedAcceleration - predictedAcceleration;
            
            // the "error" is actually what we want: the linear acceleration minus rotational influences
            _estimatedAcceleration = error;
            
            // adjust according to error in each dimension, in proportion to input magnitudes
            for (int i = 0; i < 3; i++) {
                if (fabsf(error[i]) < EPSILON) {
                    continue;
                }
                const float LEARNING_RATE = 0.001f;
                float rateSum = fabsf(_lastRotationRates.x) + fabsf(_lastRotationRates.y) + fabsf(_lastRotationRates.z);
                if (rateSum > EPSILON) {
                    for (int j = 0; j < 3; j++) {
                        float proportion = LEARNING_RATE * fabsf(_lastRotationRates[j]) / rateSum;
                        if (proportion > EPSILON) {
                            _angularVelocityToLinearAccel[j][i] += error[i] * proportion / _lastRotationRates[j];
                        }
                    }
                }
                float accelSum = fabsf(angularAcceleration.x) + fabsf(angularAcceleration.y) + fabsf(angularAcceleration.z);
                if (accelSum > EPSILON) {
                    for (int j = 0; j < 3; j++) {
                        float proportion = LEARNING_RATE * fabsf(angularAcceleration[j]) / accelSum;
                        if (proportion > EPSILON) {
                            _angularAccelToLinearAccel[j][i] += error[i] * proportion / angularAcceleration[j];
                        }
                    }                
                }
            }
        }
        
        // rotate estimated acceleration into global rotation frame
        _estimatedAcceleration = estimatedRotation * _estimatedAcceleration;
        
        //  Update estimated position and velocity
        float const DECAY_VELOCITY = 0.975f;
        float const DECAY_POSITION = 0.975f;
        _estimatedVelocity += deltaTime * _estimatedAcceleration;
        _estimatedPosition += deltaTime * _estimatedVelocity;
        _estimatedVelocity *= DECAY_VELOCITY;
        
        //  Attempt to fuse gyro position with webcam position
        Webcam* webcam = Application::getInstance()->getWebcam();
        if (webcam->isActive()) {
            const float WEBCAM_POSITION_FUSION = 0.5f;
            _estimatedPosition = glm::mix(_estimatedPosition, webcam->getEstimatedPosition(), WEBCAM_POSITION_FUSION);
               
        } else {
            _estimatedPosition *= DECAY_POSITION;
        }
            
        //  Accumulate a set of initial baseline readings for setting gravity
        if (totalSamples == 0) {
            _gravity = _lastAcceleration;
        } 
        else {
            if (totalSamples < GRAVITY_SAMPLES) {
                _gravity = (1.f - 1.f/(float)GRAVITY_SAMPLES) * _gravity +
                1.f/(float)GRAVITY_SAMPLES * _lastAcceleration;
            } else {
                //  Use gravity reading to do sensor fusion on the pitch and roll estimation
                estimatedRotation = safeMix(estimatedRotation,
                    rotationBetween(estimatedRotation * _lastAcceleration, _gravity) * estimatedRotation,
                    1.0f / SENSOR_FUSION_SAMPLES);
                
                //  Without a compass heading, always decay estimated Yaw slightly
                const float YAW_DECAY = 0.999f;
                glm::vec3 forward = estimatedRotation * glm::vec3(0.0f, 0.0f, -1.0f);
                estimatedRotation = safeMix(glm::angleAxis(glm::degrees(atan2f(forward.x, -forward.z)),
                    glm::vec3(0.0f, 1.0f, 0.0f)) * estimatedRotation, estimatedRotation, YAW_DECAY);
            }
        }
        
        _estimatedRotation = safeEulerAngles(estimatedRotation); 
        
        totalSamples++;
    } 
    
    if (initialSamples == totalSamples) {        
        timeval now;
        gettimeofday(&now, NULL);
        
        if (diffclock(&lastGoodRead, &now) > NO_READ_MAXIMUM_MSECS) {
            printLog("No data - Shutting down SerialInterface.\n");
            resetSerial();
        }
    } else {
        gettimeofday(&lastGoodRead, NULL);
    }
#endif
}