Exemplo n.º 1
0
void PrioVR::update(float deltaTime) {
#ifdef HAVE_PRIOVR
    if (!_skeletalDevice) {
        return;
    }
    PerformanceTimer perfTimer("PrioVR");
    unsigned int timestamp;
    yei_getLastStreamDataAll(_skeletalDevice, (char*)_jointRotations.data(),
        _jointRotations.size() * sizeof(glm::quat), &timestamp);

    // convert to our expected coordinate system, average with last rotations to smooth
    for (int i = 0; i < _jointRotations.size(); i++) {
        _jointRotations[i].y *= -1.0f;
        _jointRotations[i].z *= -1.0f;
        
        glm::quat lastRotation = _lastJointRotations.at(i);
        _lastJointRotations[i] = _jointRotations.at(i);
        _jointRotations[i] = safeMix(lastRotation, _jointRotations.at(i), 0.5f);
    }
    
    // convert the joysticks into palm data
    setPalm(deltaTime, LEFT_HAND_INDEX);
    setPalm(deltaTime, RIGHT_HAND_INDEX);
#endif
}
Exemplo n.º 2
0
void JointState::restoreRotation(float fraction, float priority) {
    assert(_fbxJoint != NULL);
    if (priority == _animationPriority || _animationPriority == 0.0f) {
        _rotationInParentFrame = safeMix(_rotationInParentFrame, _fbxJoint->rotation, fraction);
        _animationPriority = 0.0f;
    }
}
Exemplo n.º 3
0
// use iterative forces to keep the camera at the desired position and angle
void Camera::updateFollowMode(float deltaTime) {  
    if (_linearModeShift < 1.0f) {
        _linearModeShift += deltaTime / _modeShiftPeriod;
        if (_needsToInitialize || _linearModeShift > 1.0f) {
            _linearModeShift = 1.0f;
            _modeShift = 1.0f;
            _upShift   = _newUpShift;
            _distance  = _newDistance;
            _tightness = _newTightness;
        } else {
            _modeShift = ONE_HALF - ONE_HALF * cosf(_linearModeShift * PI );
            _upShift   = _previousUpShift   * (1.0f - _modeShift) + _newUpShift   * _modeShift;
            _distance  = _previousDistance  * (1.0f - _modeShift) + _newDistance  * _modeShift;
            _tightness = _previousTightness * (1.0f - _modeShift) + _newTightness * _modeShift;
        }
    }

    // derive t from tightness
    float t = _tightness * _modeShift * deltaTime;	
    if (t > 1.0f) {
        t = 1.0f;
    }
    
    // handle keepLookingAt
    if (_isKeepLookingAt) {
        lookAt(_lookingAt);
    }
    
    // Update position and rotation, setting directly if tightness is 0.0
    if (_needsToInitialize || (_tightness == 0.0f)) {
        _rotation = _targetRotation;
        _idealPosition = _targetPosition + _scale * (_rotation * glm::vec3(0.0f, _upShift, _distance));
        _position = _idealPosition;
        _needsToInitialize = false;
    } else {
        // pull rotation towards ideal
        _rotation = safeMix(_rotation, _targetRotation, t);
        _idealPosition = _targetPosition + _scale * (_rotation * glm::vec3(0.0f, _upShift, _distance));
        _position += (_idealPosition - _position) * t;
    }
}
Exemplo n.º 4
0
void Player::play() {
    computeCurrentFrame();
    if (_currentFrame < 0 || (_currentFrame >= _recording->getFrameNumber() - 2)) { // -2 because of interpolation
        if (_loop) {
            loopRecording();
        } else {
            stopPlaying();
        }
        return;
    }
    
    const RecordingContext* context = &_recording->getContext();
    if (_playFromCurrentPosition) {
        context = &_currentContext;
    }
    const RecordingFrame& currentFrame = _recording->getFrame(_currentFrame);
    const RecordingFrame& nextFrame = _recording->getFrame(_currentFrame + 1);
    
    glm::vec3 translation = glm::mix(currentFrame.getTranslation(),
                                     nextFrame.getTranslation(),
                                     _frameInterpolationFactor);
    _avatar->setPosition(context->position + context->orientation * translation);
    
    glm::quat rotation = safeMix(currentFrame.getRotation(),
                                 nextFrame.getRotation(),
                                 _frameInterpolationFactor);
    _avatar->setOrientation(context->orientation * rotation);
    
    float scale = glm::mix(currentFrame.getScale(),
                           nextFrame.getScale(),
                           _frameInterpolationFactor);
    _avatar->setTargetScale(context->scale * scale);
    
    
    QVector<glm::quat> jointRotations(currentFrame.getJointRotations().size());
    for (int i = 0; i < currentFrame.getJointRotations().size(); ++i) {
        jointRotations[i] = safeMix(currentFrame.getJointRotations()[i],
                                    nextFrame.getJointRotations()[i],
                                    _frameInterpolationFactor);
    }
    _avatar->setJointRotations(jointRotations);
    
    HeadData* head = const_cast<HeadData*>(_avatar->getHeadData());
    if (head) {
        // Make sure fake face tracker connection doesn't get turned off
        _avatar->setForceFaceTrackerConnected(true);
        
        QVector<float> blendCoef(currentFrame.getBlendshapeCoefficients().size());
        for (int i = 0; i < currentFrame.getBlendshapeCoefficients().size(); ++i) {
            blendCoef[i] = glm::mix(currentFrame.getBlendshapeCoefficients()[i],
                                    nextFrame.getBlendshapeCoefficients()[i],
                                    _frameInterpolationFactor);
        }
        head->setBlendshapeCoefficients(blendCoef);
        
        float leanSideways = glm::mix(currentFrame.getLeanSideways(),
                                      nextFrame.getLeanSideways(),
                                      _frameInterpolationFactor);
        head->setLeanSideways(leanSideways);
        
        float leanForward = glm::mix(currentFrame.getLeanForward(),
                                     nextFrame.getLeanForward(),
                                     _frameInterpolationFactor);
        head->setLeanForward(leanForward);
        
        glm::quat headRotation = safeMix(currentFrame.getHeadRotation(),
                                         nextFrame.getHeadRotation(),
                                         _frameInterpolationFactor);
        glm::vec3 eulers = glm::degrees(safeEulerAngles(headRotation));
        head->setFinalPitch(eulers.x);
        head->setFinalYaw(eulers.y);
        head->setFinalRoll(eulers.z);
        
        
        glm::vec3 lookAt = glm::mix(currentFrame.getLookAtPosition(),
                                    nextFrame.getLookAtPosition(),
                                    _frameInterpolationFactor);
        head->setLookAtPosition(context->position + context->orientation * lookAt);
    } else {
        qCDebug(avatars) << "WARNING: Player couldn't find head data.";
    }
    
    _options.position = _avatar->getPosition();
    _options.orientation = _avatar->getOrientation();
    _injector->setOptions(_options);
}
Exemplo n.º 5
0
void DdeFaceTracker::decodePacket(const QByteArray& buffer) {
    _lastReceiveTimestamp = usecTimestampNow();

    if (buffer.size() > MIN_PACKET_SIZE) {
        bool isFiltering = Menu::getInstance()->isOptionChecked(MenuOption::VelocityFilter);

        Packet packet;
        int bytesToCopy = glm::min((int)sizeof(packet), buffer.size());
        memset(&packet.name, '\n', MAX_NAME_SIZE + 1);
        memcpy(&packet, buffer.data(), bytesToCopy);
        
        glm::vec3 translation;
        memcpy(&translation, packet.translation, sizeof(packet.translation));
        glm::quat rotation;
        memcpy(&rotation, &packet.rotation, sizeof(packet.rotation));
        if (_reset || (_lastMessageReceived == 0)) {
            memcpy(&_referenceTranslation, &translation, sizeof(glm::vec3));
            memcpy(&_referenceRotation, &rotation, sizeof(glm::quat));
            _reset = false;
        }

        // Compute relative translation
        float LEAN_DAMPING_FACTOR = 75.0f;
        translation -= _referenceTranslation;
        translation /= LEAN_DAMPING_FACTOR;
        translation.x *= -1;
        if (isFiltering) {
            glm::vec3 linearVelocity = (translation - _lastHeadTranslation) / _averageMessageTime;
            const float LINEAR_VELOCITY_FILTER_STRENGTH = 0.3f;
            float velocityFilter = glm::clamp(1.0f - glm::length(linearVelocity) *
                LINEAR_VELOCITY_FILTER_STRENGTH, 0.0f, 1.0f);
            _filteredHeadTranslation = velocityFilter * _filteredHeadTranslation + (1.0f - velocityFilter) * translation;
            _lastHeadTranslation = translation;
            _headTranslation = _filteredHeadTranslation;
        } else {
            _headTranslation = translation;
        }

        // Compute relative rotation
        rotation = glm::inverse(_referenceRotation) * rotation;
        if (isFiltering) {
            glm::quat r = rotation * glm::inverse(_headRotation);
            float theta = 2 * acos(r.w);
            glm::vec3 angularVelocity;
            if (theta > EPSILON) {
                float rMag = glm::length(glm::vec3(r.x, r.y, r.z));
                angularVelocity = theta / _averageMessageTime * glm::vec3(r.x, r.y, r.z) / rMag;
            } else {
                angularVelocity = glm::vec3(0, 0, 0);
            }
            const float ANGULAR_VELOCITY_FILTER_STRENGTH = 0.3f;
            _headRotation = safeMix(_headRotation, rotation, glm::clamp(glm::length(angularVelocity) *
                ANGULAR_VELOCITY_FILTER_STRENGTH, 0.0f, 1.0f));
        } else {
            _headRotation = rotation;
        }

        // Translate DDE coefficients to Faceshift compatible coefficients
        for (int i = 0; i < NUM_EXPRESSIONS; i++) {
            _coefficients[DDE_TO_FACESHIFT_MAPPING[i]] = packet.expressions[i];
        }

        // Calibration
        if (_isCalibrating) {
            addCalibrationDatum();
        }
        for (int i = 0; i < NUM_FACESHIFT_BLENDSHAPES; i++) {
            _coefficients[i] -= _coefficientAverages[i];
        }

        // Use BrowsU_C to control both brows' up and down
        float browUp = _coefficients[_browUpCenterIndex];
        if (isFiltering) {
            const float BROW_VELOCITY_FILTER_STRENGTH = 0.5f;
            float velocity = fabs(browUp - _lastBrowUp) / _averageMessageTime;
            float velocityFilter = glm::clamp(velocity * BROW_VELOCITY_FILTER_STRENGTH, 0.0f, 1.0f);
            _filteredBrowUp = velocityFilter * browUp + (1.0f - velocityFilter) * _filteredBrowUp;
            _lastBrowUp = browUp;
            browUp = _filteredBrowUp;
            _coefficients[_browUpCenterIndex] = browUp;
        }
        _coefficients[_browUpLeftIndex] = browUp;
        _coefficients[_browUpRightIndex] = browUp;
        _coefficients[_browDownLeftIndex] = -browUp;
        _coefficients[_browDownRightIndex] = -browUp;

        // Offset jaw open coefficient
        static const float JAW_OPEN_THRESHOLD = 0.1f;
        _coefficients[_jawOpenIndex] = _coefficients[_jawOpenIndex] - JAW_OPEN_THRESHOLD;

        // Offset smile coefficients
        static const float SMILE_THRESHOLD = 0.5f;
        _coefficients[_mouthSmileLeftIndex] = _coefficients[_mouthSmileLeftIndex] - SMILE_THRESHOLD;
        _coefficients[_mouthSmileRightIndex] = _coefficients[_mouthSmileRightIndex] - SMILE_THRESHOLD;

        // Velocity filter EyeBlink values
        const float DDE_EYEBLINK_SCALE = 3.0f;
        float eyeBlinks[] = { DDE_EYEBLINK_SCALE * _coefficients[_leftBlinkIndex], DDE_EYEBLINK_SCALE * _coefficients[_rightBlinkIndex] };
        if (isFiltering) {
            const float BLINK_VELOCITY_FILTER_STRENGTH = 0.3f;
            for (int i = 0; i < 2; i++) {
                float velocity = fabs(eyeBlinks[i] - _lastEyeBlinks[i]) / _averageMessageTime;
                float velocityFilter = glm::clamp(velocity * BLINK_VELOCITY_FILTER_STRENGTH, 0.0f, 1.0f);
                _filteredEyeBlinks[i] = velocityFilter * eyeBlinks[i] + (1.0f - velocityFilter) * _filteredEyeBlinks[i];
                _lastEyeBlinks[i] = eyeBlinks[i];
            }
        }

        // Finesse EyeBlink values
        float eyeCoefficients[2];
        if (Menu::getInstance()->isOptionChecked(MenuOption::BinaryEyelidControl)) {
            if (_eyeStates[0] == EYE_UNCONTROLLED) {
                _eyeStates[0] = EYE_OPEN;
                _eyeStates[1] = EYE_OPEN;
            }

            for (int i = 0; i < 2; i++) {
                // Scale EyeBlink values so that they can be used to control both EyeBlink and EyeOpen
                // -ve values control EyeOpen; +ve values control EyeBlink
                static const float EYE_CONTROL_THRESHOLD = 0.5f;  // Resting eye value
                eyeCoefficients[i] = (_filteredEyeBlinks[i] - EYE_CONTROL_THRESHOLD) / (1.0f - EYE_CONTROL_THRESHOLD);

                // Change to closing or opening states
                const float EYE_CONTROL_HYSTERISIS = 0.25f;
                float eyeClosingThreshold = getEyeClosingThreshold();
                float eyeOpeningThreshold = eyeClosingThreshold - EYE_CONTROL_HYSTERISIS;
                if ((_eyeStates[i] == EYE_OPEN || _eyeStates[i] == EYE_OPENING) && eyeCoefficients[i] > eyeClosingThreshold) {
                    _eyeStates[i] = EYE_CLOSING;
                } else if ((_eyeStates[i] == EYE_CLOSED || _eyeStates[i] == EYE_CLOSING)
                    && eyeCoefficients[i] < eyeOpeningThreshold) {
                    _eyeStates[i] = EYE_OPENING;
                }

                const float EYELID_MOVEMENT_RATE = 10.0f;  // units/second
                const float EYE_OPEN_SCALE = 0.2f;
                if (_eyeStates[i] == EYE_CLOSING) {
                    // Close eyelid until it's fully closed
                    float closingValue = _lastEyeCoefficients[i] + EYELID_MOVEMENT_RATE * _averageMessageTime;
                    if (closingValue >= 1.0) {
                        _eyeStates[i] = EYE_CLOSED;
                        eyeCoefficients[i] = 1.0;
                    } else {
                        eyeCoefficients[i] = closingValue;
                    }
                } else if (_eyeStates[i] == EYE_OPENING) {
                    // Open eyelid until it meets the current adjusted value
                    float openingValue = _lastEyeCoefficients[i] - EYELID_MOVEMENT_RATE * _averageMessageTime;
                    if (openingValue < eyeCoefficients[i] * EYE_OPEN_SCALE) {
                        _eyeStates[i] = EYE_OPEN;
                        eyeCoefficients[i] = eyeCoefficients[i] * EYE_OPEN_SCALE;
                    } else {
                        eyeCoefficients[i] = openingValue;
                    }
                } else  if (_eyeStates[i] == EYE_OPEN) {
                    // Reduce eyelid movement
                    eyeCoefficients[i] = eyeCoefficients[i] * EYE_OPEN_SCALE;
                } else if (_eyeStates[i] == EYE_CLOSED) {
                    // Keep eyelid fully closed
                    eyeCoefficients[i] = 1.0;
                }
            }

            if (_eyeStates[0] == EYE_OPEN && _eyeStates[1] == EYE_OPEN) {
                // Couple eyelids
                eyeCoefficients[0] = eyeCoefficients[1] = (eyeCoefficients[0] + eyeCoefficients[0]) / 2.0f;
            }

            _lastEyeCoefficients[0] = eyeCoefficients[0];
            _lastEyeCoefficients[1] = eyeCoefficients[1];
        } else {
            _eyeStates[0] = EYE_UNCONTROLLED;
            _eyeStates[1] = EYE_UNCONTROLLED;

            eyeCoefficients[0] = _filteredEyeBlinks[0];
            eyeCoefficients[1] = _filteredEyeBlinks[1];
        }

        // Use EyeBlink values to control both EyeBlink and EyeOpen
        if (eyeCoefficients[0] > 0) {
            _coefficients[_leftBlinkIndex] = eyeCoefficients[0];
            _coefficients[_leftEyeOpenIndex] = 0.0f;
        } else {
            _coefficients[_leftBlinkIndex] = 0.0f;
            _coefficients[_leftEyeOpenIndex] = -eyeCoefficients[0];
        }
        if (eyeCoefficients[1] > 0) {
            _coefficients[_rightBlinkIndex] = eyeCoefficients[1];
            _coefficients[_rightEyeOpenIndex] = 0.0f;
        } else {
            _coefficients[_rightBlinkIndex] = 0.0f;
            _coefficients[_rightEyeOpenIndex] = -eyeCoefficients[1];
        }

        // Scale all coefficients
        for (int i = 0; i < NUM_EXPRESSIONS; i++) {
            _blendshapeCoefficients[i]
                = glm::clamp(DDE_COEFFICIENT_SCALES[i] * _coefficients[i], 0.0f, 1.0f);
        }

        // Calculate average frame time
        const float FRAME_AVERAGING_FACTOR = 0.99f;
        quint64 usecsNow = usecTimestampNow();
        if (_lastMessageReceived != 0) {
            _averageMessageTime = FRAME_AVERAGING_FACTOR * _averageMessageTime 
                + (1.0f - FRAME_AVERAGING_FACTOR) * (float)(usecsNow - _lastMessageReceived) / 1000000.0f;
        }
        _lastMessageReceived = usecsNow;

        FaceTracker::countFrame();
        
    } else {
        qCWarning(interfaceapp) << "DDE Face Tracker: Decode error";
    }

    if (_isCalibrating && _calibrationCount > CALIBRATION_SAMPLES) {
        finishCalibration();
    }
}
Exemplo n.º 6
0
void SixenseManager::update(float deltaTime) {
#ifdef HAVE_SIXENSE
    // if the controllers haven't been moved in a while, disable
    const unsigned int MOVEMENT_DISABLE_SECONDS = 3;
    if (usecTimestampNow() - _lastMovement > (MOVEMENT_DISABLE_SECONDS * USECS_PER_SECOND)) {
        Hand* hand = Application::getInstance()->getAvatar()->getHand();
        for (std::vector<PalmData>::iterator it = hand->getPalms().begin(); it != hand->getPalms().end(); it++) {
            it->setActive(false);
        }
        _lastMovement = usecTimestampNow();
    }

    if (sixenseGetNumActiveControllers() == 0) {
        _hydrasConnected = false;
        return;
    } 

    PerformanceTimer perfTimer("sixense");
    if (!_hydrasConnected) {
        _hydrasConnected = true;
        UserActivityLogger::getInstance().connectedDevice("spatial_controller", "hydra");
    }
    MyAvatar* avatar = Application::getInstance()->getAvatar();
    Hand* hand = avatar->getHand();
    
    int maxControllers = sixenseGetMaxControllers();

    // we only support two controllers
    sixenseControllerData controllers[2];

    int numActiveControllers = 0;
    for (int i = 0; i < maxControllers && numActiveControllers < 2; i++) {
        if (!sixenseIsControllerEnabled(i)) {
            continue;
        }
        sixenseControllerData* data = controllers + numActiveControllers;
        ++numActiveControllers;
        sixenseGetNewestData(i, data);
        
        //  Set palm position and normal based on Hydra position/orientation
        
        // Either find a palm matching the sixense controller, or make a new one
        PalmData* palm;
        bool foundHand = false;
        for (size_t j = 0; j < hand->getNumPalms(); j++) {
            if (hand->getPalms()[j].getSixenseID() == data->controller_index) {
                palm = &(hand->getPalms()[j]);
                foundHand = true;
            }
        }
        if (!foundHand) {
            PalmData newPalm(hand);
            hand->getPalms().push_back(newPalm);
            palm = &(hand->getPalms()[hand->getNumPalms() - 1]);
            palm->setSixenseID(data->controller_index);
            qDebug("Found new Sixense controller, ID %i", data->controller_index);
        }
        
        palm->setActive(true);
        
        //  Read controller buttons and joystick into the hand
        palm->setControllerButtons(data->buttons);
        palm->setTrigger(data->trigger);
        palm->setJoystick(data->joystick_x, data->joystick_y);


        // Emulate the mouse so we can use scripts
        if (Menu::getInstance()->isOptionChecked(MenuOption::SixenseMouseInput)) {
            emulateMouse(palm, numActiveControllers - 1);
        }

        // NOTE: Sixense API returns pos data in millimeters but we IMMEDIATELY convert to meters.
        glm::vec3 position(data->pos[0], data->pos[1], data->pos[2]);
        position *= METERS_PER_MILLIMETER;

        // Transform the measured position into body frame.  
        glm::vec3 neck = _neckBase;
        // Zeroing y component of the "neck" effectively raises the measured position a little bit.
        neck.y = 0.f;
        position = _orbRotation * (position - neck);

        //  Rotation of Palm
        glm::quat rotation(data->rot_quat[3], -data->rot_quat[0], data->rot_quat[1], -data->rot_quat[2]);
        rotation = glm::angleAxis(PI, glm::vec3(0.f, 1.f, 0.f)) * _orbRotation * rotation;
        
        //  Compute current velocity from position change
        glm::vec3 rawVelocity;
        if (deltaTime > 0.f) {
            rawVelocity = (position - palm->getRawPosition()) / deltaTime; 
        } else {
            rawVelocity = glm::vec3(0.0f);
        }
        palm->setRawVelocity(rawVelocity);   //  meters/sec
    
        // adjustment for hydra controllers fit into hands
        float sign = (i == 0) ? -1.0f : 1.0f;
        rotation *= glm::angleAxis(sign * PI/4.0f, glm::vec3(0.0f, 0.0f, 1.0f));

        if (_lowVelocityFilter) {
            //  Use a velocity sensitive filter to damp small motions and preserve large ones with
            //  no latency.
            float velocityFilter = glm::clamp(1.0f - glm::length(rawVelocity), 0.0f, 1.0f);
            position = palm->getRawPosition() * velocityFilter + position * (1.0f - velocityFilter);
            rotation = safeMix(palm->getRawRotation(), rotation, 1.0f - velocityFilter);
            palm->setRawPosition(position);
            palm->setRawRotation(rotation);
        } else {
            palm->setRawPosition(position);
            palm->setRawRotation(rotation);
        }

        // use the velocity to determine whether there's any movement (if the hand isn't new)
        const float MOVEMENT_DISTANCE_THRESHOLD = 0.003f;
        _amountMoved += rawVelocity * deltaTime;
        if (glm::length(_amountMoved) > MOVEMENT_DISTANCE_THRESHOLD && foundHand) {
            _lastMovement = usecTimestampNow();
            _amountMoved = glm::vec3(0.0f);
        }
        
        // Store the one fingertip in the palm structure so we can track velocity
        const float FINGER_LENGTH = 0.3f;   //  meters
        const glm::vec3 FINGER_VECTOR(0.0f, 0.0f, FINGER_LENGTH);
        const glm::vec3 newTipPosition = position + rotation * FINGER_VECTOR;
        glm::vec3 oldTipPosition = palm->getTipRawPosition();
        if (deltaTime > 0.f) {
            palm->setTipVelocity((newTipPosition - oldTipPosition) / deltaTime);
        } else {
            palm->setTipVelocity(glm::vec3(0.f));
        }
        palm->setTipPosition(newTipPosition);
    }

    if (numActiveControllers == 2) {
        updateCalibration(controllers);
    }
#endif  // HAVE_SIXENSE
}
Exemplo n.º 7
0
void SixenseManager::update(float deltaTime) {
#ifdef HAVE_SIXENSE
    Hand* hand = DependencyManager::get<AvatarManager>()->getMyAvatar()->getHand();
    if (_isInitialized && _isEnabled) {
#ifdef __APPLE__
        SixenseBaseFunction sixenseGetNumActiveControllers =
        (SixenseBaseFunction) _sixenseLibrary->resolve("sixenseGetNumActiveControllers");
#endif
        
        if (sixenseGetNumActiveControllers() == 0) {
            _hydrasConnected = false;
            return;
        }
        
        PerformanceTimer perfTimer("sixense");
        if (!_hydrasConnected) {
            _hydrasConnected = true;
            UserActivityLogger::getInstance().connectedDevice("spatial_controller", "hydra");
        }
        
#ifdef __APPLE__
        SixenseBaseFunction sixenseGetMaxControllers =
        (SixenseBaseFunction) _sixenseLibrary->resolve("sixenseGetMaxControllers");
#endif
        
        int maxControllers = sixenseGetMaxControllers();
        
        // we only support two controllers
        sixenseControllerData controllers[2];
        
#ifdef __APPLE__
        SixenseTakeIntFunction sixenseIsControllerEnabled =
        (SixenseTakeIntFunction) _sixenseLibrary->resolve("sixenseIsControllerEnabled");
        
        SixenseTakeIntAndSixenseControllerData sixenseGetNewestData =
        (SixenseTakeIntAndSixenseControllerData) _sixenseLibrary->resolve("sixenseGetNewestData");
#endif
        int numControllersAtBase = 0;
        int numActiveControllers = 0;
        for (int i = 0; i < maxControllers && numActiveControllers < 2; i++) {
            if (!sixenseIsControllerEnabled(i)) {
                continue;
            }
            sixenseControllerData* data = controllers + numActiveControllers;
            ++numActiveControllers;
            sixenseGetNewestData(i, data);
            
            //  Set palm position and normal based on Hydra position/orientation
            
            // Either find a palm matching the sixense controller, or make a new one
            PalmData* palm;
            bool foundHand = false;
            for (size_t j = 0; j < hand->getNumPalms(); j++) {
                if (hand->getPalms()[j].getSixenseID() == data->controller_index) {
                    palm = &(hand->getPalms()[j]);
                    foundHand = true;
                }
            }
            if (!foundHand) {
                PalmData newPalm(hand);
                hand->getPalms().push_back(newPalm);
                palm = &(hand->getPalms()[hand->getNumPalms() - 1]);
                palm->setSixenseID(data->controller_index);
                qCDebug(interfaceapp, "Found new Sixense controller, ID %i", data->controller_index);
            }
            
            // Disable the hands (and return to default pose) if both controllers are at base station
            if (foundHand) {
                palm->setActive(!_controllersAtBase);
            } else {
                palm->setActive(false); // if this isn't a Sixsense ID palm, always make it inactive
            }
            
            
            //  Read controller buttons and joystick into the hand
            palm->setControllerButtons(data->buttons);
            palm->setTrigger(data->trigger);
            palm->setJoystick(data->joystick_x, data->joystick_y);
            
            // Emulate the mouse so we can use scripts
            if (Menu::getInstance()->isOptionChecked(MenuOption::SixenseMouseInput) && !_controllersAtBase) {
                emulateMouse(palm, numActiveControllers - 1);
            }
            
            // NOTE: Sixense API returns pos data in millimeters but we IMMEDIATELY convert to meters.
            glm::vec3 position(data->pos[0], data->pos[1], data->pos[2]);
            position *= METERS_PER_MILLIMETER;
            
            // Check to see if this hand/controller is on the base
            const float CONTROLLER_AT_BASE_DISTANCE = 0.075f;
            if (glm::length(position) < CONTROLLER_AT_BASE_DISTANCE) {
                numControllersAtBase++;
            }
            
            // Transform the measured position into body frame.
            glm::vec3 neck = _neckBase;
            // Zeroing y component of the "neck" effectively raises the measured position a little bit.
            neck.y = 0.0f;
            position = _orbRotation * (position - neck);
            
            //  Rotation of Palm
            glm::quat rotation(data->rot_quat[3], -data->rot_quat[0], data->rot_quat[1], -data->rot_quat[2]);
            rotation = glm::angleAxis(PI, glm::vec3(0.0f, 1.0f, 0.0f)) * _orbRotation * rotation;
            
            //  Compute current velocity from position change
            glm::vec3 rawVelocity;
            if (deltaTime > 0.0f) {
                rawVelocity = (position - palm->getRawPosition()) / deltaTime;
            } else {
                rawVelocity = glm::vec3(0.0f);
            }
            palm->setRawVelocity(rawVelocity);   //  meters/sec
            
            // adjustment for hydra controllers fit into hands
            float sign = (i == 0) ? -1.0f : 1.0f;
            rotation *= glm::angleAxis(sign * PI/4.0f, glm::vec3(0.0f, 0.0f, 1.0f));
            
            //  Angular Velocity of Palm
            glm::quat deltaRotation = rotation * glm::inverse(palm->getRawRotation());
            glm::vec3 angularVelocity(0.0f);
            float rotationAngle = glm::angle(deltaRotation);
            if ((rotationAngle > EPSILON) && (deltaTime > 0.0f)) {
                angularVelocity = glm::normalize(glm::axis(deltaRotation));
                angularVelocity *= (rotationAngle / deltaTime);
                palm->setRawAngularVelocity(angularVelocity);
            } else {
                palm->setRawAngularVelocity(glm::vec3(0.0f));
            }
            
            if (_lowVelocityFilter) {
                //  Use a velocity sensitive filter to damp small motions and preserve large ones with
                //  no latency.
                float velocityFilter = glm::clamp(1.0f - glm::length(rawVelocity), 0.0f, 1.0f);
                position = palm->getRawPosition() * velocityFilter + position * (1.0f - velocityFilter);
                rotation = safeMix(palm->getRawRotation(), rotation, 1.0f - velocityFilter);
                palm->setRawPosition(position);
                palm->setRawRotation(rotation);
            } else {
                palm->setRawPosition(position);
                palm->setRawRotation(rotation);
            }
            
            // Store the one fingertip in the palm structure so we can track velocity
            const float FINGER_LENGTH = 0.3f;   //  meters
            const glm::vec3 FINGER_VECTOR(0.0f, 0.0f, FINGER_LENGTH);
            const glm::vec3 newTipPosition = position + rotation * FINGER_VECTOR;
            glm::vec3 oldTipPosition = palm->getTipRawPosition();
            if (deltaTime > 0.0f) {
                palm->setTipVelocity((newTipPosition - oldTipPosition) / deltaTime);
            } else {
                palm->setTipVelocity(glm::vec3(0.0f));
            }
            palm->setTipPosition(newTipPosition);
        }
        
        if (numActiveControllers == 2) {
            updateCalibration(controllers);
        }
        _controllersAtBase = (numControllersAtBase == 2);
    }
#endif  // HAVE_SIXENSE
}
Exemplo n.º 8
0
void Player::play() {
    computeCurrentFrame();
    if (_currentFrame < 0 || (_currentFrame >= _recording->getFrameNumber() - 2)) { // -2 because of interpolation
        if (_loop) {
            loopRecording();
        } else {
            stopPlaying();
        }
        return;
    }
    
    const RecordingContext* context = &_recording->getContext();
    if (_playFromCurrentPosition) {
        context = &_currentContext;
    }
    const RecordingFrame& currentFrame = _recording->getFrame(_currentFrame);
    const RecordingFrame& nextFrame = _recording->getFrame(_currentFrame + 1);
    
    glm::vec3 translation = glm::mix(currentFrame.getTranslation(),
                                     nextFrame.getTranslation(),
                                     _frameInterpolationFactor);
    _avatar->setPosition(context->position + context->orientation * translation);
    
    glm::quat rotation = safeMix(currentFrame.getRotation(),
                                 nextFrame.getRotation(),
                                 _frameInterpolationFactor);
    _avatar->setOrientation(context->orientation * rotation);
    
    float scale = glm::mix(currentFrame.getScale(),
                           nextFrame.getScale(),
                           _frameInterpolationFactor);
    _avatar->setTargetScale(context->scale * scale);

    // Joint array playback
    // FIXME: THis is still using a deprecated path to assign the joint orientation since setting the full RawJointData array doesn't
    //        work for Avatar. We need to fix this working with the animation team
    const auto& prevJointArray = currentFrame.getJointArray();
    const auto& nextJointArray = currentFrame.getJointArray();
    QVector<JointData> jointArray(prevJointArray.size());
    QVector<glm::quat> jointRotations(prevJointArray.size()); // FIXME: remove once the setRawJointData is fixed
    QVector<glm::vec3> jointTranslations(prevJointArray.size()); // FIXME: remove once the setRawJointData is fixed

    for (int i = 0; i < jointArray.size(); i++) {
        const auto& prevJoint = prevJointArray[i];
        const auto& nextJoint = nextJointArray[i];
        auto& joint = jointArray[i];

        // Rotation
        joint.rotationSet = prevJoint.rotationSet || nextJoint.rotationSet;
        if (joint.rotationSet) {
            joint.rotation = safeMix(prevJoint.rotation, nextJoint.rotation, _frameInterpolationFactor);
            jointRotations[i] = joint.rotation; // FIXME: remove once the setRawJointData is fixed
        }

        joint.translationSet = prevJoint.translationSet || nextJoint.translationSet;
        if (joint.translationSet) {
            joint.translation = glm::mix(prevJoint.translation, nextJoint.translation, _frameInterpolationFactor);
            jointTranslations[i] = joint.translation; // FIXME: remove once the setRawJointData is fixed
        }
    }

   // _avatar->setRawJointData(jointArray); // FIXME: Enable once the setRawJointData is fixed
     _avatar->setJointRotations(jointRotations); // FIXME: remove once the setRawJointData is fixed
   //  _avatar->setJointTranslations(jointTranslations); // FIXME: remove once the setRawJointData is fixed

    HeadData* head = const_cast<HeadData*>(_avatar->getHeadData());
    if (head) {
        // Make sure fake face tracker connection doesn't get turned off
        _avatar->setForceFaceTrackerConnected(true);
        
        QVector<float> blendCoef(currentFrame.getBlendshapeCoefficients().size());
        for (int i = 0; i < currentFrame.getBlendshapeCoefficients().size(); ++i) {
            blendCoef[i] = glm::mix(currentFrame.getBlendshapeCoefficients()[i],
                                    nextFrame.getBlendshapeCoefficients()[i],
                                    _frameInterpolationFactor);
        }
        head->setBlendshapeCoefficients(blendCoef);
        
        float leanSideways = glm::mix(currentFrame.getLeanSideways(),
                                      nextFrame.getLeanSideways(),
                                      _frameInterpolationFactor);
        head->setLeanSideways(leanSideways);
        
        float leanForward = glm::mix(currentFrame.getLeanForward(),
                                     nextFrame.getLeanForward(),
                                     _frameInterpolationFactor);
        head->setLeanForward(leanForward);
        
        glm::quat headRotation = safeMix(currentFrame.getHeadRotation(),
                                         nextFrame.getHeadRotation(),
                                         _frameInterpolationFactor);
        glm::vec3 eulers = glm::degrees(safeEulerAngles(headRotation));
        head->setFinalPitch(eulers.x);
        head->setFinalYaw(eulers.y);
        head->setFinalRoll(eulers.z);
        
        
        glm::vec3 lookAt = glm::mix(currentFrame.getLookAtPosition(),
                                    nextFrame.getLookAtPosition(),
                                    _frameInterpolationFactor);
        head->setLookAtPosition(context->position + context->orientation * lookAt);
    } else {
        qCDebug(avatars) << "WARNING: Player couldn't find head data.";
    }
    
    _options.position = _avatar->getPosition();
    _options.orientation = _avatar->getOrientation();
    _injector->setOptions(_options);
}
Exemplo n.º 9
0
const glm::quat FaceTracker::getHeadRotation() const {
    return safeMix(glm::quat(), _headRotation, getFadeCoefficient());
}
Exemplo n.º 10
0
void SerialInterface::readData(float deltaTime) {
#ifndef _WIN32
    
    int initialSamples = totalSamples;
    
    if (USING_INVENSENSE_MPU9150) { 

        // ask the invensense for raw gyro data
        short accelData[3];
        if (mpu_get_accel_reg(accelData, 0)) {
            close(_serialDescriptor);
            qDebug("Disconnected SerialUSB.\n");
            _active = false;
            return; // disconnected
        }
        
        const float LSB_TO_METERS_PER_SECOND2 = 1.f / 16384.f * GRAVITY_EARTH;
                                                                //  From MPU-9150 register map, with setting on
                                                                //  highest resolution = +/- 2G
        
        _lastAcceleration = glm::vec3(-accelData[2], -accelData[1], -accelData[0]) * LSB_TO_METERS_PER_SECOND2;
          
        short gyroData[3];
        mpu_get_gyro_reg(gyroData, 0);
        
        //  Convert the integer rates to floats
        const float LSB_TO_DEGREES_PER_SECOND = 1.f / 16.4f;     //  From MPU-9150 register map, 2000 deg/sec.
        glm::vec3 rotationRates;
        rotationRates[0] = ((float) -gyroData[2]) * LSB_TO_DEGREES_PER_SECOND;
        rotationRates[1] = ((float) -gyroData[1]) * LSB_TO_DEGREES_PER_SECOND;
        rotationRates[2] = ((float) -gyroData[0]) * LSB_TO_DEGREES_PER_SECOND;
      
        short compassData[3];
        mpu_get_compass_reg(compassData, 0);
      
        // Convert integer values to floats, update extents
        _lastCompass = glm::vec3(compassData[2], -compassData[0], -compassData[1]);
        
        // update and subtract the long term average
        _averageRotationRates = (1.f - 1.f/(float)LONG_TERM_RATE_SAMPLES) * _averageRotationRates +
                1.f/(float)LONG_TERM_RATE_SAMPLES * rotationRates;
        rotationRates -= _averageRotationRates;

        // compute the angular acceleration
        glm::vec3 angularAcceleration = (deltaTime < EPSILON) ? glm::vec3() : (rotationRates - _lastRotationRates) / deltaTime;
        _lastRotationRates = rotationRates;
        
        //  Update raw rotation estimates
        glm::quat estimatedRotation = glm::quat(glm::radians(_estimatedRotation)) *
            glm::quat(glm::radians(deltaTime * _lastRotationRates));
        
        //  Update acceleration estimate: first, subtract gravity as rotated into current frame
        _estimatedAcceleration = (totalSamples < GRAVITY_SAMPLES) ? glm::vec3() :
            _lastAcceleration - glm::inverse(estimatedRotation) * _gravity;
        
        // update and subtract the long term average
        _averageAcceleration = (1.f - 1.f/(float)LONG_TERM_RATE_SAMPLES) * _averageAcceleration +
                1.f/(float)LONG_TERM_RATE_SAMPLES * _estimatedAcceleration;
        _estimatedAcceleration -= _averageAcceleration;
        
        //  Consider updating our angular velocity/acceleration to linear acceleration mapping
        if (glm::length(_estimatedAcceleration) > EPSILON &&
                (glm::length(_lastRotationRates) > EPSILON || glm::length(angularAcceleration) > EPSILON)) {
            // compute predicted linear acceleration, find error between actual and predicted
            glm::vec3 predictedAcceleration = _angularVelocityToLinearAccel * _lastRotationRates +
                _angularAccelToLinearAccel * angularAcceleration;
            glm::vec3 error = _estimatedAcceleration - predictedAcceleration;
            
            // the "error" is actually what we want: the linear acceleration minus rotational influences
            _estimatedAcceleration = error;
            
            // adjust according to error in each dimension, in proportion to input magnitudes
            for (int i = 0; i < 3; i++) {
                if (fabsf(error[i]) < EPSILON) {
                    continue;
                }
                const float LEARNING_RATE = 0.001f;
                float rateSum = fabsf(_lastRotationRates.x) + fabsf(_lastRotationRates.y) + fabsf(_lastRotationRates.z);
                if (rateSum > EPSILON) {
                    for (int j = 0; j < 3; j++) {
                        float proportion = LEARNING_RATE * fabsf(_lastRotationRates[j]) / rateSum;
                        if (proportion > EPSILON) {
                            _angularVelocityToLinearAccel[j][i] += error[i] * proportion / _lastRotationRates[j];
                        }
                    }
                }
                float accelSum = fabsf(angularAcceleration.x) + fabsf(angularAcceleration.y) + fabsf(angularAcceleration.z);
                if (accelSum > EPSILON) {
                    for (int j = 0; j < 3; j++) {
                        float proportion = LEARNING_RATE * fabsf(angularAcceleration[j]) / accelSum;
                        if (proportion > EPSILON) {
                            _angularAccelToLinearAccel[j][i] += error[i] * proportion / angularAcceleration[j];
                        }
                    }                
                }
            }
        }
        
        // rotate estimated acceleration into global rotation frame
        _estimatedAcceleration = estimatedRotation * _estimatedAcceleration;
        
        //  Update estimated position and velocity
        float const DECAY_VELOCITY = 0.975f;
        float const DECAY_POSITION = 0.975f;
        _estimatedVelocity += deltaTime * _estimatedAcceleration;
        _estimatedPosition += deltaTime * _estimatedVelocity;
        _estimatedVelocity *= DECAY_VELOCITY;
        
        //  Attempt to fuse gyro position with webcam position
        Webcam* webcam = Application::getInstance()->getWebcam();
        if (webcam->isActive()) {
            const float WEBCAM_POSITION_FUSION = 0.5f;
            _estimatedPosition = glm::mix(_estimatedPosition, webcam->getEstimatedPosition(), WEBCAM_POSITION_FUSION);
               
        } else {
            _estimatedPosition *= DECAY_POSITION;
        }
            
        //  Accumulate a set of initial baseline readings for setting gravity
        if (totalSamples == 0) {
            _gravity = _lastAcceleration;
        } 
        else {
            if (totalSamples < GRAVITY_SAMPLES) {
                _gravity = glm::mix(_gravity, _lastAcceleration, 1.0f / GRAVITY_SAMPLES);
                
                //  North samples start later, because the initial compass readings are screwy
                int northSample = totalSamples - (GRAVITY_SAMPLES - NORTH_SAMPLES);
                if (northSample == 0) {
                    _north = _lastCompass;
                    
                } else if (northSample > 0) {
                    _north = glm::mix(_north, _lastCompass, 1.0f / NORTH_SAMPLES);
                }
            } else {
                //  Use gravity reading to do sensor fusion on the pitch and roll estimation
                estimatedRotation = safeMix(estimatedRotation,
                    rotationBetween(estimatedRotation * _lastAcceleration, _gravity) * estimatedRotation,
                    1.0f / ACCELERATION_SENSOR_FUSION_SAMPLES);
                
                //  Update the compass extents
                _compassMinima = glm::min(_compassMinima, _lastCompass);
                _compassMaxima = glm::max(_compassMaxima, _lastCompass);
        
                //  Same deal with the compass heading
                estimatedRotation = safeMix(estimatedRotation,
                    rotationBetween(estimatedRotation * recenterCompass(_lastCompass),
                        recenterCompass(_north)) * estimatedRotation,
                    1.0f / COMPASS_SENSOR_FUSION_SAMPLES);
            }
        }
        
        _estimatedRotation = safeEulerAngles(estimatedRotation); 
        
        totalSamples++;
    } 
    
    if (initialSamples == totalSamples) {        
        timeval now;
        gettimeofday(&now, NULL);
        
        if (diffclock(&lastGoodRead, &now) > NO_READ_MAXIMUM_MSECS) {
            qDebug("No data - Shutting down SerialInterface.\n");
            resetSerial();
        }
    } else {
        gettimeofday(&lastGoodRead, NULL);
    }
#endif
}
Exemplo n.º 11
0
void DdeFaceTracker::decodePacket(const QByteArray& buffer) {
    if(buffer.size() > MIN_PACKET_SIZE) {
        bool isFiltering = Menu::getInstance()->isOptionChecked(MenuOption::VelocityFilter);

        Packet packet;
        int bytesToCopy = glm::min((int)sizeof(packet), buffer.size());
        memset(&packet.name, '\n', MAX_NAME_SIZE + 1);
        memcpy(&packet, buffer.data(), bytesToCopy);
        
        glm::vec3 translation;
        memcpy(&translation, packet.translation, sizeof(packet.translation));
        glm::quat rotation;
        memcpy(&rotation, &packet.rotation, sizeof(packet.rotation));
        if (_reset || (_lastReceiveTimestamp == 0)) {
            memcpy(&_referenceTranslation, &translation, sizeof(glm::vec3));
            memcpy(&_referenceRotation, &rotation, sizeof(glm::quat));
            _reset = false;
        }

        // Compute relative translation
        float LEAN_DAMPING_FACTOR = 75.0f;
        translation -= _referenceTranslation;
        translation /= LEAN_DAMPING_FACTOR;
        translation.x *= -1;
        if (isFiltering) {
            glm::vec3 linearVelocity = (translation - _lastHeadTranslation) / _averageMessageTime;
            const float LINEAR_VELOCITY_FILTER_STRENGTH = 0.3f;
            float velocityFilter = glm::clamp(1.0f - glm::length(linearVelocity) *
                LINEAR_VELOCITY_FILTER_STRENGTH, 0.0f, 1.0f);
            _filteredHeadTranslation = velocityFilter * _filteredHeadTranslation + (1.0f - velocityFilter) * translation;
            _lastHeadTranslation = translation;
            _headTranslation = _filteredHeadTranslation;
        } else {
            _headTranslation = translation;
        }

        // Compute relative rotation
        rotation = glm::inverse(_referenceRotation) * rotation;
        if (isFiltering) {
            glm::quat r = rotation * glm::inverse(_headRotation);
            float theta = 2 * acos(r.w);
            glm::vec3 angularVelocity;
            if (theta > EPSILON) {
                float rMag = glm::length(glm::vec3(r.x, r.y, r.z));
                angularVelocity = theta / _averageMessageTime * glm::vec3(r.x, r.y, r.z) / rMag;
            } else {
                angularVelocity = glm::vec3(0, 0, 0);
            }
            const float ANGULAR_VELOCITY_FILTER_STRENGTH = 0.3f;
            _headRotation = safeMix(_headRotation, rotation, glm::clamp(glm::length(angularVelocity) *
                ANGULAR_VELOCITY_FILTER_STRENGTH, 0.0f, 1.0f));
        } else {
            _headRotation = rotation;
        }

        // Translate DDE coefficients to Faceshift compatible coefficients
        for (int i = 0; i < NUM_EXPRESSIONS; i += 1) {
            _coefficients[DDE_TO_FACESHIFT_MAPPING[i]] = packet.expressions[i];
        }

        // Use EyeBlink values to control both EyeBlink and EyeOpen
        static const float RELAXED_EYE_VALUE = 0.1f;
        float leftEye = _coefficients[_leftBlinkIndex];
        float rightEye = _coefficients[_rightBlinkIndex];
        if (isFiltering) {
            const float BLINK_VELOCITY_FILTER_STRENGTH = 0.3f;

            float velocity = fabs(leftEye - _lastLeftEyeBlink) / _averageMessageTime;
            float velocityFilter = glm::clamp(velocity * BLINK_VELOCITY_FILTER_STRENGTH, 0.0f, 1.0f);
            _filteredLeftEyeBlink = velocityFilter * leftEye + (1.0f - velocityFilter) * _filteredLeftEyeBlink;
            _lastLeftEyeBlink = leftEye;
            leftEye = _filteredLeftEyeBlink;

            velocity = fabs(rightEye - _lastRightEyeBlink) / _averageMessageTime;
            velocityFilter = glm::clamp(velocity * BLINK_VELOCITY_FILTER_STRENGTH, 0.0f, 1.0f);
            _filteredRightEyeBlink = velocityFilter * rightEye + (1.0f - velocityFilter) * _filteredRightEyeBlink;
            _lastRightEyeBlink = rightEye;
            rightEye = _filteredRightEyeBlink;
        }
        if (leftEye > RELAXED_EYE_VALUE) {
            _coefficients[_leftBlinkIndex] = leftEye - RELAXED_EYE_VALUE;
            _coefficients[_leftEyeOpenIndex] = 0.0f;
        } else {
            _coefficients[_leftBlinkIndex] = 0.0f;
            _coefficients[_leftEyeOpenIndex] = RELAXED_EYE_VALUE - leftEye;
        }
        if (rightEye > RELAXED_EYE_VALUE) {
            _coefficients[_rightBlinkIndex] = rightEye - RELAXED_EYE_VALUE;
            _coefficients[_rightEyeOpenIndex] = 0.0f;
        } else {
            _coefficients[_rightBlinkIndex] = 0.0f;
            _coefficients[_rightEyeOpenIndex] = RELAXED_EYE_VALUE - rightEye;
        }

        // Use BrowsU_C to control both brows' up and down
        _coefficients[_browDownLeftIndex] = -_coefficients[_browUpCenterIndex];
        _coefficients[_browDownRightIndex] = -_coefficients[_browUpCenterIndex];
        _coefficients[_browUpLeftIndex] = _coefficients[_browUpCenterIndex];
        _coefficients[_browUpRightIndex] = _coefficients[_browUpCenterIndex];

        // Offset jaw open coefficient
        static const float JAW_OPEN_THRESHOLD = 0.16f;
        _coefficients[_jawOpenIndex] = _coefficients[_jawOpenIndex] - JAW_OPEN_THRESHOLD;

        // Offset smile coefficients
        static const float SMILE_THRESHOLD = 0.18f;
        _coefficients[_mouthSmileLeftIndex] = _coefficients[_mouthSmileLeftIndex] - SMILE_THRESHOLD;
        _coefficients[_mouthSmileRightIndex] = _coefficients[_mouthSmileRightIndex] - SMILE_THRESHOLD;


        // Scale all coefficients
        for (int i = 0; i < NUM_EXPRESSIONS; i += 1) {
            _blendshapeCoefficients[i]
                = glm::clamp(DDE_COEFFICIENT_SCALES[i] * _coefficients[i], 0.0f, 1.0f);
        }

        // Calculate average frame time
        const float FRAME_AVERAGING_FACTOR = 0.99f;
        quint64 usecsNow = usecTimestampNow();
        if (_lastMessageReceived != 0) {
            _averageMessageTime = FRAME_AVERAGING_FACTOR * _averageMessageTime 
                + (1.0f - FRAME_AVERAGING_FACTOR) * (float)(usecsNow - _lastMessageReceived) / 1000000.0f;
        }
        _lastMessageReceived = usecsNow;

        FaceTracker::countFrame();
        
    } else {
        qCWarning(interfaceapp) << "DDE Face Tracker: Decode error";
    }
    _lastReceiveTimestamp = usecTimestampNow();
}
Exemplo n.º 12
0
void SerialInterface::readData(float deltaTime) {
#ifdef __APPLE__
    
    int initialSamples = totalSamples;
    
    if (USING_INVENSENSE_MPU9150) { 
        unsigned char sensorBuffer[36];
        
        // ask the invensense for raw gyro data
        write(_serialDescriptor, "RD683B0E\n", 9);
        read(_serialDescriptor, sensorBuffer, 36);
        
        int accelXRate, accelYRate, accelZRate;
        
        convertHexToInt(sensorBuffer + 6, accelZRate);
        convertHexToInt(sensorBuffer + 10, accelYRate);
        convertHexToInt(sensorBuffer + 14, accelXRate);
        
        const float LSB_TO_METERS_PER_SECOND2 = 1.f / 16384.f * GRAVITY_EARTH;
                                                                //  From MPU-9150 register map, with setting on
                                                                //  highest resolution = +/- 2G
        
        _lastAcceleration = glm::vec3(-accelXRate, -accelYRate, -accelZRate) * LSB_TO_METERS_PER_SECOND2;
                
        
        int rollRate, yawRate, pitchRate;
        
        convertHexToInt(sensorBuffer + 22, rollRate);
        convertHexToInt(sensorBuffer + 26, yawRate);
        convertHexToInt(sensorBuffer + 30, pitchRate);
        
        //  Convert the integer rates to floats
        const float LSB_TO_DEGREES_PER_SECOND = 1.f / 16.4f;     //  From MPU-9150 register map, 2000 deg/sec.
        glm::vec3 rotationRates;
        rotationRates[0] = ((float) -pitchRate) * LSB_TO_DEGREES_PER_SECOND;
        rotationRates[1] = ((float) -yawRate) * LSB_TO_DEGREES_PER_SECOND;
        rotationRates[2] = ((float) -rollRate) * LSB_TO_DEGREES_PER_SECOND;

        // update and subtract the long term average
        _averageRotationRates = (1.f - 1.f/(float)LONG_TERM_RATE_SAMPLES) * _averageRotationRates +
                1.f/(float)LONG_TERM_RATE_SAMPLES * rotationRates;
        rotationRates -= _averageRotationRates;

        // compute the angular acceleration
        glm::vec3 angularAcceleration = (deltaTime < EPSILON) ? glm::vec3() : (rotationRates - _lastRotationRates) / deltaTime;
        _lastRotationRates = rotationRates;
        
        //  Update raw rotation estimates
        glm::quat estimatedRotation = glm::quat(glm::radians(_estimatedRotation)) *
            glm::quat(glm::radians(deltaTime * _lastRotationRates));
        
        //  Update acceleration estimate: first, subtract gravity as rotated into current frame
        _estimatedAcceleration = (totalSamples < GRAVITY_SAMPLES) ? glm::vec3() :
            _lastAcceleration - glm::inverse(estimatedRotation) * _gravity;
        
        // update and subtract the long term average
        _averageAcceleration = (1.f - 1.f/(float)LONG_TERM_RATE_SAMPLES) * _averageAcceleration +
                1.f/(float)LONG_TERM_RATE_SAMPLES * _estimatedAcceleration;
        _estimatedAcceleration -= _averageAcceleration;
        
        //  Consider updating our angular velocity/acceleration to linear acceleration mapping
        if (glm::length(_estimatedAcceleration) > EPSILON &&
                (glm::length(_lastRotationRates) > EPSILON || glm::length(angularAcceleration) > EPSILON)) {
            // compute predicted linear acceleration, find error between actual and predicted
            glm::vec3 predictedAcceleration = _angularVelocityToLinearAccel * _lastRotationRates +
                _angularAccelToLinearAccel * angularAcceleration;
            glm::vec3 error = _estimatedAcceleration - predictedAcceleration;
            
            // the "error" is actually what we want: the linear acceleration minus rotational influences
            _estimatedAcceleration = error;
            
            // adjust according to error in each dimension, in proportion to input magnitudes
            for (int i = 0; i < 3; i++) {
                if (fabsf(error[i]) < EPSILON) {
                    continue;
                }
                const float LEARNING_RATE = 0.001f;
                float rateSum = fabsf(_lastRotationRates.x) + fabsf(_lastRotationRates.y) + fabsf(_lastRotationRates.z);
                if (rateSum > EPSILON) {
                    for (int j = 0; j < 3; j++) {
                        float proportion = LEARNING_RATE * fabsf(_lastRotationRates[j]) / rateSum;
                        if (proportion > EPSILON) {
                            _angularVelocityToLinearAccel[j][i] += error[i] * proportion / _lastRotationRates[j];
                        }
                    }
                }
                float accelSum = fabsf(angularAcceleration.x) + fabsf(angularAcceleration.y) + fabsf(angularAcceleration.z);
                if (accelSum > EPSILON) {
                    for (int j = 0; j < 3; j++) {
                        float proportion = LEARNING_RATE * fabsf(angularAcceleration[j]) / accelSum;
                        if (proportion > EPSILON) {
                            _angularAccelToLinearAccel[j][i] += error[i] * proportion / angularAcceleration[j];
                        }
                    }                
                }
            }
        }
        
        // rotate estimated acceleration into global rotation frame
        _estimatedAcceleration = estimatedRotation * _estimatedAcceleration;
        
        //  Update estimated position and velocity
        float const DECAY_VELOCITY = 0.975f;
        float const DECAY_POSITION = 0.975f;
        _estimatedVelocity += deltaTime * _estimatedAcceleration;
        _estimatedPosition += deltaTime * _estimatedVelocity;
        _estimatedVelocity *= DECAY_VELOCITY;
        
        //  Attempt to fuse gyro position with webcam position
        Webcam* webcam = Application::getInstance()->getWebcam();
        if (webcam->isActive()) {
            const float WEBCAM_POSITION_FUSION = 0.5f;
            _estimatedPosition = glm::mix(_estimatedPosition, webcam->getEstimatedPosition(), WEBCAM_POSITION_FUSION);
               
        } else {
            _estimatedPosition *= DECAY_POSITION;
        }
            
        //  Accumulate a set of initial baseline readings for setting gravity
        if (totalSamples == 0) {
            _gravity = _lastAcceleration;
        } 
        else {
            if (totalSamples < GRAVITY_SAMPLES) {
                _gravity = (1.f - 1.f/(float)GRAVITY_SAMPLES) * _gravity +
                1.f/(float)GRAVITY_SAMPLES * _lastAcceleration;
            } else {
                //  Use gravity reading to do sensor fusion on the pitch and roll estimation
                estimatedRotation = safeMix(estimatedRotation,
                    rotationBetween(estimatedRotation * _lastAcceleration, _gravity) * estimatedRotation,
                    1.0f / SENSOR_FUSION_SAMPLES);
                
                //  Without a compass heading, always decay estimated Yaw slightly
                const float YAW_DECAY = 0.999f;
                glm::vec3 forward = estimatedRotation * glm::vec3(0.0f, 0.0f, -1.0f);
                estimatedRotation = safeMix(glm::angleAxis(glm::degrees(atan2f(forward.x, -forward.z)),
                    glm::vec3(0.0f, 1.0f, 0.0f)) * estimatedRotation, estimatedRotation, YAW_DECAY);
            }
        }
        
        _estimatedRotation = safeEulerAngles(estimatedRotation); 
        
        totalSamples++;
    } 
    
    if (initialSamples == totalSamples) {        
        timeval now;
        gettimeofday(&now, NULL);
        
        if (diffclock(&lastGoodRead, &now) > NO_READ_MAXIMUM_MSECS) {
            printLog("No data - Shutting down SerialInterface.\n");
            resetSerial();
        }
    } else {
        gettimeofday(&lastGoodRead, NULL);
    }
#endif
}
Exemplo n.º 13
0
glm::quat Quat::mix(const glm::quat& q1, const glm::quat& q2, float alpha) {
    return safeMix(q1, q2, alpha);
}