Ejemplo n.º 1
0
//Caculate the click location using one of the sixense controllers. Scale is not applied
QPoint ApplicationCompositor::getPalmClickLocation(const PalmData *palm) const {
    QPoint rv;
    auto canvasSize = qApp->getCanvasSize();
    if (qApp->isHMDMode()) {
        glm::vec2 polar = getPolarCoordinates(*palm);
        glm::vec2 point = sphericalToScreen(-polar);
        rv.rx() = point.x;
        rv.ry() = point.y;
    } else {
        MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
        glm::dmat4 projection;
        qApp->getProjectionMatrix(&projection);
        glm::quat invOrientation = glm::inverse(myAvatar->getOrientation());
        glm::vec3 eyePos = myAvatar->getDefaultEyePosition();
        glm::vec3 tip = myAvatar->getLaserPointerTipPosition(palm);
        glm::vec3 tipPos = invOrientation * (tip - eyePos);

        glm::vec4 clipSpacePos = glm::vec4(projection * glm::dvec4(tipPos, 1.0));
        glm::vec3 ndcSpacePos;
        if (clipSpacePos.w != 0) {
            ndcSpacePos = glm::vec3(clipSpacePos) / clipSpacePos.w;
        }

        rv.setX(((ndcSpacePos.x + 1.0) / 2.0) * canvasSize.x);
        rv.setY((1.0 - ((ndcSpacePos.y + 1.0) / 2.0)) * canvasSize.y);
    }
    return rv;
}
Ejemplo n.º 2
0
void ApplicationOverlay::renderPointersOculus(const glm::vec3& eyePos) {
    glBindTexture(GL_TEXTURE_2D, _crosshairTexture);
    glDisable(GL_DEPTH_TEST);
    glMatrixMode(GL_MODELVIEW);
    
    //Controller Pointers
    MyAvatar* myAvatar = Application::getInstance()->getAvatar();
    for (int i = 0; i < (int)myAvatar->getHand()->getNumPalms(); i++) {

        PalmData& palm = myAvatar->getHand()->getPalms()[i];
        if (palm.isActive()) {
            glm::vec3 tip = myAvatar->getLaserPointerTipPosition(&palm);
            glm::vec3 tipDirection = glm::normalize(glm::inverse(myAvatar->getOrientation()) * (tip - eyePos));
            float pitch = -glm::asin(tipDirection.y);
            float yawSign = glm::sign(-tipDirection.x);
            float yaw = glm::acos(-tipDirection.z) *
                        ((yawSign == 0.0f) ? 1.0f : yawSign);
            glm::quat orientation = glm::quat(glm::vec3(pitch, yaw, 0.0f));
            renderReticle(orientation, _alpha);
        } 
    }

    //Mouse Pointer
    if (_reticleActive[MOUSE]) {
        glm::vec2 projection = screenToSpherical(glm::vec2(_reticlePosition[MOUSE].x(),
                                                           _reticlePosition[MOUSE].y()));
        glm::quat orientation(glm::vec3(-projection.y, projection.x, 0.0f));
        renderReticle(orientation, _alpha);
    }

    glEnable(GL_DEPTH_TEST);
}
Ejemplo n.º 3
0
bool AvatarUpdate::process() {
    PerformanceTimer perfTimer("AvatarUpdate");
    quint64 start = usecTimestampNow();
    quint64 deltaMicroseconds = start - _lastAvatarUpdate;
    _lastAvatarUpdate = start;
    float deltaSeconds = (float) deltaMicroseconds / (float) USECS_PER_SECOND;
    Application::getInstance()->setAvatarSimrateSample(1.0f / deltaSeconds);

    QSharedPointer<AvatarManager> manager = DependencyManager::get<AvatarManager>();
    MyAvatar* myAvatar = manager->getMyAvatar();

    //loop through all the other avatars and simulate them...
    //gets current lookat data, removes missing avatars, etc.
    manager->updateOtherAvatars(deltaSeconds);

    myAvatar->startUpdate();
    Application::getInstance()->updateMyAvatarLookAtPosition();
    // Sample hardware, update view frustum if needed, and send avatar data to mixer/nodes
    manager->updateMyAvatar(deltaSeconds);
    myAvatar->endUpdate();

    if (!isThreaded()) {
        return true;
    }
    int elapsed = (usecTimestampNow() - start);
    int usecToSleep =  _targetInterval - elapsed;
    if (usecToSleep < 0) {
        usecToSleep = 1; // always yield
    }
    usleep(usecToSleep);
    return true;
}
Ejemplo n.º 4
0
void PreferencesDialog::accept() {
    MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
    _lastGoodAvatarURL = myAvatar->getFullAvatarURLFromPreferences();
    _lastGoodAvatarName = myAvatar->getFullAvatarModelName();
    savePreferences();
    close();
    delete _marketplaceWindow;
    _marketplaceWindow = NULL;
}
Ejemplo n.º 5
0
void SkeletonModel::simulate(float deltaTime, bool fullUpdate) {
    setTranslation(_owningAvatar->getSkeletonPosition());
    static const glm::quat refOrientation = glm::angleAxis(PI, glm::vec3(0.0f, 1.0f, 0.0f));
    setRotation(_owningAvatar->getOrientation() * refOrientation);
    setScale(glm::vec3(1.0f, 1.0f, 1.0f) * _owningAvatar->getScale());
    setBlendshapeCoefficients(_owningAvatar->getHead()->getBlendshapeCoefficients());

    Model::simulate(deltaTime, fullUpdate);
    
    if (!isActive() || !_owningAvatar->isMyAvatar()) {
        return; // only simulate for own avatar
    }
    
    MyAvatar* myAvatar = static_cast<MyAvatar*>(_owningAvatar);
    if (myAvatar->isPlaying()) {
        // Don't take inputs if playing back a recording.
        return;
    }

    const FBXGeometry& geometry = _geometry->getFBXGeometry();

    // find the left and rightmost active palms
    int leftPalmIndex, rightPalmIndex;
    Hand* hand = _owningAvatar->getHand();
    hand->getLeftRightPalmIndices(leftPalmIndex, rightPalmIndex);

    const float HAND_RESTORATION_RATE = 0.25f;    
    if (leftPalmIndex == -1 || rightPalmIndex == -1) {
        // palms are not yet set, use mouse
        if (_owningAvatar->getHandState() == HAND_STATE_NULL) {
            restoreRightHandPosition(HAND_RESTORATION_RATE, PALM_PRIORITY);
        } else {
            // transform into model-frame
            glm::vec3 handPosition = glm::inverse(_rotation) * (_owningAvatar->getHandPosition() - _translation);
            applyHandPosition(geometry.rightHandJointIndex, handPosition);
        }
        restoreLeftHandPosition(HAND_RESTORATION_RATE, PALM_PRIORITY);

    } else if (leftPalmIndex == rightPalmIndex) {
        // right hand only
        applyPalmData(geometry.rightHandJointIndex, hand->getPalms()[leftPalmIndex]);
        restoreLeftHandPosition(HAND_RESTORATION_RATE, PALM_PRIORITY);

    } else {
        applyPalmData(geometry.leftHandJointIndex, hand->getPalms()[leftPalmIndex]);
        applyPalmData(geometry.rightHandJointIndex, hand->getPalms()[rightPalmIndex]);
    }

    if (_isFirstPerson) {
        cauterizeHead();
        updateClusterMatrices();
    }

    _boundingShape.setTranslation(_translation + _rotation * _boundingShapeLocalOffset);
    _boundingShape.setRotation(_rotation);
}
Ejemplo n.º 6
0
//Caculate the click location using one of the sixense controllers. Scale is not applied
QPoint ApplicationOverlay::getPalmClickLocation(const PalmData *palm) const {
    Application* application = Application::getInstance();
    GLCanvas* glWidget = application->getGLWidget();
    MyAvatar* myAvatar = application->getAvatar();

    glm::vec3 tip = myAvatar->getLaserPointerTipPosition(palm);
    glm::vec3 eyePos = myAvatar->getHead()->getEyePosition();
    glm::quat invOrientation = glm::inverse(myAvatar->getOrientation());
    //direction of ray goes towards camera
    glm::vec3 dir = invOrientation * glm::normalize(application->getCamera()->getPosition() - tip);
    glm::vec3 tipPos = invOrientation * (tip - eyePos);

    QPoint rv;

    if (OculusManager::isConnected()) {
        float t;

        //We back the ray up by dir to ensure that it will not start inside the UI.
        glm::vec3 adjustedPos = tipPos - dir;
        //Find intersection of crosshair ray. 
        if (raySphereIntersect(dir, adjustedPos, _oculusUIRadius * myAvatar->getScale(), &t)){
            glm::vec3 collisionPos = adjustedPos + dir * t;
            //Normalize it in case its not a radius of 1
            collisionPos = glm::normalize(collisionPos);
            //If we hit the back hemisphere, mark it as not a collision
            if (collisionPos.z > 0) {
                rv.setX(INT_MAX);
                rv.setY(INT_MAX);
            } else {

                float u = asin(collisionPos.x) / (_textureFov)+0.5f;
                float v = 1.0 - (asin(collisionPos.y) / (_textureFov)+0.5f);

                rv.setX(u * glWidget->width());
                rv.setY(v * glWidget->height());
            }
        } else {
            //if they did not click on the overlay, just set the coords to INT_MAX
            rv.setX(INT_MAX);
            rv.setY(INT_MAX);
        }
    } else {
        glm::dmat4 projection;
        application->getProjectionMatrix(&projection);

        glm::vec4 clipSpacePos = glm::vec4(projection * glm::dvec4(tipPos, 1.0));
        glm::vec3 ndcSpacePos;
        if (clipSpacePos.w != 0) {
            ndcSpacePos = glm::vec3(clipSpacePos) / clipSpacePos.w;
        }

        rv.setX(((ndcSpacePos.x + 1.0) / 2.0) * glWidget->width());
        rv.setY((1.0 - ((ndcSpacePos.y + 1.0) / 2.0)) * glWidget->height());
    }
    return rv;
}
Ejemplo n.º 7
0
bool AvatarActionHold::getAvatarRigidBodyLocation(glm::vec3& avatarRigidBodyPosition, glm::quat& avatarRigidBodyRotation) {
    MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
    MyCharacterController* controller = myAvatar ? myAvatar->getCharacterController() : nullptr;
    if (!controller) {
        qDebug() << "AvatarActionHold::getAvatarRigidBodyLocation failed to get character controller";
        return false;
    }
    controller->getRigidBodyLocation(avatarRigidBodyPosition, avatarRigidBodyRotation);
    return true;
}
Ejemplo n.º 8
0
QScriptValue HMDScriptingInterface::getHUDLookAtPosition2D(QScriptContext* context, QScriptEngine* engine) {
    glm::vec3 hudIntersection;
    auto instance = DependencyManager::get<HMDScriptingInterface>();
    if (instance->getHUDLookAtPosition3D(hudIntersection)) {
        MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
        glm::vec3 sphereCenter = myAvatar->getDefaultEyePosition();
        glm::vec3 direction = glm::inverse(myAvatar->getOrientation()) * (hudIntersection - sphereCenter);
        glm::vec2 polar = glm::vec2(glm::atan(direction.x, -direction.z), glm::asin(direction.y)) * -1.0f;
        auto overlayPos = qApp->getApplicationCompositor().sphericalToOverlay(polar);
        return qScriptValueFromValue<glm::vec2>(engine, overlayPos);
    }
    return QScriptValue::NullValue;
}
Ejemplo n.º 9
0
// Renders the overlays either to a texture or to the screen
void ApplicationOverlay::renderOverlay(bool renderToTexture) {
    PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings), "ApplicationOverlay::displayOverlay()");

    Application* application = Application::getInstance();

    Overlays& overlays = application->getOverlays();
    QGLWidget* glWidget = application->getGLWidget();
    MyAvatar* myAvatar = application->getAvatar();

    if (renderToTexture) {
        getFramebufferObject()->bind();
        glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
    }
    glEnable(GL_BLEND);
    glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);

    // Render 2D overlay
    glMatrixMode(GL_PROJECTION);
    glPushMatrix();

    glLoadIdentity();
    gluOrtho2D(0, glWidget->width(), glWidget->height(), 0);
    glDisable(GL_DEPTH_TEST);
    glDisable(GL_LIGHTING);

    renderAudioMeter();

    if (Menu::getInstance()->isOptionChecked(MenuOption::HeadMouse)) {
        myAvatar->renderHeadMouse(glWidget->width(), glWidget->height());
    }

    renderStatsAndLogs();

    // give external parties a change to hook in
    emit application->renderingOverlay();

    overlays.render2D();

    renderPointers();

    glPopMatrix();

    glMatrixMode(GL_MODELVIEW);
    glEnable(GL_DEPTH_TEST);
    glEnable(GL_LIGHTING);
    glBlendFuncSeparate(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA, GL_CONSTANT_ALPHA, GL_ONE);

    if (renderToTexture) {
        getFramebufferObject()->release();
    }
}
Ejemplo n.º 10
0
vec2 getPolarCoordinates(const PalmData& palm) {
    MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
    auto avatarOrientation = myAvatar->getOrientation();
    auto eyePos = myAvatar->getDefaultEyePosition();
    glm::vec3 tip = myAvatar->getLaserPointerTipPosition(&palm);
    // Direction of the tip relative to the eye
    glm::vec3 tipDirection = tip - eyePos;
    // orient into avatar space
    tipDirection = glm::inverse(avatarOrientation) * tipDirection;
    // Normalize for trig functions
    tipDirection = glm::normalize(tipDirection);
    // Convert to polar coordinates
    glm::vec2 polar(glm::atan(tipDirection.x, -tipDirection.z), glm::asin(tipDirection.y));
    return polar;
}
QScriptValue HMDScriptingInterface::getHUDLookAtPosition2D(QScriptContext* context, QScriptEngine* engine) {

    glm::vec3 hudIntersection;

    if ((&HMDScriptingInterface::getInstance())->getHUDLookAtPosition3D(hudIntersection)) {
        MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
        glm::vec3 sphereCenter = myAvatar->getDefaultEyePosition();
        glm::vec3 direction = glm::inverse(myAvatar->getOrientation()) * (hudIntersection - sphereCenter);
        glm::quat rotation = ::rotationBetween(glm::vec3(0.0f, 0.0f, -1.0f), direction);
        glm::vec3 eulers = ::safeEulerAngles(rotation);
        return qScriptValueFromValue<glm::vec2>(engine, Application::getInstance()->getApplicationCompositor()
                                                .sphericalToOverlay(glm::vec2(eulers.y, -eulers.x)));
    }
    return QScriptValue::NullValue;
}
Ejemplo n.º 12
0
//Finds the collision point of a world space ray
bool ApplicationCompositor::calculateRayUICollisionPoint(const glm::vec3& position, const glm::vec3& direction, glm::vec3& result) const {
    MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
    
    glm::quat inverseOrientation = glm::inverse(myAvatar->getOrientation());

    glm::vec3 relativePosition = inverseOrientation * (position - myAvatar->getDefaultEyePosition());
    glm::vec3 relativeDirection = glm::normalize(inverseOrientation * direction);

    float t;
    if (raySphereIntersect(relativeDirection, relativePosition, _oculusUIRadius * myAvatar->getScale(), &t)){
        result = position + direction * t;
        return true;
    }

    return false;
}
Ejemplo n.º 13
0
//Finds the collision point of a world space ray
bool ApplicationOverlay::calculateRayUICollisionPoint(const glm::vec3& position, const glm::vec3& direction, glm::vec3& result) const {
    Application* application = Application::getInstance();
    MyAvatar* myAvatar = application->getAvatar();
    
    glm::quat orientation = myAvatar->getOrientation();

    glm::vec3 relativePosition = orientation * (position - myAvatar->getDefaultEyePosition());
    glm::vec3 relativeDirection = orientation * direction;

    float t;
    if (raySphereIntersect(relativeDirection, relativePosition, _oculusUIRadius * myAvatar->getScale(), &t)){
        result = position + direction * t;
        return true;
    }

    return false;
}
Ejemplo n.º 14
0
glm::quat Head::getCameraOrientation() const {
    // NOTE: Head::getCameraOrientation() is not used for orienting the camera "view" while in Oculus mode, so
    // you may wonder why this code is here. This method will be called while in Oculus mode to determine how
    // to change the driving direction while in Oculus mode. It is used to support driving toward where you're
    // head is looking. Note that in oculus mode, your actual camera view and where your head is looking is not
    // always the same.
    if (qApp->getAvatarUpdater()->isHMDMode()) {
        MyAvatar* myAvatar = dynamic_cast<MyAvatar*>(_owningAvatar);
        if (myAvatar && myAvatar->getStandingHMDSensorMode()) {
            return glm::quat_cast(myAvatar->getSensorToWorldMatrix()) * myAvatar->getHMDSensorOrientation();
        } else {
            return getOrientation();
        }
    } else {
        Avatar* owningAvatar = static_cast<Avatar*>(_owningAvatar);
        return owningAvatar->getWorldAlignedOrientation() * glm::quat(glm::radians(glm::vec3(_basePitch, 0.0f, 0.0f)));
    }
}
Ejemplo n.º 15
0
void OverlayConductor::setEnabled(bool enabled) {

    if (enabled == _enabled) {
        return;
    }

    Menu::getInstance()->setIsOptionChecked(MenuOption::Overlays, enabled);

    _enabled = enabled; // set the new value

    // if the new state is visible/enabled...
    if (_enabled) {
        // alpha fadeIn the overlay mesh.
        qApp->getApplicationCompositor().fadeIn();

        // enable mouse clicks from script
        qApp->getOverlays().enable();

        // enable QML events
        auto offscreenUi = DependencyManager::get<OffscreenUi>();
        offscreenUi->getRootItem()->setEnabled(true);

        if (_mode == STANDING) {
            // place the overlay at the current hmd position in world space
            MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
            auto camMat = cancelOutRollAndPitch(myAvatar->getSensorToWorldMatrix() * qApp->getHMDSensorPose());
            Transform t;
            t.setTranslation(extractTranslation(camMat));
            t.setRotation(glm::quat_cast(camMat));
            qApp->getApplicationCompositor().setModelTransform(t);
        }
    } else { // other wise, if the new state is hidden/not enabled
        // alpha fadeOut the overlay mesh.
        qApp->getApplicationCompositor().fadeOut();

        // disable mouse clicks from script
        qApp->getOverlays().disable();

        // disable QML events
        auto offscreenUi = DependencyManager::get<OffscreenUi>();
        offscreenUi->getRootItem()->setEnabled(false);
    }
}
Ejemplo n.º 16
0
void OverlayConductor::update(float dt) {

    updateMode();

    switch (_mode) {
    case SITTING: {
        // when sitting, the overlay is at the origin, facing down the -z axis.
        // the camera is taken directly from the HMD.
        Transform identity;
        qApp->getApplicationCompositor().setModelTransform(identity);
        qApp->getApplicationCompositor().setCameraBaseTransform(identity);
        break;
    }
    case STANDING: {
        // when standing, the overlay is at a reference position, which is set when the overlay is
        // enabled.  The camera is taken directly from the HMD, but in world space.
        // So the sensorToWorldMatrix must be applied.
        MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
        Transform t;
        t.evalFromRawMatrix(myAvatar->getSensorToWorldMatrix());
        qApp->getApplicationCompositor().setCameraBaseTransform(t);

        // detect when head moves out side of sweet spot, or looks away.
        mat4 headMat = myAvatar->getSensorToWorldMatrix() * qApp->getHMDSensorPose();
        vec3 headWorldPos = extractTranslation(headMat);
        vec3 headForward = glm::quat_cast(headMat) * glm::vec3(0.0f, 0.0f, -1.0f);
        Transform modelXform = qApp->getApplicationCompositor().getModelTransform();
        vec3 compositorWorldPos = modelXform.getTranslation();
        vec3 compositorForward = modelXform.getRotation() * glm::vec3(0.0f, 0.0f, -1.0f);
        const float MAX_COMPOSITOR_DISTANCE = 0.6f;
        const float MAX_COMPOSITOR_ANGLE = 110.0f;
        if (_enabled && (glm::distance(headWorldPos, compositorWorldPos) > MAX_COMPOSITOR_DISTANCE ||
                         glm::dot(headForward, compositorForward) < cosf(glm::radians(MAX_COMPOSITOR_ANGLE)))) {
            // fade out the overlay
            setEnabled(false);
        }
        break;
    }
    case FLAT:
        // do nothing
        break;
    }
}
Ejemplo n.º 17
0
 template <> void payloadRender(const Overlay::Pointer& overlay, RenderArgs* args) {
     if (args) {
         if (overlay->getAnchor() == Overlay::MY_AVATAR) {
             auto batch = args->_batch;
             MyAvatar* avatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
             glm::quat myAvatarRotation = avatar->getOrientation();
             glm::vec3 myAvatarPosition = avatar->getPosition();
             float angle = glm::degrees(glm::angle(myAvatarRotation));
             glm::vec3 axis = glm::axis(myAvatarRotation);
             float myAvatarScale = avatar->getScale();
             Transform transform = Transform();
             transform.setTranslation(myAvatarPosition);
             transform.setRotation(glm::angleAxis(angle, axis));
             transform.setScale(myAvatarScale);
             batch->setModelTransform(transform);
             overlay->render(args);
         } else {
             overlay->render(args);
         }
     }
 }
Ejemplo n.º 18
0
    template <> void payloadRender(const Overlay::Pointer& overlay, RenderArgs* args) {
        if (args) {
            if (overlay->getAnchor() == Overlay::MY_AVATAR) {
                glPushMatrix();
                MyAvatar* avatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
                glm::quat myAvatarRotation = avatar->getOrientation();
                glm::vec3 myAvatarPosition = avatar->getPosition();
                float angle = glm::degrees(glm::angle(myAvatarRotation));
                glm::vec3 axis = glm::axis(myAvatarRotation);
                float myAvatarScale = avatar->getScale();

                glTranslatef(myAvatarPosition.x, myAvatarPosition.y, myAvatarPosition.z);
                glRotatef(angle, axis.x, axis.y, axis.z);
                glScalef(myAvatarScale, myAvatarScale, myAvatarScale);
                overlay->render(args);
                glPopMatrix();
            } else {
                overlay->render(args);
            }
        }
    }
Ejemplo n.º 19
0
bool LocationManager::goToDestination(QString destination) {

    QStringList coordinateItems = destination.remove(' ').split(QRegExp("_|,"), QString::SkipEmptyParts);

    const int NUMBER_OF_COORDINATE_ITEMS = 3;
    const int X_ITEM = 0;
    const int Y_ITEM = 1;
    const int Z_ITEM = 2;
    if (coordinateItems.size() == NUMBER_OF_COORDINATE_ITEMS) {

        // replace last occurrence of '_' with decimal point
        replaceLastOccurrence('-', '.', coordinateItems[X_ITEM]);
        replaceLastOccurrence('-', '.', coordinateItems[Y_ITEM]);
        replaceLastOccurrence('-', '.', coordinateItems[Z_ITEM]);

        double x = coordinateItems[X_ITEM].toDouble();
        double y = coordinateItems[Y_ITEM].toDouble();
        double z = coordinateItems[Z_ITEM].toDouble();

        glm::vec3 newAvatarPos(x, y, z);

        MyAvatar* myAvatar = Application::getInstance()->getAvatar();
        glm::vec3 avatarPos = myAvatar->getPosition();
        if (newAvatarPos != avatarPos) {
            // send a node kill request, indicating to other clients that they should play the "disappeared" effect
            MyAvatar::sendKillAvatar();

            qDebug("Going To Location: %f, %f, %f...", x, y, z);
            myAvatar->setPosition(newAvatarPos);
            emit myAvatar->transformChanged();
        }

        return true;
    }

    // no coordinates were parsed
    return false;
}
Ejemplo n.º 20
0
void OverlayConductor::updateMode() {

    Mode newMode;
    if (qApp->isHMDMode()) {
        newMode = SITTING;
    } else {
        newMode = FLAT;
    }

    if (newMode != _mode) {
        switch (newMode) {
        case SITTING: {
            // enter the SITTING state
            // place the overlay at origin
            Transform identity;
            qApp->getApplicationCompositor().setModelTransform(identity);
            break;
        }
        case STANDING: {
            // enter the STANDING state
            // place the overlay at the current hmd position in world space
            MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
            auto camMat = cancelOutRollAndPitch(myAvatar->getSensorToWorldMatrix() * qApp->getHMDSensorPose());
            Transform t;
            t.setTranslation(extractTranslation(camMat));
            t.setRotation(glm::quat_cast(camMat));
            qApp->getApplicationCompositor().setModelTransform(t);
            break;
        }

        case FLAT:
            // do nothing
            break;
        }
    }

    _mode = newMode;
}
Ejemplo n.º 21
0
//Renders sixense laser pointers for UI selection in the oculus
void OculusManager::renderLaserPointers() {
#ifdef HAVE_LIBOVR
    const float PALM_TIP_ROD_RADIUS = 0.002f;

    MyAvatar* myAvatar = Application::getInstance()->getAvatar();

    //If the Oculus is enabled, we will draw a blue cursor ray
   
    for (size_t i = 0; i < myAvatar->getHand()->getNumPalms(); ++i) {
        PalmData& palm = myAvatar->getHand()->getPalms()[i];
        if (palm.isActive()) {
            glColor4f(0, 1, 1, 1);
            glm::vec3 tip = getLaserPointerTipPosition(&palm);
            glm::vec3 root = palm.getPosition();

            //Scale the root vector with the avatar scale
            myAvatar->scaleVectorRelativeToPosition(root);

            Avatar::renderJointConnectingCone(root, tip, PALM_TIP_ROD_RADIUS, PALM_TIP_ROD_RADIUS);
        }
    }
#endif
}
Ejemplo n.º 22
0
void LocationManager::goToOrientation(QString orientation) {
    if (orientation.isEmpty()) {
        return;
    }

    QStringList orientationItems = orientation.remove(' ').split(QRegExp("_|,"), QString::SkipEmptyParts);

    const int NUMBER_OF_ORIENTATION_ITEMS = 4;
    const int W_ITEM = 0;
    const int X_ITEM = 1;
    const int Y_ITEM = 2;
    const int Z_ITEM = 3;

    if (orientationItems.size() == NUMBER_OF_ORIENTATION_ITEMS) {

        // replace last occurrence of '_' with decimal point
        replaceLastOccurrence('-', '.', orientationItems[W_ITEM]);
        replaceLastOccurrence('-', '.', orientationItems[X_ITEM]);
        replaceLastOccurrence('-', '.', orientationItems[Y_ITEM]);
        replaceLastOccurrence('-', '.', orientationItems[Z_ITEM]);

        double w = orientationItems[W_ITEM].toDouble();
        double x = orientationItems[X_ITEM].toDouble();
        double y = orientationItems[Y_ITEM].toDouble();
        double z = orientationItems[Z_ITEM].toDouble();

        glm::quat newAvatarOrientation(w, x, y, z);

        MyAvatar* myAvatar = Application::getInstance()->getAvatar();
        glm::quat avatarOrientation = myAvatar->getOrientation();
        if (newAvatarOrientation != avatarOrientation) {
            myAvatar->setOrientation(newAvatarOrientation);
            emit myAvatar->transformChanged();
        }
    }
}
Ejemplo n.º 23
0
glm::mat4 HMDScriptingInterface::getWorldHMDMatrix() const {
    MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
    return myAvatar->getSensorToWorldMatrix() * myAvatar->getHMDSensorMatrix();
}
Ejemplo n.º 24
0
void Head::simulate(float deltaTime, bool isMine, bool billboard) {
    //  Update audio trailing average for rendering facial animations
    const float AUDIO_AVERAGING_SECS = 0.05f;
    const float AUDIO_LONG_TERM_AVERAGING_SECS = 30.0f;
    _averageLoudness = glm::mix(_averageLoudness, _audioLoudness, glm::min(deltaTime / AUDIO_AVERAGING_SECS, 1.0f));

    if (_longTermAverageLoudness == -1.0f) {
        _longTermAverageLoudness = _averageLoudness;
    } else {
        _longTermAverageLoudness = glm::mix(_longTermAverageLoudness, _averageLoudness, glm::min(deltaTime / AUDIO_LONG_TERM_AVERAGING_SECS, 1.0f));
    }

    if (isMine) {
        MyAvatar* myAvatar = static_cast<MyAvatar*>(_owningAvatar);
        
        // Only use face trackers when not playing back a recording.
        if (!myAvatar->isPlaying()) {
            FaceTracker* faceTracker = Application::getInstance()->getActiveFaceTracker();
            _isFaceTrackerConnected = faceTracker != NULL && !faceTracker->isMuted();
            if (_isFaceTrackerConnected) {
                _blendshapeCoefficients = faceTracker->getBlendshapeCoefficients();

                if (typeid(*faceTracker) == typeid(DdeFaceTracker)) {

                    if (Menu::getInstance()->isOptionChecked(MenuOption::UseAudioForMouth)) {
                        calculateMouthShapes();

                        const int JAW_OPEN_BLENDSHAPE = 21;
                        const int MMMM_BLENDSHAPE = 34;
                        const int FUNNEL_BLENDSHAPE = 40;
                        const int SMILE_LEFT_BLENDSHAPE = 28;
                        const int SMILE_RIGHT_BLENDSHAPE = 29;
                        _blendshapeCoefficients[JAW_OPEN_BLENDSHAPE] += _audioJawOpen;
                        _blendshapeCoefficients[SMILE_LEFT_BLENDSHAPE] += _mouth4;
                        _blendshapeCoefficients[SMILE_RIGHT_BLENDSHAPE] += _mouth4;
                        _blendshapeCoefficients[MMMM_BLENDSHAPE] += _mouth2;
                        _blendshapeCoefficients[FUNNEL_BLENDSHAPE] += _mouth3;
                    }

                    applyEyelidOffset(getFinalOrientationInWorldFrame());
                }
            }

            auto eyeTracker = DependencyManager::get<EyeTracker>();
            _isEyeTrackerConnected = eyeTracker->isTracking();
        }

        if (!myAvatar->getStandingHMDSensorMode()) {
            //  Twist the upper body to follow the rotation of the head, but only do this with my avatar,
            //  since everyone else will see the full joint rotations for other people.  
            const float BODY_FOLLOW_HEAD_YAW_RATE = 0.1f;
            const float BODY_FOLLOW_HEAD_FACTOR = 0.66f;
            float currentTwist = getTorsoTwist();
            setTorsoTwist(currentTwist + (getFinalYaw() * BODY_FOLLOW_HEAD_FACTOR - currentTwist) * BODY_FOLLOW_HEAD_YAW_RATE);
        }
    }
   
    if (!(_isFaceTrackerConnected || billboard)) {

        if (!_isEyeTrackerConnected) {
            // Update eye saccades
            const float AVERAGE_MICROSACCADE_INTERVAL = 1.0f;
            const float AVERAGE_SACCADE_INTERVAL = 6.0f;
            const float MICROSACCADE_MAGNITUDE = 0.002f;
            const float SACCADE_MAGNITUDE = 0.04f;
            const float NOMINAL_FRAME_RATE = 60.0f;

            if (randFloat() < deltaTime / AVERAGE_MICROSACCADE_INTERVAL) {
                _saccadeTarget = MICROSACCADE_MAGNITUDE * randVector();
            } else if (randFloat() < deltaTime / AVERAGE_SACCADE_INTERVAL) {
                _saccadeTarget = SACCADE_MAGNITUDE * randVector();
            }
            _saccade += (_saccadeTarget - _saccade) * pow(0.5f, NOMINAL_FRAME_RATE * deltaTime);
        } else {
            _saccade = glm::vec3();
        }

        //  Detect transition from talking to not; force blink after that and a delay
        bool forceBlink = false;
        const float TALKING_LOUDNESS = 100.0f;
        const float BLINK_AFTER_TALKING = 0.25f;
        if ((_averageLoudness - _longTermAverageLoudness) > TALKING_LOUDNESS) {
            _timeWithoutTalking = 0.0f;
        
        } else if (_timeWithoutTalking < BLINK_AFTER_TALKING && (_timeWithoutTalking += deltaTime) >= BLINK_AFTER_TALKING) {
            forceBlink = true;
        }
                                 
        //  Update audio attack data for facial animation (eyebrows and mouth)
        const float AUDIO_ATTACK_AVERAGING_RATE = 0.9f;
        _audioAttack = AUDIO_ATTACK_AVERAGING_RATE * _audioAttack + (1.0f - AUDIO_ATTACK_AVERAGING_RATE) * fabs((_audioLoudness - _longTermAverageLoudness) - _lastLoudness);
        _lastLoudness = (_audioLoudness - _longTermAverageLoudness);
        
        const float BROW_LIFT_THRESHOLD = 100.0f;
        if (_audioAttack > BROW_LIFT_THRESHOLD) {
            _browAudioLift += sqrtf(_audioAttack) * 0.01f;
        }
        _browAudioLift = glm::clamp(_browAudioLift *= 0.7f, 0.0f, 1.0f);
        
        const float BLINK_SPEED = 10.0f;
        const float BLINK_SPEED_VARIABILITY = 1.0f;
        const float BLINK_START_VARIABILITY = 0.25f;
        const float FULLY_OPEN = 0.0f;
        const float FULLY_CLOSED = 1.0f;
        if (_leftEyeBlinkVelocity == 0.0f && _rightEyeBlinkVelocity == 0.0f) {
            // no blinking when brows are raised; blink less with increasing loudness
            const float BASE_BLINK_RATE = 15.0f / 60.0f;
            const float ROOT_LOUDNESS_TO_BLINK_INTERVAL = 0.25f;
            if (forceBlink || (_browAudioLift < EPSILON && shouldDo(glm::max(1.0f, sqrt(fabs(_averageLoudness - _longTermAverageLoudness)) *
                    ROOT_LOUDNESS_TO_BLINK_INTERVAL) / BASE_BLINK_RATE, deltaTime))) {
                _leftEyeBlinkVelocity = BLINK_SPEED + randFloat() * BLINK_SPEED_VARIABILITY;
                _rightEyeBlinkVelocity = BLINK_SPEED + randFloat() * BLINK_SPEED_VARIABILITY;
                if (randFloat() < 0.5f) {
                    _leftEyeBlink = BLINK_START_VARIABILITY;
                } else {
                    _rightEyeBlink = BLINK_START_VARIABILITY;
                }
            }
        } else {
            _leftEyeBlink = glm::clamp(_leftEyeBlink + _leftEyeBlinkVelocity * deltaTime, FULLY_OPEN, FULLY_CLOSED);
            _rightEyeBlink = glm::clamp(_rightEyeBlink + _rightEyeBlinkVelocity * deltaTime, FULLY_OPEN, FULLY_CLOSED);
            
            if (_leftEyeBlink == FULLY_CLOSED) {
                _leftEyeBlinkVelocity = -BLINK_SPEED;
            
            } else if (_leftEyeBlink == FULLY_OPEN) {
                _leftEyeBlinkVelocity = 0.0f;
            }
            if (_rightEyeBlink == FULLY_CLOSED) {
                _rightEyeBlinkVelocity = -BLINK_SPEED;
            
            } else if (_rightEyeBlink == FULLY_OPEN) {
                _rightEyeBlinkVelocity = 0.0f;
            }
        }
        
        // use data to update fake Faceshift blendshape coefficients
        calculateMouthShapes();
        DependencyManager::get<Faceshift>()->updateFakeCoefficients(_leftEyeBlink,
                                                                    _rightEyeBlink,
                                                                    _browAudioLift,
                                                                    _audioJawOpen,
                                                                    _mouth2,
                                                                    _mouth3,
                                                                    _mouth4,
                                                                    _blendshapeCoefficients);

        applyEyelidOffset(getOrientation());

    } else {
        _saccade = glm::vec3();
    }
    if (Menu::getInstance()->isOptionChecked(MenuOption::FixGaze)) { // if debug menu turns off, use no saccade
        _saccade = glm::vec3();
    }
    
    _leftEyePosition = _rightEyePosition = getPosition();
    if (!billboard) {
        _faceModel.simulate(deltaTime);
        if (!_faceModel.getEyePositions(_leftEyePosition, _rightEyePosition)) {
            static_cast<Avatar*>(_owningAvatar)->getSkeletonModel().getEyePositions(_leftEyePosition, _rightEyePosition);
        }
    }
    _eyePosition = calculateAverageEyePosition();
}
Ejemplo n.º 25
0
void SixenseManager::update(float deltaTime) {
#ifdef HAVE_SIXENSE
    // if the controllers haven't been moved in a while, disable
    const unsigned int MOVEMENT_DISABLE_SECONDS = 3;
    if (usecTimestampNow() - _lastMovement > (MOVEMENT_DISABLE_SECONDS * USECS_PER_SECOND)) {
        Hand* hand = Application::getInstance()->getAvatar()->getHand();
        for (std::vector<PalmData>::iterator it = hand->getPalms().begin(); it != hand->getPalms().end(); it++) {
            it->setActive(false);
        }
        _lastMovement = usecTimestampNow();
    }

    if (sixenseGetNumActiveControllers() == 0) {
        _hydrasConnected = false;
        return;
    } 

    PerformanceTimer perfTimer("sixense");
    if (!_hydrasConnected) {
        _hydrasConnected = true;
        UserActivityLogger::getInstance().connectedDevice("spatial_controller", "hydra");
    }
    MyAvatar* avatar = Application::getInstance()->getAvatar();
    Hand* hand = avatar->getHand();
    
    int maxControllers = sixenseGetMaxControllers();

    // we only support two controllers
    sixenseControllerData controllers[2];

    int numActiveControllers = 0;
    for (int i = 0; i < maxControllers && numActiveControllers < 2; i++) {
        if (!sixenseIsControllerEnabled(i)) {
            continue;
        }
        sixenseControllerData* data = controllers + numActiveControllers;
        ++numActiveControllers;
        sixenseGetNewestData(i, data);
        
        //  Set palm position and normal based on Hydra position/orientation
        
        // Either find a palm matching the sixense controller, or make a new one
        PalmData* palm;
        bool foundHand = false;
        for (size_t j = 0; j < hand->getNumPalms(); j++) {
            if (hand->getPalms()[j].getSixenseID() == data->controller_index) {
                palm = &(hand->getPalms()[j]);
                foundHand = true;
            }
        }
        if (!foundHand) {
            PalmData newPalm(hand);
            hand->getPalms().push_back(newPalm);
            palm = &(hand->getPalms()[hand->getNumPalms() - 1]);
            palm->setSixenseID(data->controller_index);
            qDebug("Found new Sixense controller, ID %i", data->controller_index);
        }
        
        palm->setActive(true);
        
        //  Read controller buttons and joystick into the hand
        palm->setControllerButtons(data->buttons);
        palm->setTrigger(data->trigger);
        palm->setJoystick(data->joystick_x, data->joystick_y);


        // Emulate the mouse so we can use scripts
        if (Menu::getInstance()->isOptionChecked(MenuOption::SixenseMouseInput)) {
            emulateMouse(palm, numActiveControllers - 1);
        }

        // NOTE: Sixense API returns pos data in millimeters but we IMMEDIATELY convert to meters.
        glm::vec3 position(data->pos[0], data->pos[1], data->pos[2]);
        position *= METERS_PER_MILLIMETER;

        // Transform the measured position into body frame.  
        glm::vec3 neck = _neckBase;
        // Zeroing y component of the "neck" effectively raises the measured position a little bit.
        neck.y = 0.f;
        position = _orbRotation * (position - neck);

        //  Rotation of Palm
        glm::quat rotation(data->rot_quat[3], -data->rot_quat[0], data->rot_quat[1], -data->rot_quat[2]);
        rotation = glm::angleAxis(PI, glm::vec3(0.f, 1.f, 0.f)) * _orbRotation * rotation;
        
        //  Compute current velocity from position change
        glm::vec3 rawVelocity;
        if (deltaTime > 0.f) {
            rawVelocity = (position - palm->getRawPosition()) / deltaTime; 
        } else {
            rawVelocity = glm::vec3(0.0f);
        }
        palm->setRawVelocity(rawVelocity);   //  meters/sec
    
        // adjustment for hydra controllers fit into hands
        float sign = (i == 0) ? -1.0f : 1.0f;
        rotation *= glm::angleAxis(sign * PI/4.0f, glm::vec3(0.0f, 0.0f, 1.0f));

        if (_lowVelocityFilter) {
            //  Use a velocity sensitive filter to damp small motions and preserve large ones with
            //  no latency.
            float velocityFilter = glm::clamp(1.0f - glm::length(rawVelocity), 0.0f, 1.0f);
            position = palm->getRawPosition() * velocityFilter + position * (1.0f - velocityFilter);
            rotation = safeMix(palm->getRawRotation(), rotation, 1.0f - velocityFilter);
            palm->setRawPosition(position);
            palm->setRawRotation(rotation);
        } else {
            palm->setRawPosition(position);
            palm->setRawRotation(rotation);
        }

        // use the velocity to determine whether there's any movement (if the hand isn't new)
        const float MOVEMENT_DISTANCE_THRESHOLD = 0.003f;
        _amountMoved += rawVelocity * deltaTime;
        if (glm::length(_amountMoved) > MOVEMENT_DISTANCE_THRESHOLD && foundHand) {
            _lastMovement = usecTimestampNow();
            _amountMoved = glm::vec3(0.0f);
        }
        
        // Store the one fingertip in the palm structure so we can track velocity
        const float FINGER_LENGTH = 0.3f;   //  meters
        const glm::vec3 FINGER_VECTOR(0.0f, 0.0f, FINGER_LENGTH);
        const glm::vec3 newTipPosition = position + rotation * FINGER_VECTOR;
        glm::vec3 oldTipPosition = palm->getTipRawPosition();
        if (deltaTime > 0.f) {
            palm->setTipVelocity((newTipPosition - oldTipPosition) / deltaTime);
        } else {
            palm->setTipVelocity(glm::vec3(0.f));
        }
        palm->setTipPosition(newTipPosition);
    }

    if (numActiveControllers == 2) {
        updateCalibration(controllers);
    }
#endif  // HAVE_SIXENSE
}
Ejemplo n.º 26
0
static void setPalm(float deltaTime, int index) {
    MyAvatar* avatar = Application::getInstance()->getAvatar();
    Hand* hand = avatar->getHand();
    PalmData* palm;
    bool foundHand = false;
    for (size_t j = 0; j < hand->getNumPalms(); j++) {
        if (hand->getPalms()[j].getSixenseID() == index) {
            palm = &(hand->getPalms()[j]);
            foundHand = true;
        }
    }
    if (!foundHand) {
        PalmData newPalm(hand);
        hand->getPalms().push_back(newPalm);
        palm = &(hand->getPalms()[hand->getNumPalms() - 1]);
        palm->setSixenseID(index);
    }
    
    palm->setActive(true);
    
    // Read controller buttons and joystick into the hand
    const QString PRIO_JOYSTICK_NAME = "PrioVR";
    Joystick* prioJoystick = JoystickScriptingInterface::getInstance().joystickWithName(PRIO_JOYSTICK_NAME);
    if (prioJoystick) {
        const QVector<float> axes = prioJoystick->getAxes();
        const QVector<bool> buttons = prioJoystick->getButtons();
        
        if (axes.size() >= 4 && buttons.size() >= 4) {
            if (index == LEFT_HAND_INDEX) {
                palm->setControllerButtons(buttons[1] ? BUTTON_FWD : 0);
                palm->setTrigger(buttons[0] ? 1.0f : 0.0f);
                palm->setJoystick(axes[0], -axes[1]);
                
            } else {
                palm->setControllerButtons(buttons[3] ? BUTTON_FWD : 0);
                palm->setTrigger(buttons[2] ? 1.0f : 0.0f);
                palm->setJoystick(axes[2], -axes[3]);
            }
        }
    }
    
    // NOTE: this math is done in the worl-frame with unecessary complexity.
    // TODO: transfom this to stay in the model-frame.
    glm::vec3 position;
    glm::quat rotation;
    SkeletonModel* skeletonModel = &Application::getInstance()->getAvatar()->getSkeletonModel();
    int jointIndex;
    glm::quat inverseRotation = glm::inverse(Application::getInstance()->getAvatar()->getOrientation());
    if (index == LEFT_HAND_INDEX) {
        jointIndex = skeletonModel->getLeftHandJointIndex();
        skeletonModel->getJointRotationInWorldFrame(jointIndex, rotation);      
        rotation = inverseRotation * rotation * glm::quat(glm::vec3(0.0f, PI_OVER_TWO, 0.0f));
        
    } else {
        jointIndex = skeletonModel->getRightHandJointIndex();
        skeletonModel->getJointRotationInWorldFrame(jointIndex, rotation);
        rotation = inverseRotation * rotation * glm::quat(glm::vec3(0.0f, -PI_OVER_TWO, 0.0f));
    }
    skeletonModel->getJointPositionInWorldFrame(jointIndex, position);
    position = inverseRotation * (position - skeletonModel->getTranslation());
    
    palm->setRawRotation(rotation);
    
    //  Compute current velocity from position change
    glm::vec3 rawVelocity;
    if (deltaTime > 0.0f) {
        rawVelocity = (position - palm->getRawPosition()) / deltaTime; 
    } else {
        rawVelocity = glm::vec3(0.0f);
    }
    palm->setRawVelocity(rawVelocity);
    palm->setRawPosition(position);
    
    // Store the one fingertip in the palm structure so we can track velocity
    const float FINGER_LENGTH = 0.3f;   //  meters
    const glm::vec3 FINGER_VECTOR(0.0f, 0.0f, FINGER_LENGTH);
    const glm::vec3 newTipPosition = position + rotation * FINGER_VECTOR;
    glm::vec3 oldTipPosition = palm->getTipRawPosition();
    if (deltaTime > 0.0f) {
        palm->setTipVelocity((newTipPosition - oldTipPosition) / deltaTime);
    } else {
        palm->setTipVelocity(glm::vec3(0.0f));
    }
    palm->setTipPosition(newTipPosition);
}
Ejemplo n.º 27
0
// Called within Model::simulate call, below.
void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
    const FBXGeometry& geometry = getFBXGeometry();

    Head* head = _owningAvatar->getHead();

    if (_owningAvatar->isMyAvatar()) {
        MyAvatar* myAvatar = static_cast<MyAvatar*>(_owningAvatar);

        Rig::HeadParameters headParams;
        headParams.enableLean = qApp->isHMDMode();
        headParams.leanSideways = head->getFinalLeanSideways();
        headParams.leanForward = head->getFinalLeanForward();
        headParams.torsoTwist = head->getTorsoTwist();

        if (qApp->isHMDMode()) {
            headParams.isInHMD = true;

            // get HMD position from sensor space into world space, and back into rig space
            glm::mat4 worldHMDMat = myAvatar->getSensorToWorldMatrix() * myAvatar->getHMDSensorMatrix();
            glm::mat4 rigToWorld = createMatFromQuatAndPos(getRotation(), getTranslation());
            glm::mat4 worldToRig = glm::inverse(rigToWorld);
            glm::mat4 rigHMDMat = worldToRig * worldHMDMat;

            headParams.rigHeadPosition = extractTranslation(rigHMDMat);
            headParams.rigHeadOrientation = extractRotation(rigHMDMat);
            headParams.worldHeadOrientation = extractRotation(worldHMDMat);
        } else {
            headParams.isInHMD = false;

            // We don't have a valid localHeadPosition.
            headParams.rigHeadOrientation = Quaternions::Y_180 * head->getFinalOrientationInLocalFrame();
            headParams.worldHeadOrientation = head->getFinalOrientationInWorldFrame();
        }

        headParams.leanJointIndex = geometry.leanJointIndex;
        headParams.neckJointIndex = geometry.neckJointIndex;
        headParams.isTalking = head->getTimeWithoutTalking() <= 1.5f;

        _rig->updateFromHeadParameters(headParams, deltaTime);

        Rig::HandParameters handParams;

        auto leftPose = myAvatar->getLeftHandControllerPoseInAvatarFrame();
        if (leftPose.isValid()) {
            handParams.isLeftEnabled = true;
            handParams.leftPosition = Quaternions::Y_180 * leftPose.getTranslation();
            handParams.leftOrientation = Quaternions::Y_180 * leftPose.getRotation();
        } else {
            handParams.isLeftEnabled = false;
        }

        auto rightPose = myAvatar->getRightHandControllerPoseInAvatarFrame();
        if (rightPose.isValid()) {
            handParams.isRightEnabled = true;
            handParams.rightPosition = Quaternions::Y_180 * rightPose.getTranslation();
            handParams.rightOrientation = Quaternions::Y_180 * rightPose.getRotation();
        } else {
            handParams.isRightEnabled = false;
        }

        handParams.bodyCapsuleRadius = myAvatar->getCharacterController()->getCapsuleRadius();
        handParams.bodyCapsuleHalfHeight = myAvatar->getCharacterController()->getCapsuleHalfHeight();
        handParams.bodyCapsuleLocalOffset = myAvatar->getCharacterController()->getCapsuleLocalOffset();

        _rig->updateFromHandParameters(handParams, deltaTime);

        Rig::CharacterControllerState ccState = convertCharacterControllerState(myAvatar->getCharacterController()->getState());

        auto velocity = myAvatar->getLocalVelocity();
        auto position = myAvatar->getLocalPosition();
        auto orientation = myAvatar->getLocalOrientation();
        _rig->computeMotionAnimationState(deltaTime, position, velocity, orientation, ccState);

        // evaluate AnimGraph animation and update jointStates.
        Model::updateRig(deltaTime, parentTransform);

        Rig::EyeParameters eyeParams;
        eyeParams.worldHeadOrientation = headParams.worldHeadOrientation;
        eyeParams.eyeLookAt = head->getLookAtPosition();
        eyeParams.eyeSaccade = head->getSaccade();
        eyeParams.modelRotation = getRotation();
        eyeParams.modelTranslation = getTranslation();
        eyeParams.leftEyeJointIndex = geometry.leftEyeJointIndex;
        eyeParams.rightEyeJointIndex = geometry.rightEyeJointIndex;

        _rig->updateFromEyeParameters(eyeParams);

    } else {

        Model::updateRig(deltaTime, parentTransform);

        // This is a little more work than we really want.
        //
        // Other avatars joint, including their eyes, should already be set just like any other joints
        // from the wire data. But when looking at me, we want the eyes to use the corrected lookAt.
        //
        // Thus this should really only be ... else if (_owningAvatar->getHead()->isLookingAtMe()) {...
        // However, in the !isLookingAtMe case, the eyes aren't rotating the way they should right now.
        // We will revisit that as priorities allow, and particularly after the new rig/animation/joints.

        // If the head is not positioned, updateEyeJoints won't get the math right
        glm::quat headOrientation;
        _rig->getJointRotation(geometry.headJointIndex, headOrientation);
        glm::vec3 eulers = safeEulerAngles(headOrientation);
        head->setBasePitch(glm::degrees(-eulers.x));
        head->setBaseYaw(glm::degrees(eulers.y));
        head->setBaseRoll(glm::degrees(-eulers.z));

        Rig::EyeParameters eyeParams;
        eyeParams.worldHeadOrientation = head->getFinalOrientationInWorldFrame();
        eyeParams.eyeLookAt = head->getCorrectedLookAtPosition();
        eyeParams.eyeSaccade = glm::vec3();
        eyeParams.modelRotation = getRotation();
        eyeParams.modelTranslation = getTranslation();
        eyeParams.leftEyeJointIndex = geometry.leftEyeJointIndex;
        eyeParams.rightEyeJointIndex = geometry.rightEyeJointIndex;

        _rig->updateFromEyeParameters(eyeParams);
     }
}
Ejemplo n.º 28
0
void PreferencesDialog::savePreferences() {
    
    MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();

    bool shouldDispatchIdentityPacket = false;
    
    QString displayNameStr(ui.displayNameEdit->text());
    if (displayNameStr != _displayNameString) {
        myAvatar->setDisplayName(displayNameStr);
        UserActivityLogger::getInstance().changedDisplayName(displayNameStr);
        shouldDispatchIdentityPacket = true;
    }

    if (shouldDispatchIdentityPacket) {
        myAvatar->sendIdentityPacket();
    }
    
    myAvatar->setCollisionSoundURL(ui.collisionSoundURLEdit->text());

    if (!Menu::getInstance()->isOptionChecked(MenuOption::DisableActivityLogger)
        != ui.sendDataCheckBox->isChecked()) {
        Menu::getInstance()->triggerOption(MenuOption::DisableActivityLogger);
    }

    if (!ui.snapshotLocationEdit->text().isEmpty() && QDir(ui.snapshotLocationEdit->text()).exists()) {
        Snapshot::snapshotsLocation.set(ui.snapshotLocationEdit->text());
    }

    if (!ui.scriptsLocationEdit->text().isEmpty() && QDir(ui.scriptsLocationEdit->text()).exists()) {
        qApp->setScriptsLocation(ui.scriptsLocationEdit->text());
    }

    myAvatar->getHead()->setPupilDilation(ui.pupilDilationSlider->value() / (float)ui.pupilDilationSlider->maximum());
    myAvatar->setLeanScale(ui.leanScaleSpin->value());
    myAvatar->setClampedTargetScale(ui.avatarScaleSpin->value());
    
    DependencyManager::get<AvatarManager>()->getMyAvatar()->setRealWorldFieldOfView(ui.realWorldFieldOfViewSpin->value());
    
    qApp->setFieldOfView(ui.fieldOfViewSpin->value());
    
    auto dde = DependencyManager::get<DdeFaceTracker>();
    dde->setEyeClosingThreshold(ui.ddeEyeClosingThresholdSlider->value() / 
                                (float)ui.ddeEyeClosingThresholdSlider->maximum());

    FaceTracker::setEyeDeflection(ui.faceTrackerEyeDeflectionSider->value() /
                                (float)ui.faceTrackerEyeDeflectionSider->maximum());
    
    auto faceshift = DependencyManager::get<Faceshift>();
    faceshift->setHostname(ui.faceshiftHostnameEdit->text());
    
    qApp->setMaxOctreePacketsPerSecond(ui.maxOctreePPSSpin->value());

    qApp->getApplicationCompositor().setHmdUIAngularSize(ui.oculusUIAngularSizeSpin->value());
    
    SixenseManager& sixense = SixenseManager::getInstance();
    sixense.setReticleMoveSpeed(ui.sixenseReticleMoveSpeedSpin->value());
    sixense.setInvertButtons(ui.invertSixenseButtonsCheckBox->isChecked());

    auto audio = DependencyManager::get<AudioClient>();
    MixedProcessedAudioStream& stream = audio->getReceivedAudioStream();
    
    stream.setDynamicJitterBuffers(ui.dynamicJitterBuffersCheckBox->isChecked());
    stream.setStaticDesiredJitterBufferFrames(ui.staticDesiredJitterBufferFramesSpin->value());
    stream.setMaxFramesOverDesired(ui.maxFramesOverDesiredSpin->value());
    stream.setUseStDevForJitterCalc(ui.useStdevForJitterCalcCheckBox->isChecked());
    stream.setWindowStarveThreshold(ui.windowStarveThresholdSpin->value());
    stream.setWindowSecondsForDesiredCalcOnTooManyStarves(ui.windowSecondsForDesiredCalcOnTooManyStarvesSpin->value());
    stream.setWindowSecondsForDesiredReduction(ui.windowSecondsForDesiredReductionSpin->value());
    stream.setRepetitionWithFade(ui.repetitionWithFadeCheckBox->isChecked());

    QMetaObject::invokeMethod(audio.data(), "setOutputBufferSize", Q_ARG(int, ui.outputBufferSizeSpinner->value()));

    audio->setOutputStarveDetectionEnabled(ui.outputStarveDetectionCheckBox->isChecked());
    audio->setOutputStarveDetectionThreshold(ui.outputStarveDetectionThresholdSpinner->value());
    audio->setOutputStarveDetectionPeriod(ui.outputStarveDetectionPeriodSpinner->value());

    Application::getInstance()->resizeGL();

    // LOD items
    auto lodManager = DependencyManager::get<LODManager>();
    lodManager->setDesktopLODDecreaseFPS(ui.desktopMinimumFPSSpin->value());
    lodManager->setHMDLODDecreaseFPS(ui.hmdMinimumFPSSpin->value());
}
Ejemplo n.º 29
0
void PreferencesDialog::loadPreferences() {
    
    MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
    Menu* menuInstance = Menu::getInstance();

    _displayNameString = myAvatar->getDisplayName();
    ui.displayNameEdit->setText(_displayNameString);

    ui.collisionSoundURLEdit->setText(myAvatar->getCollisionSoundURL());

    ui.sendDataCheckBox->setChecked(!menuInstance->isOptionChecked(MenuOption::DisableActivityLogger));

    ui.snapshotLocationEdit->setText(Snapshot::snapshotsLocation.get());

    ui.scriptsLocationEdit->setText(qApp->getScriptsLocation());

    ui.pupilDilationSlider->setValue(myAvatar->getHead()->getPupilDilation() *
                                     ui.pupilDilationSlider->maximum());
    
    auto dde = DependencyManager::get<DdeFaceTracker>();
    ui.ddeEyeClosingThresholdSlider->setValue(dde->getEyeClosingThreshold() * 
                                              ui.ddeEyeClosingThresholdSlider->maximum());

    ui.faceTrackerEyeDeflectionSider->setValue(FaceTracker::getEyeDeflection() *
                                               ui.faceTrackerEyeDeflectionSider->maximum());
    
    auto faceshift = DependencyManager::get<Faceshift>();
    ui.faceshiftHostnameEdit->setText(faceshift->getHostname());

    auto audio = DependencyManager::get<AudioClient>();
    MixedProcessedAudioStream& stream = audio->getReceivedAudioStream();

    ui.dynamicJitterBuffersCheckBox->setChecked(stream.getDynamicJitterBuffers());
    ui.staticDesiredJitterBufferFramesSpin->setValue(stream.getDesiredJitterBufferFrames());
    ui.maxFramesOverDesiredSpin->setValue(stream.getMaxFramesOverDesired());
    ui.useStdevForJitterCalcCheckBox->setChecked(stream.getUseStDevForJitterCalc());
    ui.windowStarveThresholdSpin->setValue(stream.getWindowStarveThreshold());
    ui.windowSecondsForDesiredCalcOnTooManyStarvesSpin->setValue(
            stream.getWindowSecondsForDesiredCalcOnTooManyStarves());
    ui.windowSecondsForDesiredReductionSpin->setValue(stream.getWindowSecondsForDesiredReduction());
    ui.repetitionWithFadeCheckBox->setChecked(stream.getRepetitionWithFade());

    ui.outputBufferSizeSpinner->setValue(audio->getOutputBufferSize());

    ui.outputStarveDetectionCheckBox->setChecked(audio->getOutputStarveDetectionEnabled());
    ui.outputStarveDetectionThresholdSpinner->setValue(audio->getOutputStarveDetectionThreshold());
    ui.outputStarveDetectionPeriodSpinner->setValue(audio->getOutputStarveDetectionPeriod());

    ui.realWorldFieldOfViewSpin->setValue(DependencyManager::get<AvatarManager>()->getMyAvatar()->getRealWorldFieldOfView());

    ui.fieldOfViewSpin->setValue(qApp->getFieldOfView());
    
    ui.leanScaleSpin->setValue(myAvatar->getLeanScale());
    
    ui.avatarScaleSpin->setValue(myAvatar->getScale());
    
    ui.maxOctreePPSSpin->setValue(qApp->getMaxOctreePacketsPerSecond());

    ui.oculusUIAngularSizeSpin->setValue(qApp->getApplicationCompositor().getHmdUIAngularSize());

    SixenseManager& sixense = SixenseManager::getInstance();
    ui.sixenseReticleMoveSpeedSpin->setValue(sixense.getReticleMoveSpeed());
    ui.invertSixenseButtonsCheckBox->setChecked(sixense.getInvertButtons());

    // LOD items
    auto lodManager = DependencyManager::get<LODManager>();
    ui.desktopMinimumFPSSpin->setValue(lodManager->getDesktopLODDecreaseFPS());
    ui.hmdMinimumFPSSpin->setValue(lodManager->getHMDLODDecreaseFPS());
}
Ejemplo n.º 30
0
void SixenseManager::update(float deltaTime) {
#ifdef HAVE_SIXENSE
    if (sixenseGetNumActiveControllers() == 0) {
        return;
    }
    MyAvatar* avatar = Application::getInstance()->getAvatar();
    Hand* hand = avatar->getHand();
    
    int maxControllers = sixenseGetMaxControllers();

    // we only support two controllers
    sixenseControllerData controllers[2];

    int numActiveControllers = 0;
    for (int i = 0; i < maxControllers && numActiveControllers < 2; i++) {
        if (!sixenseIsControllerEnabled(i)) {
            continue;
        }
        sixenseControllerData* data = controllers + numActiveControllers;
        ++numActiveControllers;
        sixenseGetNewestData(i, data);
        
        //  Set palm position and normal based on Hydra position/orientation
        
        // Either find a palm matching the sixense controller, or make a new one
        PalmData* palm;
        bool foundHand = false;
        for (size_t j = 0; j < hand->getNumPalms(); j++) {
            if (hand->getPalms()[j].getSixenseID() == data->controller_index) {
                palm = &(hand->getPalms()[j]);
                foundHand = true;
            }
        }
        if (!foundHand) {
            PalmData newPalm(hand);
            hand->getPalms().push_back(newPalm);
            palm = &(hand->getPalms()[hand->getNumPalms() - 1]);
            palm->setSixenseID(data->controller_index);
            printf("Found new Sixense controller, ID %i\n", data->controller_index);
        }
        
        palm->setActive(true);
        
        //  Read controller buttons and joystick into the hand
        palm->setControllerButtons(data->buttons);
        palm->setTrigger(data->trigger);
        palm->setJoystick(data->joystick_x, data->joystick_y);

        glm::vec3 position(data->pos[0], data->pos[1], data->pos[2]);
        // Transform the measured position into body frame.  
        glm::vec3 neck = _neckBase;
        // Zeroing y component of the "neck" effectively raises the measured position a little bit.
        neck.y = 0.f;
        position = _orbRotation * (position - neck);

        //  Rotation of Palm
        glm::quat rotation(data->rot_quat[3], -data->rot_quat[0], data->rot_quat[1], -data->rot_quat[2]);
        rotation = glm::angleAxis(PI, glm::vec3(0.f, 1.f, 0.f)) * _orbRotation * rotation;
        const glm::vec3 PALM_VECTOR(0.0f, -1.0f, 0.0f);
        glm::vec3 newNormal = rotation * PALM_VECTOR;
        palm->setRawNormal(newNormal);
        palm->setRawRotation(rotation);
        
        //  Compute current velocity from position change
        glm::vec3 rawVelocity = (position - palm->getRawPosition()) / deltaTime / 1000.f;
        palm->setRawVelocity(rawVelocity);   //  meters/sec
        palm->setRawPosition(position);
        
        // use the velocity to determine whether there's any movement (if the hand isn't new)
        const float MOVEMENT_SPEED_THRESHOLD = 0.05f;
        if (glm::length(rawVelocity) > MOVEMENT_SPEED_THRESHOLD && foundHand) {
            _lastMovement = usecTimestampNow();
        }
        
        // initialize the "finger" based on the direction
        FingerData finger(palm, hand);
        finger.setActive(true);
        finger.setRawRootPosition(position);
        const float FINGER_LENGTH = 300.0f;   //  Millimeters
        const glm::vec3 FINGER_VECTOR(0.0f, 0.0f, FINGER_LENGTH);
        const glm::vec3 newTipPosition = position + rotation * FINGER_VECTOR;
        finger.setRawTipPosition(position + rotation * FINGER_VECTOR);
        
        // Store the one fingertip in the palm structure so we can track velocity
        glm::vec3 oldTipPosition = palm->getTipRawPosition();
        palm->setTipVelocity((newTipPosition - oldTipPosition) / deltaTime / 1000.f);
        palm->setTipPosition(newTipPosition);
        
        // three fingers indicates to the skeleton that we have enough data to determine direction
        palm->getFingers().clear();
        palm->getFingers().push_back(finger);
        palm->getFingers().push_back(finger);
        palm->getFingers().push_back(finger);
    }

    if (numActiveControllers == 2) {
        updateCalibration(controllers);
    }

    // if the controllers haven't been moved in a while, disable
    const unsigned int MOVEMENT_DISABLE_DURATION = 30 * 1000 * 1000;
    if (usecTimestampNow() - _lastMovement > MOVEMENT_DISABLE_DURATION) {
        for (std::vector<PalmData>::iterator it = hand->getPalms().begin(); it != hand->getPalms().end(); it++) {
            it->setActive(false);
        }
    }
#endif  // HAVE_SIXENSE
}