예제 #1
0
void OpenVrDisplayPlugin::beginFrameRender(uint32_t frameIndex) {

    double displayFrequency = _system->GetFloatTrackedDeviceProperty(vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_DisplayFrequency_Float);
    double frameDuration = 1.f / displayFrequency;
    double vsyncToPhotons = _system->GetFloatTrackedDeviceProperty(vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_SecondsFromVsyncToPhotons_Float);

    FrameInfo frame;
#if THREADED_PRESENT
    // 3 frames of prediction + vsyncToPhotons = 44ms total
    const double NUM_PREDICTION_FRAMES = 3.0f;
    frame.predictedDisplayTime = NUM_PREDICTION_FRAMES * frameDuration + vsyncToPhotons;
#else
    frame.predictedDisplayTime = frameDuration + vsyncToPhotons;
#endif

    vr::TrackedDevicePose_t predictedTrackedDevicePose[vr::k_unMaxTrackedDeviceCount];
    _system->GetDeviceToAbsoluteTrackingPose(vr::TrackingUniverseStanding, frame.predictedDisplayTime, predictedTrackedDevicePose, vr::k_unMaxTrackedDeviceCount);

    // copy and process predictedTrackedDevicePoses
    for (int i = 0; i < vr::k_unMaxTrackedDeviceCount; i++) {
        _trackedDevicePose[i] = predictedTrackedDevicePose[i];
        _trackedDevicePoseMat4[i] = _sensorResetMat * toGlm(_trackedDevicePose[i].mDeviceToAbsoluteTracking);
        _trackedDeviceLinearVelocities[i] = transformVectorFast(_sensorResetMat, toGlm(_trackedDevicePose[i].vVelocity));
        _trackedDeviceAngularVelocities[i] = transformVectorFast(_sensorResetMat, toGlm(_trackedDevicePose[i].vAngularVelocity));
    }
    frame.headPose = _trackedDevicePoseMat4[0];
    _currentRenderFrameInfo.set(frame);

    Lock lock(_mutex);
    _frameInfos[frameIndex] = frame;
}
예제 #2
0
glm::mat4 OpenVrDisplayPlugin::getHeadPose(uint32_t frameIndex) const {

    float displayFrequency = _system->GetFloatTrackedDeviceProperty(vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_DisplayFrequency_Float);
    float frameDuration = 1.f / displayFrequency;
    float vsyncToPhotons = _system->GetFloatTrackedDeviceProperty(vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_SecondsFromVsyncToPhotons_Float);

#if THREADED_PRESENT
    // TODO: this seems awfuly long, 44ms total, but it produced the best results.
    const float NUM_PREDICTION_FRAMES = 3.0f;
    float predictedSecondsFromNow = NUM_PREDICTION_FRAMES * frameDuration + vsyncToPhotons;
#else
    uint64_t frameCounter;
    float timeSinceLastVsync;
    _system->GetTimeSinceLastVsync(&timeSinceLastVsync, &frameCounter);
    float predictedSecondsFromNow = 3.0f * frameDuration - timeSinceLastVsync + vsyncToPhotons;
#endif

    vr::TrackedDevicePose_t predictedTrackedDevicePose[vr::k_unMaxTrackedDeviceCount];
    _system->GetDeviceToAbsoluteTrackingPose(vr::TrackingUniverseSeated, predictedSecondsFromNow, predictedTrackedDevicePose, vr::k_unMaxTrackedDeviceCount);

    // copy and process predictedTrackedDevicePoses
    for (int i = 0; i < vr::k_unMaxTrackedDeviceCount; i++) {
        _trackedDevicePose[i] = predictedTrackedDevicePose[i];
        _trackedDevicePoseMat4[i] = _sensorResetMat * toGlm(_trackedDevicePose[i].mDeviceToAbsoluteTracking);
        _trackedDeviceLinearVelocities[i] = transformVectorFast(_sensorResetMat, toGlm(_trackedDevicePose[i].vVelocity));
        _trackedDeviceAngularVelocities[i] = transformVectorFast(_sensorResetMat, toGlm(_trackedDevicePose[i].vAngularVelocity));
    }
    return _trackedDevicePoseMat4[0];
}
예제 #3
0
// eyePose and headPosition are in sensor space.
// the resulting matrix should be in view space.
glm::mat4 CompositorHelper::getReticleTransform(const glm::mat4& eyePose, const glm::vec3& headPosition) const {
    glm::mat4 result;
    if (isHMD()) {
        vec2 spherical = overlayToSpherical(getReticlePosition());
        vec3 overlaySurfacePoint = getPoint(spherical.x, spherical.y);  // overlay space
        vec3 sensorSurfacePoint = _modelTransform.transform(overlaySurfacePoint);  // sensor space
        vec3 d = sensorSurfacePoint - headPosition;
        vec3 reticlePosition;
        if (glm::length(d) >= EPSILON) {
            d = glm::normalize(d);
        } else {
            d = glm::normalize(overlaySurfacePoint);
        }
        reticlePosition = headPosition + (d * getReticleDepth());
        quat reticleOrientation = cancelOutRoll(glm::quat_cast(_currentDisplayPlugin->getHeadPose()));
        vec3 reticleScale = vec3(Cursor::Manager::instance().getScale() * reticleSize * getReticleDepth());
        return glm::inverse(eyePose) * createMatFromScaleQuatAndPos(reticleScale, reticleOrientation, reticlePosition);
    } else {
        static const float CURSOR_PIXEL_SIZE = 32.0f;
        const auto canvasSize = vec2(toGlm(_renderingWidget->size()));;
        vec2 mousePosition = toGlm(_renderingWidget->mapFromGlobal(QCursor::pos()));
        mousePosition /= canvasSize;
        mousePosition *= 2.0;
        mousePosition -= 1.0;
        mousePosition.y *= -1.0f;

        vec2 mouseSize = CURSOR_PIXEL_SIZE / canvasSize;
        result = glm::scale(glm::translate(glm::mat4(), vec3(mousePosition, 0.0f)), vec3(mouseSize, 1.0f));
    }
    return result;
}
예제 #4
0
void OpenVrDisplayPlugin::activate() {
    _container->setIsOptionChecked(StandingHMDSensorMode, true);

    if (!_system) {
        _system = acquireOpenVrSystem();
    }
    Q_ASSERT(_system);

    _system->GetRecommendedRenderTargetSize(&_renderTargetSize.x, &_renderTargetSize.y);
    // Recommended render target size is per-eye, so double the X size for 
    // left + right eyes
    _renderTargetSize.x *= 2;

    {
        Lock lock(_poseMutex);
        openvr_for_each_eye([&](vr::Hmd_Eye eye) {
            _eyeOffsets[eye] = toGlm(_system->GetEyeToHeadTransform(eye));
            _eyeProjections[eye] = toGlm(_system->GetProjectionMatrix(eye, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, vr::API_OpenGL));
        });
        // FIXME Calculate the proper combined projection by using GetProjectionRaw values from both eyes
        _cullingProjection = _eyeProjections[0];

    }

    _compositor = vr::VRCompositor();
    Q_ASSERT(_compositor);
    HmdDisplayPlugin::activate();
}
예제 #5
0
controller::Pose ovrControllerRotationToHandRotation(ovrHandType hand, const ovrPoseStatef& handPose,
                                                    const ovrPoseStatef& lastHandPose) {
    static const glm::quat yFlip = glm::angleAxis(PI, Vectors::UNIT_Y);
    static const glm::quat quarterX = glm::angleAxis(PI_OVER_TWO, Vectors::UNIT_X);
    static const glm::quat touchToHand = yFlip * quarterX;
    
    static const glm::quat leftQuarterZ = glm::angleAxis(-PI_OVER_TWO, Vectors::UNIT_Z);
    static const glm::quat rightQuarterZ = glm::angleAxis(PI_OVER_TWO, Vectors::UNIT_Z);

    static const glm::quat leftRotationOffset = glm::inverse(leftQuarterZ) * touchToHand;
    static const glm::quat rightRotationOffset = glm::inverse(rightQuarterZ) * touchToHand;

    static const float CONTROLLER_LENGTH_OFFSET = 0.0762f;  // three inches
    static const glm::vec3 CONTROLLER_OFFSET = glm::vec3(CONTROLLER_LENGTH_OFFSET / 2.0f,
        -CONTROLLER_LENGTH_OFFSET / 2.0f,
        CONTROLLER_LENGTH_OFFSET * 1.5f);
    static const glm::vec3 leftTranslationOffset = glm::vec3(-1.0f, 1.0f, 1.0f) * CONTROLLER_OFFSET;
    static const glm::vec3 rightTranslationOffset = CONTROLLER_OFFSET;

    auto translationOffset = (hand == ovrHand_Left ? leftTranslationOffset : rightTranslationOffset);
    auto rotationOffset = (hand == ovrHand_Left ? leftRotationOffset : rightRotationOffset);

    glm::quat rotation = toGlm(handPose.ThePose.Orientation);

    controller::Pose pose;
    pose.translation = toGlm(lastHandPose.ThePose.Position);
    pose.translation += rotation * translationOffset;
    pose.rotation = rotation * rotationOffset;
    pose.angularVelocity = toGlm(lastHandPose.AngularVelocity);
    pose.velocity = toGlm(lastHandPose.LinearVelocity);
    pose.valid = true;
    return pose;
}
예제 #6
0
//----------------------------------------
void ofNode::createMatrix() {
	localTransformMatrix = glm::translate(glm::mat4(1.0), toGlm(position));
	localTransformMatrix = localTransformMatrix * glm::toMat4((const glm::quat&)orientation);
	localTransformMatrix = glm::scale(localTransformMatrix, toGlm(scale));
	
	updateAxis();
}
예제 #7
0
void OculusBaseDisplayPlugin::activate() {
    _session = acquireOculusSession();

    _hmdDesc = ovr_GetHmdDesc(_session);

    _ipd = ovr_GetFloat(_session, OVR_KEY_IPD, _ipd);

    glm::uvec2 eyeSizes[2];
    _viewScaleDesc.HmdSpaceToWorldScaleInMeters = 1.0f;

    ovr_for_each_eye([&](ovrEyeType eye) {
        _eyeFovs[eye] = _hmdDesc.DefaultEyeFov[eye];
        ovrEyeRenderDesc& erd = _eyeRenderDescs[eye] = ovr_GetRenderDesc(_session, eye, _eyeFovs[eye]);
        ovrMatrix4f ovrPerspectiveProjection =
            ovrMatrix4f_Projection(erd.Fov, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded);
        _eyeProjections[eye] = toGlm(ovrPerspectiveProjection);
        _eyeOffsets[eye] = glm::translate(mat4(), toGlm(erd.HmdToEyeViewOffset));
        eyeSizes[eye] = toGlm(ovr_GetFovTextureSize(_session, eye, erd.Fov, 1.0f));
        _viewScaleDesc.HmdToEyeViewOffset[eye] = erd.HmdToEyeViewOffset;
    });

    auto combinedFov = _eyeFovs[0];
    combinedFov.LeftTan = combinedFov.RightTan = std::max(combinedFov.LeftTan, combinedFov.RightTan);
    _cullingProjection = toGlm(ovrMatrix4f_Projection(combinedFov, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded));

    _renderTargetSize = uvec2(
        eyeSizes[0].x + eyeSizes[1].x,
        std::max(eyeSizes[0].y, eyeSizes[1].y));

    if (!OVR_SUCCESS(ovr_ConfigureTracking(_session,
        ovrTrackingCap_Orientation | ovrTrackingCap_Position | ovrTrackingCap_MagYawCorrection, 0))) {
        qFatal("Could not attach to sensor device");
    }

    // Parent class relies on our _session intialization, so it must come after that.
    memset(&_sceneLayer, 0, sizeof(ovrLayerEyeFov));
    _sceneLayer.Header.Type = ovrLayerType_EyeFov;
    _sceneLayer.Header.Flags = ovrLayerFlag_TextureOriginAtBottomLeft;
    ovr_for_each_eye([&](ovrEyeType eye) {
        ovrFovPort & fov = _sceneLayer.Fov[eye] = _eyeRenderDescs[eye].Fov;
        ovrSizei & size = _sceneLayer.Viewport[eye].Size = ovr_GetFovTextureSize(_session, eye, fov, 1.0f);
        _sceneLayer.Viewport[eye].Pos = { eye == ovrEye_Left ? 0 : size.w, 0 };
    });

    if (!OVR_SUCCESS(ovr_ConfigureTracking(_session,
        ovrTrackingCap_Orientation | ovrTrackingCap_Position | ovrTrackingCap_MagYawCorrection, 0))) {
        qFatal("Could not attach to sensor device");
    }

    // This must come after the initialization, so that the values calculated 
    // above are available during the customizeContext call (when not running
    // in threaded present mode)
    HmdDisplayPlugin::activate();
}
예제 #8
0
bool OpenVrDisplayPlugin::internalActivate() {
    Parent::internalActivate();

    _container->setIsOptionChecked(StandingHMDSensorMode, true);

    if (!_system) {
        _system = acquireOpenVrSystem();
    }
    if (!_system) {
        qWarning() << "Failed to initialize OpenVR";
        return false;
    }

    _system->GetRecommendedRenderTargetSize(&_renderTargetSize.x, &_renderTargetSize.y);
    // Recommended render target size is per-eye, so double the X size for 
    // left + right eyes
    _renderTargetSize.x *= 2;

    {
        Lock lock(_poseMutex);
        openvr_for_each_eye([&](vr::Hmd_Eye eye) {
            _eyeOffsets[eye] = toGlm(_system->GetEyeToHeadTransform(eye));
            _eyeProjections[eye] = toGlm(_system->GetProjectionMatrix(eye, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, vr::API_OpenGL));
        });
        // FIXME Calculate the proper combined projection by using GetProjectionRaw values from both eyes
        _cullingProjection = _eyeProjections[0];

    }

    _compositor = vr::VRCompositor();
    Q_ASSERT(_compositor);

    // enable async time warp
    // _compositor->ForceInterleavedReprojectionOn(true);

    // set up default sensor space such that the UI overlay will align with the front of the room.
    auto chaperone = vr::VRChaperone();
    if (chaperone) {
        float const UI_RADIUS = 1.0f;
        float const UI_HEIGHT = 1.6f;
        float const UI_Z_OFFSET = 0.5;

        float xSize, zSize;
        chaperone->GetPlayAreaSize(&xSize, &zSize);
        glm::vec3 uiPos(0.0f, UI_HEIGHT, UI_RADIUS - (0.5f * zSize) - UI_Z_OFFSET);
        _sensorResetMat = glm::inverse(createMatFromQuatAndPos(glm::quat(), uiPos));
    } else {
        qDebug() << "OpenVR: error could not get chaperone pointer";
    }

    return true;
}
예제 #9
0
void ofMaterial::updateLights(const ofShader & shader,ofGLProgrammableRenderer & renderer) const{
	for(size_t i=0;i<ofLightsData().size();i++){
		string idx = ofToString(i);
		shared_ptr<ofLight::Data> light = ofLightsData()[i].lock();
		if(!light || !light->isEnabled){
			shader.setUniform1f("lights["+idx+"].enabled",0);
			continue;
		}
		auto lightEyePosition = renderer.getCurrentViewMatrix() * light->position;
		shader.setUniform1f("lights["+idx+"].enabled",1);
		shader.setUniform1f("lights["+idx+"].type", light->lightType);
		shader.setUniform4f("lights["+idx+"].position", lightEyePosition);
		shader.setUniform4f("lights["+idx+"].ambient", light->ambientColor);
		shader.setUniform4f("lights["+idx+"].specular", light->specularColor);
		shader.setUniform4f("lights["+idx+"].diffuse", light->diffuseColor);

		if(light->lightType!=OF_LIGHT_DIRECTIONAL){
			shader.setUniform1f("lights["+idx+"].constantAttenuation", light->attenuation_constant);
			shader.setUniform1f("lights["+idx+"].linearAttenuation", light->attenuation_linear);
			shader.setUniform1f("lights["+idx+"].quadraticAttenuation", light->attenuation_quadratic);
		}

		if(light->lightType==OF_LIGHT_SPOT){
			auto direction = toGlm(light->position).xyz() + light->direction;
			auto direction4 = renderer.getCurrentViewMatrix() * glm::vec4(direction,1.0);
			direction = direction4.xyz() / direction4.w;
			direction = direction - lightEyePosition.xyz();
			shader.setUniform3f("lights["+idx+"].spotDirection", glm::normalize(direction));
			shader.setUniform1f("lights["+idx+"].spotExponent", light->exponent);
			shader.setUniform1f("lights["+idx+"].spotCutoff", light->spotCutOff);
			shader.setUniform1f("lights["+idx+"].spotCosCutoff", cos(ofDegToRad(light->spotCutOff)));
		}else if(light->lightType==OF_LIGHT_DIRECTIONAL){
			auto halfVector = glm::normalize(glm::vec4(0.f, 0.f, 1.f, 0.f) + lightEyePosition);
			shader.setUniform3f("lights["+idx+"].halfVector", halfVector.xyz());
		}else if(light->lightType==OF_LIGHT_AREA){
			shader.setUniform1f("lights["+idx+"].width", light->width);
			shader.setUniform1f("lights["+idx+"].height", light->height);
			auto direction = light->position.xyz() + light->direction;
			auto direction4 = renderer.getCurrentViewMatrix() * glm::vec4(direction, 1.0);
			direction = direction4.xyz() / direction4.w;
			direction = direction - lightEyePosition.xyz();
			shader.setUniform3f("lights["+idx+"].spotDirection", glm::normalize(direction));
			auto right = toGlm(light->position).xyz() + light->right;
			auto right4 = renderer.getCurrentViewMatrix() * glm::vec4(right, 1.0);
			right = right4.xyz() / right4.w;
			right = right - lightEyePosition.xyz();
			auto up = glm::cross(toGlm(right), direction);
			shader.setUniform3f("lights["+idx+"].right", glm::normalize(toGlm(right)));
			shader.setUniform3f("lights["+idx+"].up", glm::normalize(up));
		}
	}
}
예제 #10
0
void RenderableTextEntityItem::render(RenderArgs* args) {
    PerformanceTimer perfTimer("RenderableTextEntityItem::render");
    Q_ASSERT(getType() == EntityTypes::Text);
    
    static const float SLIGHTLY_BEHIND = -0.005f;
    glm::vec4 textColor = glm::vec4(toGlm(getTextColorX()), 1.0f);
    glm::vec4 backgroundColor = glm::vec4(toGlm(getBackgroundColorX()), 1.0f);
    glm::vec3 dimensions = getDimensions();
    
    // Render background
    glm::vec3 minCorner = glm::vec3(0.0f, -dimensions.y, SLIGHTLY_BEHIND);
    glm::vec3 maxCorner = glm::vec3(dimensions.x, 0.0f, SLIGHTLY_BEHIND);
    
    
    // Batch render calls
    Q_ASSERT(args->_batch);
    gpu::Batch& batch = *args->_batch;

    bool success;
    Transform transformToTopLeft = getTransformToCenter(success);
    if (!success) {
        return;
    }
    if (getFaceCamera()) {
        //rotate about vertical to face the camera
        glm::vec3 dPosition = args->_viewFrustum->getPosition() - getPosition();
        // If x and z are 0, atan(x, z) is undefined, so default to 0 degrees
        float yawRotation = dPosition.x == 0.0f && dPosition.z == 0.0f ? 0.0f : glm::atan(dPosition.x, dPosition.z);
        glm::quat orientation = glm::quat(glm::vec3(0.0f, yawRotation, 0.0f));
        transformToTopLeft.setRotation(orientation);
    }
    transformToTopLeft.postTranslate(glm::vec3(-0.5f, 0.5f, 0.0f)); // Go to the top left
    transformToTopLeft.setScale(1.0f); // Use a scale of one so that the text is not deformed
    
    batch.setModelTransform(transformToTopLeft);
    
    DependencyManager::get<DeferredLightingEffect>()->bindSimpleProgram(batch, false, false, false, true);
    DependencyManager::get<GeometryCache>()->renderQuad(batch, minCorner, maxCorner, backgroundColor);
    
    float scale = _lineHeight / _textRenderer->getFontSize();
    transformToTopLeft.setScale(scale); // Scale to have the correct line height
    batch.setModelTransform(transformToTopLeft);
    
    float leftMargin = 0.1f * _lineHeight, topMargin = 0.1f * _lineHeight;
    glm::vec2 bounds = glm::vec2(dimensions.x - 2.0f * leftMargin,
                                 dimensions.y - 2.0f * topMargin);
    _textRenderer->draw(batch, leftMargin / scale, -topMargin / scale, _text, textColor, bounds / scale);
    
}
예제 #11
0
파일: TextOverlay.cpp 프로젝트: bwent/hifi
void TextOverlay::setProperties(const QScriptValue& properties) {
    Overlay2D::setProperties(properties);
    _qmlElement->setX(_bounds.left());
    _qmlElement->setY(_bounds.top());
    _qmlElement->setWidth(_bounds.width());
    _qmlElement->setHeight(_bounds.height());
    _qmlElement->settextColor(toQmlColor(vec4(toGlm(_color), _alpha)));
    QScriptValue font = properties.property("font");
    if (font.isObject()) {
        if (font.property("size").isValid()) {
            setFontSize(font.property("size").toInt32());
        }
        QFont font(_qmlElement->fontFamily());
        font.setPixelSize(_qmlElement->fontSize());
        QFontMetrics fm(font);
        _qmlElement->setlineHeight(fm.lineSpacing() * 1.2);
    }

    QScriptValue text = properties.property("text");
    if (text.isValid()) {
        setText(text.toVariant().toString());
    }

    QScriptValue backgroundColor = properties.property("backgroundColor");
    if (backgroundColor.isValid()) {
        QScriptValue red = backgroundColor.property("red");
        QScriptValue green = backgroundColor.property("green");
        QScriptValue blue = backgroundColor.property("blue");
        if (red.isValid() && green.isValid() && blue.isValid()) {
            _backgroundColor.red = red.toVariant().toInt();
            _backgroundColor.green = green.toVariant().toInt();
            _backgroundColor.blue = blue.toVariant().toInt();
        }
    }

    if (properties.property("backgroundAlpha").isValid()) {
        _backgroundAlpha = properties.property("backgroundAlpha").toVariant().toFloat();
    }
    _qmlElement->setbackgroundColor(toQmlColor(vec4(toGlm(_backgroundColor), _backgroundAlpha)));

    if (properties.property("leftMargin").isValid()) {
        setLeftMargin(properties.property("leftMargin").toVariant().toInt());
    }

    if (properties.property("topMargin").isValid()) {
        setTopMargin(properties.property("topMargin").toVariant().toInt());
    }
}
예제 #12
0
glmPolyline toGlm(const ofPolyline &_poly){
    glmPolyline poly;
    for (int i = 0; i < _poly.size(); i++) {
        poly.add(toGlm(_poly[i]));
    }
    return poly;
}
void RenderableBoxEntityItem::render(RenderArgs* args) {
    PerformanceTimer perfTimer("RenderableBoxEntityItem::render");
    Q_ASSERT(getType() == EntityTypes::Box);
    Q_ASSERT(args->_batch);

    if (!_procedural) {
        _procedural.reset(new Procedural(this->getUserData()));
        _procedural->_vertexSource = simple_vert;
        _procedural->_fragmentSource = simple_frag;
        _procedural->_state->setCullMode(gpu::State::CULL_NONE);
        _procedural->_state->setDepthTest(true, true, gpu::LESS_EQUAL);
        _procedural->_state->setBlendFunction(false,
            gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA,
            gpu::State::FACTOR_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ONE);
    }

    gpu::Batch& batch = *args->_batch;
    glm::vec4 cubeColor(toGlm(getXColor()), getLocalRenderAlpha());

    if (_procedural->ready()) {
        batch.setModelTransform(getTransformToCenter()); // we want to include the scale as well
        _procedural->prepare(batch, this->getDimensions());
        auto color = _procedural->getColor(cubeColor);
        batch._glColor4f(color.r, color.g, color.b, color.a);
        DependencyManager::get<GeometryCache>()->renderCube(batch);
    } else {
        DependencyManager::get<DeferredLightingEffect>()->renderSolidCubeInstance(batch, getTransformToCenter(), cubeColor);
    }

    RenderableDebugableEntityItem::render(this, args);
};
예제 #14
0
void ParabolaPointer::editRenderStatePath(const std::string& state, const QVariant& pathProps) {
    auto renderState = std::static_pointer_cast<RenderState>(_renderStates[state]);
    if (renderState) {
        QVariantMap pathMap = pathProps.toMap();
        glm::vec3 color = glm::vec3(RenderState::ParabolaRenderItem::DEFAULT_PARABOLA_COLOR);
        float alpha = RenderState::ParabolaRenderItem::DEFAULT_PARABOLA_COLOR.a;
        float width = RenderState::ParabolaRenderItem::DEFAULT_PARABOLA_WIDTH;
        bool isVisibleInSecondaryCamera = RenderState::ParabolaRenderItem::DEFAULT_PARABOLA_ISVISIBLEINSECONDARYCAMERA;
        bool drawInFront = RenderState::ParabolaRenderItem::DEFAULT_PARABOLA_DRAWINFRONT;
        bool enabled = false;
        if (!pathMap.isEmpty()) {
            enabled = true;
            if (pathMap["color"].isValid()) {
                color = toGlm(u8vec3FromVariant(pathMap["color"]));
            }
            if (pathMap["alpha"].isValid()) {
                alpha = pathMap["alpha"].toFloat();
            }
            if (pathMap["width"].isValid()) {
                width = pathMap["width"].toFloat();
                renderState->setPathWidth(width);
            }
            if (pathMap["isVisibleInSecondaryCamera"].isValid()) {
                isVisibleInSecondaryCamera = pathMap["isVisibleInSecondaryCamera"].toBool();
            }
            if (pathMap["drawInFront"].isValid()) {
                drawInFront = pathMap["drawInFront"].toBool();
            }
        }
        renderState->editParabola(color, alpha, width, isVisibleInSecondaryCamera, drawInFront, enabled);
    }
}
예제 #15
0
void CompositorHelper::setReticlePosition(const glm::vec2& position, bool sendFakeEvent) {
    if (isHMD()) {
        glm::vec2 maxOverlayPosition = _currentDisplayPlugin->getRecommendedUiSize();
        // FIXME don't allow negative mouseExtra
        glm::vec2 mouseExtra = (MOUSE_EXTENTS_PIXELS - maxOverlayPosition) / 2.0f;
        glm::vec2 minMouse = vec2(0) - mouseExtra;
        glm::vec2 maxMouse = maxOverlayPosition + mouseExtra;

        {
            QMutexLocker locker(&_reticleLock);
            _reticlePositionInHMD = glm::clamp(position, minMouse, maxMouse);
        }

        if (sendFakeEvent) {
            sendFakeMouseEvent();
        }
    } else {
        // NOTE: This is some debugging code we will leave in while debugging various reticle movement strategies,
        // remove it after we're done
        const float REASONABLE_CHANGE = 50.0f;
        glm::vec2 oldPos = toGlm(QCursor::pos());
        auto distance = glm::distance(oldPos, position);
        if (distance > REASONABLE_CHANGE) {
            qDebug() << "Contrller::ScriptingInterface ---- UNREASONABLE CHANGE! distance:" <<
                distance << " oldPos:" << oldPos.x << "," << oldPos.y << " newPos:" << position.x << "," << position.y;
        }

        QCursor::setPos(position.x, position.y);
    }
}
예제 #16
0
glm::vec2 CompositorHelper::getReticlePosition() const {
    if (isHMD()) {
        QMutexLocker locker(&_reticleLock);
        return _reticlePositionInHMD;
    }
    return toGlm(_renderingWidget->mapFromGlobal(QCursor::pos()));
}
bool OculusBaseDisplayPlugin::beginFrameRender(uint32_t frameIndex) {
    _currentRenderFrameInfo = FrameInfo();
    _currentRenderFrameInfo.sensorSampleTime = ovr_GetTimeInSeconds();;
    _currentRenderFrameInfo.predictedDisplayTime = ovr_GetPredictedDisplayTime(_session, frameIndex);
    auto trackingState = ovr_GetTrackingState(_session, _currentRenderFrameInfo.predictedDisplayTime, ovrTrue);
    _currentRenderFrameInfo.renderPose = toGlm(trackingState.HeadPose.ThePose);
    _currentRenderFrameInfo.presentPose = _currentRenderFrameInfo.renderPose;

    std::array<glm::mat4, 2> handPoses;
    // Make controller poses available to the presentation thread
    ovr_for_each_hand([&](ovrHandType hand) {
        static const auto REQUIRED_HAND_STATUS = ovrStatus_OrientationTracked & ovrStatus_PositionTracked;
        if (REQUIRED_HAND_STATUS != (trackingState.HandStatusFlags[hand] & REQUIRED_HAND_STATUS)) {
            return;
        }

        auto correctedPose = ovrControllerPoseToHandPose(hand, trackingState.HandPoses[hand]);
        static const glm::quat HAND_TO_LASER_ROTATION = glm::rotation(Vectors::UNIT_Z, Vectors::UNIT_NEG_Y);
        handPoses[hand] = glm::translate(glm::mat4(), correctedPose.translation) * glm::mat4_cast(correctedPose.rotation * HAND_TO_LASER_ROTATION);
    });

    withRenderThreadLock([&] {
        _uiModelTransform = DependencyManager::get<CompositorHelper>()->getModelTransform();
        _handPoses = handPoses;
        _frameInfos[frameIndex] = _currentRenderFrameInfo;
    });
    return Parent::beginFrameRender(frameIndex);
}
예제 #18
0
void RenderableLightEntityItem::render(RenderArgs* args) {
    PerformanceTimer perfTimer("RenderableLightEntityItem::render");
    assert(getType() == EntityTypes::Light);
    glm::vec3 position = getPosition();
    glm::vec3 dimensions = getDimensions();
    glm::quat rotation = getRotation();
    float largestDiameter = glm::max(dimensions.x, dimensions.y, dimensions.z);

    glm::vec3 color = toGlm(getXColor());

    float intensity = getIntensity();
    float exponent = getExponent();
    float cutoff = glm::radians(getCutoff());

    if (_isSpotlight) {
        DependencyManager::get<DeferredLightingEffect>()->addSpotLight(position, largestDiameter / 2.0f,
            color, intensity, rotation, exponent, cutoff);
    } else {
        DependencyManager::get<DeferredLightingEffect>()->addPointLight(position, largestDiameter / 2.0f,
            color, intensity);
    }
    
#ifdef WANT_DEBUG
    Q_ASSERT(args->_batch);
    gpu::Batch& batch = *args->_batch;
    batch.setModelTransform(getTransformToCenter());
    DependencyManager::get<GeometryCache>()->renderWireSphere(batch, 0.5f, 15, 15, glm::vec4(color, 1.0f));
#endif
};
예제 #19
0
glm::vec2 CompositorHelper::getReticlePosition() const {
    if (isHMD()) {
        QMutexLocker locker(&_reticleLock);
        return _reticlePositionInHMD;
    }
    return toGlm(QCursor::pos());
}
예제 #20
0
glm::vec3 PlayerPhysicsComponent::getVelocity() const {
   btRigidBody *rigidBody = dynamic_cast<btRigidBody*>(getCollisionObject());
   if (!rigidBody) {
      return glm::vec3(0.0f);
   }

   return toGlm(rigidBody->getLinearVelocity());
}
bool OculusBaseDisplayPlugin::internalActivate() {
    _session = acquireOculusSession();
    if (!_session) {
        return false;
    }

    _hmdDesc = ovr_GetHmdDesc(_session);

    glm::uvec2 eyeSizes[2];
    _viewScaleDesc.HmdSpaceToWorldScaleInMeters = 1.0f;

    _ipd = 0;
    ovr_for_each_eye([&](ovrEyeType eye) {
        _eyeFovs[eye] = _hmdDesc.DefaultEyeFov[eye];
        ovrEyeRenderDesc& erd = _eyeRenderDescs[eye] = ovr_GetRenderDesc(_session, eye, _eyeFovs[eye]);
        ovrMatrix4f ovrPerspectiveProjection =
            ovrMatrix4f_Projection(erd.Fov, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_ClipRangeOpenGL);
        _eyeProjections[eye] = toGlm(ovrPerspectiveProjection);
        _eyeOffsets[eye] = glm::translate(mat4(), toGlm(erd.HmdToEyeOffset));
        eyeSizes[eye] = toGlm(ovr_GetFovTextureSize(_session, eye, erd.Fov, 1.0f));
        _viewScaleDesc.HmdToEyeOffset[eye] = erd.HmdToEyeOffset;
        _ipd += glm::abs(glm::length(toGlm(erd.HmdToEyeOffset)));
    });

    auto combinedFov = _eyeFovs[0];
    combinedFov.LeftTan = combinedFov.RightTan = std::max(combinedFov.LeftTan, combinedFov.RightTan);
    _cullingProjection = toGlm(ovrMatrix4f_Projection(combinedFov, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_ClipRangeOpenGL));

    _renderTargetSize = uvec2(
        eyeSizes[0].x + eyeSizes[1].x,
        std::max(eyeSizes[0].y, eyeSizes[1].y));

    memset(&_sceneLayer, 0, sizeof(ovrLayerEyeFov));
    _sceneLayer.Header.Type = ovrLayerType_EyeFov;
    _sceneLayer.Header.Flags = ovrLayerFlag_TextureOriginAtBottomLeft;
    ovr_for_each_eye([&](ovrEyeType eye) {
        ovrFovPort & fov = _sceneLayer.Fov[eye] = _eyeRenderDescs[eye].Fov;
        ovrSizei & size = _sceneLayer.Viewport[eye].Size = ovr_GetFovTextureSize(_session, eye, fov, 1.0f);
        _sceneLayer.Viewport[eye].Pos = { eye == ovrEye_Left ? 0 : size.w, 0 };
    });

    // This must come after the initialization, so that the values calculated 
    // above are available during the customizeContext call (when not running
    // in threaded present mode)
    return Parent::internalActivate();
}
예제 #22
0
void showMinSpecWarning() {
    auto vrSystem = acquireOpenVrSystem();
    auto vrOverlay = vr::VROverlay();
    if (!vrOverlay) {
        qFatal("Unable to initialize SteamVR overlay manager");
    }

    vr::VROverlayHandle_t minSpecFailedOverlay = 0;
    if (vr::VROverlayError_None != vrOverlay->CreateOverlay(FAILED_MIN_SPEC_OVERLAY_NAME, FAILED_MIN_SPEC_OVERLAY_FRIENDLY_NAME, &minSpecFailedOverlay)) {
        qFatal("Unable to create overlay");
    }

    // Needed here for PathUtils
    QCoreApplication miniApp(__argc, __argv);

    vrSystem->ResetSeatedZeroPose();
    QString imagePath = PathUtils::resourcesPath() + "/images/steam-min-spec-failed.png";
    vrOverlay->SetOverlayFromFile(minSpecFailedOverlay, imagePath.toLocal8Bit().toStdString().c_str());
    vrOverlay->SetHighQualityOverlay(minSpecFailedOverlay);
    vrOverlay->SetOverlayWidthInMeters(minSpecFailedOverlay, 1.4f);
    vrOverlay->SetOverlayInputMethod(minSpecFailedOverlay, vr::VROverlayInputMethod_Mouse);
    vrOverlay->ShowOverlay(minSpecFailedOverlay);

    QTimer* timer = new QTimer(&miniApp);
    timer->setInterval(FAILED_MIN_SPEC_UPDATE_INTERVAL_MS); // Qt::CoarseTimer acceptable, we don't need this to be frame rate accurate
    QObject::connect(timer, &QTimer::timeout, [&] {
        vr::TrackedDevicePose_t vrPoses[vr::k_unMaxTrackedDeviceCount];
        vrSystem->GetDeviceToAbsoluteTrackingPose(vr::TrackingUniverseSeated, 0, vrPoses, vr::k_unMaxTrackedDeviceCount);
        auto headPose = toGlm(vrPoses[vr::k_unTrackedDeviceIndex_Hmd].mDeviceToAbsoluteTracking);
        auto overlayPose = toOpenVr(headPose * glm::translate(glm::mat4(), vec3(0, 0, -1)));
        vrOverlay->SetOverlayTransformAbsolute(minSpecFailedOverlay, vr::TrackingUniverseSeated, &overlayPose);

        vr::VREvent_t event;
        while (vrSystem->PollNextEvent(&event, sizeof(event))) {
            switch (event.eventType) {
                case vr::VREvent_Quit:
                    vrSystem->AcknowledgeQuit_Exiting();
                    QCoreApplication::quit();
                    break;

                case vr::VREvent_ButtonPress:
                    // Quit on any button press except for 'putting on the headset'
                    if (event.data.controller.button != vr::k_EButton_ProximitySensor) {
                        QCoreApplication::quit();
                    }
                    break;

                default:
                    break;
            }
        }

    });
    timer->start();

    QTimer::singleShot(FAILED_MIN_SPEC_AUTO_QUIT_INTERVAL_MS, &miniApp, &QCoreApplication::quit);
    miniApp.exec();
}
예제 #23
0
glm::mat4 OculusBaseDisplayPlugin::getHeadPose(uint32_t frameIndex) const {
    static uint32_t lastFrameSeen = 0;
    auto displayTime = ovr_GetPredictedDisplayTime(_session, frameIndex);
    auto trackingState = ovr_GetTrackingState(_session, displayTime, frameIndex > lastFrameSeen);
    if (frameIndex > lastFrameSeen) {
        lastFrameSeen = frameIndex;
    }
    return toGlm(trackingState.HeadPose.ThePose);
}
void RenderablePolyLineEntityItem::update(const quint64& now) {
    PolyLineUniforms uniforms;
    uniforms.color = toGlm(getXColor());
    memcpy(&_uniformBuffer.edit<PolyLineUniforms>(), &uniforms, sizeof(PolyLineUniforms));
    if (_pointsChanged || _strokeWidthsChanged || _normalsChanged) {
        updateVertices();
        updateGeometry();
    }

}
예제 #25
0
void RenderableShapeEntityItem::render(RenderArgs* args) {
    PerformanceTimer perfTimer("RenderableShapeEntityItem::render");
    //Q_ASSERT(getType() == EntityTypes::Shape);
    Q_ASSERT(args->_batch);
    checkFading();

    if (!_procedural) {
        _procedural.reset(new Procedural(getUserData()));
        _procedural->_vertexSource = simple_vert;
        _procedural->_fragmentSource = simple_frag;
        _procedural->_opaqueState->setCullMode(gpu::State::CULL_NONE);
        _procedural->_opaqueState->setDepthTest(true, true, gpu::LESS_EQUAL);
        PrepareStencil::testMaskDrawShape(*_procedural->_opaqueState);
        _procedural->_opaqueState->setBlendFunction(false,
            gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA,
            gpu::State::FACTOR_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ONE);
    }

    gpu::Batch& batch = *args->_batch;
    glm::vec4 color(toGlm(getXColor()), getLocalRenderAlpha());
    bool success;
    Transform modelTransform = getTransformToCenter(success);
    if (!success) {
        return;
    }
    if (_shape == entity::Sphere) {
        modelTransform.postScale(SPHERE_ENTITY_SCALE);
    }
    batch.setModelTransform(modelTransform); // use a transform with scale, rotation, registration point and translation
    if (_procedural->ready()) {
        _procedural->prepare(batch, getPosition(), getDimensions(), getOrientation());
        auto outColor = _procedural->getColor(color);
        outColor.a *= _procedural->isFading() ? Interpolate::calculateFadeRatio(_procedural->getFadeStartTime()) : 1.0f;
        batch._glColor4f(outColor.r, outColor.g, outColor.b, outColor.a);
        if (render::ShapeKey(args->_globalShapeKey).isWireframe()) {
            DependencyManager::get<GeometryCache>()->renderWireShape(batch, MAPPING[_shape]);
        } else {
            DependencyManager::get<GeometryCache>()->renderShape(batch, MAPPING[_shape]);
        }
    } else {
        // FIXME, support instanced multi-shape rendering using multidraw indirect
        color.a *= _isFading ? Interpolate::calculateFadeRatio(_fadeStartTime) : 1.0f;
        auto geometryCache = DependencyManager::get<GeometryCache>();
        auto pipeline = color.a < 1.0f ? geometryCache->getTransparentShapePipeline() : geometryCache->getOpaqueShapePipeline();
        
        if (render::ShapeKey(args->_globalShapeKey).isWireframe()) {
            geometryCache->renderWireShapeInstance(args, batch, MAPPING[_shape], color, pipeline);
        } else {
            geometryCache->renderSolidShapeInstance(args, batch, MAPPING[_shape], color, pipeline);
        }
    }

    static const auto triCount = DependencyManager::get<GeometryCache>()->getShapeTriangleCount(MAPPING[_shape]);
    args->_details._trianglesRendered += (int)triCount;
}
예제 #26
0
void Text3DOverlay::render(RenderArgs* args) {
    if (!_renderVisible || !getParentVisible()) {
        return; // do nothing if we're not visible
    }

    Q_ASSERT(args->_batch);
    auto& batch = *args->_batch;

    auto transform = getRenderTransform();
    batch.setModelTransform(transform);

    glm::u8vec3 backgroundColor = getBackgroundColor();
    glm::vec4 quadColor(toGlm(backgroundColor), getBackgroundAlpha());

    glm::vec2 dimensions = getDimensions();
    glm::vec2 halfDimensions = dimensions * 0.5f;

    const float SLIGHTLY_BEHIND = -0.001f;

    glm::vec3 topLeft(-halfDimensions.x, -halfDimensions.y, SLIGHTLY_BEHIND);
    glm::vec3 bottomRight(halfDimensions.x, halfDimensions.y, SLIGHTLY_BEHIND);
    DependencyManager::get<GeometryCache>()->bindSimpleProgram(batch, false, quadColor.a < 1.0f, false, false, false);
    DependencyManager::get<GeometryCache>()->renderQuad(batch, topLeft, bottomRight, quadColor, _geometryId);

    // Same font properties as textSize()
    float maxHeight = (float)_textRenderer->computeExtent("Xy").y * LINE_SCALE_RATIO;

    float scaleFactor =  (maxHeight / FIXED_FONT_SCALING_RATIO) * _lineHeight;

    glm::vec2 clipDimensions((dimensions.x - (_leftMargin + _rightMargin)) / scaleFactor,
                             (dimensions.y - (_topMargin + _bottomMargin)) / scaleFactor);

    transform.postTranslate(glm::vec3(-(halfDimensions.x - _leftMargin),
                                      halfDimensions.y - _topMargin, 0.001f));
    transform.setScale(scaleFactor);
    batch.setModelTransform(transform);

    glm::vec4 textColor = { toGlm(_color), getTextAlpha() };

    // FIXME: Factor out textRenderer so that Text3DOverlay overlay parts can be grouped by pipeline for a gpu performance increase.
    _textRenderer->draw(batch, 0, 0, getText(), textColor, glm::vec2(-1.0f), true);
}
예제 #27
0
void OpenVrDisplayPlugin::activate() {
    CONTAINER->setIsOptionChecked(StandingHMDSensorMode, true);

    hmdRefCount++;
    vr::HmdError eError = vr::HmdError_None;
    if (!_hmd) {
        _hmd = vr::VR_Init(&eError);
        Q_ASSERT(eError == vr::HmdError_None);
    }
    Q_ASSERT(_hmd);

    _hmd->GetWindowBounds(&_windowPosition.x, &_windowPosition.y, &_windowSize.x, &_windowSize.y);
    _hmd->GetRecommendedRenderTargetSize(&_renderTargetSize.x, &_renderTargetSize.y);
    // Recommended render target size is per-eye, so double the X size for 
    // left + right eyes
    _renderTargetSize.x *= 2;
    openvr_for_each_eye([&](vr::Hmd_Eye eye) {
        PerEyeData& eyeData = _eyesData[eye];
        _hmd->GetEyeOutputViewport(eye, 
            &eyeData._viewportOrigin.x, &eyeData._viewportOrigin.y, 
            &eyeData._viewportSize.x, &eyeData._viewportSize.y);
        eyeData._projectionMatrix = toGlm(_hmd->GetProjectionMatrix(eye, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, vr::API_OpenGL));
        eyeData._eyeOffset = toGlm(_hmd->GetEyeToHeadTransform(eye));
    });


    _compositor = (vr::IVRCompositor*)vr::VR_GetGenericInterface(vr::IVRCompositor_Version, &eError);
    Q_ASSERT(eError == vr::HmdError_None);
    Q_ASSERT(_compositor);

    _compositor->SetGraphicsDevice(vr::Compositor_DeviceType_OpenGL, NULL);

    uint32_t unSize = _compositor->GetLastError(NULL, 0);
    if (unSize > 1) {
        char* buffer = new char[unSize];
        _compositor->GetLastError(buffer, unSize);
        printf("Compositor - %s\n", buffer);
        delete[] buffer;
    }
    Q_ASSERT(unSize <= 1);
    WindowOpenGLDisplayPlugin::activate();
}
예제 #28
0
void RenderableLineEntityItem::updateGeometry() {
    auto geometryCache = DependencyManager::get<GeometryCache>();
    if (_lineVerticesID == GeometryCache::UNKNOWN_ID) {
        _lineVerticesID = geometryCache ->allocateID();
    }
    if (_pointsChanged) {
        glm::vec4 lineColor(toGlm(getXColor()), getLocalRenderAlpha());
        geometryCache->updateVertices(_lineVerticesID, getLinePoints(), lineColor);
        _pointsChanged = false;
    }
}
예제 #29
0
void OpenVrDisplayPlugin::finishFrame() {
//    swapBuffers();
    doneCurrent();
    _compositor->WaitGetPoses(_trackedDevicePose, vr::k_unMaxTrackedDeviceCount);
    for (int i = 0; i < vr::k_unMaxTrackedDeviceCount; i++) {
        _trackedDevicePoseMat4[i] = _sensorResetMat * toGlm(_trackedDevicePose[i].mDeviceToAbsoluteTracking);
    }
    openvr_for_each_eye([&](vr::Hmd_Eye eye) {
        _eyesData[eye]._pose = _trackedDevicePoseMat4[0];
    });
};
예제 #30
0
void OpenVrDisplayPlugin::activate() {
    _container->setIsOptionChecked(StandingHMDSensorMode, true);

    if (!_hmd) {
        _hmd = acquireOpenVrSystem();
    }
    Q_ASSERT(_hmd);

    _hmd->GetRecommendedRenderTargetSize(&_renderTargetSize.x, &_renderTargetSize.y);
    // Recommended render target size is per-eye, so double the X size for 
    // left + right eyes
    _renderTargetSize.x *= 2;
    openvr_for_each_eye([&](vr::Hmd_Eye eye) {
        PerEyeData& eyeData = _eyesData[eye];
        eyeData._projectionMatrix = toGlm(_hmd->GetProjectionMatrix(eye, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, vr::API_OpenGL));
        eyeData._eyeOffset = toGlm(_hmd->GetEyeToHeadTransform(eye));
    });
    _compositor = vr::VRCompositor();
    Q_ASSERT(_compositor);
    WindowOpenGLDisplayPlugin::activate();
}