void OpenVrDisplayPlugin::activate() {
    _container->setIsOptionChecked(StandingHMDSensorMode, true);

    if (!_system) {
        _system = acquireOpenVrSystem();
    }
    Q_ASSERT(_system);

    _system->GetRecommendedRenderTargetSize(&_renderTargetSize.x, &_renderTargetSize.y);
    // Recommended render target size is per-eye, so double the X size for 
    // left + right eyes
    _renderTargetSize.x *= 2;

    {
        Lock lock(_poseMutex);
        openvr_for_each_eye([&](vr::Hmd_Eye eye) {
            _eyeOffsets[eye] = toGlm(_system->GetEyeToHeadTransform(eye));
            _eyeProjections[eye] = toGlm(_system->GetProjectionMatrix(eye, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, vr::API_OpenGL));
        });
        // FIXME Calculate the proper combined projection by using GetProjectionRaw values from both eyes
        _cullingProjection = _eyeProjections[0];

    }

    _compositor = vr::VRCompositor();
    Q_ASSERT(_compositor);
    HmdDisplayPlugin::activate();
}
void OpenVrDisplayPlugin::finishFrame() {
//    swapBuffers();
    doneCurrent();
    _compositor->WaitGetPoses(_trackedDevicePose, vr::k_unMaxTrackedDeviceCount);
    for (int i = 0; i < vr::k_unMaxTrackedDeviceCount; i++) {
        _trackedDevicePoseMat4[i] = _sensorResetMat * toGlm(_trackedDevicePose[i].mDeviceToAbsoluteTracking);
    }
    openvr_for_each_eye([&](vr::Hmd_Eye eye) {
        _eyesData[eye]._pose = _trackedDevicePoseMat4[0];
    });
};
bool OpenVrDisplayPlugin::internalActivate() {
    Parent::internalActivate();

    _container->setIsOptionChecked(StandingHMDSensorMode, true);

    if (!_system) {
        _system = acquireOpenVrSystem();
    }
    if (!_system) {
        qWarning() << "Failed to initialize OpenVR";
        return false;
    }

    _system->GetRecommendedRenderTargetSize(&_renderTargetSize.x, &_renderTargetSize.y);
    // Recommended render target size is per-eye, so double the X size for 
    // left + right eyes
    _renderTargetSize.x *= 2;

    {
        Lock lock(_poseMutex);
        openvr_for_each_eye([&](vr::Hmd_Eye eye) {
            _eyeOffsets[eye] = toGlm(_system->GetEyeToHeadTransform(eye));
            _eyeProjections[eye] = toGlm(_system->GetProjectionMatrix(eye, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, vr::API_OpenGL));
        });
        // FIXME Calculate the proper combined projection by using GetProjectionRaw values from both eyes
        _cullingProjection = _eyeProjections[0];

    }

    _compositor = vr::VRCompositor();
    Q_ASSERT(_compositor);

    // enable async time warp
    // _compositor->ForceInterleavedReprojectionOn(true);

    // set up default sensor space such that the UI overlay will align with the front of the room.
    auto chaperone = vr::VRChaperone();
    if (chaperone) {
        float const UI_RADIUS = 1.0f;
        float const UI_HEIGHT = 1.6f;
        float const UI_Z_OFFSET = 0.5;

        float xSize, zSize;
        chaperone->GetPlayAreaSize(&xSize, &zSize);
        glm::vec3 uiPos(0.0f, UI_HEIGHT, UI_RADIUS - (0.5f * zSize) - UI_Z_OFFSET);
        _sensorResetMat = glm::inverse(createMatFromQuatAndPos(glm::quat(), uiPos));
    } else {
        qDebug() << "OpenVR: error could not get chaperone pointer";
    }

    return true;
}
void OpenVrDisplayPlugin::activate() {
    CONTAINER->setIsOptionChecked(StandingHMDSensorMode, true);

    hmdRefCount++;
    vr::HmdError eError = vr::HmdError_None;
    if (!_hmd) {
        _hmd = vr::VR_Init(&eError);
        Q_ASSERT(eError == vr::HmdError_None);
    }
    Q_ASSERT(_hmd);

    _hmd->GetWindowBounds(&_windowPosition.x, &_windowPosition.y, &_windowSize.x, &_windowSize.y);
    _hmd->GetRecommendedRenderTargetSize(&_renderTargetSize.x, &_renderTargetSize.y);
    // Recommended render target size is per-eye, so double the X size for 
    // left + right eyes
    _renderTargetSize.x *= 2;
    openvr_for_each_eye([&](vr::Hmd_Eye eye) {
        PerEyeData& eyeData = _eyesData[eye];
        _hmd->GetEyeOutputViewport(eye, 
            &eyeData._viewportOrigin.x, &eyeData._viewportOrigin.y, 
            &eyeData._viewportSize.x, &eyeData._viewportSize.y);
        eyeData._projectionMatrix = toGlm(_hmd->GetProjectionMatrix(eye, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, vr::API_OpenGL));
        eyeData._eyeOffset = toGlm(_hmd->GetEyeToHeadTransform(eye));
    });


    _compositor = (vr::IVRCompositor*)vr::VR_GetGenericInterface(vr::IVRCompositor_Version, &eError);
    Q_ASSERT(eError == vr::HmdError_None);
    Q_ASSERT(_compositor);

    _compositor->SetGraphicsDevice(vr::Compositor_DeviceType_OpenGL, NULL);

    uint32_t unSize = _compositor->GetLastError(NULL, 0);
    if (unSize > 1) {
        char* buffer = new char[unSize];
        _compositor->GetLastError(buffer, unSize);
        printf("Compositor - %s\n", buffer);
        delete[] buffer;
    }
    Q_ASSERT(unSize <= 1);
    WindowOpenGLDisplayPlugin::activate();
}
Exemple #5
0
void OpenVrDisplayPlugin::activate() {
    _container->setIsOptionChecked(StandingHMDSensorMode, true);

    if (!_hmd) {
        _hmd = acquireOpenVrSystem();
    }
    Q_ASSERT(_hmd);

    _hmd->GetRecommendedRenderTargetSize(&_renderTargetSize.x, &_renderTargetSize.y);
    // Recommended render target size is per-eye, so double the X size for 
    // left + right eyes
    _renderTargetSize.x *= 2;
    openvr_for_each_eye([&](vr::Hmd_Eye eye) {
        PerEyeData& eyeData = _eyesData[eye];
        eyeData._projectionMatrix = toGlm(_hmd->GetProjectionMatrix(eye, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, vr::API_OpenGL));
        eyeData._eyeOffset = toGlm(_hmd->GetEyeToHeadTransform(eye));
    });
    _compositor = vr::VRCompositor();
    Q_ASSERT(_compositor);
    WindowOpenGLDisplayPlugin::activate();
}
Exemple #6
0
void OpenVrDisplayPlugin::internalPresent() {
    // Flip y-axis since GL UV coords are backwards.
    static vr::VRTextureBounds_t leftBounds{ 0, 0, 0.5f, 1 };
    static vr::VRTextureBounds_t rightBounds{ 0.5f, 0, 1, 1 };
    vr::Texture_t texture{ (void*)_currentSceneTexture, vr::API_OpenGL, vr::ColorSpace_Auto };
    {
        Lock lock(_mutex);
        _compositor->Submit(vr::Eye_Left, &texture, &leftBounds);
        _compositor->Submit(vr::Eye_Right, &texture, &rightBounds);
    }
    glFinish();
    {
        Lock lock(_mutex);
        _compositor->WaitGetPoses(_trackedDevicePose, vr::k_unMaxTrackedDeviceCount, nullptr, 0);
        for (int i = 0; i < vr::k_unMaxTrackedDeviceCount; i++) {
            _trackedDevicePoseMat4[i] = _sensorResetMat * toGlm(_trackedDevicePose[i].mDeviceToAbsoluteTracking);
        }
        openvr_for_each_eye([&](vr::Hmd_Eye eye) {
            _eyesData[eye]._pose = _trackedDevicePoseMat4[0];
        });
    }

    //WindowOpenGLDisplayPlugin::internalPresent();
}