void OpenVrDisplayPlugin::beginFrameRender(uint32_t frameIndex) {

    double displayFrequency = _system->GetFloatTrackedDeviceProperty(vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_DisplayFrequency_Float);
    double frameDuration = 1.f / displayFrequency;
    double vsyncToPhotons = _system->GetFloatTrackedDeviceProperty(vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_SecondsFromVsyncToPhotons_Float);

    FrameInfo frame;
#if THREADED_PRESENT
    // 3 frames of prediction + vsyncToPhotons = 44ms total
    const double NUM_PREDICTION_FRAMES = 3.0f;
    frame.predictedDisplayTime = NUM_PREDICTION_FRAMES * frameDuration + vsyncToPhotons;
#else
    frame.predictedDisplayTime = frameDuration + vsyncToPhotons;
#endif

    vr::TrackedDevicePose_t predictedTrackedDevicePose[vr::k_unMaxTrackedDeviceCount];
    _system->GetDeviceToAbsoluteTrackingPose(vr::TrackingUniverseStanding, frame.predictedDisplayTime, predictedTrackedDevicePose, vr::k_unMaxTrackedDeviceCount);

    // copy and process predictedTrackedDevicePoses
    for (int i = 0; i < vr::k_unMaxTrackedDeviceCount; i++) {
        _trackedDevicePose[i] = predictedTrackedDevicePose[i];
        _trackedDevicePoseMat4[i] = _sensorResetMat * toGlm(_trackedDevicePose[i].mDeviceToAbsoluteTracking);
        _trackedDeviceLinearVelocities[i] = transformVectorFast(_sensorResetMat, toGlm(_trackedDevicePose[i].vVelocity));
        _trackedDeviceAngularVelocities[i] = transformVectorFast(_sensorResetMat, toGlm(_trackedDevicePose[i].vAngularVelocity));
    }
    frame.headPose = _trackedDevicePoseMat4[0];
    _currentRenderFrameInfo.set(frame);

    Lock lock(_mutex);
    _frameInfos[frameIndex] = frame;
}
Example #2
0
glm::mat4 OpenVrDisplayPlugin::getHeadPose(uint32_t frameIndex) const {

    float displayFrequency = _system->GetFloatTrackedDeviceProperty(vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_DisplayFrequency_Float);
    float frameDuration = 1.f / displayFrequency;
    float vsyncToPhotons = _system->GetFloatTrackedDeviceProperty(vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_SecondsFromVsyncToPhotons_Float);

#if THREADED_PRESENT
    // TODO: this seems awfuly long, 44ms total, but it produced the best results.
    const float NUM_PREDICTION_FRAMES = 3.0f;
    float predictedSecondsFromNow = NUM_PREDICTION_FRAMES * frameDuration + vsyncToPhotons;
#else
    uint64_t frameCounter;
    float timeSinceLastVsync;
    _system->GetTimeSinceLastVsync(&timeSinceLastVsync, &frameCounter);
    float predictedSecondsFromNow = 3.0f * frameDuration - timeSinceLastVsync + vsyncToPhotons;
#endif

    vr::TrackedDevicePose_t predictedTrackedDevicePose[vr::k_unMaxTrackedDeviceCount];
    _system->GetDeviceToAbsoluteTrackingPose(vr::TrackingUniverseSeated, predictedSecondsFromNow, predictedTrackedDevicePose, vr::k_unMaxTrackedDeviceCount);

    // copy and process predictedTrackedDevicePoses
    for (int i = 0; i < vr::k_unMaxTrackedDeviceCount; i++) {
        _trackedDevicePose[i] = predictedTrackedDevicePose[i];
        _trackedDevicePoseMat4[i] = _sensorResetMat * toGlm(_trackedDevicePose[i].mDeviceToAbsoluteTracking);
        _trackedDeviceLinearVelocities[i] = transformVectorFast(_sensorResetMat, toGlm(_trackedDevicePose[i].vVelocity));
        _trackedDeviceAngularVelocities[i] = transformVectorFast(_sensorResetMat, toGlm(_trackedDevicePose[i].vAngularVelocity));
    }
    return _trackedDevicePoseMat4[0];
}
Example #3
0
 Pose Pose::transform(const glm::mat4& mat) const {
     auto rot = glmExtractRotation(mat);
     Pose pose(transformPoint(mat, translation),
               rot * rotation,
               transformVectorFast(mat, velocity),
               rot * angularVelocity);
     pose.valid = valid;
     return pose;
 }