Example #1
0
void GVRActivity::onDrawFrame() {
    ovrFrameParms parms = vrapi_DefaultFrameParms(&oculusJavaGlThread_, VRAPI_FRAME_INIT_DEFAULT, vrapi_GetTimeInSeconds(),
            NULL);
    parms.FrameIndex = ++frameIndex;
    parms.MinimumVsyncs = 1;
    parms.PerformanceParms = oculusPerformanceParms_;
    parms.Layers[VRAPI_FRAME_LAYER_TYPE_WORLD].Flags |= VRAPI_FRAME_LAYER_FLAG_CHROMATIC_ABERRATION_CORRECTION;

    const double predictedDisplayTime = vrapi_GetPredictedDisplayTime(oculusMobile_, frameIndex);
    const ovrTracking baseTracking = vrapi_GetPredictedTracking(oculusMobile_, predictedDisplayTime);

    const ovrHeadModelParms headModelParms = vrapi_DefaultHeadModelParms();
    const ovrTracking tracking = vrapi_ApplyHeadModel(&headModelParms, &baseTracking);

    ovrTracking updatedTracking = vrapi_GetPredictedTracking(oculusMobile_, tracking.HeadPose.TimeInSeconds);
    updatedTracking.HeadPose.Pose.Position = tracking.HeadPose.Pose.Position;

    for ( int eye = 0; eye < VRAPI_FRAME_LAYER_EYE_MAX; eye++ )
    {
        ovrFrameLayerTexture& eyeTexture = parms.Layers[VRAPI_FRAME_LAYER_TYPE_WORLD].Textures[eye];

        eyeTexture.ColorTextureSwapChain = frameBuffer_[use_multiview ? 0 : eye].mColorTextureSwapChain;
        eyeTexture.DepthTextureSwapChain = frameBuffer_[use_multiview ? 0 : eye].mDepthTextureSwapChain;
        eyeTexture.TextureSwapChainIndex = frameBuffer_[use_multiview ? 0 : eye].mTextureSwapChainIndex;
        eyeTexture.TexCoordsFromTanAngles = texCoordsTanAnglesMatrix_;
        eyeTexture.HeadPose = updatedTracking.HeadPose;
    }

    if (docked_) {
        const ovrQuatf& orientation = updatedTracking.HeadPose.Pose.Orientation;
        const glm::quat tmp(orientation.w, orientation.x, orientation.y, orientation.z);
        const glm::quat quat = glm::conjugate(glm::inverse(tmp));
        cameraRig_->setRotation(quat);
    } else if (nullptr != cameraRig_) {
        cameraRig_->updateRotation();
    } else {
        cameraRig_->setRotation(glm::quat());
    }

    if (!sensoredSceneUpdated_ && docked_) {
        sensoredSceneUpdated_ = updateSensoredScene();
    }

    // Render the eye images.
    for (int eye = 0; eye < (use_multiview ? 1 :VRAPI_FRAME_LAYER_EYE_MAX); eye++) {

        beginRenderingEye(eye);

        oculusJavaGlThread_.Env->CallVoidMethod(viewManager_, onDrawEyeMethodId, eye);

        endRenderingEye(eye);
    }

    FrameBufferObject::unbind();
    vrapi_SubmitFrame(oculusMobile_, &parms);
}
Example #2
0
void GVRActivity::onDrawFrame() {
    ovrFrameParms parms = vrapi_DefaultFrameParms(&oculusJavaGlThread_, VRAPI_FRAME_INIT_DEFAULT, vrapi_GetTimeInSeconds(),
            NULL);
    parms.FrameIndex = ++frameIndex;
    parms.MinimumVsyncs = 1;
    parms.PerformanceParms = oculusPerformanceParms_;
    parms.Layers[VRAPI_FRAME_LAYER_TYPE_WORLD].Flags |= VRAPI_FRAME_LAYER_FLAG_CHROMATIC_ABERRATION_CORRECTION;

    const double predictedDisplayTime = vrapi_GetPredictedDisplayTime(oculusMobile_, frameIndex);
    const ovrTracking baseTracking = vrapi_GetPredictedTracking(oculusMobile_, predictedDisplayTime);

    const ovrHeadModelParms headModelParms = vrapi_DefaultHeadModelParms();
    const ovrTracking tracking = vrapi_ApplyHeadModel(&headModelParms, &baseTracking);

    // Render the eye images.
    for (int eye = 0; eye < VRAPI_FRAME_LAYER_EYE_MAX; eye++) {
        ovrTracking updatedTracking = vrapi_GetPredictedTracking(oculusMobile_, tracking.HeadPose.TimeInSeconds);
        updatedTracking.HeadPose.Pose.Position = tracking.HeadPose.Pose.Position;

        beginRenderingEye(eye);

        if (!sensoredSceneUpdated_ && headRotationProvider_.receivingUpdates()) {
            sensoredSceneUpdated_ = updateSensoredScene();
        }
        headRotationProvider_.predict(*this, parms, (1 == eye ? 4.0f : 3.5f) / 60.0f);
        oculusJavaGlThread_.Env->CallVoidMethod(activityRenderingCallbacks_, onDrawEyeMethodId, eye);

        ovrFrameLayerTexture& eyeTexture = parms.Layers[VRAPI_FRAME_LAYER_TYPE_WORLD].Textures[eye];
        eyeTexture.ColorTextureSwapChain = frameBuffer_[eye].mColorTextureSwapChain;
        eyeTexture.DepthTextureSwapChain = frameBuffer_[eye].mDepthTextureSwapChain;
        eyeTexture.TextureSwapChainIndex = frameBuffer_[eye].mTextureSwapChainIndex;

        for (int layer = 0; layer < VRAPI_FRAME_LAYER_TYPE_MAX; layer++) {
            parms.Layers[layer].Textures[eye].TexCoordsFromTanAngles = texCoordsTanAnglesMatrix_;
            parms.Layers[layer].Textures[eye].HeadPose = updatedTracking.HeadPose;
        }

        endRenderingEye(eye);
    }

    FrameBufferObject::unbind();
    vrapi_SubmitFrame(oculusMobile_, &parms);
}
Example #3
0
template <class R> OVR::Matrix4f GVRActivityT<R>::DrawEyeView(const int eye, const float fovDegrees) {
    const OVR::Matrix4f view = GetEyeView(eye, fovDegrees);

    // Transpose view matrix from oculus to mvp_matrix to rendering correctly with gvrf renderer.
    mvp_matrix = glm::mat4(view.M[0][0], view.M[1][0], view.M[2][0],
            view.M[3][0], view.M[0][1], view.M[1][1], view.M[2][1],
            view.M[3][1], view.M[0][2], view.M[1][2], view.M[2][2],
            view.M[3][2], view.M[0][3], view.M[1][3], view.M[2][3],
            view.M[3][3]);

    SetMVPMatrix(mvp_matrix);

    if (!sensoredSceneUpdated_ && headRotationProvider_.receivingUpdates()) {
        sensoredSceneUpdated_ = updateSensoredScene();
    }
    glm::quat headRotation = headRotationProvider_.getPrediction(*this, (1 == eye ? 4.0f : 3.5f) / 60.0f);
    cameraRig_->getHeadTransform()->set_rotation(headRotation);

    JNIEnv* jni = app->GetVrJni();
    jni->CallVoidMethod(javaObject, drawEyeViewMethodId, eye, fovDegrees);

    if (eye == 1) {
        jni->CallVoidMethod(javaObject, afterDrawEyesMethodId);
    }

    glm::mat4 view_matrix = camera->getViewMatrix();
    glm::mat4 projection_matrix = camera->getProjectionMatrix(); //gun
    glm::mat4 vp_matrix = glm::mat4(projection_matrix * view_matrix);

    OVR::Matrix4f view2 = OVR::Matrix4f(vp_matrix[0][0], vp_matrix[1][0],
            vp_matrix[2][0], vp_matrix[3][0], vp_matrix[0][1], vp_matrix[1][1],
            vp_matrix[2][1], vp_matrix[3][1], vp_matrix[0][2], vp_matrix[1][2],
            vp_matrix[2][2], vp_matrix[3][2], vp_matrix[0][3], vp_matrix[1][3],
            vp_matrix[2][3], vp_matrix[3][3]);

    return view2;

}
Example #4
0
void GVRActivity::onDrawFrame(jobject jViewManager) {
    ovrFrameParms parms = vrapi_DefaultFrameParms(&oculusJavaGlThread_, VRAPI_FRAME_INIT_DEFAULT, vrapi_GetTimeInSeconds(),
            NULL);
    parms.FrameIndex = ++frameIndex;
    parms.MinimumVsyncs = 1;
    parms.PerformanceParms = oculusPerformanceParms_;

    const double predictedDisplayTime = vrapi_GetPredictedDisplayTime(oculusMobile_, frameIndex);
    const ovrTracking baseTracking = vrapi_GetPredictedTracking(oculusMobile_, predictedDisplayTime);

    const ovrHeadModelParms headModelParms = vrapi_DefaultHeadModelParms();
    const ovrTracking tracking = vrapi_ApplyHeadModel(&headModelParms, &baseTracking);

    ovrTracking updatedTracking = vrapi_GetPredictedTracking(oculusMobile_, tracking.HeadPose.TimeInSeconds);
    updatedTracking.HeadPose.Pose.Position = tracking.HeadPose.Pose.Position;

    for ( int eye = 0; eye < VRAPI_FRAME_LAYER_EYE_MAX; eye++ )
    {
        ovrFrameLayerTexture& eyeTexture = parms.Layers[0].Textures[eye];

        eyeTexture.ColorTextureSwapChain = frameBuffer_[use_multiview ? 0 : eye].mColorTextureSwapChain;
        eyeTexture.DepthTextureSwapChain = frameBuffer_[use_multiview ? 0 : eye].mDepthTextureSwapChain;
        eyeTexture.TextureSwapChainIndex = frameBuffer_[use_multiview ? 0 : eye].mTextureSwapChainIndex;
        eyeTexture.TexCoordsFromTanAngles = texCoordsTanAnglesMatrix_;
        eyeTexture.HeadPose = updatedTracking.HeadPose;
    }
    parms.Layers[0].Flags |= VRAPI_FRAME_LAYER_FLAG_CHROMATIC_ABERRATION_CORRECTION;
    if (CameraRig::CameraRigType::FREEZE == cameraRig_->camera_rig_type()) {
        parms.Layers[0].Flags |= VRAPI_FRAME_LAYER_FLAG_FIXED_TO_VIEW;
    }

    if (docked_) {
        const ovrQuatf& orientation = updatedTracking.HeadPose.Pose.Orientation;
        const glm::quat tmp(orientation.w, orientation.x, orientation.y, orientation.z);
        const glm::quat quat = glm::conjugate(glm::inverse(tmp));

        cameraRig_->setRotationSensorData(0, quat.w, quat.x, quat.y, quat.z, 0, 0, 0);
        cameraRig_->updateRotation();
    } else if (nullptr != cameraRig_) {
        cameraRig_->updateRotation();
    } else {
        cameraRig_->setRotation(glm::quat());
    }

    if (!sensoredSceneUpdated_ && docked_) {
        sensoredSceneUpdated_ = updateSensoredScene();
    }

    // Render the eye images.
    for (int eye = 0; eye < (use_multiview ? 1 :VRAPI_FRAME_LAYER_EYE_MAX); eye++) {

        beginRenderingEye(eye);
        int textureSwapChainIndex = frameBuffer_[eye].mTextureSwapChainIndex;
        int colorTexture = vrapi_GetTextureSwapChainHandle(frameBuffer_[eye].mColorTextureSwapChain, textureSwapChainIndex);

        oculusJavaGlThread_.Env->CallVoidMethod(jViewManager, onDrawEyeMethodId, eye, colorTexture);

        endRenderingEye(eye);
    }

    FrameBufferObject::unbind();

    // check if the controller is available
    if (gearController != nullptr && gearController->findConnectedGearController()) {
        // collect the controller input if available
        gearController->onFrame(predictedDisplayTime);
    }

    vrapi_SubmitFrame(oculusMobile_, &parms);
}