void BaseRenderer::setupCameraPerspective(float pitch, float heading, float fov) { _projectionMatrix = makeProjectionMatrix(fov); _modelViewMatrix = Math::Matrix4(180.0f - heading, pitch, 0.0f, Math::EO_YXZ); Math::Matrix4 proj = _projectionMatrix; Math::Matrix4 model = _modelViewMatrix; proj.transpose(); model.transpose(); _mvpMatrix = proj * model; _frustum.setup(_mvpMatrix); _mvpMatrix.transpose(); }
// Receives cartesian images and calibration matrices StereoData PeripheralStereo::computeStereo(const cv::Mat & left_image, const cv::Mat & right_image, const cv::Mat & R1, const cv::Mat & R2, const cv::Mat & P1_, const cv::Mat & P2_, const cv::Mat & left_to_center) { cv::Mat Q=makeProjectionMatrix(P2_,1.0,1.0); std::cout << "Q:" << Q << std::endl; /* EMULATE HUMAN EYE (START) */ // Convert cartesian to cortical /*sensor.stereo_data.left_cortical_image=sensor.to_cortical(left_image); sensor.stereo_data.right_cortical_image=sensor.to_cortical(right_image); // Convert cortical to cartesian sensor.stereo_data.left_retinal_image=sensor.to_cartesian(sensor.stereo_data.left_cortical_image); sensor.stereo_data.right_retinal_image=sensor.to_cartesian(sensor.stereo_data.right_cortical_image);//*/ /* EMULATE HUMAN EYE (FINISH) */ cv::Mat rectified_left_image; cv::Mat rectified_right_image; stereoRectify(sensor.stereo_data.left_retinal_image, sensor.stereo_data.right_retinal_image, R1, R2, P1_, P2_, rectified_left_image, rectified_right_image); //////////////////////////////////////////////////////////////////////////////// // Compute Stereo (MARTELADO NO CARTESIANO, IDEALMENTE DEVIA SER NO CORTICAL) // //////////////////////////////////////////////////////////////////////////////// getCartesianDisparityMap(rectified_left_image, rectified_right_image, sensor.stereo_data.disparity_image, sensor.stereo_data.disparity_values); cv::Mat R1_ = cv::Mat::eye(4,4,CV_64F); R1.copyTo(R1_(cv::Range(0,3), cv::Range(0,3))); //cv::Mat transf_=left_to_center*R1_.t(); cv::Mat transf_=R1_.t(); get3DpointCloud(disparity32F,sensor.stereo_data.point_cloud_cartesian,transf_,Q); sensor.stereo_data.point_cloud_rgb=rectified_left_image; //sensor.stereo_data.point_cloud_rgb=sensor.to_cortical(sensor.stereo_data.point_cloud_rgb); /////////////////////////// // Propagate uncertainty // /////////////////////////// std::cout << "propagate uncertainty" << std::endl; sensor.computeUncertainty(sensor.stereo_data.disparity_values, H1, H2, stereo_rectification_map1_left, stereo_rectification_map2_left, stereo_rectification_map1_right, stereo_rectification_map2_right, trans2.at<double>(0,0)); return sensor.stereo_data; }