// Receives cartesian images and calibration matrices
StereoData PeripheralStereo::computeStereo(const cv::Mat & left_image,
                                           const cv::Mat & right_image,
                                           const cv::Mat & R1,
                                           const cv::Mat & R2,
                                           const cv::Mat & P1_,
                                           const cv::Mat & P2_,
                                           const cv::Mat & left_to_center)
{
    cv::Mat Q=makeProjectionMatrix(P2_,1.0,1.0);
    std::cout << "Q:" << Q << std::endl;
    /* EMULATE HUMAN EYE (START) */
    // Convert cartesian to cortical
    /*sensor.stereo_data.left_cortical_image=sensor.to_cortical(left_image);
    sensor.stereo_data.right_cortical_image=sensor.to_cortical(right_image);
    // Convert cortical to cartesian
    sensor.stereo_data.left_retinal_image=sensor.to_cartesian(sensor.stereo_data.left_cortical_image);
    sensor.stereo_data.right_retinal_image=sensor.to_cartesian(sensor.stereo_data.right_cortical_image);//*/

    /* EMULATE HUMAN EYE (FINISH) */

    cv::Mat rectified_left_image;
    cv::Mat rectified_right_image;

    stereoRectify(sensor.stereo_data.left_retinal_image,
                  sensor.stereo_data.right_retinal_image,
                  R1,
                  R2,
                  P1_,
                  P2_,
                  rectified_left_image,
                  rectified_right_image);

    ////////////////////////////////////////////////////////////////////////////////
    // Compute Stereo (MARTELADO NO CARTESIANO, IDEALMENTE DEVIA SER NO CORTICAL) //
    ////////////////////////////////////////////////////////////////////////////////

    getCartesianDisparityMap(rectified_left_image,
                             rectified_right_image,
                             sensor.stereo_data.disparity_image,
                             sensor.stereo_data.disparity_values);

    cv::Mat R1_ = cv::Mat::eye(4,4,CV_64F);
    R1.copyTo(R1_(cv::Range(0,3), cv::Range(0,3)));
    //cv::Mat transf_=left_to_center*R1_.t();
    cv::Mat transf_=R1_.t();
    get3DpointCloud(disparity32F,sensor.stereo_data.point_cloud_cartesian,transf_,Q);

    sensor.stereo_data.point_cloud_rgb=rectified_left_image;
    //sensor.stereo_data.point_cloud_rgb=sensor.to_cortical(sensor.stereo_data.point_cloud_rgb);

    ///////////////////////////
    // Propagate uncertainty //
    ///////////////////////////

    std::cout << "propagate uncertainty" << std::endl;

    sensor.computeUncertainty(sensor.stereo_data.disparity_values,
                              H1,
                              H2,
                              stereo_rectification_map1_left,
                              stereo_rectification_map2_left,
                              stereo_rectification_map1_right,
                              stereo_rectification_map2_right,
                              trans2.at<double>(0,0));

    return sensor.stereo_data;
}
예제 #2
0
파일: cam.cpp 프로젝트: trebb/phoml
//#######################################################################################
//Bestimmung der relativen Orientierung zweier Kameras
Matrix Cam::TransInRellativeOrientierung( Cam &C_l, Cam &C_r, Cam &C_l_rell ,Cam &C_r_rell)
{
 //#############
 //Kamera links

	
	 // Rotationsmatrix von Kamera links (in Objektkoordinaten)
	 Rotation_matrix R1_(Rotation_matrix::math,C_l.get_rotX(),C_l.get_rotY(),C_l.get_rotZ());
	 Matrix R1=R1_.get_Matrix();
	 //R1.MatShow(R1);
 
	 //fr die Transformation von Objektkoordinaten ins Koordinatensystem Kamera 1
	 Matrix RotInKooC_l;
	 RotInKooC_l=R1.MatTrans();

 
 //########################
 // Setzen der Kamera links
 C_l_rell = C_l; //bertragen der Kalib.werte
 
 Point O_l(0.0,0.0,0.0);
 //C_l_rell.m_O = O_l;
 C_l_rell.set_O( O_l );
 
  //Rotationswinkel
 C_l_rell.m_rotX = 0.0;
 C_l_rell.m_rotY = 0.0;
 C_l_rell.m_rotZ = 0.0;



 //#############
 //Kamera rechts
 
	
     // Rotationsmatrix von Kamera rechts (in Objektkoordinaten)
     Rotation_matrix R2_(Rotation_matrix::math,C_r.get_rotX(),C_r.get_rotY(),C_r.get_rotZ());
	 Matrix R2=R2_.get_Matrix();
	 //R2.MatShow(R2);


 //########################
 //Setzen der Kamera rechts
 C_r_rell=C_r; //bertragen der Kalib.werte


     //#######################################################################
	 // transformieren des Hauptpunktes Kamera 2 in KooSystem Kamera 1
	 // RT*(O_r-O_l) = Hauptpunkt Kamera 2 neu (im Koordinatensystem Kamera 1)
	 Matrix O_Rd(3,1,Null);
	 O_Rd(0,0)=C_r.get_O().get_X() - C_l.get_O().get_X();
	 O_Rd(1,0)=C_r.get_O().get_Y() - C_l.get_O().get_Y();
	 O_Rd(2,0)=C_r.get_O().get_Z() - C_l.get_O().get_Z();
 
	 Matrix O_R=RotInKooC_l.MatMult( O_Rd );
	 Point O_r(O_R(0,0),O_R(1,0),O_R(2,0));

 // setzen des Neuen Bildhauptpunktes im KooSystem Kamera 1
 //C_r_rell.m_O=O_r;
 C_r_rell.set_O(O_r);
 

	 //#######################################################################
     //berechnen der Rotationswinkel von Cam_r zu Cam_l
	 Matrix Rotw;
	 Rotw=RotInKooC_l.MatMult(R2);
	 //Rotw.MatShow(Rotw);

	   double o,p,k;
	   double r11,r12,r13,r21,r22,r23,r31,r32,r33;

	   r11 =Rotw(0,0); 
	   r21 =Rotw(1,0); 
	   r31 =Rotw(2,0); 
	   r12 =Rotw(0,1); 
	   r22 =Rotw(1,1); 
	   r32 =Rotw(2,1); 
	   r13 =Rotw(0,2); 
	   r23 =Rotw(1,2); 
	   r33 =Rotw(2,2); 
//neu
	   double threshold = 9.99999e-14;
       if(fabs(r11)<threshold) r11=0.0;
	   if(fabs(r21)<threshold) r21=0.0;
	   if(fabs(r31)<threshold) r31=0.0;
	   if(fabs(r12)<threshold) r12=0.0;
	   if(fabs(r22)<threshold) r22=0.0;
	   if(fabs(r32)<threshold) r32=0.0;
	   if(fabs(r13)<threshold) r13=0.0;
	   if(fabs(r23)<threshold) r23=0.0;
	   if(fabs(r33)<threshold) r33=0.0;
//END neu
	   p = asin (r13);
//alt
/*	   C = cos(p);

	   if(fabs(p)>0.0005)
	   {
		   trx_ =  r33 / C;
		   try_ = -r23 / C;
		   o    =  atan2(try_, trx_);

		   trx_ =  r11 / C;
		   try_ = -r12 / C;
		   k    =  atan2(try_, trx_);
	   }
	   else
	   {
		   o    =  0.0;
		   trx_ =  r22;
		   try_ =  r21;
		   k    =  atan2(try_, trx_);
	   }
	   */
//END alt
//neu
	   if(r23||r33)
	    o = atan2(-r23,r33);
	   else
	    o = atan2(-r22,r32);
	   
	     //Testrechnung
	       //double Omega1 = atan2(-r23,r33);
	       //double Omega2 = atan2(-r22,r32);
	   
	   if(r12||r11)
        k = atan2(-r12,r11);
	   else
	    k = atan2(-r22,r21);

	     //Testrechnung
	       //double Kappa1 = atan2(-r12,r11);
	       //double Kappa2 = atan2(-r22,r21);

	   if(fabs(r13-1.0) < threshold || fabs(r13+1.0) < threshold)
	   {
	    o = atan2(r32,r22);
		k = 0.0;
	   }
    //END neu
	   o = clamp(o, -PI, PI);
	   p = clamp(p, -PI, PI);
	   k = clamp(k, -PI, PI);

	   //Testausgabe
	   //Rot R3_(o,p,k);
	   //Matrix R3=R3_.R;
	   //R2.MatShow(R3);

 //setzen der neuen Rotationswinkel Kamera 2
 C_r_rell.m_rotX = o;
 C_r_rell.m_rotY = p;
 C_r_rell.m_rotZ = k;


return R1;
}