//------------------------------------------------------------------------------
void create_perspecive_undistortion_LUT( CvMat *mapx, CvMat *mapy, struct ocam_model *ocam_model, float sf)
{
     int i, j;
     int width = mapx->cols; //New width
     int height = mapx->rows;//New height
	 num = height*width;
     float *data_mapx = mapx->data.fl;
     float *data_mapy = mapy->data.fl;
     float Nxc = height/2.0;
     float Nyc = width/2.0;
     float Nz  = -width/sf;
     double M[3];
     double m[2];
    
	 //array = new double [num*3];
	 

	for (i=0; i<height; i++){
        for (j=0; j<width; j++)
         {   

             M[0] = (i - Nxc);
             M[1] = (j - Nyc);
             M[2] = Nz;
             world2cam(m, M, ocam_model);
             *( data_mapx + i*width+j ) = (float) m[1];
             *( data_mapy + i*width+j ) = (float) m[0];
			 /*cam2world(M, m, ocam_model);
			 array[3*(924*i+j)] = M[0];
			 array[3*(924*i+j)+1] = M[1];
			 array[3*(924*i+j)+2] = M[2];*/

         }
	}

	//writeFile();
	//delete[] array;
	mapx_persp_left = Mat(mapx); // to copy the data
	mapy_persp_left = Mat(mapy); // to copy the data
	//cam2world(M, m, ocam_model);
}
Beispiel #2
0
//------------------------------------------------------------------------------
void create_perspecive_undistortion_LUT( CvMat *mapx, CvMat *mapy, struct ocam_model *ocam_model, float sf)
{
     int i, j;
     int width = mapx->cols; //New width
     int height = mapx->rows;//New height
     float *data_mapx = mapx->data.fl;
     float *data_mapy = mapy->data.fl;
     float Nxc = height/2.0;
     float Nyc = width/2.0;
     float Nz  = -width/sf;
     double M[3];
     double m[2];

     for (i=0; i<height; i++)
         for (j=0; j<width; j++)
         {
             M[0] = (i - Nxc);
             M[1] = (j - Nyc);
             M[2] = Nz;
             world2cam(m, M, ocam_model);
             *( data_mapx + i*width+j ) = (float) m[1];
             *( data_mapy + i*width+j ) = (float) m[0];
         }
}
//---------------------------------------------------------------------------
int main(int argc, char *argv[])
{   

  struct ocam_model o_cata; 
  get_ocam_model(&o1, "calib_results_fisheye1.txt");
  get_ocam_model(&o2, "calib_results_fisheye2.txt");
  int i;
  printf("pol =\n");    for (i=0; i<o1.length_pol; i++){    printf("\t%e\n",o1.pol[i]); };    printf("\n");
  printf("invpol =\n"); for (i=0; i<o1.length_invpol; i++){ printf("\t%e\n",o1.invpol[i]); }; printf("\n");  
  printf("\nxc = %f\nyc = %f\n\nwidth = %d\nheight = %d\n",o1.xc,o1.yc,o1.width,o1.height);
                       

  
  /* --------------------------------------------------------------------*/
  /* WORLD2CAM projects 3D point into the image                          */
  /* --------------------------------------------------------------------*/
  double point3D[3] = { 100 , 200 , -300 };       // a sample 3D point
  double point2D[2];                              // the image point in pixel coordinates  
  world2cam(point2D, point3D, &o1); // The behaviour of this function is the same as in MATLAB
  printf("\nworld2cam: pixel coordinates reprojected onto the image1\n");  
  printf("m_row= %2.4f, m_col=%2.4f\n", point2D[0], point2D[1]);



  cam2world(point3D, point2D, &o1); 
  printf("\ncam2world: coordinates back-projected onto the unit sphere1 (x^2+y^2+z^2=1)\n");
  printf("x= %2.4f, y=%2.4f, z=%2.4f\n", point3D[0], point3D[1], point3D[2]);

  /*cam2world(point3D, point2D, &o2); 
  printf("\ncam2world: coordinates back-projected onto the unit sphere2 (x^2+y^2+z^2=1)\n");
  printf("x= %2.4f, y=%2.4f, z=%2.4f\n", point3D[0], point3D[1], point3D[2]);*/

  

  /* --------------------------------------------------------------------*/  
  
  src1 = imread("./img_l.jpg" );
  src2 = imread("./img_r.jpg" );

  dst_persp1   = cvCreateImage( src1.size(), 8, 3 );   
  dst_persp2   = cvCreateImage( src2.size(), 8, 3 );   

  
 
  mapx_persp1 = cvCreateMat(src1.rows, src1.cols, CV_32FC1);
  mapy_persp1 = cvCreateMat(src1.rows, src1.cols, CV_32FC1);
  mapx_persp2 = cvCreateMat(src2.rows, src2.cols, CV_32FC1);
  mapy_persp2 = cvCreateMat(src2.rows, src2.cols, CV_32FC1);

  //namedWindow("rectified image1", 1);
  //namedWindow("rectified image2", 1);
  //createTrackbar("scaling factor", "rectified image1", &sf, 25, onTrackbar1);
  //createTrackbar("scaling factor", "rectified image2", &sf, 25, onTrackbar2);
  //onTrackbar1(0, 0);
  //onTrackbar2(0, 0);

	create_perspecive_undistortion_LUT( mapx_persp1, mapy_persp1, &o1, sf );
	mapx_persp_left = Mat(mapx_persp1); // to copy the data
	mapy_persp_left = Mat(mapy_persp1); // to copy the data
	//for( int j = 0; j < mapx_persp_left.rows; j++ ){ 
	//	for( int i = 924; i < mapx_persp_left.cols; i++ ){

	//			 mapx_persp_left.at<float>(j,i) = 0 ;
	//			 mapy_persp_left.at<float>(j,i) = 0 ;
	//			
	// }
 // }

	remap(src1, dst_persp1, mapx_persp_left, mapy_persp_left, CV_INTER_LINEAR, BORDER_CONSTANT, Scalar(0,0,0) );
	
	cout<<"image1 process"<<endl;



	//create_perspecive_undistortion_LUT( mapx_persp2, mapy_persp2, &o2, sf );
	//mapx_persp_right = Mat(mapx_persp2); // to copy the data
	//mapy_persp_right = Mat(mapy_persp2); // to copy the data
	//remap(src2, dst_persp2, mapx_persp_right, mapy_persp_right, CV_INTER_LINEAR, BORDER_CONSTANT, Scalar(0,0,0) );

   imshow( "rectified image1", dst_persp1 );

   //imshow( "rectified image2", dst_persp2 );



  cvWaitKey();

 

  return 0;
}