示例#1
0
void SE_average<3>::append(const mrpt::poses::CPose3D &p, const double weight) {
	m_count += weight;
	m_accum_x += weight * p.x();
	m_accum_y += weight * p.y();
	m_accum_z += weight * p.z();
	m_rot_part.append(p.getRotationMatrix(), weight);
}
示例#2
0
void SE_average<3>::get_average(mrpt::poses::CPose3D &ret_mean) const
{
	ASSERT_ABOVE_(m_count,0);
	ret_mean.x( m_accum_x / m_count );
	ret_mean.y( m_accum_y / m_count );
	ret_mean.z( m_accum_z / m_count );
	const_cast<SO_average<3>*>(&m_rot_part)->enable_exception_on_undeterminate = this->enable_exception_on_undeterminate;
	ret_mean.setRotationMatrix( m_rot_part.get_average() );
}
示例#3
0
// Transform all 6 components for a change of reference frame from "A" to 
// another frame "B" whose rotation with respect to "A" is given by `rot`. The translational part of the pose is ignored
void TTwist3D::rotate(const mrpt::poses::CPose3D & rot)
{
	const TTwist3D t = *this;
	const mrpt::math::CMatrixDouble33 & R = rot.getRotationMatrix();
	vx=R(0,0)*t.vx+R(0,1)*t.vy+R(0,2)*t.vz;
	vy=R(1,0)*t.vx+R(1,1)*t.vy+R(1,2)*t.vz;
	vz=R(2,0)*t.vx+R(2,1)*t.vy+R(2,2)*t.vz;

	wx=R(0,0)*t.wx+R(0,1)*t.wy+R(0,2)*t.wz;
	wy=R(1,0)*t.wx+R(1,1)*t.wy+R(1,2)*t.wz;
	wz=R(2,0)*t.wx+R(2,1)*t.wy+R(2,2)*t.wz;
}
void run_pc_filter_test(
	const double map2_x_off, const double map2_tim_off,
	const size_t expected_m1_count, const size_t expected_m2_count)
{
	const double pts1[8][3] = {
		{1, 0, 0}, {1.03, 0, 0},	 {1, 1, 0},  {1.01, 1.02, 0},
		{0, 1, 0}, {-0.01, 1.01, 0}, {-1, 0, 0}, {-1.01, 0.02, 0}};
	const mrpt::poses::CPose3D pts1_pose(0, 0, 0, 0, 0, 0);
	const mrpt::system::TTimeStamp pts1_tim = mrpt::system::now();

	const mrpt::poses::CPose3D pts2_pose(0.5, 0, 0, 0, 0, 0);
	const mrpt::system::TTimeStamp pts2_tim =
		mrpt::system::timestampAdd(pts1_tim, 0.2 + map2_tim_off);

	mrpt::maps::CSimplePointsMap map1, map2;
	for (size_t i = 0; i < sizeof(pts1) / sizeof(pts1[0]); i++)
		map1.insertPoint(pts1[i][0], pts1[i][1], pts1[i][2]);
	for (size_t i = 0; i < sizeof(pts1) / sizeof(pts1[0]); i++)
	{
		double x, y, z;
		pts2_pose.inverseComposePoint(
			pts1[i][0], pts1[i][1], pts1[i][2], x, y, z);
		// Introduce optionally, 1 outlier:
		if (i == 1) x += map2_x_off;
		map2.insertPoint(x, y, z);
	}

	mrpt::maps::CPointCloudFilterByDistance pc_filter;
	mrpt::maps::CPointCloudFilterByDistance::TExtraFilterParams extra_params;
	std::vector<bool> deletion_mask;
	extra_params.out_deletion_mask = &deletion_mask;

	pc_filter.filter(&map1, pts1_tim, pts1_pose, &extra_params);
	EXPECT_EQ(map1.size(), expected_m1_count);

	pc_filter.filter(&map2, pts2_tim, pts2_pose, &extra_params);
	EXPECT_EQ(map2.size(), expected_m2_count);
}
示例#5
0
TPose3D::TPose3D(const mrpt::poses::CPose3D &p):x(p.x()),y(p.y()),z(p.z()),yaw(p.yaw()),pitch(p.pitch()),roll(p.roll())	{}
示例#6
0
TPoint3D::TPoint3D(const mrpt::poses::CPose3D &p):x(p.x()),y(p.y()),z(p.z())	{}
void run_test(const mrpt::poses::CPose3D &incr)
{
	my_srba_t rba;     //  Create an empty RBA problem

	rba.get_time_profiler().disable();

	// --------------------------------------------------------------------------------
	// Set parameters 
	// --------------------------------------------------------------------------------
	rba.setVerbosityLevel( 0 );   // 0: None; 1:Important only; 2:Verbose

	rba.parameters.srba.use_robust_kernel = false;

	// =========== Topology parameters ===========
	rba.parameters.srba.max_tree_depth       = 3;
	rba.parameters.srba.max_optimize_depth   = 3;
	// ===========================================

	// --------------------------------------------------------------------------------
	// Define observations of KF #0:
	// --------------------------------------------------------------------------------
	my_srba_t::new_kf_observations_t  list_obs;
	my_srba_t::new_kf_observation_t   obs_field;

	obs_field.is_fixed = false;   // Landmarks have unknown relative positions (i.e. treat them as unknowns to be estimated)
	obs_field.is_unknown_with_init_val = false; // We don't have any guess on the initial LM position (will invoke the inverse sensor model)

	for (size_t i=0;i<sizeof(dummy_obs)/sizeof(dummy_obs[0]);i++)
	{
		obs_field.obs.feat_id = dummy_obs[i].landmark_id;
		obs_field.obs.obs_data.pt.x = dummy_obs[i].x;
		obs_field.obs.obs_data.pt.y = dummy_obs[i].y;
		obs_field.obs.obs_data.pt.z = dummy_obs[i].z;
		list_obs.push_back( obs_field );
	}


	//  Here happens the main stuff: create Key-frames, build structures, run optimization, etc.
	//  ============================================================================================
	my_srba_t::TNewKeyFrameInfo new_kf_info;
	rba.define_new_keyframe(
		list_obs,      // Input observations for the new KF
		new_kf_info,   // Output info
		true           // Also run local optimization?
		);

	// --------------------------------------------------------------------------------
	// Define observations of next KF:
	// --------------------------------------------------------------------------------
	list_obs.clear();

	for (size_t i=0;i<sizeof(dummy_obs)/sizeof(dummy_obs[0]);i++)
	{
		obs_field.obs.feat_id = dummy_obs[i].landmark_id;
		obs_field.obs.obs_data.pt.x = dummy_obs[i].x;
		obs_field.obs.obs_data.pt.y = dummy_obs[i].y;
		obs_field.obs.obs_data.pt.z = dummy_obs[i].z;

		if (INVERSE_INCR)
			incr.inverseComposePoint(
				obs_field.obs.obs_data.pt.x, obs_field.obs.obs_data.pt.y, obs_field.obs.obs_data.pt.z,
				obs_field.obs.obs_data.pt.x, obs_field.obs.obs_data.pt.y, obs_field.obs.obs_data.pt.z);
		else
			incr.composePoint(obs_field.obs.obs_data.pt, obs_field.obs.obs_data.pt);

		list_obs.push_back( obs_field );
	}
	
	//  Here happens the main stuff: create Key-frames, build structures, run optimization, etc.
	//  ============================================================================================
	rba.define_new_keyframe(
		list_obs,      // Input observations for the new KF
		new_kf_info,   // Output info
		true           // Also run local optimization?
		);

	mrpt::poses::CPose3D estIncr = rba.get_k2k_edges()[0].inv_pose;
	if (INVERSE_INCR)
		estIncr.inverse();

	EXPECT_NEAR( (incr.getHomogeneousMatrixVal()-estIncr.getHomogeneousMatrixVal()).array().abs().sum(),0, 1e-3)
		<< "=> Ground truth: " << incr << " Inverse: " << (INVERSE_INCR ? "YES":"NO") << endl
		<< "=> inv_pose of KF-to-KF edge #0 (relative pose of KF#0 wrt KF#1):\n" << estIncr << endl
		<< "=> Optimization error: " << new_kf_info.optimize_results.total_sqr_error_init << " -> " << new_kf_info.optimize_results.total_sqr_error_final << endl;
}
示例#8
0
void run_test()
{
	// Declare a typedef "my_srba_t" for easily referring to my RBA problem type:
	typedef RbaEngine<
		kf2kf_poses::SE3,                // Parameterization  KF-to-KF poses
		landmarks::Euclidean3D,          // Parameterization of landmark positions
		observations::Cartesian_3D,       // Type of observations
		typename DATASET::my_srba_options
		>
		my_srba_t;

	my_srba_t rba;     //  Create an empty RBA problem

	rba.get_time_profiler().disable();

	// --------------------------------------------------------------------------------
	// Set parameters
	// --------------------------------------------------------------------------------
	rba.setVerbosityLevel( 0 );   // 0: None; 1:Important only; 2:Verbose

	rba.parameters.srba.use_robust_kernel = true;

	rba.parameters.obs_noise.std_noise_observations = 0.1;

	// =========== Topology parameters ===========
	rba.parameters.srba.edge_creation_policy = mrpt::srba::ecpICRA2013;
	rba.parameters.srba.max_tree_depth       = 3;
	rba.parameters.srba.max_optimize_depth   = 3;
	// ===========================================

	DATASET::setExtraOptions(rba);


	size_t N0,N1;
	mrpt::poses::CPose3DQuat GT0,GT1;

	const basic_euclidean_dataset_entry_t *d0 = DATASET::getData0(N0,GT0);
	const basic_euclidean_dataset_entry_t *d1 = DATASET::getData1(N1,GT1);


	// --------------------------------------------------------------------------------
	// Define observations of KF #0:
	// --------------------------------------------------------------------------------
	typename my_srba_t::new_kf_observations_t  list_obs;
	typename my_srba_t::new_kf_observation_t   obs_field;

	obs_field.is_fixed = false;   // Landmarks have unknown relative positions (i.e. treat them as unknowns to be estimated)
	obs_field.is_unknown_with_init_val = false; // We don't have any guess on the initial LM position (will invoke the inverse sensor model)

	for (size_t i=0;i<N0;i++)
	{
		obs_field.obs.feat_id = d0[i].landmark_id;
		obs_field.obs.obs_data.pt.x = d0[i].x + randomGenerator.drawGaussian1D(0,SENSOR_NOISE_STD);
		obs_field.obs.obs_data.pt.y = d0[i].y + randomGenerator.drawGaussian1D(0,SENSOR_NOISE_STD);
		obs_field.obs.obs_data.pt.z = d0[i].z + randomGenerator.drawGaussian1D(0,SENSOR_NOISE_STD);
		list_obs.push_back( obs_field );
	}


	//  Here happens the main stuff: create Key-frames, build structures, run optimization, etc.
	//  ============================================================================================
	typename my_srba_t::TNewKeyFrameInfo new_kf_info;
	rba.define_new_keyframe(
		list_obs,      // Input observations for the new KF
		new_kf_info,   // Output info
		true           // Also run local optimization?
		);

	// --------------------------------------------------------------------------------
	// Define observations of next KF:
	// --------------------------------------------------------------------------------
	list_obs.clear();
	for (size_t i=0;i<N1;i++)
	{
		obs_field.obs.feat_id = d1[i].landmark_id;
		obs_field.obs.obs_data.pt.x = d1[i].x + randomGenerator.drawGaussian1D(0,SENSOR_NOISE_STD);
		obs_field.obs.obs_data.pt.y = d1[i].y + randomGenerator.drawGaussian1D(0,SENSOR_NOISE_STD);
		obs_field.obs.obs_data.pt.z = d1[i].z + randomGenerator.drawGaussian1D(0,SENSOR_NOISE_STD);
		list_obs.push_back( obs_field );
	}

	rba.define_new_keyframe(
		list_obs,      // Input observations for the new KF
		new_kf_info,   // Output info
		true           // Also run local optimization?
		);

	// Compare to ground truth:
	// Relative pose of KF#1 wrt KF#0:
	const mrpt::poses::CPose3D P = -rba.get_k2k_edges()[0]
#ifdef SRBA_WORKAROUND_MSVC9_DEQUE_BUG
		->
#else
		.
#endif
		inv_pose;

	const mrpt::poses::CPose3D P_GT = GT1-GT0;

	EXPECT_NEAR(0, (P.getAsVectorVal()-P_GT.getAsVectorVal()).array().abs().sum(), 1e-2 )
		<< "P : " << P << endl
		<< "GT: " << P_GT << endl;
}
示例#9
0
void CViewerContainer::showRobotDirection(const mrpt::poses::CPose3D& pose)
{
	forEachGl([pose](CGlWidget* gl) { gl->setSelected(pose.asTPose()); });
}
示例#10
0
文件: test.cpp 项目: jhuxiang/mrpt
//
// Given a list of chessboard images, the number of corners (nx, ny)
// on the chessboards, and a flag: useCalibrated for calibrated (0) or
// uncalibrated (1: use cvStereoCalibrate(), 2: compute fundamental
// matrix separately) stereo. Calibrate the cameras and display the
// rectified results along with the computed disparity images.
//
void StereoCalib(
    const char* imageList,
    const char* sOutFile,
    int nx, int ny,
    int useUncalibrated,
    const float squareSize,
    const double alpha,
    const bool flag_fix_aspect_ratio,
    const bool flag_zero_tangent_dist,
    const bool flag_same_focal_len
)
{
    int displayCorners = 1;
    bool isVerticalStereo = false;//OpenCV can handle left-right
    //or up-down camera arrangements
    const int maxScale = 1;

    FILE* f = fopen(imageList, "rt");

    int i, j, lr, nframes, n = nx*ny, N = 0;
    vector<string> imageNames[2];
    vector<CvPoint3D32f> objectPoints;
    vector<CvPoint2D32f> points[2];
    vector<int> npoints;
    vector<uchar> active[2];
    vector<CvPoint2D32f> temp(n);
    CvSize imageSize = {0,0};
    // ARRAY AND VECTOR STORAGE:
    double M1[3][3], M2[3][3], D1[5], D2[5];
    double R[3][3], T[3], E[3][3], F[3][3];
    CvMat _M1 = cvMat(3, 3, CV_64F, M1 );
    CvMat _M2 = cvMat(3, 3, CV_64F, M2 );
    CvMat _D1 = cvMat(1, 5, CV_64F, D1 );
    CvMat _D2 = cvMat(1, 5, CV_64F, D2 );
    CvMat _R = cvMat(3, 3, CV_64F, R );
    CvMat _T = cvMat(3, 1, CV_64F, T );
    CvMat _E = cvMat(3, 3, CV_64F, E );
    CvMat _F = cvMat(3, 3, CV_64F, F );
    if( displayCorners )
        cvNamedWindow( "corners", 1 );

    // READ IN THE LIST OF CHESSBOARDS:
    if( !f )
    {
        fprintf(stderr, "can not open file %s\n", imageList );
        return;
    }

    std::vector<std::string> lst_image_files;


    printf("Starting main loop\n");
    for(i=0;; i++)
    {
        printf("Iteration %d\n", i);
        char buf[1024];
        int count = 0, result=0;
        lr = i % 2;
        vector<CvPoint2D32f>& pts = points[lr];
        if( !fgets( buf, sizeof(buf)-3, f ))
            break;
        size_t len = strlen(buf);
        while( len > 0 && isspace(buf[len-1]))
            buf[--len] = '\0';
        if( buf[0] == '#')
            continue;
        IplImage* img = cvLoadImage( buf, 0 );
        if( !img )
            break;

        lst_image_files.push_back(string(buf));

        imageSize = cvGetSize(img);
        imageNames[lr].push_back(buf);
        //FIND CHESSBOARDS AND CORNERS THEREIN:
        for( int s = 1; s <= maxScale; s++ )
        {
            IplImage* timg = img;
            if( s > 1 )
            {
                timg = cvCreateImage(cvSize(img->width*s,img->height*s),
                                     img->depth, img->nChannels );
                cvResize( img, timg, CV_INTER_CUBIC );
            }
            result = cvFindChessboardCorners( timg, cvSize(nx, ny),
                                              &temp[0], &count,
                                              CV_CALIB_CB_ADAPTIVE_THRESH |
                                              CV_CALIB_CB_NORMALIZE_IMAGE);
            if( timg != img )
                cvReleaseImage( &timg );
            if( result || s == maxScale )
                for( j = 0; j < count; j++ )
                {
                    temp[j].x /= s;
                    temp[j].y /= s;
                }
            if( result )
                break;
        }
        if( displayCorners )
        {
            printf("%s\n", buf);
            IplImage* cimg = cvCreateImage( imageSize, 8, 3 );
            cvCvtColor( img, cimg, CV_GRAY2BGR );
            cvDrawChessboardCorners( cimg, cvSize(nx, ny), &temp[0],
                                     count, result );
            cvShowImage( "corners", cimg );
            cvReleaseImage( &cimg );
            int c = cvWaitKey(100);
            if( c == 27 || c == 'q' || c == 'Q' ) //Allow ESC to quit
                exit(-1);
        }
        else
            putchar('.');
        N = pts.size();
        pts.resize(N + n, cvPoint2D32f(0,0));
        active[lr].push_back((uchar)result);
        //assert( result != 0 );
        if( result )
        {
            //Calibration will suffer without subpixel interpolation
            cvFindCornerSubPix( img, &temp[0], count,
                                cvSize(11, 11), cvSize(-1,-1),
                                cvTermCriteria(CV_TERMCRIT_ITER+CV_TERMCRIT_EPS,
                                               30, 0.01) );
            copy( temp.begin(), temp.end(), pts.begin() + N );
        }
        cvReleaseImage( &img );
    }
    fclose(f);
    printf("\n");
// HARVEST CHESSBOARD 3D OBJECT POINT LIST:
    nframes = active[0].size();//Number of good chessboads found
    objectPoints.resize(nframes*n);
    for( i = 0; i < ny; i++ )
        for( j = 0; j < nx; j++ )
            objectPoints[i*nx + j] =
                cvPoint3D32f(i*squareSize, j*squareSize, 0);
    for( i = 1; i < nframes; i++ )
        copy( objectPoints.begin(), objectPoints.begin() + n,
              objectPoints.begin() + i*n );
    npoints.resize(nframes,n);
    N = nframes*n;
    CvMat _objectPoints = cvMat(1, N, CV_32FC3, &objectPoints[0] );
    CvMat _imagePoints1 = cvMat(1, N, CV_32FC2, &points[0][0] );
    CvMat _imagePoints2 = cvMat(1, N, CV_32FC2, &points[1][0] );
    CvMat _npoints = cvMat(1, npoints.size(), CV_32S, &npoints[0] );
    cvSetIdentity(&_M1);
    cvSetIdentity(&_M2);
    cvZero(&_D1);
    cvZero(&_D2);

    // CALIBRATE THE STEREO CAMERAS
    // ======================================================
    printf("Running stereo calibration ...");
    fflush(stdout);
    cvStereoCalibrate( &_objectPoints, &_imagePoints1,
                       &_imagePoints2, &_npoints,
                       &_M1, &_D1, &_M2, &_D2,
                       imageSize, &_R, &_T, &_E, &_F,
                       cvTermCriteria(CV_TERMCRIT_ITER+
                                      CV_TERMCRIT_EPS, 150, 1e-6),
                       (flag_fix_aspect_ratio ? CV_CALIB_FIX_ASPECT_RATIO:0)
                       +
                       (flag_zero_tangent_dist ? CV_CALIB_ZERO_TANGENT_DIST:0)
                       +
                       (flag_same_focal_len ? CV_CALIB_SAME_FOCAL_LENGTH:0) );
    printf(" done\n");


    // CALIBRATION QUALITY CHECK
    // ======================================================
// because the output fundamental matrix implicitly
// includes all the output information,
// we can check the quality of calibration using the
// epipolar geometry constraint: m2^t*F*m1=0
    vector<CvPoint3D32f> lines[2];
    points[0].resize(N);
    points[1].resize(N);
    _imagePoints1 = cvMat(1, N, CV_32FC2, &points[0][0] );
    _imagePoints2 = cvMat(1, N, CV_32FC2, &points[1][0] );
    lines[0].resize(N);
    lines[1].resize(N);
    CvMat _L1 = cvMat(1, N, CV_32FC3, &lines[0][0]);
    CvMat _L2 = cvMat(1, N, CV_32FC3, &lines[1][0]);
//Always work in undistorted space
    cvUndistortPoints( &_imagePoints1, &_imagePoints1,
                       &_M1, &_D1, 0, &_M1 );
    cvUndistortPoints( &_imagePoints2, &_imagePoints2,
                       &_M2, &_D2, 0, &_M2 );
    cvComputeCorrespondEpilines( &_imagePoints1, 1, &_F, &_L1 );
    cvComputeCorrespondEpilines( &_imagePoints2, 2, &_F, &_L2 );
    double avgErr = 0;
    for( i = 0; i < N; i++ )
    {
        double err = fabs(points[0][i].x*lines[1][i].x +
                          points[0][i].y*lines[1][i].y + lines[1][i].z)
                     + fabs(points[1][i].x*lines[0][i].x +
                            points[1][i].y*lines[0][i].y + lines[0][i].z);
        avgErr += err;
    }
    printf( "avg err = %g\n", avgErr/(nframes*n) );



    //COMPUTE RECTIFICATION
    // ========================================================
    CvMat* mx1 = cvCreateMat( imageSize.height,
                              imageSize.width, CV_32F );
    CvMat* my1 = cvCreateMat( imageSize.height,
                              imageSize.width, CV_32F );
    CvMat* mx2 = cvCreateMat( imageSize.height,
                              imageSize.width, CV_32F );
    CvMat* my2 = cvCreateMat( imageSize.height,
                              imageSize.width, CV_32F );
    CvMat* img1r = cvCreateMat( imageSize.height,
                                imageSize.width, CV_8U );
    CvMat* img2r = cvCreateMat( imageSize.height,
                                imageSize.width, CV_8U );
    CvMat* disp = cvCreateMat( imageSize.height,
                               imageSize.width, CV_16S );
    CvMat* vdisp = cvCreateMat( imageSize.height,
                                imageSize.width, CV_8U );
    CvMat* pair;
    double R1[3][3], R2[3][3], P1[3][4], P2[3][4], Q[4][4];
    CvMat _R1 = cvMat(3, 3, CV_64F, R1);
    CvMat _R2 = cvMat(3, 3, CV_64F, R2);
    CvMat _Q = cvMat(4, 4, CV_64F, Q);
// IF BY CALIBRATED (BOUGUET'S METHOD)
    if( useUncalibrated == 0 )
    {
        CvMat _P1 = cvMat(3, 4, CV_64F, P1);
        CvMat _P2 = cvMat(3, 4, CV_64F, P2);
#if MRPT_OPENCV_VERSION_NUM<0x210
        // OpenCV 2.0.X
        cvStereoRectify( &_M1, &_M2, &_D1, &_D2, imageSize,
                         &_R, &_T,
                         &_R1, &_R2, &_P1, &_P2, &_Q,
                         0/*CV_CALIB_ZERO_DISPARITY*/
                       );
#else
        // OpenCV 2.1.X - 2.2.X - 2.3.X
        cvStereoRectify( &_M1, &_M2, &_D1, &_D2, imageSize,
                         &_R, &_T,
                         &_R1, &_R2, &_P1, &_P2, &_Q,
                         0 /* CV_CALIB_ZERO_DISPARITY */,
                         0 /* alpha */
                       );
#endif
        isVerticalStereo = fabs(P2[1][3]) > fabs(P2[0][3]);
//Precompute maps for cvRemap()
        cvInitUndistortRectifyMap(&_M1,&_D1,&_R1,&_P1,mx1,my1);
        cvInitUndistortRectifyMap(&_M2,&_D2,&_R2,&_P2,mx2,my2);
    }
//OR ELSE HARTLEY'S METHOD
    else if( useUncalibrated == 1 || useUncalibrated == 2 )
// use intrinsic parameters of each camera, but
// compute the rectification transformation directly
// from the fundamental matrix
    {
        double H1[3][3], H2[3][3], iM[3][3];
        CvMat _H1 = cvMat(3, 3, CV_64F, H1);
        CvMat _H2 = cvMat(3, 3, CV_64F, H2);
        CvMat _iM = cvMat(3, 3, CV_64F, iM);
//Just to show you could have independently used F
        if( useUncalibrated == 2 )
            cvFindFundamentalMat( &_imagePoints1,
                                  &_imagePoints2, &_F);
        cvStereoRectifyUncalibrated( &_imagePoints1,
                                     &_imagePoints2, &_F,
                                     imageSize,
                                     &_H1, &_H2, 3);
        cvInvert(&_M1, &_iM);
        cvMatMul(&_H1, &_M1, &_R1);
        cvMatMul(&_iM, &_R1, &_R1);
        cvInvert(&_M2, &_iM);
        cvMatMul(&_H2, &_M2, &_R2);
        cvMatMul(&_iM, &_R2, &_R2);
//Precompute map for cvRemap()
        cvInitUndistortRectifyMap(&_M1,&_D1,&_R1,&_M1,mx1,my1);

        cvInitUndistortRectifyMap(&_M2,&_D1,&_R2,&_M2,mx2,my2);
    }
    else
        assert(0);



    // SAVE CALIBRATION REPORT FILE
    // ==============================================================================
    cout << "Writing report file: " << sOutFile<< endl;
    FILE *f_out = fopen(sOutFile,"wt");

    time_t systime;
    time(&systime);
    struct tm * timeinfo=localtime(&systime);

    fprintf( f_out,
             "# Stereo camera calibration report\n"
             "# Generated by camera-calib-gui - MRPT at %s"
             "# (This file is loadable from rawlog-edit and other MRPT tools)\n"
             "# ---------------------------------------------------------------------\n\n",
             asctime(timeinfo)
           );

    fprintf( f_out,
             "# Left camera calibration parameters:\n"
             "[CAMERA_PARAMS_LEFT]\n"
             "resolution = [%u %u]\n"
             "cx         = %f\n"
             "cy         = %f\n"
             "fx         = %f\n"
             "fy         = %f\n"
             "dist       = [%e %e %e %e %e]    // The order is: [K1 K2 T1 T2 K3]\n\n",
             imageSize.width,imageSize.height,
             cvGet2D(&_M1,0,2).val[0],
             cvGet2D(&_M1,1,2).val[0],
             cvGet2D(&_M1,0,0).val[0],
             cvGet2D(&_M1,1,1).val[0],
             cvGet2D(&_D1,0,0).val[0], cvGet2D(&_D1,0,1).val[0], cvGet2D(&_D1,0,2).val[0],
             cvGet2D(&_D1,0,3).val[0], cvGet2D(&_D1,0,4).val[0] );

    fprintf( f_out,
             "# Right camera calibration parameters:\n"
             "[CAMERA_PARAMS_RIGHT]\n"
             "resolution = [%u %u]\n"
             "cx         = %f\n"
             "cy         = %f\n"
             "fx         = %f\n"
             "fy         = %f\n"
             "dist       = [%e %e %e %e %e]    // The order is: [K1 K2 T1 T2 K3]\n\n",
             imageSize.width,imageSize.height,
             cvGet2D(&_M2,0,2).val[0],
             cvGet2D(&_M2,1,2).val[0],
             cvGet2D(&_M2,0,0).val[0],
             cvGet2D(&_M2,1,1).val[0],
             cvGet2D(&_D2,0,0).val[0], cvGet2D(&_D2,0,1).val[0], cvGet2D(&_D2,0,2).val[0],
             cvGet2D(&_D2,0,3).val[0], cvGet2D(&_D2,0,4).val[0] );


    // Convert RT to MRPT classes:
    mrpt::math::CMatrixFixedNumeric<double,3,3> mROT;
    for (int i=0; i<3; i++)
        for (int j=0; j<3; j++)
            mROT(i,j)=cvGet2D(&_R,i,j).val[0];
    mrpt::math::TPoint3D mT;
    mT.x = cvGet2D(&_T,0,0).val[0];
    mT.y = cvGet2D(&_T,1,0).val[0];
    mT.z = cvGet2D(&_T,2,0).val[0];

    // NOTE: OpenCV seems to return the inverse of what we want, so invert the pose:
    const mrpt::poses::CPose3D     RT_YPR(-mrpt::poses::CPose3D(mROT,mT));
    const mrpt::poses::CPose3DQuat RT_quat(RT_YPR);

    fprintf( f_out,
             "# Relative pose of the right camera wrt to the left camera:\n"
             "[CAMERA_PARAMS_LEFT2RIGHT_POSE]\n"
             "translation_only     = [%e %e %e]\n"
             "rotation_matrix_only = %s\n"
             "pose_yaw_pitch_roll  = %s\n"
             "pose_quaternion      = %s\n\n"
             ,
             RT_YPR.x(),RT_YPR.y(),RT_YPR.z(),
             RT_YPR.getRotationMatrix().inMatlabFormat(13).c_str(),
             RT_YPR.asString().c_str(),
             RT_quat.asString().c_str()
           );


    // Convert RT to MRPT classes:
    mrpt::math::CMatrixFixedNumeric<double,3,3> mR1, mR2;
    mrpt::math::CMatrixFixedNumeric<double,3,4> mP1, mP2;
    mrpt::math::CMatrixFixedNumeric<double,4,4> mQ;
    for (int i=0; i<3; i++)
        for (int j=0; j<3; j++)
        {
            mR1(i,j)=R1[i][j];
            mR2(i,j)=R2[i][j];
        }
    for (int i=0; i<3; i++)
        for (int j=0; j<4; j++)
        {
            mP1(i,j)=P1[i][j];
            mP2(i,j)=P2[i][j];
        }
    for (int i=0; i<4; i++)
        for (int j=0; j<4; j++)
            mQ(i,j)=Q[i][j];

    fprintf( f_out,
             "# Stereo rectify matrices (see http://opencv.willowgarage.com/documentation/camera_calibration_and_3d_reconstruction.html ):\n"
             "# R1, R2: The output 3x3 rectification transforms (rotation matrices) for the first and the second cameras, respectively.\n"
             "# P1, P2: The output 3x4 projection matrices in the new (rectified) coordinate systems.\n"
             "[STEREO_RECTIFY_MATRICES]\n"
             "R1 = %s\n"
             "R2 = %s\n"
             "P1 = %s\n"
             "P2 = %s\n\n"
             "Q  = %s\n\n"
             ,
             mR1.inMatlabFormat(13).c_str(),
             mR2.inMatlabFormat(13).c_str(),
             mP1.inMatlabFormat(13).c_str(),
             mP2.inMatlabFormat(13).c_str(),
             mQ.inMatlabFormat(13).c_str()
           );



    fprintf( f_out,
             "# Info about calibration parameters:\n"
             "[CALIB_METAINFO]\n"
             "number_good_cheesboards = %i\n"
             "average_reprojection_error = %f // pixels\n"
             "cheesboard_nx = %i\n"
             "cheesboard_ny = %i\n"
             "cheesboard_square_size = %f\n"
             "alpha = %f // Parameter for zoom in/out\n"
             "flag_fix_aspect_ratio = %s\n"
             "flag_zero_tangent_dist = %s\n"
             "flag_same_focal_len = %s\n\n"
             ,
             nframes,
             avgErr/(nframes*n),
             nx,ny,
             (double)squareSize,
             alpha,
             flag_fix_aspect_ratio ? "true":"false",
             flag_zero_tangent_dist ? "true":"false",
             flag_same_focal_len ? "true":"false"
           );

    fprintf( f_out,
             "# List of files used in the optimization:\n"
             "[CALIB_FILE_LIST]\n");
    for (unsigned int i=0; i<lst_image_files.size(); i++)
        fprintf( f_out, "img_%04u = %s\n", i, lst_image_files[i].c_str() );

    fprintf( f_out, "\n");

    fclose(f_out);

    // DISPLAY RECTIFICATION
    // ========================================================
    cvNamedWindow( "rectified", 1 );
// RECTIFY THE IMAGES AND FIND DISPARITY MAPS
    if( !isVerticalStereo )
        pair = cvCreateMat( imageSize.height, imageSize.width*2,
                            CV_8UC3 );
    else
        pair = cvCreateMat( imageSize.height*2, imageSize.width,
                            CV_8UC3 );
//Setup for finding stereo corrrespondences
    CvStereoBMState *BMState = cvCreateStereoBMState();
    assert(BMState != 0);
    BMState->preFilterSize=41;
    BMState->preFilterCap=31;
    BMState->SADWindowSize=41;
    BMState->minDisparity=-64;
    BMState->numberOfDisparities=128;
    BMState->textureThreshold=10;
    BMState->uniquenessRatio=15;
    for( i = 0; i < nframes; i++ )
    {
        IplImage* img1=cvLoadImage(imageNames[0][i].c_str(),0);
        IplImage* img2=cvLoadImage(imageNames[1][i].c_str(),0);
        if( img1 && img2 )
        {
            CvMat part;
            cvRemap( img1, img1r, mx1, my1 );
            cvRemap( img2, img2r, mx2, my2 );

            if( !isVerticalStereo || useUncalibrated != 0 )
            {
                // When the stereo camera is oriented vertically,
                // useUncalibrated==0 does not transpose the
                // image, so the epipolar lines in the rectified
                // images are vertical. Stereo correspondence
                // function does not support such a case.
                cvFindStereoCorrespondenceBM( img1r, img2r, disp,
                                              BMState);
                cvNormalize( disp, vdisp, 0, 256, CV_MINMAX );
                //cvNamedWindow( "disparity" );
                //cvShowImage( "disparity", vdisp );
            }
            if( !isVerticalStereo )
            {
                cvGetCols( pair, &part, 0, imageSize.width );
                cvCvtColor( img1r, &part, CV_GRAY2BGR );
                cvGetCols( pair, &part, imageSize.width,
                           imageSize.width*2 );
                cvCvtColor( img2r, &part, CV_GRAY2BGR );
                for( j = 0; j < imageSize.height; j += 16 )
                    cvLine( pair, cvPoint(0,j),
                            cvPoint(imageSize.width*2,j),
                            CV_RGB(0,255,0));
            }
            else
            {
                cvGetRows( pair, &part, 0, imageSize.height );
                cvCvtColor( img1r, &part, CV_GRAY2BGR );
                cvGetRows( pair, &part, imageSize.height,
                           imageSize.height*2 );
                cvCvtColor( img2r, &part, CV_GRAY2BGR );
                for( j = 0; j < imageSize.width; j += 16 )
                    cvLine( pair, cvPoint(j,0),
                            cvPoint(j,imageSize.height*2),
                            CV_RGB(0,255,0));
            }
            cvShowImage( "rectified", pair );
            if( cvWaitKey() == 27 )
                break;
        }
        cvReleaseImage( &img1 );
        cvReleaseImage( &img2 );
    }
    cvReleaseStereoBMState(&BMState);
    cvReleaseMat( &mx1 );
    cvReleaseMat( &my1 );
    cvReleaseMat( &mx2 );
    cvReleaseMat( &my2 );
    cvReleaseMat( &img1r );
    cvReleaseMat( &img2r );
    cvReleaseMat( &disp );

}