Exemple #1
0
	void Calibration::getTransformation(Calibration& dst, Mat& rotation, Mat& translation) {
		if(imagePoints.size() == 0 || dst.imagePoints.size() == 0) {
			ofLog(OF_LOG_ERROR, "getTransformation() requires both Calibration objects to have just been calibrated");
			return;
		}
		if(imagePoints.size() != dst.imagePoints.size() || patternSize != dst.patternSize) {
			ofLog(OF_LOG_ERROR, "getTransformation() requires both Calibration objects to be trained simultaneously on the same board");
			return;
		}
		Mat fundamentalMatrix, essentialMatrix;
		Mat cameraMatrix = distortedIntrinsics.getCameraMatrix();
		Mat dstCameraMatrix = dst.getDistortedIntrinsics().getCameraMatrix();
		// uses CALIB_FIX_INTRINSIC by default
		stereoCalibrate(objectPoints,
										imagePoints, dst.imagePoints,
										cameraMatrix, distCoeffs,
										dstCameraMatrix, dst.distCoeffs,
										distortedIntrinsics.getImageSize(), rotation, translation,
										essentialMatrix, fundamentalMatrix);			
	}		
void stereoCalibThread::stereoCalibration(const vector<string>& imagelist, int boardWidth, int boardHeight,float sqsize)
{
    Size boardSize;
    boardSize.width=boardWidth;
    boardSize.height=boardHeight;
    if( imagelist.size() % 2 != 0 )
    {
        cout << "Error: the image list contains odd (non-even) number of elements\n";
        return;
    }
    
    const int maxScale = 2;
    // ARRAY AND VECTOR STORAGE:
    
    std::vector<std::vector<Point2f> > imagePoints[2];
    std::vector<std::vector<Point3f> > objectPoints;
    Size imageSize;
    
    int i, j, k, nimages = (int)imagelist.size()/2;
    
    imagePoints[0].resize(nimages);
    imagePoints[1].resize(nimages);
    std::vector<string> goodImageList;
    
    for( i = j = 0; i < nimages; i++ )
    {
        for( k = 0; k < 2; k++ )
        {
            const string& filename = imagelist[i*2+k];
            Mat img = cv::imread(filename, 0);
            if(img.empty())
                break;
            if( imageSize == Size() )
                imageSize = img.size();
            else if( img.size() != imageSize )
            {
                cout << "The image " << filename << " has the size different from the first image size. Skipping the pair\n";
                break;
            }
            bool found = false;
            std::vector<Point2f>& corners = imagePoints[k][j];
            for( int scale = 1; scale <= maxScale; scale++ )
            {
                Mat timg;
                if( scale == 1 )
                    timg = img;
                else
                    resize(img, timg, Size(), scale, scale);

                if(boardType == "CIRCLES_GRID") {
                    found = findCirclesGridDefault(timg, boardSize, corners, CALIB_CB_SYMMETRIC_GRID  | CALIB_CB_CLUSTERING);
                } else if(boardType == "ASYMMETRIC_CIRCLES_GRID") {
                    found = findCirclesGridDefault(timg, boardSize, corners, CALIB_CB_ASYMMETRIC_GRID | CALIB_CB_CLUSTERING);
                } else {
                    found = findChessboardCorners(timg, boardSize, corners,
                                                  CV_CALIB_CB_ADAPTIVE_THRESH | CV_CALIB_CB_NORMALIZE_IMAGE);
                }

                if( found )
                {
                    if( scale > 1 )
                    {
                        Mat cornersMat(corners);
                        cornersMat *= 1./scale;
                    }
                    break;
                }
            }
            if( !found )
                break;
            }
        if( k == 2 )
        {
            goodImageList.push_back(imagelist[i*2]);
            goodImageList.push_back(imagelist[i*2+1]);
            j++;
        }
    }
    fprintf(stdout,"%i pairs have been successfully detected.\n",j);
    nimages = j;
    if( nimages < 2 )
    {
        fprintf(stdout,"Error: too few pairs detected \n");
        return;
    }
    
    imagePoints[0].resize(nimages);
    imagePoints[1].resize(nimages);
    objectPoints.resize(nimages);
    
    for( i = 0; i < nimages; i++ )
    {
        for( j = 0; j < boardSize.height; j++ )
            for( k = 0; k < boardSize.width; k++ )
                objectPoints[i].push_back(Point3f(j*squareSize, k*squareSize, 0));
    }
    
    fprintf(stdout,"Running stereo calibration ...\n");
    
    Mat cameraMatrix[2], distCoeffs[2];
    Mat E, F;
    
    if(this->Kleft.empty() || this->Kright.empty())
    {
        double rms = stereoCalibrate(objectPoints, imagePoints[0], imagePoints[1],
                        this->Kleft, this->DistL,
                        this->Kright, this->DistR,
                        imageSize, this->R, this->T, E, F,
                        TermCriteria(CV_TERMCRIT_ITER+CV_TERMCRIT_EPS, 100, 1e-5),
                        CV_CALIB_FIX_ASPECT_RATIO +
                        CV_CALIB_ZERO_TANGENT_DIST +
                        CV_CALIB_SAME_FOCAL_LENGTH +
                        CV_CALIB_FIX_K3);
        fprintf(stdout,"done with RMS error= %f\n",rms);
    } else
    {
        double rms = stereoCalibrate(objectPoints, imagePoints[0], imagePoints[1],
                this->Kleft, this->DistL,
                this->Kright, this->DistR,
                imageSize, this->R, this->T, E, F,
                TermCriteria(CV_TERMCRIT_ITER+CV_TERMCRIT_EPS, 100, 1e-5),CV_CALIB_FIX_ASPECT_RATIO + CV_CALIB_FIX_INTRINSIC + CV_CALIB_FIX_K3);
        fprintf(stdout,"done with RMS error= %f\n",rms);
    }
// CALIBRATION QUALITY CHECK
    cameraMatrix[0] = this->Kleft;
    cameraMatrix[1] = this->Kright;
    distCoeffs[0]=this->DistL;
    distCoeffs[1]=this->DistR;
    Mat R, T;
    T=this->T;
    R=this->R;
    double err = 0;
    int npoints = 0;
    std::vector<Vec3f> lines[2];
    for( i = 0; i < nimages; i++ )
    {
        int npt = (int)imagePoints[0][i].size();
        Mat imgpt[2];
        for( k = 0; k < 2; k++ )
        {
            imgpt[k] = Mat(imagePoints[k][i]);
            undistortPoints(imgpt[k], imgpt[k], cameraMatrix[k], distCoeffs[k], Mat(), cameraMatrix[k]);
            computeCorrespondEpilines(imgpt[k], k+1, F, lines[k]);
        }
        for( j = 0; j < npt; j++ )
        {
            double errij = fabs(imagePoints[0][i][j].x*lines[1][j][0] +
                                imagePoints[0][i][j].y*lines[1][j][1] + lines[1][j][2]) +
                           fabs(imagePoints[1][i][j].x*lines[0][j][0] +
                                imagePoints[1][i][j].y*lines[0][j][1] + lines[0][j][2]);
            err += errij;
        }
        npoints += npt;
    }
    fprintf(stdout,"average reprojection err = %f\n",err/npoints);
    cout.flush();
}
void CameraCalibration::StereoCalibration()
{

	vector<vector<Point2f> > ImagePoints[2];

	vector<vector<Point3f> > ObjectPoints(1);
	for(int i=0; i<BoardSize.height; i++)
		for(int j=0; j<BoardSize.width; j++)
			ObjectPoints.at(0).push_back(Point3f(float( j*SquareSize ),
			float( i*SquareSize ),
			0));

	ObjectPoints.resize(NumFrames, ObjectPoints[0]);

	vector<Mat> RVecs[2], TVecs[2];
	double rms;

	for(int c_idx=0; c_idx<2; c_idx++)
	{
		for(int i=0; i < NumFrames; i++)
		{

			Mat img = imread(data_path+"/"+calib_params[c_idx].ImageList.at(i), CV_LOAD_IMAGE_COLOR);

			vector<Point2f> pointBuf;
			bool found = false;

			found = findChessboardCorners(img, BoardSize, pointBuf,
				CV_CALIB_CB_ADAPTIVE_THRESH |
				CV_CALIB_CB_NORMALIZE_IMAGE);

			if(found)
			{
				Mat viewGray;
				cvtColor(img, viewGray, CV_BGR2GRAY);
				cornerSubPix(viewGray, pointBuf, Size(11, 11), Size(-1, -1),
					TermCriteria(CV_TERMCRIT_EPS + CV_TERMCRIT_ITER, 100, 0.01));

				//drawChessboardCorners(img, BoardSize, Mat(pointBuf), found);
				//namedWindow("Image View", CV_WINDOW_AUTOSIZE);
				//imshow("Image View", img);
				//waitKey();
			}
			else
			{
				cerr << i << "th image cannot be found a pattern." << endl;
				exit(EXIT_FAILURE);
			}

			ImagePoints[c_idx].push_back(pointBuf);
		}

		calib_params[c_idx].DistCoeffs = Mat::zeros(8, 1, CV_64F);
		calib_params[c_idx].CameraMatrix = initCameraMatrix2D(ObjectPoints, ImagePoints[c_idx], ImageSize, 0);
		rms = calibrateCamera(ObjectPoints, 
			ImagePoints[c_idx],
			ImageSize,
			calib_params[c_idx].CameraMatrix,
			calib_params[c_idx].DistCoeffs,
			RVecs[c_idx],
			TVecs[c_idx],
			CV_CALIB_USE_INTRINSIC_GUESS |
			CV_CALIB_FIX_K3 |	
			CV_CALIB_FIX_K4 |
			CV_CALIB_FIX_K5 |
			CV_CALIB_FIX_K6);

		cout << c_idx << " camera re-projection error reported by calibrateCamera: "<< rms << endl;

	}

	rms = stereoCalibrate(ObjectPoints,
						  ImagePoints[0],
						  ImagePoints[1],
						  calib_params[0].CameraMatrix,
						  calib_params[0].DistCoeffs,
						  calib_params[1].CameraMatrix,
						  calib_params[1].DistCoeffs,
						  ImageSize,
						  stereo_params->R,
						  stereo_params->T,
						  stereo_params->E,
						  stereo_params->F,
						  TermCriteria(CV_TERMCRIT_ITER+CV_TERMCRIT_EPS, 100, 1e-5));

	cout << "Stereo re-projection error reported by stereoCalibrate: " << rms << endl;

	cout << "Fundamental Matrix reprojection error: " << FundamentalMatrixQuality(ImagePoints[0], ImagePoints[1], 
		calib_params[0].CameraMatrix, calib_params[1].CameraMatrix, 
		calib_params[0].DistCoeffs, calib_params[1].DistCoeffs, stereo_params->F) << endl;        

    // Transfer matrix from OpenCV Mat to Pangolin matrix
	CvtCameraExtrins(RVecs, TVecs);
	
    Timer PangolinTimer;

	// Stereo rectification
	stereoRectify(calib_params[0].CameraMatrix,
		calib_params[0].DistCoeffs,
		calib_params[1].CameraMatrix,
		calib_params[1].DistCoeffs,
		ImageSize, 
		stereo_params->R, 
		stereo_params->T, 
		rect_params->LeftRot,
		rect_params->RightRot,
		rect_params->LeftProj,
		rect_params->RightProj,
		rect_params->Disp2DepthReProjMat,
		CALIB_ZERO_DISPARITY, // test later
		1, // test later
		ImageSize,
		&rect_params->LeftRoi, 
		&rect_params->RightRoi);

    cout << "\nStereo rectification using calibration spent: " << PangolinTimer.getElapsedTimeInMilliSec() << "ms." << endl;

	rect_params->isVerticalStereo = fabs(rect_params->RightProj.at<double>(1, 3)) > 
										fabs(rect_params->RightProj.at<double>(0, 3));

	// Get the rectification re-map index
	initUndistortRectifyMap(calib_params[0].CameraMatrix, calib_params[0].DistCoeffs, 
		rect_params->LeftRot, rect_params->LeftProj,	
		ImageSize, CV_16SC2, rect_params->LeftRMAP[0], rect_params->LeftRMAP[1]);
	initUndistortRectifyMap(calib_params[1].CameraMatrix, calib_params[1].DistCoeffs, 
		rect_params->RightRot, rect_params->RightProj, 
		ImageSize, CV_16SC2, rect_params->RightRMAP[0], rect_params->RightRMAP[1]);

}
/// Calibrates the extrinsic parameters of the setup and saves it to an XML file
/// Press'r' to retreive chessboard corners
///      's' to save and exit
///      'c' to exit without saving
/// In: inputCapture1: video feed of camera 1
///     inputCapture2: video feed of camera 2
void CalibrateEnvironment(VideoCapture& inputCapture1, VideoCapture& inputCapture2)
{
    Size boardSize;
    boardSize.width = BOARD_WIDTH;
    boardSize.height = BOARD_HEIGHT;
    
    const string fileName1 = "CameraIntrinsics1.xml";
    const string fileName2 = "CameraIntrinsics2.xml";
    
    cerr << "Attempting to open configuration files" << endl;
    FileStorage fs1(fileName1, FileStorage::READ);
    FileStorage fs2(fileName2, FileStorage::READ);
    
    Mat cameraMatrix1, cameraMatrix2;
    Mat distCoeffs1, distCoeffs2;
    
    fs1["Camera_Matrix"] >> cameraMatrix1;
    fs1["Distortion_Coefficients"] >> distCoeffs1;
    fs2["Camera_Matrix"] >> cameraMatrix2;
    fs2["Distortion_Coefficients"] >> distCoeffs2;
    
    if (cameraMatrix1.data == NULL || distCoeffs1.data == NULL ||
        cameraMatrix2.data == NULL || distCoeffs2.data == NULL)
    {
        cerr << "Could not load camera intrinsics\n" << endl;
    }
    else{
        cerr << "Loaded intrinsics\n" << endl;
        cerr << "Camera Matrix1: " << cameraMatrix1 << endl;
        cerr << "Camera Matrix2: " << cameraMatrix2 << endl;
        
    }
    
    Mat translation;
    Mat image1, image2;
    Mat mapX1, mapX2, mapY1, mapY2;
    inputCapture1.read(image1);
    Size imageSize = image1.size();
    bool rotationCalibrated = false;
    
    while(inputCapture1.isOpened() && inputCapture2.isOpened())
    {
        inputCapture1.read(image1);
        inputCapture2.read(image2);
        
        if (rotationCalibrated)
        {
            Mat t1 = image1.clone();
            Mat t2 = image2.clone();
            remap(t1, image1, mapX1, mapY1, INTER_LINEAR);
            remap(t2, image2, mapX2, mapY2, INTER_LINEAR);
            t1.release();
            t2.release();
        }
        
        char c = waitKey(15);
        if (c == 'c')
        {
            cerr << "Cancelling..." << endl;
            return;
        }
        else if(c == 's' && rotationCalibrated)
        {
            cerr << "Saving..." << endl;
            const string fileName = "EnvironmentCalibration.xml";
            FileStorage fs(fileName, FileStorage::WRITE);
            fs << "Camera_Matrix_1" <<  getOptimalNewCameraMatrix(cameraMatrix1, distCoeffs1, imageSize, 1,imageSize, 0);
            fs << "Camera_Matrix_2" <<  getOptimalNewCameraMatrix(cameraMatrix2, distCoeffs2, imageSize, 1, imageSize, 0);
            fs << "Mapping_X_1" << mapX1;
            fs << "Mapping_Y_1" << mapY1;
            fs << "Mapping_X_2" << mapX2;
            fs << "Mapping_Y_2" << mapY2;
            fs << "Translation" << translation;
            cerr << "Exiting..." << endl;
            destroyAllWindows();
            return;
        }
        else if(c == 's' && !rotationCalibrated)
        {
            cerr << "Exiting..." << endl;
            destroyAllWindows();
            return;
        }
        else if (c == 'r')
        {
            BoardSettings s;
            s.boardSize.width = BOARD_WIDTH;
            s.boardSize.height = BOARD_HEIGHT;
            s.cornerNum = s.boardSize.width * s.boardSize.height;
            s.squareSize = (float)SQUARE_SIZE;
            
            vector<Point3f> objectPoints;
            vector<vector<Point2f> > imagePoints1, imagePoints2;
            
            if (RetrieveChessboardCorners(imagePoints1, imagePoints2, s, inputCapture1, inputCapture2, ITERATIONS))
            {
                vector<vector<Point3f> > objectPoints(1);
                CalcBoardCornerPositions(s.boardSize, s.squareSize, objectPoints[0]);
                objectPoints.resize(imagePoints1.size(),objectPoints[0]);
                
                Mat R, T, E, F;
                Mat rmat1, rmat2, rvec;
                
                double rms = stereoCalibrate(objectPoints, imagePoints1, imagePoints2, cameraMatrix1, distCoeffs1, cameraMatrix2, distCoeffs2, imageSize, R, T, E, F,
                                             TermCriteria( CV_TERMCRIT_EPS+CV_TERMCRIT_ITER, 1000, 0.01),
                                             CV_CALIB_FIX_INTRINSIC);
                
                cerr << "Original translation: " << T << endl;
                cerr << "Reprojection error reported by camera: " << rms << endl;
                
                // convert to rotation vector and then remove 90 degree offset
                Rodrigues(R, rvec);
                rvec.at<double>(1,0) -= 1.570796327;
                
                // equal rotation applied to each image...not necessarily needed
                rvec = rvec/2;
                Rodrigues(rvec, rmat1);
                invert(rmat1,rmat2);
                
                initUndistortRectifyMap(cameraMatrix1, distCoeffs1, rmat1,
                                        getOptimalNewCameraMatrix(cameraMatrix1, distCoeffs1, imageSize, 1,imageSize, 0), imageSize, CV_32FC1, mapX1, mapY1);
                initUndistortRectifyMap(cameraMatrix2, distCoeffs2, rmat2,
                                        getOptimalNewCameraMatrix(cameraMatrix2, distCoeffs2, imageSize, 1, imageSize, 0), imageSize, CV_32FC1, mapX2, mapY2);
                
                
                // reproject points in camera 1 since its rotation has been changed
                // need to find the translation between cameras based on the new camera 1 orientation
                for  (int i = 0; i < imagePoints1.size(); i++)
                {
                    Mat pointsMat1 = Mat(imagePoints1[i]);
                    Mat pointsMat2 = Mat(imagePoints2[i]);
                    
                    
                    undistortPoints(pointsMat1, imagePoints1[i], cameraMatrix1, distCoeffs1, rmat1,getOptimalNewCameraMatrix(cameraMatrix1, distCoeffs1, imageSize, 1, imageSize, 0));
                    undistortPoints(pointsMat2, imagePoints2[i], cameraMatrix2, distCoeffs2, rmat2,getOptimalNewCameraMatrix(cameraMatrix2, distCoeffs2, imageSize, 1, imageSize, 0));
                    
                    pointsMat1.release();
                    pointsMat2.release();
                }
                
                Mat temp1, temp2;
                R.release();
                T.release();
                E.release();
                F.release();
                
                // TODO: remove this
                // CalcBoardCornerPositions(s.boardSize, s.squareSize, objectPoints[0]);
                // objectPoints.resize(imagePoints1.size(),objectPoints[0]);
                
                stereoCalibrate(objectPoints, imagePoints1, imagePoints2, cameraMatrix1, distCoeffs1, cameraMatrix2, distCoeffs2, imageSize, R, T, E, F,
                                TermCriteria( CV_TERMCRIT_EPS+CV_TERMCRIT_ITER, 1000, 0.01),
                                CV_CALIB_FIX_INTRINSIC);
                
                // need to alter translation matrix so
                // [0] = distance in X direction (right from perspective of camera 1 is positive)
                // [1] = distance in Y direction (away from camera 1 is positive)
                // [2] = distance in Z direction (up is positive)
                translation = T;
                double temp = -translation.at<double>(0,0);
                translation.at<double>(0,0) = translation.at<double>(2,0);
                translation.at<double>(2,0) = temp;
                
                cerr << "Translation reproj: " << translation << endl;
                Rodrigues(R, rvec);
                cerr << "Reprojected rvec: " << rvec << endl;
                
                imagePoints1.clear();
                imagePoints2.clear();
                
                rvec.release();
                rmat1.release();
                rmat2.release();
                R.release();
                T.release();
                E.release();
                F.release();
                
                rotationCalibrated = true;
            }
        }
        imshow("Image View1", image1);
        imshow("Image View2", image2);
    }
}
Exemple #5
0
void StereoCalib(const vector<string>& imagelist, Size boardSize, bool useCalibrated=true, bool showRectified=true)
{
    if( imagelist.size() % 2 != 0 )
    {
        cout << "Error: the image list contains odd (non-even) number of elements\n";
        return;
    }
    printf("board size: %d x %d", boardSize.width, boardSize.height);
    bool displayCorners = true;
    const int maxScale = 2;
    const float squareSize = 1.f;  // Set this to your actual square size
    // ARRAY AND VECTOR STORAGE:

    vector<vector<Point2f> > imagePoints[2];
    vector<vector<Point3f> > objectPoints;
    Size imageSize;

    int i, j, k, nimages = (int)imagelist.size()/2;

    imagePoints[0].resize(nimages);
    imagePoints[1].resize(nimages);
    vector<string> goodImageList;

    for( i = j = 0; i < nimages; i++ )
    {
        for( k = 0; k < 2; k++ )
        {
            const string& filename = imagelist[i*2+k];
            Mat img = imread(filename, 0);
            if(img.empty())
                break;
            if( imageSize == Size() )
                imageSize = img.size();
            else if( img.size() != imageSize )
            {
                cout << "The image " << filename << " has the size different from the first image size. Skipping the pair\n";
                break;
            }
            bool found = false;
            vector<Point2f>& corners = imagePoints[k][j];
            for( int scale = 1; scale <= maxScale; scale++ )
            {
                Mat timg;
                if( scale == 1 )
                    timg = img;
                else
                    resize(img, timg, Size(), scale, scale);
                found = findChessboardCorners(timg, boardSize, corners,
                    CV_CALIB_CB_ADAPTIVE_THRESH | CV_CALIB_CB_NORMALIZE_IMAGE);
                if( found )
                {
                    if( scale > 1 )
                    {
                        Mat cornersMat(corners);
                        cornersMat *= 1./scale;
                    }
                    break;
                }
            }
            if( displayCorners )
            {
                cout << filename << endl;
                Mat cimg, cimg1;
                cvtColor(img, cimg, CV_GRAY2BGR);
                drawChessboardCorners(cimg, boardSize, corners, found);
                double sf = 640./MAX(img.rows, img.cols);
                resize(cimg, cimg1, Size(), sf, sf);
                imshow("corners", cimg1);
                char c = (char)waitKey(500);
                if( c == 27 || c == 'q' || c == 'Q' ) //Allow ESC to quit
                    exit(-1);
            }
            else
                putchar('.');
            if( !found )
                break;
            cornerSubPix(img, corners, Size(11,11), Size(-1,-1),
                         TermCriteria(CV_TERMCRIT_ITER+CV_TERMCRIT_EPS,
                                      30, 0.01));
        }
        if( k == 2 )
        {
            goodImageList.push_back(imagelist[i*2]);
            goodImageList.push_back(imagelist[i*2+1]);
            j++;
        }
    }
    cout << j << " pairs have been successfully detected.\n";
    nimages = j;
    if( nimages < 2 )
    {
        cout << "Error: too little pairs to run the calibration\n";
        return;
    }

    imagePoints[0].resize(nimages);
    imagePoints[1].resize(nimages);
    objectPoints.resize(nimages);

    for( i = 0; i < nimages; i++ )
    {
        for( j = 0; j < boardSize.height; j++ )
            for( k = 0; k < boardSize.width; k++ )
                objectPoints[i].push_back(Point3f(j*squareSize, k*squareSize, 0));
    }

    cout << "Running stereo calibration ...\n";

    Mat cameraMatrix[2], distCoeffs[2];
    cameraMatrix[0] = Mat::eye(3, 3, CV_64F);
    cameraMatrix[1] = Mat::eye(3, 3, CV_64F);
    Mat R, T, E, F;

    double rms = stereoCalibrate(objectPoints, imagePoints[0], imagePoints[1],
                    cameraMatrix[0], distCoeffs[0],
                    cameraMatrix[1], distCoeffs[1],
                    imageSize, R, T, E, F,
                    TermCriteria(CV_TERMCRIT_ITER+CV_TERMCRIT_EPS, 100, 1e-5),
                    CV_CALIB_FIX_ASPECT_RATIO +
                    CV_CALIB_ZERO_TANGENT_DIST +
                    //CV_CALIB_SAME_FOCAL_LENGTH +
                    CV_CALIB_RATIONAL_MODEL +
                    CV_CALIB_FIX_K3 + CV_CALIB_FIX_K4 + CV_CALIB_FIX_K5);
    cout << "done with RMS error=" << rms << endl;

// CALIBRATION QUALITY CHECK
// because the output fundamental matrix implicitly
// includes all the output information,
// we can check the quality of calibration using the
// epipolar geometry constraint: m2^t*F*m1=0
    double err = 0;
    int npoints = 0;
    vector<Vec3f> lines[2];
    for( i = 0; i < nimages; i++ )
    {
        int npt = (int)imagePoints[0][i].size();
        Mat imgpt[2];
        for( k = 0; k < 2; k++ )
        {
            imgpt[k] = Mat(imagePoints[k][i]);
            undistortPoints(imgpt[k], imgpt[k], cameraMatrix[k], distCoeffs[k], Mat(), cameraMatrix[k]);
            computeCorrespondEpilines(imgpt[k], k+1, F, lines[k]);
        }
        for( j = 0; j < npt; j++ )
        {
            double errij = fabs(imagePoints[0][i][j].x*lines[1][j][0] +
                                imagePoints[0][i][j].y*lines[1][j][1] + lines[1][j][2]) +
                           fabs(imagePoints[1][i][j].x*lines[0][j][0] +
                                imagePoints[1][i][j].y*lines[0][j][1] + lines[0][j][2]);
            err += errij;
        }
        npoints += npt;
    }
    cout << "average reprojection err = " <<  err/npoints << endl;

    // save intrinsic parameters
    FileStorage fs("calib/intrinsics.yml", CV_STORAGE_WRITE);
    if( fs.isOpened() )
    {
        fs << "M1" << cameraMatrix[0] << "D1" << distCoeffs[0] <<
            "M2" << cameraMatrix[1] << "D2" << distCoeffs[1];
        fs.release();
    }
    else
        cout << "Error: can not save the intrinsic parameters\n";

    Mat R1, R2, P1, P2, Q;
    Rect validRoi[2];

    stereoRectify(cameraMatrix[0], distCoeffs[0],
                  cameraMatrix[1], distCoeffs[1],
                  imageSize, R, T, R1, R2, P1, P2, Q,
                  CALIB_ZERO_DISPARITY, 1, imageSize, &validRoi[0], &validRoi[1]);

    fs.open("calib/extrinsics.yml", CV_STORAGE_WRITE);
    if( fs.isOpened() )
    {
        fs << "R" << R << "T" << T << "R1" << R1 << "R2" << R2 << "P1" << P1 << "P2" << P2 << "Q" << Q;
        fs.release();
    }
    else
        cout << "Error: can not save the intrinsic parameters\n";

    // OpenCV can handle left-right
    // or up-down camera arrangements
    bool isVerticalStereo = fabs(P2.at<double>(1, 3)) > fabs(P2.at<double>(0, 3));

// COMPUTE AND DISPLAY RECTIFICATION
    if( !showRectified )
        return;

    Mat rmap[2][2];
// IF BY CALIBRATED (BOUGUET'S METHOD)
    if( useCalibrated )
    {
        // we already computed everything
    }
// OR ELSE HARTLEY'S METHOD
    else
 // use intrinsic parameters of each camera, but
 // compute the rectification transformation directly
 // from the fundamental matrix
    {
        vector<Point2f> allimgpt[2];
        for( k = 0; k < 2; k++ )
        {
            for( i = 0; i < nimages; i++ )
                std::copy(imagePoints[k][i].begin(), imagePoints[k][i].end(), back_inserter(allimgpt[k]));
        }
        F = findFundamentalMat(Mat(allimgpt[0]), Mat(allimgpt[1]), FM_8POINT, 0, 0);
        Mat H1, H2;
        stereoRectifyUncalibrated(Mat(allimgpt[0]), Mat(allimgpt[1]), F, imageSize, H1, H2, 3);

        R1 = cameraMatrix[0].inv()*H1*cameraMatrix[0];
        R2 = cameraMatrix[1].inv()*H2*cameraMatrix[1];
        P1 = cameraMatrix[0];
        P2 = cameraMatrix[1];
    }

    //Precompute maps for cv::remap()
    initUndistortRectifyMap(cameraMatrix[0], distCoeffs[0], R1, P1, imageSize, CV_16SC2, rmap[0][0], rmap[0][1]);
    initUndistortRectifyMap(cameraMatrix[1], distCoeffs[1], R2, P2, imageSize, CV_16SC2, rmap[1][0], rmap[1][1]);

    Mat canvas;
    double sf;
    int w, h;
    if( !isVerticalStereo )
    {
        sf = 600./MAX(imageSize.width, imageSize.height);
        w = cvRound(imageSize.width*sf);
        h = cvRound(imageSize.height*sf);
        canvas.create(h, w*2, CV_8UC3);
    }
    else
    {
        sf = 300./MAX(imageSize.width, imageSize.height);
        w = cvRound(imageSize.width*sf);
        h = cvRound(imageSize.height*sf);
        canvas.create(h*2, w, CV_8UC3);
    }

    for( i = 0; i < nimages; i++ )
    {
        for( k = 0; k < 2; k++ )
        {
            Mat img = imread(goodImageList[i*2+k], 0), rimg, cimg;
            remap(img, rimg, rmap[k][0], rmap[k][1], CV_INTER_LINEAR);
            cvtColor(rimg, cimg, CV_GRAY2BGR);
            Mat canvasPart = !isVerticalStereo ? canvas(Rect(w*k, 0, w, h)) : canvas(Rect(0, h*k, w, h));
            resize(cimg, canvasPart, canvasPart.size(), 0, 0, CV_INTER_AREA);
            if( useCalibrated )
            {
                Rect vroi(cvRound(validRoi[k].x*sf), cvRound(validRoi[k].y*sf),
                          cvRound(validRoi[k].width*sf), cvRound(validRoi[k].height*sf));
                rectangle(canvasPart, vroi, Scalar(0,0,255), 3, 8);
            }
        }

        if( !isVerticalStereo )
            for( j = 0; j < canvas.rows; j += 16 )
                line(canvas, Point(0, j), Point(canvas.cols, j), Scalar(0, 255, 0), 1, 8);
        else
            for( j = 0; j < canvas.cols; j += 16 )
                line(canvas, Point(j, 0), Point(j, canvas.rows), Scalar(0, 255, 0), 1, 8);
        imshow("rectified", canvas);
        char c = (char)waitKey();
        if( c == 27 || c == 'q' || c == 'Q' )
            break;
    }
}
int main(int argc, char *argv[]) {
    gflags::ParseCommandLineFlags(&argc, &argv, true);

    cv::vector<cv::Mat> lefts, rights;
    const cv::Size patternSize(9, 6);
    cv::vector<cv::vector<cv::Point3f>> worldPoints;
    cv::vector<cv::vector<cv::vector<cv::Point2f>>> imagePoints(2);

    for (size_t i = 0; i < 2; i++)
        imagePoints[i].resize(FLAGS_size);

    loadImages(lefts, rights, FLAGS_size);
    FLAGS_size = findChessboards(lefts, rights, imagePoints, patternSize, FLAGS_size);
    std::cout << "number of correct files = " << FLAGS_size << std::endl;
    setWorldPoints(worldPoints, patternSize, 0.024, FLAGS_size);

    std::cout << "calibrate stereo cameras" << std::endl;
    cv::vector<cv::Mat> cameraMatrix(2);
    cv::vector<cv::Mat> distCoeffs(2);
    cameraMatrix[0] = cv::Mat::eye(3, 3, CV_64FC1);
    cameraMatrix[1] = cv::Mat::eye(3, 3, CV_64FC1);
    distCoeffs[0] = cv::Mat(8, 1, CV_64FC1);
    distCoeffs[1] = cv::Mat(8, 1, CV_64FC1);
    cv::Mat R, T, E, F;

    double rms = stereoCalibrate(
            worldPoints, imagePoints[0], imagePoints[1], cameraMatrix[0],
            distCoeffs[0], cameraMatrix[1], distCoeffs[1], lefts[0].size(),
            R, T, E, F, cv::TermCriteria(CV_TERMCRIT_ITER + CV_TERMCRIT_EPS, 100, 1e-5),
            CV_CALIB_FIX_ASPECT_RATIO +
            CV_CALIB_ZERO_TANGENT_DIST +
            CV_CALIB_SAME_FOCAL_LENGTH +
            CV_CALIB_RATIONAL_MODEL +
            CV_CALIB_FIX_K3 + CV_CALIB_FIX_K4 + CV_CALIB_FIX_K5);
    std::cout << "done with RMS error = " << rms << std::endl;

    double err = 0;
    int npoints = 0;
    for (int i = 0; i < FLAGS_size; i++) {
        int size = (int) imagePoints[0][i].size();
        cv::vector<cv::Vec3f> lines[2];
        cv::Mat imgpt[2];
        for (int k = 0; k < 2; k++) {
            imgpt[k] = cv::Mat(imagePoints[k][i]);
            cv::undistortPoints(imgpt[k], imgpt[k], cameraMatrix[k], distCoeffs[k],
                                cv::Mat(), cameraMatrix[k]);
            cv::computeCorrespondEpilines(imgpt[k], k + 1, F, lines[k]);
        }

        for (int j = 0; j < size; j++) {
            double errij =
                    std::fabs(imagePoints[0][i][j].x * lines[1][j][0] +
                              imagePoints[0][i][j].y * lines[1][j][1] +
                              lines[1][j][2]) +
                    std::fabs(imagePoints[1][i][j].x * lines[0][j][0] +
                              imagePoints[1][i][j].y * lines[0][j][1] +
                              lines[0][j][2]);
            err += errij;
        }
        npoints += size;
    }
    std::cout << "average reprojection error = " << err / npoints << std::endl;

    cv::Mat R1, R2, P1, P2, Q;
    cv::Rect validROI[2];
    stereoRectify(cameraMatrix[0], distCoeffs[0], cameraMatrix[1],
                  distCoeffs[1], lefts[0].size(), R, T, R1, R2, P1, P2, Q,
                  cv::CALIB_ZERO_DISPARITY, 1, lefts[0].size(),
                  &validROI[0], &validROI[1]);

    {
        cv::FileStorage fs(FLAGS_intrinsics.c_str(), cv::FileStorage::WRITE);
        if (fs.isOpened()) {
            fs << "M1" << cameraMatrix[0] << "D1" << distCoeffs[0]
                << "M2" << cameraMatrix[1] << "D2" << distCoeffs[1];
            fs.release();
        }
    }

    cv::Mat rmap[2][2];
    cv::initUndistortRectifyMap(cameraMatrix[0], distCoeffs[0], R1, P1,
                                lefts[0].size(),
                                CV_16SC2,
                                rmap[0][0], rmap[0][1]);
    cv::initUndistortRectifyMap(cameraMatrix[1], distCoeffs[1], R2, P2,
                                lefts[0].size(),
                                CV_16SC2,
                                rmap[1][0], rmap[1][1]);

    {
        cv::FileStorage fs(FLAGS_extrinsics.c_str(), cv::FileStorage::WRITE);
        if (fs.isOpened()) {
            fs << "R" << R << "T" << T << "R1" << R1 << "R2" << R2
               << "P1" << P1 << "P2" << P2 << "Q" << Q
               << "V1" << validROI[0] << "V2" << validROI[1];
            fs.release();
        }
    }

    cv::Mat canvas;
    double sf;
    int w, h;

    sf = 600. / MAX(lefts[0].size().width, lefts[0].size().height);
    w = cvRound(lefts[0].size().width * sf);
    h = cvRound(lefts[0].size().height * sf);
    canvas.create(h, w * 2, CV_8UC3);

    cv::namedWindow("Rectified", CV_WINDOW_AUTOSIZE | CV_WINDOW_FREERATIO);

    for (int i = 0; i < FLAGS_size; i++) {
        for (int k = 0; k < 2; k++) {
            if (k == 0) {
                cv::Mat img = lefts[i].clone(), rimg, cimg;
                cv::remap(img, rimg, rmap[k][0], rmap[k][1], CV_INTER_LINEAR);
                cv::cvtColor(rimg, cimg, CV_GRAY2BGR);
                cv::Mat canvasPart = canvas(cv::Rect(w * k, 0, w, h));
                cv::resize(cimg, canvasPart, canvasPart.size(), 0, 0, CV_INTER_AREA);

                cv::Rect vroi(cvRound(validROI[k].x * sf),
                              cvRound(validROI[k].y * sf),
                              cvRound(validROI[k].width * sf),
                              cvRound(validROI[k].height * sf));
                cv::rectangle(canvasPart, vroi, cv::Scalar(0, 0, 255), 3, 8);
            } else {
                cv::Mat img = rights[i].clone(), rimg, cimg;
                cv::remap(img, rimg, rmap[k][0], rmap[k][1], CV_INTER_LINEAR);
                cvtColor(rimg, cimg, CV_GRAY2BGR);
                cv::Mat canvasPart = canvas(cv::Rect(w * k, 0, w, h));
                cv::resize(cimg, canvasPart, canvasPart.size(), 0, 0, CV_INTER_AREA);

                cv::Rect vroi(cvRound(validROI[k].x * sf),
                              cvRound(validROI[k].y * sf),
                              cvRound(validROI[k].width * sf),
                              cvRound(validROI[k].height * sf));
                cv::rectangle(canvasPart, vroi, cv::Scalar(0, 0, 255), 3, 8);
            }
        }

        for (int j = 0; j < canvas.rows; j += 16)
            cv::line(canvas, cv::Point(0, j), cv::Point(canvas.cols, j),
                     cv::Scalar(0, 255, 0), 1, 8);

        cv::imshow("Rectified", canvas);

        if (cv::waitKey(0) == 'q')
            break;
    }

    cv::destroyAllWindows();
    return 0;
}
void stereo_vision::calibrating_get_corners(cv::Mat left,cv::Mat right)
{
  cv::cvtColor(left,left_gray,CV_RGB2GRAY);
  cv::cvtColor(right,right_gray,CV_RGB2GRAY);

  left_found= cv::findChessboardCorners(left_gray, settings.boardSize, pointBuf_left, CV_CALIB_CB_ADAPTIVE_THRESH | CV_CALIB_CB_FAST_CHECK | CV_CALIB_CB_NORMALIZE_IMAGE);
  right_found= cv::findChessboardCorners(right_gray, settings.boardSize, pointBuf_right, CV_CALIB_CB_ADAPTIVE_THRESH | CV_CALIB_CB_FAST_CHECK | CV_CALIB_CB_NORMALIZE_IMAGE);
  
  cv::drawChessboardCorners(left,settings.boardSize, pointBuf_left, left_found);
  cv::drawChessboardCorners(right,settings.boardSize, pointBuf_right, right_found);
    
  if((left_found)&&(right_found))
  {
    qDebug("Both found");
    cv::cornerSubPix(left_gray, pointBuf_left, cv::Size(11,11), cv::Size(-1,-1),cv::TermCriteria(CV_TERMCRIT_EPS + CV_TERMCRIT_ITER, 30, 0.1));
    cv::cornerSubPix(right_gray, pointBuf_right, cv::Size(11,11), cv::Size(-1,-1),cv::TermCriteria(CV_TERMCRIT_EPS + CV_TERMCRIT_ITER, 30, 0.1));
//     std::vector<std::vector<cv::Point3f> > imagePoints1(frames);
    imagePoints1.push_back(pointBuf_left);
    imagePoints2.push_back(pointBuf_right);
    
    frame_n++;
    
    emit calibration_progress((frame_n*100/frames));
    
    if(frame_n >= frames)
    {
     qDebug("Finished getting images, now starting calculations");
     emit(calibration_running(0));
    objectPoints.resize(frames);
    calcBoardCornerPositions(settings.boardSize, settings.squareSize, objectPoints);
    
    imageSize = left.size();
    qDebug("%d %d", imageSize.height, imageSize.width);
    cameraMatrix_left = cv::Mat::eye(3, 3, CV_64F);
    cameraMatrix_right = cv::Mat::eye(3, 3, CV_64F);
    imagePoints1.resize(frames);
    imagePoints2.resize(frames);
    double rms = stereoCalibrate(objectPoints,imagePoints1,imagePoints2, cameraMatrix_left, distCoeffs_left,
                    cameraMatrix_right, distCoeffs_right, imageSize, R, T, E, F, cv::TermCriteria(CV_TERMCRIT_ITER+CV_TERMCRIT_EPS, 100, 1e-5),CV_CALIB_FIX_ASPECT_RATIO +
                    CV_CALIB_ZERO_TANGENT_DIST +
                    CV_CALIB_SAME_FOCAL_LENGTH +
                    CV_CALIB_RATIONAL_MODEL +
                    CV_CALIB_FIX_K3 + CV_CALIB_FIX_K4 + CV_CALIB_FIX_K5);
    qDebug("Kalibravimo rezultatas %f",rms);
    cv::stereoRectify(cameraMatrix_left, distCoeffs_left, cameraMatrix_right, distCoeffs_right, imageSize, R, T, R1, R2, P1, P2, Q, CV_CALIB_ZERO_DISPARITY, 0, imageSize, &roi_left, &roi_right );
    bm.state->roi1 = roi_left;
    bm.state->roi2 = roi_right;
    cv::initUndistortRectifyMap(cameraMatrix_left, distCoeffs_left, R1, P1, imageSize, CV_16SC2, rmap_left[0], rmap_left[1]);
    cv::initUndistortRectifyMap(cameraMatrix_right, distCoeffs_right, R2, P2, imageSize, CV_16SC2, rmap_right[0], rmap_right[1]);
/*    
    double err = 0;
    int npoints = 0;
    
    std::vector<cv::Vec3f> lines1;
    std::vector<cv::Vec3f> lines2;
    
    for( int i = 0; i < frames; i++ )
    {
        int npt = (int)imagePoints1[i].size();
        cv::Mat imgpt1,imgpt2;
            
	imgpt1 = cv::Mat(imagePoints1[i]);
        cv::undistortPoints(imgpt1, imgpt1, cameraMatrix_left, distCoeffs_left, cv::Mat(), cameraMatrix_left);
        cv::computeCorrespondEpilines(imgpt1, 1, F, lines1);
	
	imgpt2 = cv::Mat(imagePoint[i]);
        cv::undistortPoints(imgpt2, imgpt2, cameraMatrix_right, distCoeffs_right, cv::Mat(), cameraMatrix_right);
        cv::computeCorrespondEpilines(imgpt2, 2, F, lines2);
        for( int j = 0; j < npt; j++ )
        {
            double errij = fabs(imagePoints1[i][j].x*lines2[j][0] +
                                imagePoints1[i][j].y*lines2[j][1] + lines2[j][2]) +
                           fabs(imagePoints2[i][j].x*lines1[j][0] +
                                imagePoints2[i][j].y*lines1[j][1] + lines1[j][2]);
            err += errij;
        }
        npoints += npt;
    }
    
    qDebug("Reprojection error: %d", err/npoints); << "average reprojection err = " <<  err/npoints << endl;
    }
*/
     mode = CALIBRATED;
  }
  }
}