TEST_F(fisheyeTest, Calibration) { const int n_images = 34; std::vector<std::vector<cv::Point2d> > imagePoints(n_images); std::vector<std::vector<cv::Point3d> > objectPoints(n_images); const std::string folder =combine(datasets_repository_path, "calib-3_stereo_from_JY"); cv::FileStorage fs_left(combine(folder, "left.xml"), cv::FileStorage::READ); CV_Assert(fs_left.isOpened()); for(int i = 0; i < n_images; ++i) fs_left[cv::format("image_%d", i )] >> imagePoints[i]; fs_left.release(); cv::FileStorage fs_object(combine(folder, "object.xml"), cv::FileStorage::READ); CV_Assert(fs_object.isOpened()); for(int i = 0; i < n_images; ++i) fs_object[cv::format("image_%d", i )] >> objectPoints[i]; fs_object.release(); int flag = 0; flag |= cv::fisheye::CALIB_RECOMPUTE_EXTRINSIC; flag |= cv::fisheye::CALIB_CHECK_COND; flag |= cv::fisheye::CALIB_FIX_SKEW; cv::Matx33d theK; cv::Vec4d theD; cv::fisheye::calibrate(objectPoints, imagePoints, imageSize, theK, theD, cv::noArray(), cv::noArray(), flag, cv::TermCriteria(3, 20, 1e-6)); EXPECT_MAT_NEAR(theK, this->K, 1e-10); EXPECT_MAT_NEAR(theD, this->D, 1e-10); }
TEST_F(fisheyeTest, Homography) { const int n_images = 1; std::vector<std::vector<cv::Point2d> > imagePoints(n_images); std::vector<std::vector<cv::Point3d> > objectPoints(n_images); const std::string folder =combine(datasets_repository_path, "calib-3_stereo_from_JY"); cv::FileStorage fs_left(combine(folder, "left.xml"), cv::FileStorage::READ); CV_Assert(fs_left.isOpened()); for(int i = 0; i < n_images; ++i) fs_left[cv::format("image_%d", i )] >> imagePoints[i]; fs_left.release(); cv::FileStorage fs_object(combine(folder, "object.xml"), cv::FileStorage::READ); CV_Assert(fs_object.isOpened()); for(int i = 0; i < n_images; ++i) fs_object[cv::format("image_%d", i )] >> objectPoints[i]; fs_object.release(); cv::internal::IntrinsicParams param; param.Init(cv::Vec2d(cv::max(imageSize.width, imageSize.height) / CV_PI, cv::max(imageSize.width, imageSize.height) / CV_PI), cv::Vec2d(imageSize.width / 2.0 - 0.5, imageSize.height / 2.0 - 0.5)); cv::Mat _imagePoints (imagePoints[0]); cv::Mat _objectPoints(objectPoints[0]); cv::Mat imagePointsNormalized = NormalizePixels(_imagePoints, param).reshape(1).t(); _objectPoints = _objectPoints.reshape(1).t(); cv::Mat objectPointsMean, covObjectPoints; int Np = imagePointsNormalized.cols; cv::calcCovarMatrix(_objectPoints, covObjectPoints, objectPointsMean, cv::COVAR_NORMAL | cv::COVAR_COLS); cv::SVD svd(covObjectPoints); cv::Mat theR(svd.vt); if (cv::norm(theR(cv::Rect(2, 0, 1, 2))) < 1e-6) theR = cv::Mat::eye(3,3, CV_64FC1); if (cv::determinant(theR) < 0) theR = -theR; cv::Mat theT = -theR * objectPointsMean; cv::Mat X_new = theR * _objectPoints + theT * cv::Mat::ones(1, Np, CV_64FC1); cv::Mat H = cv::internal::ComputeHomography(imagePointsNormalized, X_new.rowRange(0, 2)); cv::Mat M = cv::Mat::ones(3, X_new.cols, CV_64FC1); X_new.rowRange(0, 2).copyTo(M.rowRange(0, 2)); cv::Mat mrep = H * M; cv::divide(mrep, cv::Mat::ones(3,1, CV_64FC1) * mrep.row(2).clone(), mrep); cv::Mat merr = (mrep.rowRange(0, 2) - imagePointsNormalized).t(); cv::Vec2d std_err; cv::meanStdDev(merr.reshape(2), cv::noArray(), std_err); std_err *= sqrt((double)merr.reshape(2).total() / (merr.reshape(2).total() - 1)); cv::Vec2d correct_std_err(0.00516740156010384, 0.00644205331553901); EXPECT_MAT_NEAR(std_err, correct_std_err, 1e-12); }
vector<cv::Point2f> ofxFaceTracker2Landmarks::getCvImagePoints() const { int n = shape.num_parts(); vector<cv::Point2f> imagePoints(n); for(int i = 0; i < n; i++) { imagePoints[i] = ofxCv::toCv(getImagePoint(i)); } return imagePoints; }
vector<ofVec2f> ofxFaceTracker2Landmarks::getImagePoints() const { int n = shape.num_parts(); vector<ofVec2f> imagePoints(n); for(int i = 0; i < n; i++) { imagePoints[i] = getImagePoint(i); } return imagePoints; }
TEST_F(fisheyeTest, EtimateUncertainties) { const int n_images = 34; std::vector<std::vector<cv::Point2d> > imagePoints(n_images); std::vector<std::vector<cv::Point3d> > objectPoints(n_images); const std::string folder =combine(datasets_repository_path, "calib-3_stereo_from_JY"); cv::FileStorage fs_left(combine(folder, "left.xml"), cv::FileStorage::READ); CV_Assert(fs_left.isOpened()); for(int i = 0; i < n_images; ++i) fs_left[cv::format("image_%d", i )] >> imagePoints[i]; fs_left.release(); cv::FileStorage fs_object(combine(folder, "object.xml"), cv::FileStorage::READ); CV_Assert(fs_object.isOpened()); for(int i = 0; i < n_images; ++i) fs_object[cv::format("image_%d", i )] >> objectPoints[i]; fs_object.release(); int flag = 0; flag |= cv::fisheye::CALIB_RECOMPUTE_EXTRINSIC; flag |= cv::fisheye::CALIB_CHECK_COND; flag |= cv::fisheye::CALIB_FIX_SKEW; cv::Matx33d theK; cv::Vec4d theD; std::vector<cv::Vec3d> rvec; std::vector<cv::Vec3d> tvec; cv::fisheye::calibrate(objectPoints, imagePoints, imageSize, theK, theD, rvec, tvec, flag, cv::TermCriteria(3, 20, 1e-6)); cv::internal::IntrinsicParams param, errors; cv::Vec2d err_std; double thresh_cond = 1e6; int check_cond = 1; param.Init(cv::Vec2d(theK(0,0), theK(1,1)), cv::Vec2d(theK(0,2), theK(1, 2)), theD); param.isEstimate = std::vector<uchar>(9, 1); param.isEstimate[4] = 0; errors.isEstimate = param.isEstimate; double rms; cv::internal::EstimateUncertainties(objectPoints, imagePoints, param, rvec, tvec, errors, err_std, thresh_cond, check_cond, rms); EXPECT_MAT_NEAR(errors.f, cv::Vec2d(1.29837104202046, 1.31565641071524), 1e-10); EXPECT_MAT_NEAR(errors.c, cv::Vec2d(0.890439368129246, 0.816096854937896), 1e-10); EXPECT_MAT_NEAR(errors.k, cv::Vec4d(0.00516248605191506, 0.0168181467500934, 0.0213118690274604, 0.00916010877545648), 1e-10); EXPECT_MAT_NEAR(err_std, cv::Vec2d(0.187475975266883, 0.185678953263995), 1e-10); CV_Assert(fabs(rms - 0.263782587133546) < 1e-10); CV_Assert(errors.alpha == 0); }
void calibrateProcess(Configure& conf, cv::Mat& cameraMatrix, cv::Mat& distCoeffs) { const cv::Size frameSize(conf.getConfInt("universal.frameWidth"), conf.getConfInt("universal.frameHeight")); const cv::string dataPath(conf.getConfString("universal.dataPath")); const std::string checkerPrefix (conf.getConfString("calibration.checkerPrefix")); const std::string checkerSuffix (conf.getConfString("calibration.checkerSuffix")); const int checkerNum = conf.getConfInt("calibration.checkerNum"); const cv::Size checkerSize(conf.getConfInt("calibration.checkerWidth"), conf.getConfInt("calibration.checkerHeight")); cv::vector<cv::Mat> checkerImgs; cv::vector<cv::vector<cv::Point3f>> worldPoints(checkerNum); cv::vector<cv::vector<cv::Point2f>> imagePoints(checkerNum); cv::TermCriteria criteria(CV_TERMCRIT_ITER|CV_TERMCRIT_EPS, 20, 0.001); cv::vector<cv::Mat> rotationVectors; cv::vector<cv::Mat> translationVectors; for(int i=0; i<checkerNum; i++){ std::stringstream stream; stream << checkerPrefix << i+1 << checkerSuffix; std::string fileName = stream.str(); cv::Mat tmp = cv::imread(dataPath + fileName, 0); cv::resize(tmp, tmp, frameSize); checkerImgs.push_back(tmp); std::cout << "load checker image: " << fileName << std::endl; } cv::namedWindow("Source", CV_WINDOW_AUTOSIZE|CV_WINDOW_FREERATIO); for(int i=0; i<checkerNum; i++){ std::cout << "find corners from image " << i+1; if(cv::findChessboardCorners(checkerImgs[i], checkerSize, imagePoints[i])){ std::cout << " ... all corners found." << std::endl; cv::cornerSubPix(checkerImgs[i], imagePoints[i], cv::Size(5, 5), cv::Size(-1, -1), criteria); cv::drawChessboardCorners(checkerImgs[i], checkerSize, (cv::Mat)(imagePoints[i]), true); cv::imshow("Source", checkerImgs[i]); cv::waitKey(200); }else{ std::cout << " ... at least 1 corner not found." << std::endl; cv::waitKey(0); exit(-1); } } cv::destroyWindow("Source"); for(int i=0; i<checkerNum; i++){ for(int j=0; j<checkerSize.area(); j++){ worldPoints[i]. push_back(cv::Point3f(static_cast<float>(j%checkerSize.width*10), static_cast<float>(j/checkerSize.width*10), 0.0)); } } cv::calibrateCamera(worldPoints, imagePoints, frameSize, cameraMatrix, distCoeffs, rotationVectors, translationVectors); std::cout << "camera matrix" << std::endl; std::cout << cameraMatrix << std::endl; std::cout << "dist coeffs" << std::endl; std::cout << distCoeffs << std::endl; }
float BoardDetector::detect(const vector<Marker> &detectedMarkers,const BoardConfiguration &BConf, Board &Bdetected, Mat camMatrix,Mat distCoeff,float markerSizeMeters)throw (cv::Exception) { // cout<<"markerSizeMeters="<<markerSizeMeters<<endl; ///find among detected markers these that belong to the board configuration Mat detected(BConf._markersId.size(),CV_32SC1); //stores the indices of the makers detected.setTo(Scalar(-1));//-1 mean not detected int nMarkInBoard=0;//total number of markers detected for (unsigned int i=0;i<detectedMarkers.size();i++) { bool found=false; int id=detectedMarkers[i].id; //find it for ( int j=0;j<detected.size().height && ! found;j++) for ( int k=0;k<detected.size().width && ! found;k++) if ( BConf._markersId.at<int>(j,k)==id) { detected.at<int>(j,k)=i; nMarkInBoard++; found=true; Bdetected.push_back(detectedMarkers[i]); if (markerSizeMeters>0) Bdetected.back().ssize=markerSizeMeters; } } Bdetected.conf=BConf; if (markerSizeMeters!=-1) Bdetected.markerSizeMeters=markerSizeMeters; //calculate extrinsic if there is information for that if (camMatrix.rows!=0 && markerSizeMeters>0 && detectedMarkers.size()>1) { // now, create the matrices for finding the extrinsics Mat objPoints(4*nMarkInBoard,3,CV_32FC1); Mat imagePoints(4*nMarkInBoard,2,CV_32FC1); //size in meters of inter-marker distance double markerDistanceMeters= double(BConf._markerDistancePix) * markerSizeMeters / double(BConf._markerSizePix); int currIndex=0; for ( int y=0;y<detected.size().height;y++) for ( int x=0;x<detected.size().width;x++) { if ( detected.at<int>(y,x)!=-1 ) { vector<Point2f> points =detectedMarkers[ detected.at<int>(y,x) ]; //set first image points for (int p=0;p<4;p++) { imagePoints.at<float>(currIndex+p,0)=points[p].x; imagePoints.at<float>(currIndex+p,1)=points[p].y; } //tranaltion to make the Ref System be in center float TX=-( ((detected.size().height-1)*(markerDistanceMeters+markerSizeMeters) +markerSizeMeters) /2) ; float TY=-( ((detected.size().width-1)*(markerDistanceMeters+markerSizeMeters) +markerSizeMeters)/2); //points in real refernce system. We se the center in the bottom-left corner float AY=x*(markerDistanceMeters+markerSizeMeters ) +TY; float AX=y*(markerDistanceMeters+markerSizeMeters )+TX; objPoints.at<float>( currIndex,0)= AX; objPoints.at<float>( currIndex,1)= AY; objPoints.at<float>( currIndex,2)= 0; objPoints.at<float>( currIndex+1,0)= AX; objPoints.at<float>( currIndex+1,1)= AY+markerSizeMeters; objPoints.at<float>( currIndex+1,2)= 0; objPoints.at<float>( currIndex+2,0)= AX+markerSizeMeters; objPoints.at<float>( currIndex+2,1)= AY+markerSizeMeters; objPoints.at<float>( currIndex+2,2)= 0; objPoints.at<float>( currIndex+3,0)= AX+markerSizeMeters; objPoints.at<float>( currIndex+3,1)= AY; objPoints.at<float>( currIndex+3,2)= 0; currIndex+=4; } } CvMat cvCamMatrix=camMatrix; CvMat cvDistCoeffs; Mat zeros=Mat::zeros(4,1,CV_32FC1); if (distCoeff.rows>=4) cvDistCoeffs=distCoeff; else cvDistCoeffs=zeros; CvMat cvImgPoints=imagePoints; CvMat cvObjPoints=objPoints; CvMat cvRvec=Bdetected.Rvec; CvMat cvTvec=Bdetected.Tvec; cvFindExtrinsicCameraParams2(&cvObjPoints, &cvImgPoints, &cvCamMatrix, &cvDistCoeffs,&cvRvec,&cvTvec); //now, rotate 90 deg in X so that Y axis points up rotateXAxis(Bdetected.Rvec); //cout<<Bdetected.Rvec.at<float>(0,0)<<" "<<Bdetected.Rvec.at<float>(1,0)<<Bdetected.Rvec.at<float>(2,0)<<endl; //cout<<Bdetected.Tvec.at<float>(0,0)<<" "<<Bdetected.Tvec.at<float>(1,0)<<Bdetected.Tvec.at<float>(2,0)<<endl; } return double(nMarkInBoard)/double( BConf._markersId.size().width*BConf._markersId.size().height); }
int main(int argc, char *argv[]) { gflags::ParseCommandLineFlags(&argc, &argv, true); cv::vector<cv::Mat> lefts, rights; const cv::Size patternSize(9, 6); cv::vector<cv::vector<cv::Point3f>> worldPoints; cv::vector<cv::vector<cv::vector<cv::Point2f>>> imagePoints(2); for (size_t i = 0; i < 2; i++) imagePoints[i].resize(FLAGS_size); loadImages(lefts, rights, FLAGS_size); FLAGS_size = findChessboards(lefts, rights, imagePoints, patternSize, FLAGS_size); std::cout << "number of correct files = " << FLAGS_size << std::endl; setWorldPoints(worldPoints, patternSize, 0.024, FLAGS_size); std::cout << "calibrate stereo cameras" << std::endl; cv::vector<cv::Mat> cameraMatrix(2); cv::vector<cv::Mat> distCoeffs(2); cameraMatrix[0] = cv::Mat::eye(3, 3, CV_64FC1); cameraMatrix[1] = cv::Mat::eye(3, 3, CV_64FC1); distCoeffs[0] = cv::Mat(8, 1, CV_64FC1); distCoeffs[1] = cv::Mat(8, 1, CV_64FC1); cv::Mat R, T, E, F; double rms = stereoCalibrate( worldPoints, imagePoints[0], imagePoints[1], cameraMatrix[0], distCoeffs[0], cameraMatrix[1], distCoeffs[1], lefts[0].size(), R, T, E, F, cv::TermCriteria(CV_TERMCRIT_ITER + CV_TERMCRIT_EPS, 100, 1e-5), CV_CALIB_FIX_ASPECT_RATIO + CV_CALIB_ZERO_TANGENT_DIST + CV_CALIB_SAME_FOCAL_LENGTH + CV_CALIB_RATIONAL_MODEL + CV_CALIB_FIX_K3 + CV_CALIB_FIX_K4 + CV_CALIB_FIX_K5); std::cout << "done with RMS error = " << rms << std::endl; double err = 0; int npoints = 0; for (int i = 0; i < FLAGS_size; i++) { int size = (int) imagePoints[0][i].size(); cv::vector<cv::Vec3f> lines[2]; cv::Mat imgpt[2]; for (int k = 0; k < 2; k++) { imgpt[k] = cv::Mat(imagePoints[k][i]); cv::undistortPoints(imgpt[k], imgpt[k], cameraMatrix[k], distCoeffs[k], cv::Mat(), cameraMatrix[k]); cv::computeCorrespondEpilines(imgpt[k], k + 1, F, lines[k]); } for (int j = 0; j < size; j++) { double errij = std::fabs(imagePoints[0][i][j].x * lines[1][j][0] + imagePoints[0][i][j].y * lines[1][j][1] + lines[1][j][2]) + std::fabs(imagePoints[1][i][j].x * lines[0][j][0] + imagePoints[1][i][j].y * lines[0][j][1] + lines[0][j][2]); err += errij; } npoints += size; } std::cout << "average reprojection error = " << err / npoints << std::endl; cv::Mat R1, R2, P1, P2, Q; cv::Rect validROI[2]; stereoRectify(cameraMatrix[0], distCoeffs[0], cameraMatrix[1], distCoeffs[1], lefts[0].size(), R, T, R1, R2, P1, P2, Q, cv::CALIB_ZERO_DISPARITY, 1, lefts[0].size(), &validROI[0], &validROI[1]); { cv::FileStorage fs(FLAGS_intrinsics.c_str(), cv::FileStorage::WRITE); if (fs.isOpened()) { fs << "M1" << cameraMatrix[0] << "D1" << distCoeffs[0] << "M2" << cameraMatrix[1] << "D2" << distCoeffs[1]; fs.release(); } } cv::Mat rmap[2][2]; cv::initUndistortRectifyMap(cameraMatrix[0], distCoeffs[0], R1, P1, lefts[0].size(), CV_16SC2, rmap[0][0], rmap[0][1]); cv::initUndistortRectifyMap(cameraMatrix[1], distCoeffs[1], R2, P2, lefts[0].size(), CV_16SC2, rmap[1][0], rmap[1][1]); { cv::FileStorage fs(FLAGS_extrinsics.c_str(), cv::FileStorage::WRITE); if (fs.isOpened()) { fs << "R" << R << "T" << T << "R1" << R1 << "R2" << R2 << "P1" << P1 << "P2" << P2 << "Q" << Q << "V1" << validROI[0] << "V2" << validROI[1]; fs.release(); } } cv::Mat canvas; double sf; int w, h; sf = 600. / MAX(lefts[0].size().width, lefts[0].size().height); w = cvRound(lefts[0].size().width * sf); h = cvRound(lefts[0].size().height * sf); canvas.create(h, w * 2, CV_8UC3); cv::namedWindow("Rectified", CV_WINDOW_AUTOSIZE | CV_WINDOW_FREERATIO); for (int i = 0; i < FLAGS_size; i++) { for (int k = 0; k < 2; k++) { if (k == 0) { cv::Mat img = lefts[i].clone(), rimg, cimg; cv::remap(img, rimg, rmap[k][0], rmap[k][1], CV_INTER_LINEAR); cv::cvtColor(rimg, cimg, CV_GRAY2BGR); cv::Mat canvasPart = canvas(cv::Rect(w * k, 0, w, h)); cv::resize(cimg, canvasPart, canvasPart.size(), 0, 0, CV_INTER_AREA); cv::Rect vroi(cvRound(validROI[k].x * sf), cvRound(validROI[k].y * sf), cvRound(validROI[k].width * sf), cvRound(validROI[k].height * sf)); cv::rectangle(canvasPart, vroi, cv::Scalar(0, 0, 255), 3, 8); } else { cv::Mat img = rights[i].clone(), rimg, cimg; cv::remap(img, rimg, rmap[k][0], rmap[k][1], CV_INTER_LINEAR); cvtColor(rimg, cimg, CV_GRAY2BGR); cv::Mat canvasPart = canvas(cv::Rect(w * k, 0, w, h)); cv::resize(cimg, canvasPart, canvasPart.size(), 0, 0, CV_INTER_AREA); cv::Rect vroi(cvRound(validROI[k].x * sf), cvRound(validROI[k].y * sf), cvRound(validROI[k].width * sf), cvRound(validROI[k].height * sf)); cv::rectangle(canvasPart, vroi, cv::Scalar(0, 0, 255), 3, 8); } } for (int j = 0; j < canvas.rows; j += 16) cv::line(canvas, cv::Point(0, j), cv::Point(canvas.cols, j), cv::Scalar(0, 255, 0), 1, 8); cv::imshow("Rectified", canvas); if (cv::waitKey(0) == 'q') break; } cv::destroyAllWindows(); return 0; }