TEST_F(fisheyeTest, Calibration) { const int n_images = 34; std::vector<std::vector<cv::Point2d> > imagePoints(n_images); std::vector<std::vector<cv::Point3d> > objectPoints(n_images); const std::string folder =combine(datasets_repository_path, "calib-3_stereo_from_JY"); cv::FileStorage fs_left(combine(folder, "left.xml"), cv::FileStorage::READ); CV_Assert(fs_left.isOpened()); for(int i = 0; i < n_images; ++i) fs_left[cv::format("image_%d", i )] >> imagePoints[i]; fs_left.release(); cv::FileStorage fs_object(combine(folder, "object.xml"), cv::FileStorage::READ); CV_Assert(fs_object.isOpened()); for(int i = 0; i < n_images; ++i) fs_object[cv::format("image_%d", i )] >> objectPoints[i]; fs_object.release(); int flag = 0; flag |= cv::fisheye::CALIB_RECOMPUTE_EXTRINSIC; flag |= cv::fisheye::CALIB_CHECK_COND; flag |= cv::fisheye::CALIB_FIX_SKEW; cv::Matx33d theK; cv::Vec4d theD; cv::fisheye::calibrate(objectPoints, imagePoints, imageSize, theK, theD, cv::noArray(), cv::noArray(), flag, cv::TermCriteria(3, 20, 1e-6)); EXPECT_MAT_NEAR(theK, this->K, 1e-10); EXPECT_MAT_NEAR(theD, this->D, 1e-10); }
TEST_F(fisheyeTest, Homography) { const int n_images = 1; std::vector<std::vector<cv::Point2d> > imagePoints(n_images); std::vector<std::vector<cv::Point3d> > objectPoints(n_images); const std::string folder =combine(datasets_repository_path, "calib-3_stereo_from_JY"); cv::FileStorage fs_left(combine(folder, "left.xml"), cv::FileStorage::READ); CV_Assert(fs_left.isOpened()); for(int i = 0; i < n_images; ++i) fs_left[cv::format("image_%d", i )] >> imagePoints[i]; fs_left.release(); cv::FileStorage fs_object(combine(folder, "object.xml"), cv::FileStorage::READ); CV_Assert(fs_object.isOpened()); for(int i = 0; i < n_images; ++i) fs_object[cv::format("image_%d", i )] >> objectPoints[i]; fs_object.release(); cv::internal::IntrinsicParams param; param.Init(cv::Vec2d(cv::max(imageSize.width, imageSize.height) / CV_PI, cv::max(imageSize.width, imageSize.height) / CV_PI), cv::Vec2d(imageSize.width / 2.0 - 0.5, imageSize.height / 2.0 - 0.5)); cv::Mat _imagePoints (imagePoints[0]); cv::Mat _objectPoints(objectPoints[0]); cv::Mat imagePointsNormalized = NormalizePixels(_imagePoints, param).reshape(1).t(); _objectPoints = _objectPoints.reshape(1).t(); cv::Mat objectPointsMean, covObjectPoints; int Np = imagePointsNormalized.cols; cv::calcCovarMatrix(_objectPoints, covObjectPoints, objectPointsMean, cv::COVAR_NORMAL | cv::COVAR_COLS); cv::SVD svd(covObjectPoints); cv::Mat theR(svd.vt); if (cv::norm(theR(cv::Rect(2, 0, 1, 2))) < 1e-6) theR = cv::Mat::eye(3,3, CV_64FC1); if (cv::determinant(theR) < 0) theR = -theR; cv::Mat theT = -theR * objectPointsMean; cv::Mat X_new = theR * _objectPoints + theT * cv::Mat::ones(1, Np, CV_64FC1); cv::Mat H = cv::internal::ComputeHomography(imagePointsNormalized, X_new.rowRange(0, 2)); cv::Mat M = cv::Mat::ones(3, X_new.cols, CV_64FC1); X_new.rowRange(0, 2).copyTo(M.rowRange(0, 2)); cv::Mat mrep = H * M; cv::divide(mrep, cv::Mat::ones(3,1, CV_64FC1) * mrep.row(2).clone(), mrep); cv::Mat merr = (mrep.rowRange(0, 2) - imagePointsNormalized).t(); cv::Vec2d std_err; cv::meanStdDev(merr.reshape(2), cv::noArray(), std_err); std_err *= sqrt((double)merr.reshape(2).total() / (merr.reshape(2).total() - 1)); cv::Vec2d correct_std_err(0.00516740156010384, 0.00644205331553901); EXPECT_MAT_NEAR(std_err, correct_std_err, 1e-12); }
bool CameraCalibration::runCalibration( Settings& s, Size& imageSize, Mat& cameraMatrix, Mat& distCoeffs, vector<vector<Point2f> > imagePoints, vector<Mat>& rvecs, vector<Mat>& tvecs, vector<float>& reprojErrs, double& totalAvgErr ) { cameraMatrix = Mat::eye( 3, 3, CV_64F ); if ( s.flag & CALIB_FIX_ASPECT_RATIO ) { cameraMatrix.at<double>( 0, 0 ) = 1.0; } distCoeffs = Mat::zeros( 8, 1, CV_64F ); vector<vector<Point3f> > objectPoints( 1 ); calcBoardCornerPositions( s.boardSize, s.squareSize, objectPoints[0], s.calibrationPattern ); objectPoints.resize( imagePoints.size(), objectPoints[0] ); // Find intrinsic and extrinsic camera parameters double rms = calibrateCamera( objectPoints, imagePoints, imageSize, cameraMatrix, distCoeffs, rvecs, tvecs, s.flag | CALIB_FIX_K4 | CALIB_FIX_K5 ); cout << "Re-projection error reported by calibrateCamera: " << rms << endl; bool ok = checkRange( cameraMatrix ) && checkRange( distCoeffs ); totalAvgErr = computeReprojectionErrors( objectPoints, imagePoints, rvecs, tvecs, cameraMatrix, distCoeffs, reprojErrs ); return( ok ); }
static int runCalibration( vector<vector<Point2f> > imagePoints, Size imageSize, Size boardSize, Pattern patternType, float squareSize, float aspectRatio, int flags, Mat& cameraMatrix, Mat& distCoeffs, vector<Mat>& rvecs, vector<Mat>& tvecs, vector<float>& reprojErrs, double& totalAvgErr) { cameraMatrix = Mat::eye(3, 3, CV_64F); if( flags & CV_CALIB_FIX_ASPECT_RATIO ) cameraMatrix.at<double>(0,0) = aspectRatio; distCoeffs = Mat::zeros(8, 1, CV_64F); vector<vector<Point3f> > objectPoints(1); calcChessboardCorners(boardSize, squareSize, objectPoints[0], patternType); objectPoints.resize(imagePoints.size(),objectPoints[0]); double rms = calibrateCamera(objectPoints, imagePoints, imageSize, cameraMatrix, distCoeffs, rvecs, tvecs, flags|CV_CALIB_FIX_K4|CV_CALIB_FIX_K5); ///*|CV_CALIB_FIX_K3*/|CV_CALIB_FIX_K4|CV_CALIB_FIX_K5); printf("RMS error reported by calibrateCamera: %g\n", rms); bool ok = checkRange(cameraMatrix) && checkRange(distCoeffs); if(ok == false) return -1; totalAvgErr = computeReprojectionErrors(objectPoints, imagePoints, rvecs, tvecs, cameraMatrix, distCoeffs, reprojErrs); return 0; }
void stereoCalibThread::monoCalibration(const vector<string>& imageList, int boardWidth, int boardHeight, Mat &K, Mat &Dist) { vector<vector<Point2f> > imagePoints; Size boardSize, imageSize; boardSize.width=boardWidth; boardSize.height=boardHeight; int flags=0; int i; float squareSize = 1.f, aspectRatio = 1.f; Mat view, viewGray; for(i = 0; i<(int)imageList.size();i++) { view = cv::imread(imageList[i], 1); imageSize = view.size(); vector<Point2f> pointbuf; cvtColor(view, viewGray, CV_BGR2GRAY); bool found = false; if(boardType == "CIRCLES_GRID") { found = findCirclesGridDefault(view, boardSize, pointbuf, CALIB_CB_SYMMETRIC_GRID | CALIB_CB_CLUSTERING); } else if(boardType == "ASYMMETRIC_CIRCLES_GRID") { found = findCirclesGridDefault(view, boardSize, pointbuf, CALIB_CB_ASYMMETRIC_GRID | CALIB_CB_CLUSTERING); } else { found = findChessboardCorners( view, boardSize, pointbuf, CV_CALIB_CB_ADAPTIVE_THRESH | CV_CALIB_CB_FAST_CHECK | CV_CALIB_CB_NORMALIZE_IMAGE); } if(found) { drawChessboardCorners( view, boardSize, Mat(pointbuf), found ); imagePoints.push_back(pointbuf); } } std::vector<Mat> rvecs, tvecs; std::vector<float> reprojErrs; double totalAvgErr = 0; K = Mat::eye(3, 3, CV_64F); if( flags & CV_CALIB_FIX_ASPECT_RATIO ) K.at<double>(0,0) = aspectRatio; Dist = Mat::zeros(4, 1, CV_64F); std::vector<std::vector<Point3f> > objectPoints(1); calcChessboardCorners(boardSize, squareSize, objectPoints[0]); objectPoints.resize(imagePoints.size(),objectPoints[0]); double rms = calibrateCamera(objectPoints, imagePoints, imageSize, K, Dist, rvecs, tvecs,CV_CALIB_FIX_K3); printf("RMS error reported by calibrateCamera: %g\n", rms); cout.flush(); }
TEST_F(fisheyeTest, EtimateUncertainties) { const int n_images = 34; std::vector<std::vector<cv::Point2d> > imagePoints(n_images); std::vector<std::vector<cv::Point3d> > objectPoints(n_images); const std::string folder =combine(datasets_repository_path, "calib-3_stereo_from_JY"); cv::FileStorage fs_left(combine(folder, "left.xml"), cv::FileStorage::READ); CV_Assert(fs_left.isOpened()); for(int i = 0; i < n_images; ++i) fs_left[cv::format("image_%d", i )] >> imagePoints[i]; fs_left.release(); cv::FileStorage fs_object(combine(folder, "object.xml"), cv::FileStorage::READ); CV_Assert(fs_object.isOpened()); for(int i = 0; i < n_images; ++i) fs_object[cv::format("image_%d", i )] >> objectPoints[i]; fs_object.release(); int flag = 0; flag |= cv::fisheye::CALIB_RECOMPUTE_EXTRINSIC; flag |= cv::fisheye::CALIB_CHECK_COND; flag |= cv::fisheye::CALIB_FIX_SKEW; cv::Matx33d theK; cv::Vec4d theD; std::vector<cv::Vec3d> rvec; std::vector<cv::Vec3d> tvec; cv::fisheye::calibrate(objectPoints, imagePoints, imageSize, theK, theD, rvec, tvec, flag, cv::TermCriteria(3, 20, 1e-6)); cv::internal::IntrinsicParams param, errors; cv::Vec2d err_std; double thresh_cond = 1e6; int check_cond = 1; param.Init(cv::Vec2d(theK(0,0), theK(1,1)), cv::Vec2d(theK(0,2), theK(1, 2)), theD); param.isEstimate = std::vector<uchar>(9, 1); param.isEstimate[4] = 0; errors.isEstimate = param.isEstimate; double rms; cv::internal::EstimateUncertainties(objectPoints, imagePoints, param, rvec, tvec, errors, err_std, thresh_cond, check_cond, rms); EXPECT_MAT_NEAR(errors.f, cv::Vec2d(1.29837104202046, 1.31565641071524), 1e-10); EXPECT_MAT_NEAR(errors.c, cv::Vec2d(0.890439368129246, 0.816096854937896), 1e-10); EXPECT_MAT_NEAR(errors.k, cv::Vec4d(0.00516248605191506, 0.0168181467500934, 0.0213118690274604, 0.00916010877545648), 1e-10); EXPECT_MAT_NEAR(err_std, cv::Vec2d(0.187475975266883, 0.185678953263995), 1e-10); CV_Assert(fabs(rms - 0.263782587133546) < 1e-10); CV_Assert(errors.alpha == 0); }
static bool camera_calibrate_calibrate_full(GstCameraCalibrate *calib, cv::Size& imageSize, cv::Mat& cameraMatrix, cv::Mat& distCoeffs, std::vector<std::vector<cv::Point2f> > imagePoints, std::vector<cv::Mat>& rvecs, std::vector<cv::Mat>& tvecs, std::vector<float>& reprojErrs, double& totalAvgErr) { cameraMatrix = cv::Mat::eye(3, 3, CV_64F); if (calib->flags & cv::CALIB_FIX_ASPECT_RATIO) { cameraMatrix.at<double>(0,0) = calib->aspectRatio; } if (calib->useFisheye) { distCoeffs = cv::Mat::zeros(4, 1, CV_64F); } else { distCoeffs = cv::Mat::zeros(8, 1, CV_64F); } std::vector<std::vector<cv::Point3f> > objectPoints(1); camera_calibrate_calc_corners (calib->boardSize, calib->squareSize, objectPoints[0], calib->calibrationPattern); objectPoints.resize(imagePoints.size(), objectPoints[0]); /* Find intrinsic and extrinsic camera parameters */ double rms; if (calib->useFisheye) { cv::Mat _rvecs, _tvecs; rms = cv::fisheye::calibrate(objectPoints, imagePoints, imageSize, cameraMatrix, distCoeffs, _rvecs, _tvecs, calib->flags); rvecs.reserve(_rvecs.rows); tvecs.reserve(_tvecs.rows); for(int i = 0; i < int(objectPoints.size()); i++){ rvecs.push_back(_rvecs.row(i)); tvecs.push_back(_tvecs.row(i)); } } else { rms = cv::calibrateCamera(objectPoints, imagePoints, imageSize, cameraMatrix, distCoeffs, rvecs, tvecs, calib->flags); } GST_LOG_OBJECT (calib, "Re-projection error reported by calibrateCamera: %f", rms); bool ok = checkRange(cameraMatrix) && checkRange(distCoeffs); totalAvgErr = camera_calibrate_calc_reprojection_errors (objectPoints, imagePoints, rvecs, tvecs, cameraMatrix, distCoeffs, reprojErrs, calib->useFisheye); return ok; }
bool CameraCalibrationAlgorithm::runCalibration ( VectorOfVectorOf2DPoints imagePoints, cv::Size imageSize, cv::Size boardSize, PatternType patternType, float squareSize, float aspectRatio, int flags, cv::Mat& cameraMatrix, cv::Mat& distCoeffs, VectorOfMat& rvecs, VectorOfMat& tvecs, std::vector<float>& reprojErrs, double& totalAvgErr ) { cameraMatrix = cv::Mat::eye(3, 3, CV_64F); if (flags & cv::CALIB_FIX_ASPECT_RATIO) { cameraMatrix.at<double>(0, 0) = aspectRatio; } distCoeffs = cv::Mat::zeros(8, 1, CV_64F); VectorOfVectorOf3DPoints objectPoints(1); calcChessboardCorners(boardSize, squareSize, objectPoints[0], patternType); objectPoints.resize(imagePoints.size(), objectPoints[0]); //LOG_TRACE_MESSAGE("Object points size:" << objectPoints.size()); //LOG_TRACE_MESSAGE("Image points size :" << imagePoints.size()); //LOG_TRACE_MESSAGE("Image size :" << imageSize); double rms = cv::calibrateCamera(objectPoints, imagePoints, imageSize, cameraMatrix, distCoeffs, rvecs, tvecs, flags | cv::CALIB_FIX_K4 | cv::CALIB_FIX_K5); //LOG_TRACE_MESSAGE("RMS error reported by calibrateCamera: " << rms); bool ok = cv::checkRange(cameraMatrix) && cv::checkRange(distCoeffs); totalAvgErr = computeReprojectionErrors(objectPoints, imagePoints, rvecs, tvecs, cameraMatrix, distCoeffs, reprojErrs); return ok; }
bool runCalibration(const std::vector<std::vector<cv::Point2f> >& imagePoints, cv::Size imageSize, cv::Size boardSize, double squareSize, cv::Mat& cameraMatrix, cv::Mat& distCoeffs) { cameraMatrix = cv::Mat::eye(3, 3, CV_64F); distCoeffs = cv::Mat::zeros(8, 1, CV_64F); std::vector<std::vector<cv::Point3f> > objectPoints(1); calcChessboardCorners(boardSize, squareSize, objectPoints[0]); objectPoints.resize(imagePoints.size(),objectPoints[0]); std::vector<cv::Mat> rvecs, tvecs; double rms = cv::calibrateCamera(objectPoints, imagePoints, imageSize, cameraMatrix, distCoeffs, rvecs, tvecs, CV_CALIB_FIX_K4|CV_CALIB_FIX_K5); bool ok = cv::checkRange(cameraMatrix) && cv::checkRange(distCoeffs); return ok; }
void ofxFeatureFinder::drawDetected() { vector<ofxFeatureFinderObject>::iterator it = detectedObjects.begin(); for(int i=0; i < detectedObjects.size(); i++){ ofxFeatureFinderObject object = detectedObjects.at(i); cv::Mat H = detectedHomographies.at(i); ofSetLineWidth(2); ofSetColor(0, 255, 255); vector<ofPolyline>::iterator outline = object.outlines.begin(); for(; outline != object.outlines.end(); ++outline){ vector<cv::Point2f> objectPoints((*outline).size()); vector<cv::Point2f> scenePoints((*outline).size()); for (int i=0, l=(*outline).size(); i<l; i++) { ofPoint p = (*outline)[i]; objectPoints[i] = cv::Point2f(p.x, p.y); } perspectiveTransform( objectPoints, scenePoints, H); ofPolyline sceneOutlines; for (int i=0, l=(*outline).size(); i<l; i++) { cv::Point2f p = scenePoints[i]; sceneOutlines.addVertex(p.x, p.y); } sceneOutlines.close(); sceneOutlines.draw(); } } }
void QCvCameraCalibration::calibrate() { this->setEnable(false); QString msg; if (imagePoints.size() >= s.minNumSamples) { // compute board geometry vector<vector<cv::Point3f> > objectPoints(1); calcBoardCornerPositions(s.boardSize, s.squareSize, objectPoints[0]); objectPoints.resize(imagePoints.size(),objectPoints[0]); rmsError = calibrateCamera(objectPoints, imagePoints, imgSize, cameraMatrix, distCoeffs, rvecs, tvecs, CV_CALIB_FIX_K4|CV_CALIB_FIX_K5); msg = "updated object points successfully, RMS = "+QString::number(rmsError); s.bCALIBRATED=true; saveCameraParams(); } else { msg = "Insufficient number of samples taken!"; } emit statusUpdate(msg); this->setEnable(true); }
TEST_F(fisheyeTest, stereoCalibrateFixIntrinsic) { const int n_images = 34; const std::string folder =combine(datasets_repository_path, "calib-3_stereo_from_JY"); std::vector<std::vector<cv::Point2d> > leftPoints(n_images); std::vector<std::vector<cv::Point2d> > rightPoints(n_images); std::vector<std::vector<cv::Point3d> > objectPoints(n_images); cv::FileStorage fs_left(combine(folder, "left.xml"), cv::FileStorage::READ); CV_Assert(fs_left.isOpened()); for(int i = 0; i < n_images; ++i) fs_left[cv::format("image_%d", i )] >> leftPoints[i]; fs_left.release(); cv::FileStorage fs_right(combine(folder, "right.xml"), cv::FileStorage::READ); CV_Assert(fs_right.isOpened()); for(int i = 0; i < n_images; ++i) fs_right[cv::format("image_%d", i )] >> rightPoints[i]; fs_right.release(); cv::FileStorage fs_object(combine(folder, "object.xml"), cv::FileStorage::READ); CV_Assert(fs_object.isOpened()); for(int i = 0; i < n_images; ++i) fs_object[cv::format("image_%d", i )] >> objectPoints[i]; fs_object.release(); cv::Matx33d theR; cv::Vec3d theT; int flag = 0; flag |= cv::fisheye::CALIB_RECOMPUTE_EXTRINSIC; flag |= cv::fisheye::CALIB_CHECK_COND; flag |= cv::fisheye::CALIB_FIX_SKEW; flag |= cv::fisheye::CALIB_FIX_INTRINSIC; cv::Matx33d K1 (561.195925927249, 0, 621.282400272412, 0, 562.849402029712, 380.555455380889, 0, 0, 1); cv::Matx33d K2 (560.395452535348, 0, 678.971652040359, 0, 561.90171021422, 380.401340535339, 0, 0, 1); cv::Vec4d D1 (-7.44253716539556e-05, -0.00702662033932424, 0.00737569823650885, -0.00342230256441771); cv::Vec4d D2 (-0.0130785435677431, 0.0284434505383497, -0.0360333869900506, 0.0144724062347222); cv::fisheye::stereoCalibrate(objectPoints, leftPoints, rightPoints, K1, D1, K2, D2, imageSize, theR, theT, flag, cv::TermCriteria(3, 12, 0)); cv::Matx33d R_correct( 0.9975587205950972, 0.06953016383322372, 0.006492709911733523, -0.06956823121068059, 0.9975601387249519, 0.005833595226966235, -0.006071257768382089, -0.006271040135405457, 0.9999619062167968); cv::Vec3d T_correct(-0.099402724724121, 0.00270812139265413, 0.00129330292472699); EXPECT_MAT_NEAR(theR, R_correct, 1e-10); EXPECT_MAT_NEAR(theT, T_correct, 1e-10); }
SplineObject* Voxelify::GetContour(BaseObject *op, BaseDocument *doc, Real lod, BaseThread *bt){ BaseContainer *data = op->GetDataInstance(); BaseObject* parent=(BaseObject*)data->GetLink(CTT_OBJECT_LINK,doc,Obase); if (!parent) return NULL; LONG startObject = data->GetLong(START_FRAME); LONG endObject = data->GetLong(END_FRAME); if (startObject >=endObject) return NULL; maxSeg = data->GetReal(CTTSPOBJECT_MAXSEG,30.); minSeg = data->GetReal(CTTSPOBJECT_MINSEG); LONG delta = data->GetLong(OBJECT_SKIP,1); delta = delta < 1 ? 1 : delta; GeDynamicArray<BaseObject*> children; GeDynamicArray<GeDynamicArray<Vector> > splineAtPoint; BaseObject* chld = NULL; LONG trck = 0; for (chld=parent->GetDownLast(); chld; chld=chld->GetPred()) { if (trck >= startObject && trck<= endObject && trck % delta == 0){ children.Push((BaseObject*)chld->GetClone(COPYFLAGS_NO_HIERARCHY|COPYFLAGS_NO_ANIMATION|COPYFLAGS_NO_BITS,NULL)); } trck++; } if (children.GetCount() < 2) { return NULL; } LONG splineInterpolation = data->GetLong(SPLINEOBJECT_INTERPOLATION); LONG longestPercent = data->GetLong(TAKE_LONGEST, 1); longestPercent = longestPercent > 100 ? 100: longestPercent; parentMatrix = parent->GetMl(); GeDynamicArray<GeDynamicArray<Vector> > objectPoints(children.GetCount()); StatusSetBar(0); StatusSetText("Collecting Points"); vector<vector<float> > points; std::vector<VGrid> grids; LONG gridSize = data->GetLong(GRID_SIZE, 1); if (!(gridSize > 0)) return NULL; for (int k= 0; k < children.GetCount(); k++){ Matrix ml; DoRecursion(op,children[k],objectPoints[k], ml); points = objectPointsToPoints(objectPoints[k]); GePrint(children[k]->GetName()); grids.push_back(vox.voxelify(points,gridSize, 12, 1.0)); if (k % 5 == 0){ LONG progress = 10 + (50*k)/children.GetCount(); StatusSetBar(progress); StatusSetText(LongToString(progress)+"%"); if (bt && bt->TestBreak()){ //break; //this break seems to be kicking in randomly killing the loop } } } StatusSetText("Building Splines"); SplineObject* parentSpline = ComputeSpline(bt, grids, longestPercent, splineAtPoint); ModelingCommandData mcd; mcd.doc = doc; mcd.op = parentSpline; if(!SendModelingCommand(MCOMMAND_JOIN, mcd)){ return NULL; } SplineObject* ret = ToSpline(mcd.result->GetIndex(0L)); ret->GetDataInstance()->SetLong(SPLINEOBJECT_INTERPOLATION, splineInterpolation); for (int k=0; k<children.GetCount(); k++){ if (children[k]){ BaseObject::Free(children[k]); } } return ret; Error: for (int i = 0; i < children.GetCount(); i++){ BaseObject::Free(children[i]); } return NULL; }
void CalibrationDialog::calibrate() { processingData_ = true; savedCalibration_ = false; QMessageBox mb(QMessageBox::Information, tr("Calibrating..."), tr("Operation in progress...")); mb.show(); QApplication::processEvents(); uSleep(100); // hack make sure the text in the QMessageBox is shown... QApplication::processEvents(); std::vector<std::vector<cv::Point3f> > objectPoints(1); cv::Size boardSize(ui_->spinBox_boardWidth->value(), ui_->spinBox_boardHeight->value()); float squareSize = ui_->doubleSpinBox_squareSize->value(); // compute board corner positions for( int i = 0; i < boardSize.height; ++i ) for( int j = 0; j < boardSize.width; ++j ) objectPoints[0].push_back(cv::Point3f(float( j*squareSize ), float( i*squareSize ), 0)); for(int id=0; id<(stereo_?2:1); ++id) { UINFO("Calibrating camera %d (samples=%d)", id, (int)imagePoints_[id].size()); objectPoints.resize(imagePoints_[id].size(), objectPoints[0]); //calibrate std::vector<cv::Mat> rvecs, tvecs; std::vector<float> reprojErrs; cv::Mat K, D; K = cv::Mat::eye(3,3,CV_64FC1); UINFO("calibrate!"); //Find intrinsic and extrinsic camera parameters double rms = cv::calibrateCamera(objectPoints, imagePoints_[id], imageSize_[id], K, D, rvecs, tvecs); UINFO("Re-projection error reported by calibrateCamera: %f", rms); // compute reprojection errors std::vector<cv::Point2f> imagePoints2; int i, totalPoints = 0; double totalErr = 0, err; reprojErrs.resize(objectPoints.size()); for( i = 0; i < (int)objectPoints.size(); ++i ) { cv::projectPoints( cv::Mat(objectPoints[i]), rvecs[i], tvecs[i], K, D, imagePoints2); err = cv::norm(cv::Mat(imagePoints_[id][i]), cv::Mat(imagePoints2), CV_L2); int n = (int)objectPoints[i].size(); reprojErrs[i] = (float) std::sqrt(err*err/n); totalErr += err*err; totalPoints += n; } double totalAvgErr = std::sqrt(totalErr/totalPoints); UINFO("avg re projection error = %f", totalAvgErr); cv::Mat P(3,4,CV_64FC1); P.at<double>(2,3) = 1; K.copyTo(P.colRange(0,3).rowRange(0,3)); std::cout << "cameraMatrix = " << K << std::endl; std::cout << "distCoeffs = " << D << std::endl; std::cout << "width = " << imageSize_[id].width << std::endl; std::cout << "height = " << imageSize_[id].height << std::endl; models_[id] = CameraModel(cameraName_.toStdString(), imageSize_[id], K, D, cv::Mat::eye(3,3,CV_64FC1), P); if(id == 0) { ui_->label_fx->setNum(models_[id].fx()); ui_->label_fy->setNum(models_[id].fy()); ui_->label_cx->setNum(models_[id].cx()); ui_->label_cy->setNum(models_[id].cy()); ui_->label_error->setNum(totalAvgErr); std::stringstream strK, strD, strR, strP; strK << models_[id].K(); strD << models_[id].D(); strR << models_[id].R(); strP << models_[id].P(); ui_->lineEdit_K->setText(strK.str().c_str()); ui_->lineEdit_D->setText(strD.str().c_str()); ui_->lineEdit_R->setText(strR.str().c_str()); ui_->lineEdit_P->setText(strP.str().c_str()); } else { ui_->label_fx_2->setNum(models_[id].fx()); ui_->label_fy_2->setNum(models_[id].fy()); ui_->label_cx_2->setNum(models_[id].cx()); ui_->label_cy_2->setNum(models_[id].cy()); ui_->label_error_2->setNum(totalAvgErr); std::stringstream strK, strD, strR, strP; strK << models_[id].K(); strD << models_[id].D(); strR << models_[id].R(); strP << models_[id].P(); ui_->lineEdit_K_2->setText(strK.str().c_str()); ui_->lineEdit_D_2->setText(strD.str().c_str()); ui_->lineEdit_R_2->setText(strR.str().c_str()); ui_->lineEdit_P_2->setText(strP.str().c_str()); } } if(stereo_ && models_[0].isValid() && models_[1].isValid()) { UINFO("stereo calibration (samples=%d)...", (int)stereoImagePoints_[0].size()); cv::Size imageSize = imageSize_[0].width > imageSize_[1].width?imageSize_[0]:imageSize_[1]; cv::Mat R, T, E, F; std::vector<std::vector<cv::Point3f> > objectPoints(1); cv::Size boardSize(ui_->spinBox_boardWidth->value(), ui_->spinBox_boardHeight->value()); float squareSize = ui_->doubleSpinBox_squareSize->value(); // compute board corner positions for( int i = 0; i < boardSize.height; ++i ) for( int j = 0; j < boardSize.width; ++j ) objectPoints[0].push_back(cv::Point3f(float( j*squareSize ), float( i*squareSize ), 0)); objectPoints.resize(stereoImagePoints_[0].size(), objectPoints[0]); // calibrate extrinsic #if CV_MAJOR_VERSION < 3 double rms = cv::stereoCalibrate( objectPoints, stereoImagePoints_[0], stereoImagePoints_[1], models_[0].K(), models_[0].D(), models_[1].K(), models_[1].D(), imageSize, R, T, E, F, cv::TermCriteria(cv::TermCriteria::COUNT+cv::TermCriteria::EPS, 100, 1e-5), cv::CALIB_FIX_INTRINSIC); #else double rms = cv::stereoCalibrate( objectPoints, stereoImagePoints_[0], stereoImagePoints_[1], models_[0].K(), models_[0].D(), models_[1].K(), models_[1].D(), imageSize, R, T, E, F, cv::CALIB_FIX_INTRINSIC, cv::TermCriteria(cv::TermCriteria::COUNT+cv::TermCriteria::EPS, 100, 1e-5)); #endif UINFO("stereo calibration... done with RMS error=%f", rms); double err = 0; int npoints = 0; std::vector<cv::Vec3f> lines[2]; UINFO("Computing avg re-projection error..."); for(unsigned int i = 0; i < stereoImagePoints_[0].size(); i++ ) { int npt = (int)stereoImagePoints_[0][i].size(); cv::Mat imgpt[2]; for(int k = 0; k < 2; k++ ) { imgpt[k] = cv::Mat(stereoImagePoints_[k][i]); cv::undistortPoints(imgpt[k], imgpt[k], models_[k].K(), models_[k].D(), cv::Mat(), models_[k].K()); computeCorrespondEpilines(imgpt[k], k+1, F, lines[k]); } for(int j = 0; j < npt; j++ ) { double errij = fabs(stereoImagePoints_[0][i][j].x*lines[1][j][0] + stereoImagePoints_[0][i][j].y*lines[1][j][1] + lines[1][j][2]) + fabs(stereoImagePoints_[1][i][j].x*lines[0][j][0] + stereoImagePoints_[1][i][j].y*lines[0][j][1] + lines[0][j][2]); err += errij; } npoints += npt; } double totalAvgErr = err/(double)npoints; UINFO("stereo avg re projection error = %f", totalAvgErr); cv::Mat R1, R2, P1, P2, Q; cv::Rect validRoi[2]; cv::stereoRectify(models_[0].K(), models_[0].D(), models_[1].K(), models_[1].D(), imageSize, R, T, R1, R2, P1, P2, Q, cv::CALIB_ZERO_DISPARITY, 0, imageSize, &validRoi[0], &validRoi[1]); UINFO("Valid ROI1 = %d,%d,%d,%d ROI2 = %d,%d,%d,%d newImageSize=%d/%d", validRoi[0].x, validRoi[0].y, validRoi[0].width, validRoi[0].height, validRoi[1].x, validRoi[1].y, validRoi[1].width, validRoi[1].height, imageSize.width, imageSize.height); if(imageSize_[0].width == imageSize_[1].width) { //Stereo, keep new extrinsic projection matrix stereoModel_ = StereoCameraModel( cameraName_.toStdString(), imageSize_[0], models_[0].K(), models_[0].D(), R1, P1, imageSize_[1], models_[1].K(), models_[1].D(), R2, P2, R, T, E, F); } else { //Kinect stereoModel_ = StereoCameraModel( cameraName_.toStdString(), imageSize_[0], models_[0].K(), models_[0].D(), cv::Mat::eye(3,3,CV_64FC1), models_[0].P(), imageSize_[1], models_[1].K(), models_[1].D(), cv::Mat::eye(3,3,CV_64FC1), models_[1].P(), R, T, E, F); } std::stringstream strR1, strP1, strR2, strP2; strR1 << stereoModel_.left().R(); strP1 << stereoModel_.left().P(); strR2 << stereoModel_.right().R(); strP2 << stereoModel_.right().P(); ui_->lineEdit_R->setText(strR1.str().c_str()); ui_->lineEdit_P->setText(strP1.str().c_str()); ui_->lineEdit_R_2->setText(strR2.str().c_str()); ui_->lineEdit_P_2->setText(strP2.str().c_str()); ui_->label_baseline->setNum(stereoModel_.baseline()); //ui_->label_error_stereo->setNum(totalAvgErr); } if(stereo_ && stereoModel_.left().isValid() && stereoModel_.right().isValid()&& (!ui_->label_baseline->isVisible() || stereoModel_.baseline() > 0.0)) { ui_->radioButton_rectified->setEnabled(true); ui_->radioButton_stereoRectified->setEnabled(true); ui_->radioButton_stereoRectified->setChecked(true); ui_->pushButton_save->setEnabled(true); } else if(models_[0].isValid()) { ui_->radioButton_rectified->setEnabled(true); ui_->radioButton_rectified->setChecked(true); ui_->pushButton_save->setEnabled(!stereo_); } UINFO("End calibration"); processingData_ = false; }
/// Calibrates the extrinsic parameters of the setup and saves it to an XML file /// Press'r' to retreive chessboard corners /// 's' to save and exit /// 'c' to exit without saving /// In: inputCapture1: video feed of camera 1 /// inputCapture2: video feed of camera 2 void CalibrateEnvironment(VideoCapture& inputCapture1, VideoCapture& inputCapture2) { Size boardSize; boardSize.width = BOARD_WIDTH; boardSize.height = BOARD_HEIGHT; const string fileName1 = "CameraIntrinsics1.xml"; const string fileName2 = "CameraIntrinsics2.xml"; cerr << "Attempting to open configuration files" << endl; FileStorage fs1(fileName1, FileStorage::READ); FileStorage fs2(fileName2, FileStorage::READ); Mat cameraMatrix1, cameraMatrix2; Mat distCoeffs1, distCoeffs2; fs1["Camera_Matrix"] >> cameraMatrix1; fs1["Distortion_Coefficients"] >> distCoeffs1; fs2["Camera_Matrix"] >> cameraMatrix2; fs2["Distortion_Coefficients"] >> distCoeffs2; if (cameraMatrix1.data == NULL || distCoeffs1.data == NULL || cameraMatrix2.data == NULL || distCoeffs2.data == NULL) { cerr << "Could not load camera intrinsics\n" << endl; } else{ cerr << "Loaded intrinsics\n" << endl; cerr << "Camera Matrix1: " << cameraMatrix1 << endl; cerr << "Camera Matrix2: " << cameraMatrix2 << endl; } Mat translation; Mat image1, image2; Mat mapX1, mapX2, mapY1, mapY2; inputCapture1.read(image1); Size imageSize = image1.size(); bool rotationCalibrated = false; while(inputCapture1.isOpened() && inputCapture2.isOpened()) { inputCapture1.read(image1); inputCapture2.read(image2); if (rotationCalibrated) { Mat t1 = image1.clone(); Mat t2 = image2.clone(); remap(t1, image1, mapX1, mapY1, INTER_LINEAR); remap(t2, image2, mapX2, mapY2, INTER_LINEAR); t1.release(); t2.release(); } char c = waitKey(15); if (c == 'c') { cerr << "Cancelling..." << endl; return; } else if(c == 's' && rotationCalibrated) { cerr << "Saving..." << endl; const string fileName = "EnvironmentCalibration.xml"; FileStorage fs(fileName, FileStorage::WRITE); fs << "Camera_Matrix_1" << getOptimalNewCameraMatrix(cameraMatrix1, distCoeffs1, imageSize, 1,imageSize, 0); fs << "Camera_Matrix_2" << getOptimalNewCameraMatrix(cameraMatrix2, distCoeffs2, imageSize, 1, imageSize, 0); fs << "Mapping_X_1" << mapX1; fs << "Mapping_Y_1" << mapY1; fs << "Mapping_X_2" << mapX2; fs << "Mapping_Y_2" << mapY2; fs << "Translation" << translation; cerr << "Exiting..." << endl; destroyAllWindows(); return; } else if(c == 's' && !rotationCalibrated) { cerr << "Exiting..." << endl; destroyAllWindows(); return; } else if (c == 'r') { BoardSettings s; s.boardSize.width = BOARD_WIDTH; s.boardSize.height = BOARD_HEIGHT; s.cornerNum = s.boardSize.width * s.boardSize.height; s.squareSize = (float)SQUARE_SIZE; vector<Point3f> objectPoints; vector<vector<Point2f> > imagePoints1, imagePoints2; if (RetrieveChessboardCorners(imagePoints1, imagePoints2, s, inputCapture1, inputCapture2, ITERATIONS)) { vector<vector<Point3f> > objectPoints(1); CalcBoardCornerPositions(s.boardSize, s.squareSize, objectPoints[0]); objectPoints.resize(imagePoints1.size(),objectPoints[0]); Mat R, T, E, F; Mat rmat1, rmat2, rvec; double rms = stereoCalibrate(objectPoints, imagePoints1, imagePoints2, cameraMatrix1, distCoeffs1, cameraMatrix2, distCoeffs2, imageSize, R, T, E, F, TermCriteria( CV_TERMCRIT_EPS+CV_TERMCRIT_ITER, 1000, 0.01), CV_CALIB_FIX_INTRINSIC); cerr << "Original translation: " << T << endl; cerr << "Reprojection error reported by camera: " << rms << endl; // convert to rotation vector and then remove 90 degree offset Rodrigues(R, rvec); rvec.at<double>(1,0) -= 1.570796327; // equal rotation applied to each image...not necessarily needed rvec = rvec/2; Rodrigues(rvec, rmat1); invert(rmat1,rmat2); initUndistortRectifyMap(cameraMatrix1, distCoeffs1, rmat1, getOptimalNewCameraMatrix(cameraMatrix1, distCoeffs1, imageSize, 1,imageSize, 0), imageSize, CV_32FC1, mapX1, mapY1); initUndistortRectifyMap(cameraMatrix2, distCoeffs2, rmat2, getOptimalNewCameraMatrix(cameraMatrix2, distCoeffs2, imageSize, 1, imageSize, 0), imageSize, CV_32FC1, mapX2, mapY2); // reproject points in camera 1 since its rotation has been changed // need to find the translation between cameras based on the new camera 1 orientation for (int i = 0; i < imagePoints1.size(); i++) { Mat pointsMat1 = Mat(imagePoints1[i]); Mat pointsMat2 = Mat(imagePoints2[i]); undistortPoints(pointsMat1, imagePoints1[i], cameraMatrix1, distCoeffs1, rmat1,getOptimalNewCameraMatrix(cameraMatrix1, distCoeffs1, imageSize, 1, imageSize, 0)); undistortPoints(pointsMat2, imagePoints2[i], cameraMatrix2, distCoeffs2, rmat2,getOptimalNewCameraMatrix(cameraMatrix2, distCoeffs2, imageSize, 1, imageSize, 0)); pointsMat1.release(); pointsMat2.release(); } Mat temp1, temp2; R.release(); T.release(); E.release(); F.release(); // TODO: remove this // CalcBoardCornerPositions(s.boardSize, s.squareSize, objectPoints[0]); // objectPoints.resize(imagePoints1.size(),objectPoints[0]); stereoCalibrate(objectPoints, imagePoints1, imagePoints2, cameraMatrix1, distCoeffs1, cameraMatrix2, distCoeffs2, imageSize, R, T, E, F, TermCriteria( CV_TERMCRIT_EPS+CV_TERMCRIT_ITER, 1000, 0.01), CV_CALIB_FIX_INTRINSIC); // need to alter translation matrix so // [0] = distance in X direction (right from perspective of camera 1 is positive) // [1] = distance in Y direction (away from camera 1 is positive) // [2] = distance in Z direction (up is positive) translation = T; double temp = -translation.at<double>(0,0); translation.at<double>(0,0) = translation.at<double>(2,0); translation.at<double>(2,0) = temp; cerr << "Translation reproj: " << translation << endl; Rodrigues(R, rvec); cerr << "Reprojected rvec: " << rvec << endl; imagePoints1.clear(); imagePoints2.clear(); rvec.release(); rmat1.release(); rmat2.release(); R.release(); T.release(); E.release(); F.release(); rotationCalibrated = true; } } imshow("Image View1", image1); imshow("Image View2", image2); } }