ImageProcessing::ImageProcessing(ros::NodeHandle &nh) { nh_ = nh; imageSubscriber = nh_.subscribe("/camera/image_color", 1000, &ImageProcessing::callback, this); grid_info_pub = nh_.advertise<serwo::ErrorInfo>("/error", 1); //3D model points are constant objectPoints = Generate3DPoints(); distCoeffs = cv::Mat(5, 1, cv::DataType<double>::type); distCoeffs.at<double>(0) = -0.413271; distCoeffs.at<double>(1) = 0.172960; distCoeffs.at<double>(2) = -0.004729; distCoeffs.at<double>(3) = -0.000895; distCoeffs.at<double>(4) = 0.0; cameraMatrix = cv::Mat(3, 3, cv::DataType<double>::type); cameraMatrix.at<double>(0, 0) = 1080.054285; cameraMatrix.at<double>(0, 1) = 0.0; cameraMatrix.at<double>(0, 2) = 655.287407; cameraMatrix.at<double>(1, 0) = 0.0; cameraMatrix.at<double>(1, 1) = 1072.245647; cameraMatrix.at<double>(1, 2) = 524.778361; cameraMatrix.at<double>(2, 0) = 0.0; cameraMatrix.at<double>(2, 1) = 0.0; cameraMatrix.at<double>(2, 2) = 1.0; rvec = cv::Mat(3, 1, cv::DataType<double>::type); tvec = cv::Mat(3, 1, cv::DataType<double>::type); }
int main( int argc, char* argv[]) { // Read points std::vector<cv::Point2f> imagePoints = Generate2DPoints(); std::vector<cv::Point3f> objectPoints = Generate3DPoints(); std::cout << "There are " << imagePoints.size() << " imagePoints and " << objectPoints.size() << " objectPoints." << std::endl; //camera parameters double fx = 525.0; //focal length x double fy = 525.0; //focal le double cx = 319.5; //optical centre x double cy = 239.5; //optical centre y cv::Mat cameraMatrix(3,3,cv::DataType<double>::type); cameraMatrix.at<double>(0,0)=fx; cameraMatrix.at<double>(1,1)=fy; cameraMatrix.at<double>(2,2)=1; cameraMatrix.at<double>(0,2)=cx; cameraMatrix.at<double>(1,2)=cy; cameraMatrix.at<double>(0,1)=0; cameraMatrix.at<double>(1,0)=0; cameraMatrix.at<double>(2,0)=0; cameraMatrix.at<double>(2,1)=0; std::cout << "Initial cameraMatrix: " << cameraMatrix << std::endl; cv::Mat distCoeffs(4,1,cv::DataType<double>::type); distCoeffs.at<double>(0) = 0; distCoeffs.at<double>(1) = 0; distCoeffs.at<double>(2) = 0; distCoeffs.at<double>(3) = 0; cv::Mat rvec(3,1,cv::DataType<double>::type); cv::Mat tvec(3,1,cv::DataType<double>::type); cv::solvePnP(objectPoints, imagePoints, cameraMatrix, distCoeffs, rvec, tvec); std::cout << "rvec: " << rvec << std::endl; std::cout << "tvec: " << tvec << std::endl; std::vector<cv::Point2f> projectedPoints; cv::projectPoints(objectPoints, rvec, tvec, cameraMatrix, distCoeffs, projectedPoints); for(unsigned int i = 0; i < projectedPoints.size(); ++i) { std::cout << "Image point: " << imagePoints[i] << " Projected to " << projectedPoints[i] << std::endl; } return 0; }