void GraphicEnd::savepcd() { cout<<"save final pointcloud"<<endl; SLAMEnd slam; slam.init(NULL); SparseOptimizer& opt = slam.globalOptimizer; opt.load("/home/lc/workspace/paper_related/Appolo/test/result/final_after.g2o"); ifstream fin("/home/lc/workspace/paper_related/Appolo/test/result/key_frame.txt"); PointCloud::Ptr output(new PointCloud()); PointCloud::Ptr curr( new PointCloud()); pcl::VoxelGrid<PointT> voxel; voxel.setLeafSize(0.01f, 0.01f, 0.01f ); string pclPath ="/media/新加卷/dataset/dataset1/pcd/"; pcl::PassThrough<PointT> pass; pass.setFilterFieldName("z"); double z = 5.0; pass.setFilterLimits(0.0, z); while( !fin.eof() ) { int frame, id; fin>>id>>frame; ss<<pclPath<<frame<<".pcd"; string str; ss>>str; cout<<"loading "<<str<<endl; ss.clear(); pcl::io::loadPCDFile( str.c_str(), *curr ); //cout<<"curr cloud size is: "<<curr->points.size()<<endl; VertexSE3* pv = dynamic_cast<VertexSE3*> (opt.vertex( id )); if (pv == NULL) break; Eigen::Isometry3d pos = pv->estimate(); cout<<pos.matrix()<<endl; voxel.setInputCloud( curr ); PointCloud::Ptr tmp( new PointCloud()); voxel.filter( *tmp ); curr.swap( tmp ); pass.setInputCloud( curr ); pass.filter(*tmp); curr->swap( *tmp ); //cout<<"tmp: "<<tmp->points.size()<<endl; pcl::transformPointCloud( *curr, *tmp, pos.matrix()); *output += *tmp; //cout<<"output: "<<output->points.size()<<endl; } voxel.setInputCloud( output ); PointCloud::Ptr output_filtered( new PointCloud ); voxel.filter( *output_filtered ); output->swap( *output_filtered ); //cout<<output->points.size()<<endl; pcl::io::savePCDFile( "/home/lc/workspace/paper_related/Appolo/test/result/result.pcd", *output); cout<<"final result saved."<<endl; }
void GraphicEnd::generateKeyFrame( Eigen::Isometry3d T ) { cout<<BOLDGREEN<<"GraphicEnd::generateKeyFrame"<<RESET<<endl; //把present中的数据存储到current中 _currKF.id ++; _currKF.planes = _present.planes; _currKF.frame_index = _index; _lastRGB = _currRGB.clone(); _kf_pos = _robot; cout<<"add key frame: "<<_currKF.id<<endl; //waitKey(0); _keyframes.push_back( _currKF ); //将current关键帧存储至全局优化器 SparseOptimizer& opt = _pSLAMEnd->globalOptimizer; //顶点 VertexSE3* v = new VertexSE3(); v->setId( _currKF.id ); //v->setEstimate( _robot ); if (_use_odometry) v->setEstimate( _odo_this ); //v->setEstimate( Eigen::Isometry3d::Identity() ); else v->setEstimate( Eigen::Isometry3d::Identity() ); opt.addVertex( v ); //边 EdgeSE3* edge = new EdgeSE3(); edge->vertices()[0] = opt.vertex( _currKF.id - 1 ); edge->vertices()[1] = opt.vertex( _currKF.id ); Eigen::Matrix<double, 6,6> information = Eigen::Matrix<double, 6, 6>::Identity(); information(0, 0) = information(2,2) = 100; information(1, 1) = 100; information(3,3) = information(4,4) = information(5,5) = 100; edge->setInformation( information ); edge->setMeasurement( T ); opt.addEdge( edge ); if (_use_odometry) { Eigen::Isometry3d To = _odo_last.inverse()*_odo_this; cout<<"odo last = "<<endl<<_odo_last.matrix()<<endl<<"odo this = "<<endl<<_odo_this.matrix()<<endl; cout<<"To = "<<endl<<To.matrix()<<endl; cout<<"Measurement = "<<endl<<T.matrix()<<endl; //waitKey( 0 ); EdgeSE3* edge = new EdgeSE3(); edge->vertices()[0] = opt.vertex( _currKF.id - 1 ); edge->vertices()[1] = opt.vertex( _currKF.id ); Eigen::Matrix<double, 6,6> information = Eigen::Matrix<double, 6, 6>::Identity(); information(0, 0) = information(1,1) = information(2,2) = information(3,3) = information(4,4) = information(5,5) = 1/(_error_odometry*_error_odometry); edge->setInformation( information ); edge->setMeasurement( To ); opt.addEdge( edge ); _odo_last = _odo_this; } }
int main() { Mat rgb1,rgb2,depth1,depth2; fileread(rgb1,rgb2,depth1,depth2); CAMERA_INTRINSIC_PARAMETERS C; C.cx = 325.5; C.cy = 253.5; C.fx = 518.0; C.fy = 519.0; C.scale = 1000.0; //feature detector and descriptor compute Eigen::Isometry3d T = transformEstimation(rgb1,rgb2,depth1,depth2,C); PointCloud::Ptr cloud1 = image2PointCloud(rgb1,depth1,C); PointCloud::Ptr cloud2 = image2PointCloud(rgb2,depth2,C); //pcl::io::savePCDFile("1.pcd", *cloud1); //pcl::io::savePCDFile("2.pcd", *cloud2); cout<<"combining clouds"<<endl; PointCloud::Ptr output (new PointCloud()); pcl::transformPointCloud( *cloud2, *output, T.matrix()); *cloud1 += *output; pcl::io::savePCDFile("result.pcd", *cloud1); cout<<"Final result saved."<<endl; return 0; }
/** * @function fit_BB * @brief */ void fit_BB( std::vector<pcl::PointCloud<pcl::PointXYZRGBA>::Ptr > _clusters ) { double dim[3]; Eigen::Isometry3d Tf; for( int i = 0; i < _clusters.size(); ++i ) { printf("\t * Fitting box for cluster %d \n", i ); pcl::PointCloud<pcl::PointXYZ>::Ptr p( new pcl::PointCloud<pcl::PointXYZ>() ); p->points.resize( _clusters[i]->points.size() ); for(int j = 0; j < _clusters[i]->points.size(); ++j ) { p->points[j].x = _clusters[i]->points[j].x; p->points[j].y = _clusters[i]->points[j].y; p->points[j].z = _clusters[i]->points[j].z; } p->width = 1; p->height = p->points.size(); // Get Bounding Box pointcloudToBB( p, dim, Tf ); // Store (cube) pcl::PointCloud<pcl::PointXYZ>::Ptr pts( new pcl::PointCloud<pcl::PointXYZ>() ); pcl::PointCloud<pcl::PointXYZ>::Ptr final( new pcl::PointCloud<pcl::PointXYZ>() ); pts = sampleSQ_uniform( dim[0]/2, dim[1]/2, dim[2]/2, 0.1, 0.1 ); pcl::transformPointCloud( *pts, *final, Tf.matrix() ); char name[50]; sprintf( name, "bb_%d.pcd", i); pcl::io::savePCDFileASCII(name, *final ); } }
//============================================================================== dart::dynamics::SkeletonPtr loadSkeletonFromURDF( const dart::common::ResourceRetrieverPtr& retriever, const dart::common::Uri& uri, const Eigen::Isometry3d& transform) { dart::utils::DartLoader urdfLoader; const dart::dynamics::SkeletonPtr skeleton = urdfLoader.parseSkeleton(uri, retriever); if (!skeleton) throw std::runtime_error("Unable to load '" + uri.toString() + "'"); if (skeleton->getNumJoints() == 0) throw std::runtime_error("Skeleton is empty."); if (!transform.matrix().isIdentity()) { auto freeJoint = dynamic_cast<dart::dynamics::FreeJoint*>(skeleton->getRootJoint()); if (!freeJoint) throw std::runtime_error( "Unable to cast Skeleton's root joint to FreeJoint."); freeJoint->setTransform(transform); } return skeleton; }
void pcl::simulation::RangeLikelihood::applyCameraTransform (const Eigen::Isometry3d & pose) { float T[16]; Eigen::Matrix4f m = (pose.matrix ().inverse ()).cast<float> (); T[0] = m(0,0); T[4] = m(0,1); T[8] = m(0,2); T[12] = m(0,3); T[1] = m(1,0); T[5] = m(1,1); T[9] = m(1,2); T[13] = m(1,3); T[2] = m(2,0); T[6] = m(2,1); T[10] = m(2,2); T[14] = m(2,3); T[3] = m(3,0); T[7] = m(3,1); T[11] = m(3,2); T[15] = m(3,3); glMultMatrixf(T); }
void TestFKIKFK() { int arm = RIGHT; // Create random legitimate joint values std::vector<RobotKin::Joint*> joints = kinematics->linkage("RightArm").joints(); for (int i = 0; i < joints.size(); i++) { double jointVal = randbetween(joints[i]->min(), joints[i]->max()); kinematics->joint(joints[i]->name()).value(jointVal); } // Do FK to get ee pose Eigen::Isometry3d initialFrame; initialFrame = kinematics->linkage("RightArm").tool().respectToRobot(); // Do IK to get joints DrcHuboKin::ArmVector q = DrcHuboKin::ArmVector::Zero(); RobotKin::rk_result_t result = kinematics->armIK(arm, q, initialFrame); std::cerr << "Solver result: " << result << std::endl; CPPUNIT_ASSERT(RobotKin::RK_SOLVED == result); // Do FK and verify that it's pretty much the same int baseJoint = 0; if (LEFT == arm) { baseJoint = LSP; } else if (RIGHT == arm) { baseJoint = RSP; } for (int i = baseJoint; i < baseJoint+7; i++) { //kinematics->joint(DRCHUBO_URDF_JOINT_NAMES[i]).value(q[DRCHUBO_JOINT_INDEX_TO_LIMB_POSITION[i]]); } Eigen::Isometry3d finalFrame; finalFrame = kinematics->linkage("RightArm").tool().respectToRobot(); //CPPUNIT_ASSERT((initialFrame.matrix() - finalFrame.matrix()).norm() < 0.001); std::cerr << "Inital pose: \n" << initialFrame.matrix() << std::endl; std::cerr << "Final pose: \n" << finalFrame.matrix() << std::endl; }
Eigen::Isometry3d transformEstimation(const Mat& rgb1,const Mat& rgb2,const Mat& depth1,const Mat& depth2,const CAMERA_INTRINSIC_PARAMETERS& C) { vector<KeyPoint> keyPts1,keyPts2; Mat descriptors1,descriptors2; extractKeypointsAndDescripotrs(rgb1,rgb2,keyPts1,keyPts2,descriptors1,descriptors2); vector<DMatch> matches; descriptorsMatch(rgb1,rgb2,keyPts1,keyPts2,descriptors1,descriptors2,matches); vector<Point2f> points; vector<Point3f> objectPoints; vector<Eigen::Vector2d> imagePoints1,imagePoints2; getObjectPointsAndImagePoints(depth1,depth2,keyPts1,keyPts2,matches,points,objectPoints,imagePoints1,imagePoints2,C); Mat translation,rotation; double camera_matrix_data[3][3] = { {C.fx, 0, C.cx}, {0, C.fy, C.cy}, {0, 0, 1} }; Mat cameraMatrix(3,3,CV_64F,camera_matrix_data); solvePnPRansac(objectPoints,points,cameraMatrix,Mat(),rotation,translation,false, 100, 1.0, 20); Mat rot; Rodrigues(rotation,rot); Eigen::Matrix3d r; Eigen::Vector3d t; cout<<rot<<endl; cout<<translation<<endl; r<< ((double*)rot.data)[0],((double*)rot.data)[1],((double*)rot.data)[2], ((double*)rot.data)[3],((double*)rot.data)[4],((double*)rot.data)[5], ((double*)rot.data)[6],((double*)rot.data)[7],((double*)rot.data)[8]; t<<((double*)translation.data)[0],((double*)translation.data)[1],((double*)translation.data)[2]; Eigen::Isometry3d T = Eigen::Isometry3d::Identity(); T.rotate(r); T.pretranslate(t); cout<<T.matrix()<<endl; BundleAdjustmentOptimization(objectPoints,imagePoints1,imagePoints2); return T; }
int GraphicEnd2::run() { cout<<"********************"<<endl; _present.planes.clear(); readimage(); _present.planes.push_back( extractKPandDesp( _currRGB, _currDep ) ); _present.frame_index = _index; RESULT_OF_MULTIPNP result = multiPnP( _currKF.planes, _present.planes, _currKF.frame_index, _present.frame_index ); Eigen::Isometry3d T = result.T.inverse(); if (T.matrix() == Eigen::Isometry3d::Identity().matrix()) { //匹配失败 cout<<BOLDRED"This frame is lost"<<RESET<<endl; _lost++; } else if (result.norm > _max_pos_change) { //生成新关键帧 _robot = T*_kf_pos; generateKeyFrame(T); if (_loop_closure_detection == true) loopClosure(); _lost = 0; } else { //位置变化小 _robot = T*_kf_pos; _lost = 0; } if (_lost > _lost_frames) { cerr<<"the robot is lost, perform lost recovery"<<endl; lostRecovery(); } cout<<RED"keyframe size = "<<_keyframes.size()<<RESET<<endl; _index++; if (_use_odometry ) { _odo_this = _odometry[_index - 1]; } return 0; }
GTEST_TEST(TestLcmUtil, testPose) { default_random_engine generator; generator.seed(0); Eigen::Isometry3d pose; pose.linear() = drake::math::UniformlyRandomRotmat(generator); pose.translation().setLinSpaced(0, drake::kSpaceDimension); pose.makeAffine(); const Eigen::Isometry3d& const_pose = pose; bot_core::position_3d_t msg; EncodePose(const_pose, msg); Eigen::Isometry3d pose_back = DecodePose(msg); EXPECT_TRUE(CompareMatrices(pose.matrix(), pose_back.matrix(), 1e-12, MatrixCompareType::absolute)); }
cv::Point2f FeatureMatcher::point3Dto2D( cv::Point3f& point_xyz, const Eigen::Isometry3d & trans ) { cv::Point2f p; // 3D 点 Eigen::Vector4d p_xyz1( point_xyz.x, point_xyz.y, point_xyz.z, 1 ); //变换到参考坐标系下 Eigen::Matrix<double, 3, 4> K = pCamera->toProjectionMatrix(); Eigen::Vector3d p_uvw = K * trans.matrix() * p_xyz1; p.x = p_uvw(0) / p_uvw(2); p.y = p_uvw(1) / p_uvw(2); return p; }
//============================================================================== void ShapeNode::setRelativeTransform(const Eigen::Isometry3d& transform) { if(transform.matrix() == FixedFrame::mAspectProperties.mRelativeTf.matrix()) return; const Eigen::Isometry3d oldTransform = getRelativeTransform(); FixedFrame::setRelativeTransform(transform); dirtyJacobian(); dirtyJacobianDeriv(); mRelativeTransformUpdatedSignal.raise( this, oldTransform, getRelativeTransform()); }
void FeatureTransformationEstimator::estimatePoseSVD(Eigen::MatrixXd P, Eigen::MatrixXd Q, Eigen::Isometry3d &T) { pcl::TransformationFromCorrespondences tfc; for (int i = 0; i < P.cols(); i++) { Eigen::Vector3f p_i = P.col(i).cast<float>(); Eigen::Vector3f q_i = Q.col(i).cast<float>(); float inverse_weight = p_i(2)*p_i(2) + q_i(2)*q_i(2); float weight = 1; if (inverse_weight > 0) { weight = 1. / weight; } tfc.add(p_i, q_i, weight); } T.matrix() = tfc.getTransformation().matrix().cast<double>(); // T.matrix() = Eigen::umeyama(P, Q, false); }
int main() { Fastrak fastrak; Eigen::Isometry3d pose; while (true) { fastrak.achUpdate(); for (int i = 0; i < fastrak.getNumChannels(); i++) { fastrak.getPose(pose, i, false); std::cout << "Sensor " << i << ": " << std::endl; std::cout << pose.matrix() << std::endl; } boost::this_thread::sleep( boost::posix_time::milliseconds(1000) ); } return 0; }
int main(int argc, char** argv) { ROS_INFO("Tracker Started."); ros::init(argc, argv, "simple_tracker"); ros::NodeHandle nh; ros::ServiceClient poseClient = nh.serviceClient<HuboApplication::SetHuboArmPose>("/hubo/set_arm"); ros::Rate loop_rate(0.1); while (ros::ok()) { /*double x = randbetween(X_MIN, X_MAX); double y = randbetween(Y_MIN, Y_MAX); double z = randbetween(Z_MIN, Z_MAX);*/ Eigen::Isometry3d ePose; tf::StampedTransform tPose; HuboApplication::SetHuboArmPose srv; ePose.matrix() << 1, 0, 0, .3, 0, 1, 0, .2, 0, 0, 1, 0, 0, 0, 0, 1; ePose.rotate(Eigen::AngleAxisd(-M_PI/2, Eigen::Vector3d::UnitZ())); ePose.rotate(Eigen::AngleAxisd(M_PI, Eigen::Vector3d::UnitX())); /* Collision_Checker cc; cc.initCollisionChecker(); cc.checkSelfCollision(ePose); tf::TransformEigenToTF(ePose, tPose); tf::poseTFToMsg(tPose, srv.request.Target); srv.request.ArmIndex = 1; poseClient.call(srv); */ ros::spinOnce(); loop_rate.sleep(); } ros::spin(); return 0; }
PointCloud::Ptr joinPointCloud( PointCloud::Ptr original, FRAME& newFrame, Eigen::Isometry3d T, CAMERA_INTRINSIC_PARAMETERS& camera ){ PointCloud::Ptr newCloud = image2PointCloud(newFrame.rgb,newFrame.depth,camera); // 合并点云 PointCloud::Ptr output(new PointCloud()); pcl::transformPointCloud(*original,*output,T.matrix()); *newCloud += *output; // Voxel grid 滤波降采样 static pcl::VoxelGrid<PointT> voxel; static ParameterReader pd; double gridsize = atof(pd.getData("voxel_grid").c_str()); voxel.setLeafSize(gridsize,gridsize,gridsize); voxel.setInputCloud(newCloud); PointCloud::Ptr tmp(new PointCloud()); voxel.filter(*tmp); return tmp; }
Mapper::PointCloud::Ptr Mapper::generatePointCloud(const RGBDFrame::Ptr &frame) { PointCloud::Ptr tmp( new PointCloud() ); if ( frame->pointcloud == nullptr ) { // point cloud is null ptr frame->pointcloud = boost::make_shared<PointCloud>(); #pragma omp parallel for for ( int m=0; m<frame->depth.rows; m+=3 ) { for ( int n=0; n<frame->depth.cols; n+=3 ) { ushort d = frame->depth.ptr<ushort>(m)[n]; if (d == 0) continue; if (d > max_distance * frame->camera.scale) continue; PointT p; cv::Point3f p_cv = frame->project2dTo3d(n, m); p.b = frame->rgb.ptr<uchar>(m)[n*3]; p.g = frame->rgb.ptr<uchar>(m)[n*3+1]; p.r = frame->rgb.ptr<uchar>(m)[n*3+2]; p.x = p_cv.x; p.y = p_cv.y; p.z = p_cv.z; frame->pointcloud->points.push_back( p ); } } } Eigen::Isometry3d T = frame->getTransform().inverse(); pcl::transformPointCloud( *frame->pointcloud, *tmp, T.matrix()); tmp->is_dense = false; return tmp; }
void pcl::simulation::RangeLikelihood::getPointCloud (pcl::PointCloud<pcl::PointXYZRGB>::Ptr pc, bool make_global, const Eigen::Isometry3d & pose) { // TODO: check if this works for for rows/cols >1 and for width&height != 640x480 // i.e. multiple tiled images pc->width = col_width_; pc->height = row_height_; // Was: //pc->width = camera_width_; //pc->height = camera_height_; pc->is_dense = false; pc->points.resize (pc->width*pc->height); int points_added = 0; float camera_fx_reciprocal_ = 1.0f / camera_fx_; float camera_fy_reciprocal_ = 1.0f / camera_fy_; float zn = z_near_; float zf = z_far_; const uint8_t* color_buffer = getColorBuffer(); // TODO: support decimation // Copied the format of RangeImagePlanar::setDepthImage() // Use this as a template for decimation for (int y = 0; y < row_height_ ; ++y) //camera_height_ { for (int x = 0; x < col_width_ ; ++x) // camera_width_ { // Find XYZ from normalized 0->1 mapped disparity int idx = points_added; // y*camera_width_ + x; float d = depth_buffer_[y*camera_width_ + x] ; if (d < 1.0) // only add points with depth buffer less than max (20m) range { float z = zf*zn/((zf-zn)*(d - zf/(zf-zn))); // TODO: add mode to ignore points with no return i.e. depth_buffer_ ==1 // NB: OpenGL uses a Right Hand system with +X right, +Y up, +Z back out of the screen, // The Z-buffer is natively -1 (far) to 1 (near) // But in this class we invert this to be 0 (near, 0.7m) and 1 (far, 20m) // ... so by negating y we get to a right-hand computer vision system // which is also used by PCL and OpenNi pc->points[idx].z = z; pc->points[idx].x = (static_cast<float> (x)-camera_cx_) * z * (-camera_fx_reciprocal_); pc->points[idx].y = (static_cast<float> (y)-camera_cy_) * z * (-camera_fy_reciprocal_); int rgb_idx = y*col_width_ + x; //camera_width_ pc->points[idx].b = color_buffer[rgb_idx*3+2]; // blue pc->points[idx].g = color_buffer[rgb_idx*3+1]; // green pc->points[idx].r = color_buffer[rgb_idx*3]; // red points_added++; } } } pc->width = 1; pc->height = points_added; pc->points.resize (points_added); if (make_global) { // Go from OpenGL to (Z-up, X-forward, Y-left) Eigen::Matrix4f T; T << 0, 0, -1, 0, -1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1; Eigen::Matrix4f m = pose.matrix ().cast<float> (); m = m * T; pcl::transformPointCloud (*pc, *pc, m); } else { // Go from OpenGL to Camera (Z-forward, X-right, Y-down) Eigen::Matrix4f T; T << 1, 0, 0, 0, 0, -1, 0, 0, 0, 0, -1, 0, 0, 0, 0, 1; pcl::transformPointCloud (*pc, *pc, T); // Go from Camera to body (Z-up, X-forward, Y-left) Eigen::Matrix4f cam_to_body; cam_to_body << 0, 0, 1, 0, -1, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 1; Eigen::Matrix4f camera = pose.matrix ().cast<float> (); camera = camera * cam_to_body; pc->sensor_origin_ = camera.rightCols (1); Eigen::Quaternion<float> quat (camera.block<3,3> (0,0)); pc->sensor_orientation_ = quat; } }
int main ( int argc, char** argv ) { if ( argc != 2 ) { cout<<"用法:./direct_sparse path_to_dataset"<<endl; return 1; } srand ( ( unsigned int ) time ( 0 ) );//随机数 string path_to_dataset = argv[1]; string associate_file = path_to_dataset + "/associate.txt"; ifstream fin ( associate_file ); string rgb_file, depth_file, time_rgb, time_depth; //rgb图像对应时间 rgb图像 深度图像对应时间 深度图像 cv::Mat color, depth, gray;// 彩色图 深度图 灰度图 vector<Measurement> measurements; // 相机内参 float cx = 325.5; float cy = 253.5; float fx = 518.0; float fy = 519.0; float depth_scale = 1000.0;// mm 变成 m Eigen::Matrix3f K; K<<fx,0.f,cx,0.f,fy,cy,0.f,0.f,1.0f; Eigen::Isometry3d Tcw = Eigen::Isometry3d::Identity();//相机位姿 [R t] 的齐次表示 4*4 cv::Mat prev_color; // 我们以第一个图像为参考,对后续图像和参考图像做直接法 for ( int index=0; index<10; index++ ) { cout<<"*********** loop "<<index<<" ************"<<endl; fin>>time_rgb>>rgb_file>>time_depth>>depth_file; color = cv::imread ( path_to_dataset+"/"+rgb_file );// rgb 图像 depth = cv::imread ( path_to_dataset+"/"+depth_file, -1 );// 深度图 if ( color.data==nullptr || depth.data==nullptr ) continue; cv::cvtColor ( color, gray, cv::COLOR_BGR2GRAY );//彩色图到灰度图 if ( index ==0 )//第一帧 { // 对第一帧提取FAST特征点 vector<cv::KeyPoint> keypoints; cv::Ptr<cv::FastFeatureDetector> detector = cv::FastFeatureDetector::create(); detector->detect ( color, keypoints );//检测 特征点 for ( auto kp:keypoints ) { // 去掉邻近图像边缘处的点 if ( kp.pt.x < 20 || kp.pt.y < 20 || ( kp.pt.x+20 ) >color.cols || ( kp.pt.y+20 ) >color.rows ) continue;//跳过以下 ushort d = depth.ptr<ushort> ( cvRound ( kp.pt.y ) ) [ cvRound ( kp.pt.x ) ];//对于特征点的深度 if ( d==0 ) continue;//跳过 Eigen::Vector3d p3d = project2Dto3D ( kp.pt.x, kp.pt.y, d, fx, fy, cx, cy, depth_scale );//2D像素坐标 转换成 相机坐标系下的 三维点 3D float grayscale = float ( gray.ptr<uchar> ( cvRound ( kp.pt.y ) ) [ cvRound ( kp.pt.x ) ] );//特征点 对应的灰度值 坐标值为整数 需要取整 measurements.push_back ( Measurement ( p3d, grayscale ) );//测量值为 三维点 和 对应图像的灰度值 } prev_color = color.clone();//赋值 图像 continue;//第一幅图 跳过 以下 } // 使用直接法计算相机运动 chrono::steady_clock::time_point t1 = chrono::steady_clock::now();//计时开始 poseEstimationDirect ( measurements, &gray, K, Tcw );//测量值 chrono::steady_clock::time_point t2 = chrono::steady_clock::now();//计时结束 chrono::duration<double> time_used = chrono::duration_cast<chrono::duration<double>> ( t2-t1 ); cout<<"直接法耗时 direct method costs time: "<<time_used.count() <<" seconds."<<endl; cout<<"转换矩阵 Tcw="<<Tcw.matrix() <<endl; // 画特征点 plot the feature points cv::Mat img_show ( color.rows*2, color.cols, CV_8UC3 ); prev_color.copyTo ( img_show ( cv::Rect ( 0,0,color.cols, color.rows ) ) ); color.copyTo ( img_show ( cv::Rect ( 0,color.rows,color.cols, color.rows ) ) ); for ( Measurement m:measurements ) { if ( rand() > RAND_MAX/5 ) continue; Eigen::Vector3d p = m.pos_world;//测量值的 三维点 p Eigen::Vector2d pixel_prev = project3Dto2D ( p ( 0,0 ), p ( 1,0 ), p ( 2,0 ), fx, fy, cx, cy );//转换成 2d像素坐标 Eigen::Vector3d p2 = Tcw*m.pos_world;//变换到 第二帧图像的坐标系下 Eigen::Vector2d pixel_now = project3Dto2D ( p2 ( 0,0 ), p2 ( 1,0 ), p2 ( 2,0 ), fx, fy, cx, cy );//转化成 2d像素坐标 if ( pixel_now(0,0)<0 || pixel_now(0,0)>=color.cols || pixel_now(1,0)<0 || pixel_now(1,0)>=color.rows )// 超出范围的 跳过 continue; float b = 255*float ( rand() ) /RAND_MAX;//随机颜色 分量 float g = 255*float ( rand() ) /RAND_MAX; float r = 255*float ( rand() ) /RAND_MAX; cv::circle ( img_show, cv::Point2d ( pixel_prev ( 0,0 ), pixel_prev ( 1,0 ) ), 8, cv::Scalar ( b,g,r ), 2 ); cv::circle ( img_show, cv::Point2d ( pixel_now ( 0,0 ), pixel_now ( 1,0 ) +color.rows ), 8, cv::Scalar ( b,g,r ), 2 ); cv::line ( img_show, cv::Point2d ( pixel_prev ( 0,0 ), pixel_prev ( 1,0 ) ), cv::Point2d ( pixel_now ( 0,0 ), pixel_now ( 1,0 ) +color.rows ), cv::Scalar ( b,g,r ), 1 ); } cv::imshow ( "result", img_show ); cv::waitKey ( 0 );//等待按键 } return 0; }
TEST(LIE_GROUP_OPERATORS, ADJOINT_MAPPINGS) { int numTest = 100; // AdT(V) == T * V * InvT for (int i = 0; i < numTest; ++i) { Eigen::Vector6d t = Eigen::Vector6d::Random(); Eigen::Isometry3d T = math::expMap(t); Eigen::Vector6d V = Eigen::Vector6d::Random(); Eigen::Vector6d AdTV = AdT(T, V); // Ad(T, V) = T * [V] * InvT Eigen::Matrix4d T_V_InvT = T.matrix() * toMatrixForm(V) * T.inverse().matrix(); Eigen::Vector6d T_V_InvT_se3 = fromMatrixForm(T_V_InvT); for (int j = 0; j < 6; ++j) EXPECT_NEAR(AdTV(j), T_V_InvT_se3(j), LIE_GROUP_OPT_TOL); // Ad(T, V) = [R 0; [p]R R] * V Eigen::Matrix6d AdTMatrix = Eigen::Matrix6d::Zero(); AdTMatrix.topLeftCorner<3,3>() = T.linear(); AdTMatrix.bottomRightCorner<3,3>() = T.linear(); AdTMatrix.bottomLeftCorner<3,3>() = math::makeSkewSymmetric(T.translation()) * T.linear(); Eigen::Vector6d AdTMatrix_V = AdTMatrix * V; for (int j = 0; j < 6; ++j) EXPECT_NEAR(AdTV(j), AdTMatrix_V(j), LIE_GROUP_OPT_TOL); } // AdR == AdT([R 0; 0 1], V) for (int i = 0; i < numTest; ++i) { Eigen::Vector6d t = Eigen::Vector6d::Random(); Eigen::Isometry3d T = math::expMap(t); Eigen::Isometry3d R = Eigen::Isometry3d::Identity(); R = T.linear(); Eigen::Vector6d V = Eigen::Vector6d::Random(); Eigen::Vector6d AdTV = AdT(R, V); Eigen::Vector6d AdRV = AdR(T, V); for (int j = 0; j < 6; ++j) EXPECT_NEAR(AdTV(j), AdRV(j), LIE_GROUP_OPT_TOL); } // AdTAngular == AdT(T, se3(w, 0)) for (int i = 0; i < numTest; ++i) { Eigen::Vector6d t = Eigen::Vector6d::Random(); Eigen::Isometry3d T = math::expMap(t); Eigen::Vector3d w = Eigen::Vector3d::Random(); Eigen::Vector6d V = Eigen::Vector6d::Zero(); V.head<3>() = w; Eigen::Vector6d AdTV = AdT(T, V); Eigen::Vector6d AdTAng = AdTAngular(T, w); for (int j = 0; j < 6; ++j) EXPECT_NEAR(AdTV(j), AdTAng(j), LIE_GROUP_OPT_TOL); } // AdTLinear == AdT(T, se3(w, 0)) for (int i = 0; i < numTest; ++i) { Eigen::Vector6d t = Eigen::Vector6d::Random(); Eigen::Isometry3d T = math::expMap(t); Eigen::Vector3d v = Eigen::Vector3d::Random(); Eigen::Vector6d V = Eigen::Vector6d::Zero(); V.tail<3>() = v; Eigen::Vector6d AdTV = AdT(T, V); Eigen::Vector6d AdTLin = AdTLinear(T, v); for (int j = 0; j < 6; ++j) EXPECT_NEAR(AdTV(j), AdTLin(j), LIE_GROUP_OPT_TOL); } // AdTJac for (int i = 0; i < numTest; ++i) { Eigen::Vector6d t = Eigen::Vector6d::Random(); Eigen::Isometry3d T = math::expMap(t); Eigen::Vector3d v = Eigen::Vector3d::Random(); Eigen::Vector6d V = Eigen::Vector6d::Zero(); V.tail<3>() = v; Eigen::Vector6d AdTV = AdT(T, V); Eigen::Vector6d AdTLin = AdTLinear(T, v); for (int j = 0; j < 6; ++j) EXPECT_NEAR(AdTV(j), AdTLin(j), LIE_GROUP_OPT_TOL); } // AdInvT for (int i = 0; i < numTest; ++i) { Eigen::Vector6d t = Eigen::Vector6d::Random(); Eigen::Isometry3d T = math::expMap(t); Eigen::Isometry3d InvT = T.inverse(); Eigen::Vector6d V = Eigen::Vector6d::Random(); Eigen::Vector6d Ad_InvT = AdT(InvT, V); Eigen::Vector6d AdInv_T = AdInvT(T, V); for (int j = 0; j < 6; ++j) EXPECT_NEAR(Ad_InvT(j), AdInv_T(j), LIE_GROUP_OPT_TOL); } // AdInvRLinear for (int i = 0; i < numTest; ++i) { Eigen::Vector6d t = Eigen::Vector6d::Random(); Eigen::Isometry3d T = math::expMap(t); Eigen::Vector3d v = Eigen::Vector3d::Random(); Eigen::Vector6d V = Eigen::Vector6d::Zero(); V.tail<3>() = v; Eigen::Isometry3d R = Eigen::Isometry3d::Identity(); R = T.linear(); Eigen::Vector6d AdT_ = AdT(R.inverse(), V); Eigen::Vector6d AdInvRLinear_ = AdInvRLinear(T, v); for (int j = 0; j < 6; ++j) EXPECT_NEAR(AdT_(j), AdInvRLinear_(j), LIE_GROUP_OPT_TOL); } // dAdT for (int i = 0; i < numTest; ++i) { Eigen::Vector6d t = Eigen::Vector6d::Random(); Eigen::Isometry3d T = math::expMap(t); Eigen::Vector6d F = Eigen::Vector6d::Random(); Eigen::Vector6d dAdTF = dAdT(T, F); // dAd(T, F) = [R 0; [p]R R]^T * F Eigen::Matrix6d AdTMatrix = Eigen::Matrix6d::Zero(); AdTMatrix.topLeftCorner<3,3>() = T.linear(); AdTMatrix.bottomRightCorner<3,3>() = T.linear(); AdTMatrix.bottomLeftCorner<3,3>() = math::makeSkewSymmetric(T.translation()) * T.linear(); Eigen::Vector6d AdTTransMatrix_V = AdTMatrix.transpose() * F; for (int j = 0; j < 6; ++j) EXPECT_NEAR(dAdTF(j), AdTTransMatrix_V(j), LIE_GROUP_OPT_TOL); } // dAdInvT for (int i = 0; i < numTest; ++i) { Eigen::Vector6d t = Eigen::Vector6d::Random(); Eigen::Isometry3d T = math::expMap(t); Eigen::Isometry3d InvT = T.inverse(); Eigen::Vector6d F = Eigen::Vector6d::Random(); Eigen::Vector6d dAdInvT_F = dAdInvT(T, F); // Eigen::Vector6d dAd_InvTF = dAdT(InvT, F); for (int j = 0; j < 6; ++j) EXPECT_NEAR(dAdInvT_F(j), dAd_InvTF(j), LIE_GROUP_OPT_TOL); // dAd(T, F) = [R 0; [p]R R]^T * F Eigen::Matrix6d AdInvTMatrix = Eigen::Matrix6d::Zero(); AdInvTMatrix.topLeftCorner<3,3>() = InvT.linear(); AdInvTMatrix.bottomRightCorner<3,3>() = InvT.linear(); AdInvTMatrix.bottomLeftCorner<3,3>() = math::makeSkewSymmetric(InvT.translation()) * InvT.linear(); Eigen::Vector6d AdInvTTransMatrix_V = AdInvTMatrix.transpose() * F; for (int j = 0; j < 6; ++j) EXPECT_NEAR(dAdInvT_F(j), AdInvTTransMatrix_V(j), LIE_GROUP_OPT_TOL); } // dAdInvR for (int i = 0; i < numTest; ++i) { Eigen::Vector6d t = Eigen::Vector6d::Random(); Eigen::Isometry3d T = math::expMap(t); Eigen::Isometry3d InvT = T.inverse(); Eigen::Isometry3d InvR = Eigen::Isometry3d::Identity(); InvR = InvT.linear(); Eigen::Vector6d F = Eigen::Vector6d::Random(); Eigen::Vector6d dAdInvR_F = dAdInvR(T, F); // Eigen::Vector6d dAd_InvTF = dAdT(InvR, F); for (int j = 0; j < 6; ++j) EXPECT_NEAR(dAdInvR_F(j), dAd_InvTF(j), LIE_GROUP_OPT_TOL); } // ad for (int i = 0; i < numTest; ++i) { Eigen::Vector6d V = Eigen::Vector6d::Random(); Eigen::Vector6d W = Eigen::Vector6d::Random(); Eigen::Vector6d ad_V_W = ad(V, W); // Eigen::Matrix6d adV_Matrix = Eigen::Matrix6d::Zero(); adV_Matrix.topLeftCorner<3,3>() = math::makeSkewSymmetric(V.head<3>()); adV_Matrix.bottomRightCorner<3,3>() = math::makeSkewSymmetric(V.head<3>()); adV_Matrix.bottomLeftCorner<3,3>() = math::makeSkewSymmetric(V.tail<3>()); Eigen::Vector6d adV_Matrix_W = adV_Matrix * W; for (int j = 0; j < 6; ++j) EXPECT_NEAR(ad_V_W(j), adV_Matrix_W(j), LIE_GROUP_OPT_TOL); } // dad for (int i = 0; i < numTest; ++i) { Eigen::Vector6d V = Eigen::Vector6d::Random(); Eigen::Vector6d F = Eigen::Vector6d::Random(); Eigen::Vector6d dad_V_F = dad(V, F); // Eigen::Matrix6d dadV_Matrix = Eigen::Matrix6d::Zero(); dadV_Matrix.topLeftCorner<3,3>() = math::makeSkewSymmetric(V.head<3>()); dadV_Matrix.bottomRightCorner<3,3>() = math::makeSkewSymmetric(V.head<3>()); dadV_Matrix.bottomLeftCorner<3,3>() = math::makeSkewSymmetric(V.tail<3>()); Eigen::Vector6d dadV_Matrix_F= dadV_Matrix.transpose() * F; for (int j = 0; j < 6; ++j) EXPECT_NEAR(dad_V_F(j), dadV_Matrix_F(j), LIE_GROUP_OPT_TOL); } }
const std::shared_ptr< std::vector< CLandmark* > > CTrackerStereoMotionModel::_getNewLandmarks( const UIDFrame& p_uFrame, cv::Mat& p_matDisplay, const cv::Mat& p_matImageLEFT, const cv::Mat& p_matImageRIGHT, const Eigen::Isometry3d& p_matTransformationWORLDtoLEFT, const Eigen::Isometry3d& p_matTransformationLEFTtoWORLD, const Eigen::Vector3d& p_vecRotation ) { //ds precompute extrinsics const MatrixProjection matProjectionWORLDtoLEFT( m_pCameraLEFT->m_matProjection*p_matTransformationWORLDtoLEFT.matrix( ) ); //ds solution holder std::shared_ptr< std::vector< CLandmark* > > vecNewLandmarks( std::make_shared< std::vector< CLandmark* > >( ) ); //ds detect new keypoints //const std::shared_ptr< std::vector< cv::KeyPoint > > vecKeyPoints( m_cDetector.detectKeyPointsTilewise( p_matImageLEFT, matMask ) ); std::vector< cv::KeyPoint > vecKeyPoints; m_pDetector->detect( p_matImageLEFT, vecKeyPoints, m_cMatcher.getMaskActiveLandmarks( p_matTransformationWORLDtoLEFT, p_matDisplay ) ); //ds compute descriptors for the keypoints CDescriptor matReferenceDescriptors; //m_pExtractor->compute( p_matImageLEFT, *vecKeyPoints, matReferenceDescriptors ); m_pExtractor->compute( p_matImageLEFT, vecKeyPoints, matReferenceDescriptors ); //ds process the keypoints and see if we can use them as landmarks for( uint32_t u = 0; u < vecKeyPoints.size( ); ++u ) { //ds current points const cv::KeyPoint cKeyPointLEFT( vecKeyPoints[u] ); const cv::Point2f ptLandmarkLEFT( cKeyPointLEFT.pt ); const CDescriptor matDescriptorLEFT( matReferenceDescriptors.row(u) ); try { //ds triangulate the point const CMatchTriangulation cMatch( m_pTriangulator->getPointTriangulatedCompactInRIGHT( p_matImageRIGHT, cKeyPointLEFT, matDescriptorLEFT ) ); const CPoint3DCAMERA vecPointTriangulatedLEFT( cMatch.vecPointXYZCAMERA ); const CDescriptor matDescriptorRIGHT( cMatch.matDescriptorCAMERA ); //ds check depth const double dDepthMeters( vecPointTriangulatedLEFT.z( ) ); //ds check if point is in front of camera an not more than a defined distance away if( m_dMinimumDepthMeters < dDepthMeters && m_dMaximumDepthMeters > dDepthMeters ) { //ds compute triangulated point in world frame const CPoint3DWORLD vecPointTriangulatedWORLD( p_matTransformationLEFTtoWORLD*vecPointTriangulatedLEFT ); //ds landmark right const cv::Point2f ptLandmarkRIGHT( cMatch.ptUVCAMERA ); //ds allocate a new landmark and add the current position CLandmark* pLandmark( new CLandmark( m_uAvailableLandmarkID, matDescriptorLEFT, cMatch.matDescriptorCAMERA, cKeyPointLEFT.size, vecPointTriangulatedWORLD, m_pCameraLEFT->getNormalHomogenized( ptLandmarkLEFT ), ptLandmarkLEFT, ptLandmarkRIGHT, vecPointTriangulatedLEFT, p_matTransformationLEFTtoWORLD.translation( ), p_vecRotation, matProjectionWORLDtoLEFT, p_uFrame ) ); //ds log creation CLogger::CLogLandmarkCreation::addEntry( p_uFrame, pLandmark, dDepthMeters, ptLandmarkLEFT, ptLandmarkRIGHT ); //ds add to newly detected vecNewLandmarks->push_back( pLandmark ); //ds next landmark id ++m_uAvailableLandmarkID; //ds draw detected point cv::line( p_matDisplay, ptLandmarkLEFT, cv::Point2f( ptLandmarkLEFT.x+m_pCameraSTEREO->m_uPixelWidth, ptLandmarkLEFT.y ), CColorCodeBGR( 175, 175, 175 ) ); cv::circle( p_matDisplay, ptLandmarkLEFT, 2, CColorCodeBGR( 0, 255, 0 ), -1 ); //cv::circle( p_matDisplay, ptLandmarkLEFT, cLandmark->dKeyPointSize, CColorCodeBGR( 255, 0, 0 ), 1 ); cv::putText( p_matDisplay, std::to_string( pLandmark->uID ) , cv::Point2d( ptLandmarkLEFT.x+pLandmark->dKeyPointSize, ptLandmarkLEFT.y+pLandmark->dKeyPointSize ), cv::FONT_HERSHEY_PLAIN, 0.5, CColorCodeBGR( 0, 0, 255 ) ); //ds draw reprojection of triangulation cv::circle( p_matDisplay, cv::Point2d( ptLandmarkRIGHT.x+m_pCameraSTEREO->m_uPixelWidth, ptLandmarkRIGHT.y ), 2, CColorCodeBGR( 255, 0, 0 ), -1 ); } else { cv::circle( p_matDisplay, ptLandmarkLEFT, 2, CColorCodeBGR( 0, 0, 255 ), -1 ); //cv::circle( p_matDisplay, ptLandmarkLEFT, cKeyPoint.size, CColorCodeBGR( 0, 0, 255 ) ); //std::printf( "<CTrackerStereoMotionModel>(_getNewLandmarks) could not find match for keypoint (invalid depth: %f m)\n", vecPointTriangulatedLEFT(2) ); } } catch( const CExceptionNoMatchFound& p_cException ) { cv::circle( p_matDisplay, ptLandmarkLEFT, 2, CColorCodeBGR( 0, 0, 255 ), -1 ); //cv::circle( p_matDisplay, ptLandmarkLEFT, cKeyPoint.size, CColorCodeBGR( 0, 0, 255 ) ); //std::printf( "<CTrackerStereoMotionModel>(_getNewLandmarks) could not find match for keypoint (%s)\n", p_cException.what( ) ); } } //std::printf( "<CTrackerStereoMotionModel>(_getNewLandmarks) added new landmarks: %lu/%lu\n", vecNewLandmarks->size( ), vecKeyPoints.size( ) ); //ds return found landmarks return vecNewLandmarks; }
int main( int argc, char** argv ) { // 调用格式:命令 [第一个图] [第二个图] if (argc != 3) { cout<<"Usage: ba_example img1, img2"<<endl; exit(1); } // 读取图像 cv::Mat img1 = cv::imread( argv[1] ); cv::Mat img2 = cv::imread( argv[2] ); // 找到对应点 vector<cv::Point2f> pts1, pts2; if ( findCorrespondingPoints( img1, img2, pts1, pts2 ) == false ) { cout<<"匹配点不够!"<<endl; return 0; } cout<<"找到了"<<pts1.size()<<"组对应特征点。"<<endl; // 构造g2o中的图 // 先构造求解器 g2o::SparseOptimizer optimizer; // 使用Cholmod中的线性方程求解器 g2o::BlockSolver_6_3::LinearSolverType* linearSolver = new g2o::LinearSolverCholmod<g2o::BlockSolver_6_3::PoseMatrixType> (); // 6*3 的参数 g2o::BlockSolver_6_3* block_solver = new g2o::BlockSolver_6_3( linearSolver ); // L-M 下降 g2o::OptimizationAlgorithmLevenberg* algorithm = new g2o::OptimizationAlgorithmLevenberg( block_solver ); optimizer.setAlgorithm( algorithm ); optimizer.setVerbose( false ); // 添加节点 // 两个位姿节点 for ( int i=0; i<2; i++ ) { g2o::VertexSE3Expmap* v = new g2o::VertexSE3Expmap(); v->setId(i); if ( i == 0) v->setFixed( true ); // 第一个点固定为零 // 预设值为单位Pose,因为我们不知道任何信息 v->setEstimate( g2o::SE3Quat() ); optimizer.addVertex( v ); } // 很多个特征点的节点 // 以第一帧为准 for ( size_t i=0; i<pts1.size(); i++ ) { g2o::VertexSBAPointXYZ* v = new g2o::VertexSBAPointXYZ(); v->setId( 2 + i ); // 由于深度不知道,只能把深度设置为1了 double z = 1; double x = ( pts1[i].x - cx ) * z / fx; double y = ( pts1[i].y - cy ) * z / fy; v->setMarginalized(true); v->setEstimate( Eigen::Vector3d(x,y,z) ); optimizer.addVertex( v ); } // 准备相机参数 g2o::CameraParameters* camera = new g2o::CameraParameters( fx, Eigen::Vector2d(cx, cy), 0 ); camera->setId(0); optimizer.addParameter( camera ); // 准备边 // 第一帧 vector<g2o::EdgeProjectXYZ2UV*> edges; for ( size_t i=0; i<pts1.size(); i++ ) { g2o::EdgeProjectXYZ2UV* edge = new g2o::EdgeProjectXYZ2UV(); edge->setVertex( 0, dynamic_cast<g2o::VertexSBAPointXYZ*> (optimizer.vertex(i+2)) ); edge->setVertex( 1, dynamic_cast<g2o::VertexSE3Expmap*> (optimizer.vertex(0)) ); edge->setMeasurement( Eigen::Vector2d(pts1[i].x, pts1[i].y ) ); edge->setInformation( Eigen::Matrix2d::Identity() ); edge->setParameterId(0, 0); // 核函数 edge->setRobustKernel( new g2o::RobustKernelHuber() ); optimizer.addEdge( edge ); edges.push_back(edge); } // 第二帧 for ( size_t i=0; i<pts2.size(); i++ ) { g2o::EdgeProjectXYZ2UV* edge = new g2o::EdgeProjectXYZ2UV(); edge->setVertex( 0, dynamic_cast<g2o::VertexSBAPointXYZ*> (optimizer.vertex(i+2)) ); edge->setVertex( 1, dynamic_cast<g2o::VertexSE3Expmap*> (optimizer.vertex(1)) ); edge->setMeasurement( Eigen::Vector2d(pts2[i].x, pts2[i].y ) ); edge->setInformation( Eigen::Matrix2d::Identity() ); edge->setParameterId(0,0); // 核函数 edge->setRobustKernel( new g2o::RobustKernelHuber() ); optimizer.addEdge( edge ); edges.push_back(edge); } cout<<"开始优化"<<endl; optimizer.setVerbose(true); optimizer.initializeOptimization(); optimizer.optimize(10); cout<<"优化完毕"<<endl; //我们比较关心两帧之间的变换矩阵 g2o::VertexSE3Expmap* v = dynamic_cast<g2o::VertexSE3Expmap*>( optimizer.vertex(1) ); Eigen::Isometry3d pose = v->estimate(); cout<<"Pose="<<endl<<pose.matrix()<<endl; // 以及所有特征点的位置 for ( size_t i=0; i<pts1.size(); i++ ) { g2o::VertexSBAPointXYZ* v = dynamic_cast<g2o::VertexSBAPointXYZ*> (optimizer.vertex(i+2)); cout<<"vertex id "<<i+2<<", pos = "; Eigen::Vector3d pos = v->estimate(); cout<<pos(0)<<","<<pos(1)<<","<<pos(2)<<endl; } // 估计inlier的个数 int inliers = 0; for ( auto e:edges ) { e->computeError(); // chi2 就是 error*\Omega*error, 如果这个数很大,说明此边的值与其他边很不相符 if ( e->chi2() > 1 ) { cout<<"error = "<<e->chi2()<<endl; } else { inliers++; } } cout<<"inliers in total points: "<<inliers<<"/"<<pts1.size()+pts2.size()<<endl; optimizer.save("ba.g2o"); return 0; }
void GraphicEnd::lostRecovery() { //以present作为新的关键帧 cout<<BOLDYELLOW<<"Lost Recovery..."<<RESET<<endl; //把present中的数据存储到current中 _currKF.id ++; _currKF.planes = _present.planes; _currKF.frame_index = _index; _lastRGB = _currRGB.clone(); _kf_pos = _robot; //waitKey(0); _keyframes.push_back( _currKF ); //将current关键帧存储至全局优化器 SparseOptimizer& opt = _pSLAMEnd->globalOptimizer; //顶点 VertexSE3* v = new VertexSE3(); v->setId( _currKF.id ); if (_use_odometry) v->setEstimate( _odo_this ); else v->setEstimate( Eigen::Isometry3d::Identity() ); opt.addVertex( v ); //由于当前位置不知道,所以不添加与上一帧相关的边 if (_use_odometry) { Eigen::Isometry3d To = _odo_last.inverse()*_odo_this; EdgeSE3* edge = new EdgeSE3(); edge->vertices()[0] = opt.vertex( _currKF.id - 1 ); edge->vertices()[1] = opt.vertex( _currKF.id ); Eigen::Matrix<double, 6,6> information = Eigen::Matrix<double, 6, 6>::Identity(); information(0, 0) = information(2,2) = information(1, 1) = information(3,3) = information(4,4) = information(5,5) = 1/(_error_odometry*_error_odometry); edge->setInformation( information ); edge->setMeasurement( To ); opt.addEdge( edge ); _odo_last = _odo_this; _lost = 0; return; } //check loop closure for (int i=0; i<_keyframes.size() - 1; i++) { vector<PLANE>& p1 = _keyframes[ i ].planes; Eigen::Isometry3d T = multiPnP( p1, _currKF.planes ).T; if (T.matrix() == Eigen::Isometry3d::Identity().matrix()) //匹配不上 continue; T = T.inverse(); //若匹配上,则在两个帧之间加一条边 EdgeSE3* edge = new EdgeSE3(); edge->vertices() [0] = opt.vertex( _keyframes[i].id ); edge->vertices() [1] = opt.vertex( _currKF.id ); Eigen::Matrix<double, 6,6> information = Eigen::Matrix<double, 6, 6>::Identity(); information(0, 0) = information(1,1) = information(2,2) = 100; information(3,3) = information(4,4) = information(5,5) = 100; edge->setInformation( information ); edge->setMeasurement( T ); edge->setRobustKernel( _pSLAMEnd->_robustKernel ); opt.addEdge( edge ); } }
//回环检测:在过去的帧中随机取_loopclosure_frames那么多帧进行两两比较 void GraphicEnd::loopClosure() { if (_keyframes.size() <= 3 ) //小于3时,回环没有意义 return; cout<<"Checking loop closure."<<endl; waitKey(10); vector<int> checked; SparseOptimizer& opt = _pSLAMEnd->globalOptimizer; //相邻帧 for (int i=-3; i>-5; i--) { int n = _keyframes.size() + i; if (n>=0) { vector<PLANE>& p1 = _keyframes[n].planes; Eigen::Isometry3d T = multiPnP( p1, _currKF.planes ).T; if (T.matrix() == Eigen::Isometry3d::Identity().matrix()) //匹配不上 continue; T = T.inverse(); //若匹配上,则在两个帧之间加一条边 EdgeSE3* edge = new EdgeSE3(); edge->vertices() [0] = opt.vertex( _keyframes[n].id ); edge->vertices() [1] = opt.vertex( _currKF.id ); Eigen::Matrix<double, 6,6> information = Eigen::Matrix<double, 6, 6>::Identity(); information(0, 0) = information(2,2) = 100; information(1,1) = 100; information(3,3) = information(4,4) = information(5,5) = 100; edge->setInformation( information ); edge->setMeasurement( T ); edge->setRobustKernel( _pSLAMEnd->_robustKernel ); opt.addEdge( edge ); } else break; } //搜索种子帧 cout<<"checking seeds, seed.size()"<<_seed.size()<<endl; vector<int> newseed; for (size_t i=0; i<_seed.size(); i++) { vector<PLANE>& p1 = _keyframes[_seed[i]].planes; Eigen::Isometry3d T = multiPnP( p1, _currKF.planes ).T; if (T.matrix() == Eigen::Isometry3d::Identity().matrix()) //匹配不上 continue; T = T.inverse(); //若匹配上,则在两个帧之间加一条边 checked.push_back( _seed[i] ); newseed.push_back( _seed[i] ); EdgeSE3* edge = new EdgeSE3(); edge->vertices() [0] = opt.vertex( _keyframes[_seed[i]].id ); edge->vertices() [1] = opt.vertex( _currKF.id ); Eigen::Matrix<double, 6,6> information = Eigen::Matrix<double, 6, 6>::Identity(); information(0, 0) = information(2,2) = 100; information(1,1) = 100; information(3,3) = information(4,4) = information(5,5) = 100; edge->setInformation( information ); edge->setMeasurement( T ); edge->setRobustKernel( _pSLAMEnd->_robustKernel ); opt.addEdge( edge ); } //随机搜索 cout<<"checking random frames"<<endl; for (int i=0; i<_loopclosure_frames; i++) { int frame = rand() % (_keyframes.size() -3 ); //随机在过去的帧中取一帧 if ( find(checked.begin(), checked.end(), frame) != checked.end() ) //之前已检查过 continue; checked.push_back( frame ); vector<PLANE>& p1 = _keyframes[frame].planes; RESULT_OF_MULTIPNP result = multiPnP( p1, _currKF.planes, true, _keyframes[frame].frame_index, 20 ); Eigen::Isometry3d T = result.T; if (T.matrix() == Eigen::Isometry3d::Identity().matrix()) //匹配不上 continue; T = T.inverse(); displayLC( _keyframes[frame].frame_index, _currKF.frame_index, result.norm); newseed.push_back( frame ); //若匹配上,则在两个帧之间加一条边 cout<<BOLDBLUE<<"find a loop closure between kf "<<_currKF.id<<" and kf "<<frame<<RESET<<endl; EdgeSE3* edge = new EdgeSE3(); edge->vertices() [0] = opt.vertex( _keyframes[frame].id ); edge->vertices() [1] = opt.vertex( _currKF.id ); Eigen::Matrix<double, 6,6> information = Eigen::Matrix<double, 6, 6>::Identity(); information(0, 0) = information(2,2) = 100; information(1,1) = 100; information(3,3) = information(4,4) = information(5,5) = 100; edge->setInformation( information ); edge->setMeasurement( T ); edge->setRobustKernel( _pSLAMEnd->_robustKernel ); opt.addEdge( edge ); } waitKey(10); _seed = newseed; }
bool G2oSlamInterface::addEdge(const std::string& tag, int id, int dimension, int v1Id, int v2Id, const std::vector<double>& measurement, const std::vector<double>& information) { (void) tag; (void) id; size_t oldEdgesSize = _optimizer->edges().size(); if (dimension == 3) { SE2 transf(measurement[0], measurement[1], measurement[2]); Eigen::Matrix3d infMat; int idx = 0; for (int r = 0; r < 3; ++r) for (int c = r; c < 3; ++c, ++idx) { assert(idx < (int)information.size()); infMat(r,c) = infMat(c,r) = information[idx]; } //cerr << PVAR(infMat) << endl; int doInit = 0; SparseOptimizer::Vertex* v1 = _optimizer->vertex(v1Id); SparseOptimizer::Vertex* v2 = _optimizer->vertex(v2Id); if (! v1) { OptimizableGraph::Vertex* v = v1 = addVertex(dimension, v1Id); _verticesAdded.insert(v); doInit = 1; ++_nodesAdded; } if (! v2) { OptimizableGraph::Vertex* v = v2 = addVertex(dimension, v2Id); _verticesAdded.insert(v); doInit = 2; ++_nodesAdded; } if (_optimizer->edges().size() == 0) { cerr << "FIRST EDGE "; if (v1->id() < v2->id()) { cerr << "fixing " << v1->id() << endl; v1->setFixed(true); } else { cerr << "fixing " << v2->id() << endl; v2->setFixed(true); } } OnlineEdgeSE2* e = new OnlineEdgeSE2; e->vertices()[0] = v1; e->vertices()[1] = v2; e->setMeasurement(transf); e->setInformation(infMat); _optimizer->addEdge(e); _edgesAdded.insert(e); if (doInit) { OptimizableGraph::Vertex* from = static_cast<OptimizableGraph::Vertex*>(e->vertices()[0]); OptimizableGraph::Vertex* to = static_cast<OptimizableGraph::Vertex*>(e->vertices()[1]); switch (doInit){ case 1: // initialize v1 from v2 { HyperGraph::VertexSet toSet; toSet.insert(to); if (e->initialEstimatePossible(toSet, from) > 0.) { e->initialEstimate(toSet, from); } break; } case 2: { HyperGraph::VertexSet fromSet; fromSet.insert(from); if (e->initialEstimatePossible(fromSet, to) > 0.) { e->initialEstimate(fromSet, to); } break; } default: cerr << "doInit wrong value\n"; } } } else if (dimension == 6) { Eigen::Isometry3d transf; Matrix<double, 6, 6> infMat; if (measurement.size() == 7) { // measurement is a Quaternion Vector7d meas; for (int i=0; i<7; ++i) meas(i) = measurement[i]; // normalize the quaternion to recover numerical precision lost by storing as human readable text Vector4d::MapType(meas.data()+3).normalize(); transf = internal::fromVectorQT(meas); for (int i = 0, idx = 0; i < infMat.rows(); ++i) for (int j = i; j < infMat.cols(); ++j){ infMat(i,j) = information[idx++]; if (i != j) infMat(j,i)=infMat(i,j); } } else { // measurement consists of Euler angles Vector6d aux; aux << measurement[0], measurement[1], measurement[2],measurement[3], measurement[4], measurement[5]; transf = internal::fromVectorET(aux); Matrix<double, 6, 6> infMatEuler; int idx = 0; for (int r = 0; r < 6; ++r) for (int c = r; c < 6; ++c, ++idx) { assert(idx < (int)information.size()); infMatEuler(r,c) = infMatEuler(c,r) = information[idx]; } // convert information matrix to our internal representation Matrix<double, 6, 6> J; SE3Quat transfAsSe3(transf.matrix().topLeftCorner<3,3>(), transf.translation()); jac_quat3_euler3(J, transfAsSe3); infMat.noalias() = J.transpose() * infMatEuler * J; //cerr << PVAR(transf.matrix()) << endl; //cerr << PVAR(infMat) << endl; } int doInit = 0; SparseOptimizer::Vertex* v1 = _optimizer->vertex(v1Id); SparseOptimizer::Vertex* v2 = _optimizer->vertex(v2Id); if (! v1) { OptimizableGraph::Vertex* v = v1 = addVertex(dimension, v1Id); _verticesAdded.insert(v); doInit = 1; ++_nodesAdded; } if (! v2) { OptimizableGraph::Vertex* v = v2 = addVertex(dimension, v2Id); _verticesAdded.insert(v); doInit = 2; ++_nodesAdded; } if (_optimizer->edges().size() == 0) { cerr << "FIRST EDGE "; if (v1->id() < v2->id()) { cerr << "fixing " << v1->id() << endl; v1->setFixed(true); } else { cerr << "fixing " << v2->id() << endl; v2->setFixed(true); } } OnlineEdgeSE3* e = new OnlineEdgeSE3; e->vertices()[0] = v1; e->vertices()[1] = v2; e->setMeasurement(transf); e->setInformation(infMat); _optimizer->addEdge(e); _edgesAdded.insert(e); if (doInit) { OptimizableGraph::Vertex* from = static_cast<OptimizableGraph::Vertex*>(e->vertices()[0]); OptimizableGraph::Vertex* to = static_cast<OptimizableGraph::Vertex*>(e->vertices()[1]); switch (doInit){ case 1: // initialize v1 from v2 { HyperGraph::VertexSet toSet; toSet.insert(to); if (e->initialEstimatePossible(toSet, from) > 0.) { e->initialEstimate(toSet, from); } break; } case 2: { HyperGraph::VertexSet fromSet; fromSet.insert(from); if (e->initialEstimatePossible(fromSet, to) > 0.) { e->initialEstimate(fromSet, to); } break; } default: cerr << "doInit wrong value\n"; } } } else { cerr << __PRETTY_FUNCTION__ << " not implemented for this dimension" << endl; return false; } if (oldEdgesSize == 0) { _optimizer->jacobianWorkspace().allocate(); } return true; }
int GraphicEnd::run() { //清空present并读取新的数据 cout<<"********************"<<endl; _present.planes.clear(); readimage(); //处理present _present.planes = extractPlanesAndGenerateImage( _currCloud, _currRGB, _currDep ); //_present.planes = extractPlanes( _currCloud ); //generateImageOnPlane( _currRGB, _present.planes, _currDep ); for ( size_t i=0; i<_present.planes.size(); i++ ) { _present.planes[i].kp = extractKeypoints( _present.planes[i].image ); _present.planes[i].desp = extractDescriptor( _currRGB, _present.planes[i].kp ); compute3dPosition( _present.planes[i], _currDep); } // 求解present到current的变换矩阵 RESULT_OF_MULTIPNP result = multiPnP( _currKF.planes, _present.planes ); Eigen::Isometry3d T = result.T; T = T.inverse(); //好像是反着的 // 如果平移和旋转超过一个阈值,则定义新的关键帧 if ( T.matrix() == Eigen::Isometry3d::Identity().matrix() ) { //未匹配上 cout<<BOLDRED"This frame lost"<<RESET<<endl; _lost++; } else if (result.norm > _max_pos_change) { //生成一个新的关键帧 _robot = T * _kf_pos; generateKeyFrame(T); if (_loop_closure_detection == true) loopClosure(); _lost = 0; } else { //小变化,更新robot位置 _robot = T * _kf_pos; _lost = 0; } if (_lost > _lost_frames) { cerr<<"the robot lost. Perform lost recovery."<<endl; lostRecovery(); } cout<<RED<<"key frame size = "<<_keyframes.size()<<RESET<<endl; _index ++; if (_use_odometry) { _odo_this = _odometry[ _index-1 ]; } return 1; }
void test_relative_values(bool spatial_targets, bool spatial_followers) { const double tolerance = 1e-8; // These frames will form a chain SimpleFrame A(Frame::World(), "A"); SimpleFrame B(&A, "B"); SimpleFrame C(&B, "C"); SimpleFrame D(&C, "D"); std::vector<SimpleFrame*> targets; targets.push_back(&A); targets.push_back(&B); targets.push_back(&C); targets.push_back(&D); // R will be an arbitrary reference frame SimpleFrame R(Frame::World(), "R"); targets.push_back(&R); // Each of these frames will attempt to track one of the frames in the chain // with respect to the frame R. SimpleFrame AR(&R, "AR"); SimpleFrame BR(&R, "BR"); SimpleFrame CR(&R, "CR"); SimpleFrame DR(&R, "DR"); SimpleFrame RR(&R, "RR"); std::vector<SimpleFrame*> followers; followers.push_back(&AR); followers.push_back(&BR); followers.push_back(&CR); followers.push_back(&DR); followers.push_back(&RR); assert( targets.size() == followers.size() ); randomize_target_values(targets, spatial_targets); set_relative_values(targets, followers, spatial_followers); check_world_values(targets, followers, tolerance); // Check every combination of relative values for(std::size_t i=0; i<targets.size(); ++i) { Frame* T = targets[i]; for(std::size_t j=0; j<followers.size(); ++j) { Frame* F = followers[j]; check_values(targets, followers, T, F, tolerance); check_values(targets, followers, F, T, tolerance); check_offset_computations(targets, followers, T, F, tolerance); } } // Test SimpleFrame::setTransform() for(std::size_t i=0; i<followers.size(); ++i) { for(std::size_t j=0; j<followers.size(); ++j) { SimpleFrame* F = followers[i]; SimpleFrame* T = followers[j]; Eigen::Isometry3d tf; randomize_transform(tf, 1, 2*M_PI); T->setTransform(tf, F); if(i != j) EXPECT_TRUE( equals(T->getTransform(F).matrix(), tf.matrix(), 1e-10)); } } }
TEST(FRAMES, FORWARD_KINEMATICS_CHAIN) { std::vector<SimpleFrame*> frames; double tolerance = 1e-6; SimpleFrame A(Frame::World(), "A"); frames.push_back(&A); SimpleFrame B(&A, "B"); frames.push_back(&B); SimpleFrame C(&B, "C"); frames.push_back(&C); SimpleFrame D(&C, "D"); frames.push_back(&D); // -- Test Position -------------------------------------------------------- EXPECT_TRUE( equals(D.getTransform().matrix(), Eigen::Isometry3d::Identity().matrix(), tolerance)); Eigen::aligned_vector<Eigen::Isometry3d> tfs; tfs.resize(frames.size(), Eigen::Isometry3d::Identity()); randomize_transforms(tfs); for(std::size_t i=0; i<frames.size(); ++i) { SimpleFrame* F = frames[i]; F->setRelativeTransform(tfs[i]); } for(std::size_t i=0; i<frames.size(); ++i) { Frame* F = frames[i]; Eigen::Isometry3d expectation(Eigen::Isometry3d::Identity()); for(std::size_t j=0; j<=i; ++j) { expectation = expectation * tfs[j]; } Eigen::Isometry3d actual = F->getTransform(); EXPECT_TRUE( equals(actual.matrix(), expectation.matrix(), tolerance)); } randomize_transforms(tfs); for(std::size_t i=0; i<frames.size(); ++i) { SimpleFrame* F = frames[i]; F->setRelativeTransform(tfs[i]); } Eigen::Isometry3d expectation(Eigen::Isometry3d::Identity()); for(std::size_t j=0; j<frames.size(); ++j) expectation = expectation * tfs[j]; EXPECT_TRUE( equals(frames.back()->getTransform().matrix(), expectation.matrix(), tolerance) ); // -- Test Velocity -------------------------------------------------------- // Basic forward spatial velocity propagation { // The brackets are to allow reusing variable names Eigen::aligned_vector<Eigen::Vector6d> v_rels(frames.size()); Eigen::aligned_vector<Eigen::Vector6d> v_total(frames.size()); for(std::size_t i=0; i<frames.size(); ++i) { v_rels[i] = random_vec<6>(); SimpleFrame* F = frames[i]; F->setRelativeSpatialVelocity(v_rels[i]); if(i>0) compute_spatial_velocity(v_total[i-1], v_rels[i], F->getRelativeTransform(), v_total[i]); else compute_spatial_velocity(Eigen::Vector6d::Zero(), v_rels[i], F->getRelativeTransform(), v_total[i]); } for(std::size_t i=0; i<frames.size(); ++i) { SimpleFrame* F = frames[i]; Eigen::Vector6d v_actual = F->getSpatialVelocity(); EXPECT_TRUE( equals(v_total[i], v_actual) ); } } // Testing conversion beteween spatial and classical velocities { std::vector<Eigen::Vector3d> v_rels(frames.size()); std::vector<Eigen::Vector3d> w_rels(frames.size()); std::vector<Eigen::Vector3d> v_total(frames.size()); std::vector<Eigen::Vector3d> w_total(frames.size()); for(std::size_t i=0; i<frames.size(); ++i) { v_rels[i] = random_vec<3>(); w_rels[i] = random_vec<3>(); SimpleFrame* F = frames[i]; F->setClassicDerivatives(v_rels[i], w_rels[i]); Eigen::Vector3d offset = F->getRelativeTransform().translation(); Eigen::Isometry3d tf = i>0? frames[i-1]->getTransform() : Eigen::Isometry3d::Identity(); if(i>0) compute_velocity(v_total[i-1], w_total[i-1], v_rels[i], w_rels[i], offset, tf, v_total[i], w_total[i]); else compute_velocity(Eigen::Vector3d::Zero(), Eigen::Vector3d::Zero(), v_rels[i], w_rels[i], offset, tf, v_total[i], w_total[i]); } for(std::size_t i=0; i<frames.size(); ++i) { SimpleFrame* F = frames[i]; Eigen::Vector3d v_actual = F->getLinearVelocity(); Eigen::Vector3d w_actual = F->getAngularVelocity(); EXPECT_TRUE( equals(v_total[i], v_actual, tolerance) ); EXPECT_TRUE( equals(w_total[i], w_actual, tolerance) ); } } // -- Acceleration --------------------------------------------------------- // Basic forward spatial acceleration propagation { Eigen::aligned_vector<Eigen::Vector6d> v_rels(frames.size()); Eigen::aligned_vector<Eigen::Vector6d> a_rels(frames.size()); Eigen::aligned_vector<Eigen::Vector6d> v_total(frames.size()); Eigen::aligned_vector<Eigen::Vector6d> a_total(frames.size()); for(std::size_t i=0; i<frames.size(); ++i) { v_rels[i] = random_vec<6>(); a_rels[i] = random_vec<6>(); SimpleFrame* F = frames[i]; F->setRelativeSpatialVelocity(v_rels[i]); F->setRelativeSpatialAcceleration(a_rels[i]); Eigen::Isometry3d tf = F->getRelativeTransform(); if(i>0) { compute_spatial_velocity(v_total[i-1], v_rels[i], tf, v_total[i]); compute_spatial_acceleration(a_total[i-1], a_rels[i], v_total[i], v_rels[i], tf, a_total[i]); } else { compute_spatial_velocity(Eigen::Vector6d::Zero(), v_rels[i], tf, v_total[i]); compute_spatial_acceleration(Eigen::Vector6d::Zero(), a_rels[i], v_total[i], v_rels[i], tf, a_total[i]); } } for(std::size_t i=0; i<frames.size(); ++i) { SimpleFrame* F = frames[i]; Eigen::Vector6d a_actual = F->getSpatialAcceleration(); EXPECT_TRUE( equals(a_total[i], a_actual) ); } // Test relative computations for(std::size_t i=0; i<frames.size(); ++i) { Frame* F = frames[i]; Frame* P = F->getParentFrame(); Eigen::Vector6d v_rel = F->getSpatialVelocity(P, F); Eigen::Vector6d a_rel = F->getSpatialAcceleration(P, F); EXPECT_TRUE( equals(v_rels[i], v_rel, tolerance) ); EXPECT_TRUE( equals(a_rels[i], a_rel, tolerance) ); } // Test offset computations for(std::size_t i=0; i<frames.size(); ++i) { Eigen::Vector3d offset = random_vec<3>(); Frame* F = frames[i]; Eigen::Vector3d v_actual = F->getLinearVelocity(offset); Eigen::Vector3d w_actual = F->getAngularVelocity(); Eigen::Vector3d v_expect, w_expect; compute_velocity(F->getLinearVelocity(), F->getAngularVelocity(), Eigen::Vector3d::Zero(), Eigen::Vector3d::Zero(), offset, F->getWorldTransform(), v_expect, w_expect); EXPECT_TRUE( equals( v_expect, v_actual) ); EXPECT_TRUE( equals( w_expect, w_actual) ); Eigen::Vector3d a_actual = F->getLinearAcceleration(offset); Eigen::Vector3d alpha_actual = F->getAngularAcceleration(); Eigen::Vector3d a_expect, alpha_expect; compute_acceleration(F->getLinearAcceleration(), F->getAngularAcceleration(), F->getAngularVelocity(), Eigen::Vector3d::Zero(), Eigen::Vector3d::Zero(), Eigen::Vector3d::Zero(), Eigen::Vector3d::Zero(), offset, F->getWorldTransform(), a_expect, alpha_expect); EXPECT_TRUE( equals( a_expect, a_actual) ); EXPECT_TRUE( equals( alpha_expect, alpha_actual) ); } } // Testing conversion between spatial and classical accelerations { std::vector<Eigen::Vector3d> v_rels(frames.size()); std::vector<Eigen::Vector3d> w_rels(frames.size()); std::vector<Eigen::Vector3d> a_rels(frames.size()); std::vector<Eigen::Vector3d> alpha_rels(frames.size()); std::vector<Eigen::Vector3d> v_total(frames.size()); std::vector<Eigen::Vector3d> w_total(frames.size()); std::vector<Eigen::Vector3d> a_total(frames.size()); std::vector<Eigen::Vector3d> alpha_total(frames.size()); for(std::size_t i=0; i<frames.size(); ++i) { v_rels[i] = random_vec<3>(); w_rels[i] = random_vec<3>(); a_rels[i] = random_vec<3>(); alpha_rels[i] = random_vec<3>(); SimpleFrame* F = frames[i]; Eigen::Isometry3d rel_tf; randomize_transform(rel_tf); F->setRelativeTransform(rel_tf); F->setClassicDerivatives(v_rels[i], w_rels[i], a_rels[i], alpha_rels[i]); Eigen::Vector3d offset = F->getRelativeTransform().translation(); Eigen::Isometry3d tf = i>0? frames[i-1]->getTransform() : Eigen::Isometry3d::Identity(); if(i>0) { compute_velocity(v_total[i-1], w_total[i-1], v_rels[i], w_rels[i], offset, tf, v_total[i], w_total[i]); compute_acceleration(a_total[i-1], alpha_total[i-1], w_total[i-1], a_rels[i], alpha_rels[i], v_rels[i], w_rels[i], offset, tf, a_total[i], alpha_total[i]); } else { compute_velocity(Eigen::Vector3d::Zero(), Eigen::Vector3d::Zero(), v_rels[i], w_rels[i], offset, tf, v_total[i], w_total[i]); compute_acceleration(Eigen::Vector3d::Zero(), Eigen::Vector3d::Zero(), Eigen::Vector3d::Zero(), a_rels[i], alpha_rels[i], v_rels[i], w_rels[i], offset, tf, a_total[i], alpha_total[i]); } } for(std::size_t i=0; i<frames.size(); ++i) { SimpleFrame* F = frames[i]; Eigen::Vector3d v_actual = F->getLinearVelocity(); Eigen::Vector3d w_actual = F->getAngularVelocity(); Eigen::Vector3d a_actual = F->getLinearAcceleration(); Eigen::Vector3d alpha_actual = F->getAngularAcceleration(); EXPECT_TRUE( equals(v_total[i], v_actual, tolerance) ); EXPECT_TRUE( equals(w_total[i], w_actual, tolerance) ); EXPECT_TRUE( equals(a_total[i], a_actual, tolerance) ); EXPECT_TRUE( equals(alpha_total[i], alpha_actual, tolerance) ); } // Test relative computations for(std::size_t i=0; i<frames.size(); ++i) { SimpleFrame* F = frames[i]; Frame* P = F->getParentFrame(); Eigen::Vector3d v_rel = F->getLinearVelocity(P, P); Eigen::Vector3d w_rel = F->getAngularVelocity(P, P); Eigen::Vector3d a_rel = F->getLinearAcceleration(P, P); Eigen::Vector3d alpha_rel = F->getAngularAcceleration(P, P); EXPECT_TRUE( equals(v_rels[i], v_rel, tolerance) ); EXPECT_TRUE( equals(w_rels[i], w_rel, tolerance) ); EXPECT_TRUE( equals(a_rels[i], a_rel, tolerance) ); EXPECT_TRUE( equals(alpha_rels[i], alpha_rel, tolerance) ); } } }
void ViewerState::optimizeSelected(){ DrawableFrame* current = drawableFrameVector.back(); DrawableFrame* reference = drawableFrameVector[drawableFrameVector.size()-2]; cerr << "optimizing" << endl; cerr << "current=" << current->frame() << endl; cerr << "reference= " << reference->frame() << endl; // cerr computing initial guess based on the frame positions, just for convenience Eigen::Isometry3d delta = reference->_vertex->estimate().inverse()*current->_vertex->estimate(); for(int c=0; c<4; c++) for(int r=0; r<3; r++) initialGuess.matrix()(r,c) = delta.matrix()(r,c); Eigen::Isometry3f odometryMean; Matrix6f odometryInfo; bool hasOdometry = extractRelativePrior(odometryMean, odometryInfo, reference, current); if (hasOdometry) initialGuess=odometryMean; Eigen::Isometry3f imuMean; Matrix6f imuInfo; bool hasImu = extractAbsolutePrior(imuMean, imuInfo, current); initialGuess.matrix().row(3) << 0,0,0,1; if(!wasInitialGuess) { aligner->clearPriors(); aligner->setOuterIterations(al_outerIterations); aligner->setReferenceFrame(reference->frame()); aligner->setCurrentFrame(current->frame()); aligner->setInitialGuess(initialGuess); aligner->setSensorOffset(sensorOffset); if (hasOdometry) aligner->addRelativePrior(odometryMean, odometryInfo); if (hasImu) aligner->addAbsolutePrior(reference->transformation(), imuMean, imuInfo); aligner->align(); } Eigen::Isometry3f localTransformation =aligner->T(); if (aligner->inliers()<1000 || aligner->error()/aligner->inliers()>10){ cerr << "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" << endl; cerr << "aligner: monster failure: inliers = " << aligner->inliers() << endl; cerr << "aligner: monster failure: error/inliers = " << aligner->error()/aligner->inliers() << endl; cerr << "Local transformation: " << t2v(aligner->T()).transpose() << endl; localTransformation = initialGuess; sleep(1); } cout << "Local transformation: " << t2v(aligner->T()).transpose() << endl; globalT = reference->transformation()*aligner->T(); // Update cloud drawing position. current->setTransformation(globalT); current->setLocalTransformation(aligner->T()); // Show zBuffers. refScn->clear(); currScn->clear(); QImage refQImage; QImage currQImage; DepthImageView div; div.computeColorMap(300, 2000, 128); div.convertToQImage(refQImage, aligner->correspondenceFinder()->referenceDepthImage()); div.convertToQImage(currQImage, aligner->correspondenceFinder()->currentDepthImage()); refScn->addPixmap((QPixmap::fromImage(refQImage)).scaled(QSize((int)refQImage.width()/(ng_scale*3), (int)(refQImage.height()/(ng_scale*3))))); currScn->addPixmap((QPixmap::fromImage(currQImage)).scaled(QSize((int)currQImage.width()/(ng_scale*3), (int)(currQImage.height()/(ng_scale*3))))); pwnGMW->graphicsView1_2d->show(); pwnGMW->graphicsView2_2d->show(); wasInitialGuess = false; newCloudAdded = false; *initialGuessViewer = 0; *optimizeViewer = 0; }
void BundleAdjustmentOptimization(const vector<Point3f>& objectPoints,const vector<Eigen::Vector2d>& imagePoints1,const vector<Eigen::Vector2d>& imagePoints2) { typedef BlockSolver_6_3 SlamBlockSolver; typedef LinearSolverEigen< SlamBlockSolver::PoseMatrixType > SlamLinearSolver; // Eigen::Quaternionf r = Eigen::Quaterniond(T.rotation()); // Eigen::Quaterniond rr(r.w(),r.x(),r.y(),r.z()); // Eigen::Vector3f t = T.translation(); // Eigen::Vector3d tt(t[0],t[1],t[2]); // g2o::BlockSolver_6_3::LinearSolverType* linearSolver = new g2o::LinearSolverCholmod<g2o::BlockSolver_6_3::PoseMatrixType> (); // //SlamLinearSolver* linearSolver = new SlamLinearSolver(); // SlamBlockSolver* blockSolver = new SlamBlockSolver(linearSolver); // OptimizationAlgorithmGaussNewton* solver = new OptimizationAlgorithmGaussNewton(blockSolver); // //OptimizationAlgorithmLevenberg* solver = new OptimizationAlgorithmLevenberg(blockSolver); // //Eigen::Vector2d principal_point(325.5,253.5); // Eigen::Vector2d principal_point(320,240); // //CameraParameters * cam_params = new CameraParameters (518.5, principal_point,0); // CameraParameters * cam_params = new CameraParameters (518.5, principal_point,0); // cam_params->setId(0); // SparseOptimizer optimizer; // optimizer.setAlgorithm(solver); // optimizer.setVerbose(false); // optimizer.addParameter(cam_params); // int id=0; // for(int i =0;i<2;i++) // { // VertexSE3Expmap* se3 = new VertexSE3Expmap(); // se3->setId(id); // if(i == 0) // { // se3->setFixed(true); // se3->setEstimate(SE3Quat()); // } // else // { //SE3Quat* pos = new SE3Quat(rr,tt); // se3->setEstimate(SE3Quat()); // } // optimizer.addVertex(se3); // id++; // } // cout<<"obejecPoints.size = "; // cout<<objectPoints.size()<<endl; // for(int i =0;i<objectPoints.size();i++) // { // VertexSBAPointXYZ* v_xyz = new VertexSBAPointXYZ(); // Eigen::Vector3d vec; // v_xyz->setId(id); // vec<<objectPoints[i].x,objectPoints[i].y,objectPoints[i].z; // v_xyz->setEstimate(vec); // v_xyz->setMarginalized(false); // optimizer.addVertex(v_xyz); // id++; // } // cout<<imagePoints1.size()<<endl; // for(int i =0;i<imagePoints1.size();i++) // { // EdgeProjectXYZ2UV* e = new EdgeProjectXYZ2UV(); // e->setVertex(0, dynamic_cast<VertexSBAPointXYZ*> (optimizer.vertex(2+i))); // e->setVertex(1, dynamic_cast<VertexSE3Expmap*> (optimizer.vertex(0))); // e->setMeasurement(imagePoints1[i]); // e->information() = Eigen::Matrix2d::Identity(); // e->setParameterId(0,0); // RobustKernelHuber* rk = new RobustKernelHuber; // e->setRobustKernel(rk); // optimizer.addEdge(e); // } // cout<<imagePoints2.size()<<endl; // for(int i =0;i<imagePoints2.size();i++) // { // EdgeProjectXYZ2UV* e = new EdgeProjectXYZ2UV(); // e->setVertex(0, dynamic_cast<VertexSBAPointXYZ*> (optimizer.vertex(2+i))); // e->setVertex(1, dynamic_cast<VertexSE3Expmap*> (optimizer.vertex(1))); // e->setMeasurement(imagePoints2[i]); // e->information() = Eigen::Matrix2d::Identity(); // RobustKernelHuber* rk = new RobustKernelHuber; // e->setParameterId(0,0); // e->setRobustKernel(rk); // optimizer.addEdge(e); // } g2o::SparseOptimizer optimizer; // 使用Cholmod中的线性方程求解器 g2o::BlockSolver_6_3::LinearSolverType* linearSolver = new g2o::LinearSolverCholmod<g2o::BlockSolver_6_3::PoseMatrixType> (); // 6*3 的参数 g2o::BlockSolver_6_3* block_solver = new g2o::BlockSolver_6_3( linearSolver ); // L-M 下降 //g2o::OptimizationAlgorithmLevenberg* algorithm = new g2o::OptimizationAlgorithmLevenberg( block_solver ); OptimizationAlgorithmDogleg* algorithm = new OptimizationAlgorithmDogleg(block_solver); optimizer.setAlgorithm( algorithm ); optimizer.setVerbose( false ); g2o::CameraParameters* camera = new g2o::CameraParameters( 518.5, Eigen::Vector2d(325,253), 0 ); camera->setId(0); optimizer.addParameter( camera ); // 添加节点 // 两个位姿节点 for ( int i=0; i<2; i++ ) { g2o::VertexSE3Expmap* v = new g2o::VertexSE3Expmap(); v->setId(i); if ( i == 0) v->setFixed( true ); // 第一个点固定为零 // 预设值为单位Pose,因为我们不知道任何信息 v->setEstimate( g2o::SE3Quat() ); optimizer.addVertex( v ); } // 很多个特征点的节点 // 以第一帧为准 for ( size_t i=0; i<objectPoints.size(); i++ ) { g2o::VertexSBAPointXYZ* v = new g2o::VertexSBAPointXYZ(); v->setId( 2 + i ); // 由于深度不知道,只能把深度设置为1了 Eigen::Vector3d vec; vec<<objectPoints[i].x,objectPoints[i].y,objectPoints[i].z; v->setMarginalized(true); v->setEstimate( vec ); optimizer.addVertex( v ); } // 准备边 // 第一帧 vector<g2o::EdgeProjectXYZ2UV*> edges; for ( size_t i=0; i<imagePoints1.size(); i++ ) { g2o::EdgeProjectXYZ2UV* edge = new g2o::EdgeProjectXYZ2UV(); edge->setVertex( 0, dynamic_cast<g2o::VertexSBAPointXYZ*> (optimizer.vertex(i+2)) ); edge->setVertex( 1, dynamic_cast<g2o::VertexSE3Expmap*> (optimizer.vertex(0)) ); edge->setMeasurement( imagePoints1[i] ); edge->setInformation( Eigen::Matrix2d::Identity() ); edge->setParameterId(0, 0); // 核函数 edge->setRobustKernel( new g2o::RobustKernelHuber() ); optimizer.addEdge( edge ); edges.push_back(edge); } // 第二帧 for ( size_t i=0; i<imagePoints2.size(); i++ ) { g2o::EdgeProjectXYZ2UV* edge = new g2o::EdgeProjectXYZ2UV(); edge->setVertex( 0, dynamic_cast<g2o::VertexSBAPointXYZ*> (optimizer.vertex(i+2)) ); edge->setVertex( 1, dynamic_cast<g2o::VertexSE3Expmap*> (optimizer.vertex(1)) ); edge->setMeasurement( imagePoints2[i] ); edge->setInformation( Eigen::Matrix2d::Identity() ); edge->setParameterId(0,0); // 核函数 edge->setRobustKernel( new g2o::RobustKernelHuber() ); optimizer.addEdge( edge ); edges.push_back(edge); } int inliers=0; for ( vector<g2o::EdgeProjectXYZ2UV*>::iterator e = edges.begin();e!=edges.end();e++ ) { (*e)->computeError(); // chi2 就是 error*\Omega*error, 如果这个数很大,说明此边的值与其他边很不相符 if ( (*e)->chi2() > 1 ) { cout<<"error = "<<(*e)->chi2()<<endl; } else { inliers++; } } optimizer.setVerbose(true); optimizer.initializeOptimization(); cout<<"optimizer start"<<endl; optimizer.optimize(1,true); cout<<"optimizer end"<<endl; g2o::VertexSE3Expmap* v = dynamic_cast<g2o::VertexSE3Expmap*>( optimizer.vertex(1) ); Eigen::Isometry3d pose = v->estimate(); cout<<"Pose="<<endl<<pose.matrix()<<endl; for ( size_t i=0; i<imagePoints1.size(); i++ ) { g2o::VertexSBAPointXYZ* v = dynamic_cast<g2o::VertexSBAPointXYZ*> (optimizer.vertex(i+2)); cout<<"vertex id "<<i+2<<", pos = "; Eigen::Vector3d pos = v->estimate(); cout<<pos(0)<<","<<pos(1)<<","<<pos(2)<<endl; } }