int vpm(int argc, char **argv) { if( argc < 2 ) throw std::runtime_error("Provide model name as second argument"); std::string obj_name(argv[1]); std::string vision_module_dir( ros::package::getPath("vision_module") ); std::string tree_vps_path( vision_module_dir + "/data/tree_vps.txt"); std::string views_path( vision_module_dir + "/../database/cloud_data/" + obj_name + "_views" ); // Read the tree vps file Eigen::MatrixXd tree_vps; io_utils::file2matrix( tree_vps_path, tree_vps, 3 ); int num_vps = tree_vps.rows(); pcl::PointCloud<pcl::PointXYZ>::Ptr pcd_model_ptr( new pcl::PointCloud<pcl::PointXYZ>); for(int vp = 0; vp < num_vps; ++vp) { // load the view pcl::PointCloud<pcl::PointXYZ>::Ptr view_ptr( new pcl::PointCloud<pcl::PointXYZ>); std::string view_path( views_path + "/" + obj_name + "_" + boost::lexical_cast<std::string>(vp) +".pcd" ); pcl::io::loadPCDFile<pcl::PointXYZ> ( view_path, *view_ptr ); // rotate to world frame Eigen::Vector3d camera_position = tree_vps.row(vp).transpose(); Eigen::Vector4d camera_orientation = misc::target2quat( camera_position, Eigen::Vector3d(0,0,0) ); tf::Transform map_T_snsr; map_T_snsr.setOrigin( tf::Vector3(camera_position.x(), camera_position.y(), camera_position.z()) ); map_T_snsr.setRotation( tf::Quaternion( camera_orientation.x(), camera_orientation.y(), camera_orientation.z(), camera_orientation.w()) ); tf::Transform snsr_T_optical; snsr_T_optical.setOrigin( tf::Vector3( 0.0, 0.0, 0.0 ) ); snsr_T_optical.setRotation( tf::Quaternion( -0.5, 0.5, -0.5, 0.5 ) ); tf::Transform map_T_optical = map_T_snsr * snsr_T_optical; pcl_ros::transformPointCloud( *view_ptr, *view_ptr, map_T_optical); // add to pcd_model *pcd_model_ptr += *view_ptr; } // Save pcl::PCDWriter writer; writer.writeASCII( vision_module_dir + "../database/pcd_models/" + obj_name + "_complete.pcd", *pcd_model_ptr ); return 0; }
// this is just a workbench. most of the stuff here will go into the Frontend class. int main(int argc, char **argv) { ros::init(argc, argv, "okvis_node_synchronous"); google::InitGoogleLogging(argv[0]); FLAGS_stderrthreshold = 0; // INFO: 0, WARNING: 1, ERROR: 2, FATAL: 3 FLAGS_colorlogtostderr = 1; if (argc != 3 && argc != 4) { LOG(ERROR) << "Usage: ./" << argv[0] << " configuration-yaml-file bag-to-read-from [skip-first-seconds]"; return -1; } okvis::Duration deltaT(0.0); if (argc == 4) { deltaT = okvis::Duration(atof(argv[3])); } // set up the node ros::NodeHandle nh("okvis_node"); // publisher okvis::Publisher publisher(nh); // read configuration file std::string configFilename(argv[1]); okvis::RosParametersReader vio_parameters_reader(configFilename); okvis::VioParameters parameters; vio_parameters_reader.getParameters(parameters); okvis::ThreadedKFVio okvis_estimator(parameters); okvis_estimator.setFullStateCallback(std::bind(&okvis::Publisher::publishFullStateAsCallback,&publisher,std::placeholders::_1,std::placeholders::_2,std::placeholders::_3,std::placeholders::_4)); okvis_estimator.setLandmarksCallback(std::bind(&okvis::Publisher::publishLandmarksAsCallback,&publisher,std::placeholders::_1,std::placeholders::_2,std::placeholders::_3)); okvis_estimator.setStateCallback(std::bind(&okvis::Publisher::publishStateAsCallback,&publisher,std::placeholders::_1,std::placeholders::_2)); okvis_estimator.setBlocking(true); publisher.setParameters(parameters); // pass the specified publishing stuff // extract the folder path std::string bagname(argv[2]); size_t pos = bagname.find_last_of("/"); std::string path; if (pos == std::string::npos) path = "."; else path = bagname.substr(0, pos); const unsigned int numCameras = parameters.nCameraSystem.numCameras(); // setup files to be written publisher.setCsvFile(path + "/okvis_estimator_output.csv"); publisher.setLandmarksCsvFile(path + "/okvis_estimator_landmarks.csv"); okvis_estimator.setImuCsvFile(path + "/imu0_data.csv"); for (size_t i = 0; i < numCameras; ++i) { std::stringstream num; num << i; okvis_estimator.setTracksCsvFile(i, path + "/cam" + num.str() + "_tracks.csv"); } // open the bag rosbag::Bag bag(argv[2], rosbag::bagmode::Read); // views on topics. the slash is needs to be correct, it's ridiculous... std::string imu_topic("/imu0"); rosbag::View view_imu( bag, rosbag::TopicQuery(imu_topic)); if (view_imu.size() == 0) { LOG(ERROR) << "no imu topic"; return -1; } rosbag::View::iterator view_imu_iterator = view_imu.begin(); LOG(INFO) << "No. IMU messages: " << view_imu.size(); std::vector<std::shared_ptr<rosbag::View> > view_cams_ptr; std::vector<rosbag::View::iterator> view_cam_iterators; std::vector<okvis::Time> times; okvis::Time latest(0); for(size_t i=0; i<numCameras;++i) { std::string camera_topic("/cam"+std::to_string(i)+"/image_raw"); std::shared_ptr<rosbag::View> view_ptr( new rosbag::View( bag, rosbag::TopicQuery(camera_topic))); if (view_ptr->size() == 0) { LOG(ERROR) << "no camera topic"; return 1; } view_cams_ptr.push_back(view_ptr); view_cam_iterators.push_back(view_ptr->begin()); sensor_msgs::ImageConstPtr msg1 = view_cam_iterators[i] ->instantiate<sensor_msgs::Image>(); times.push_back( okvis::Time(msg1->header.stamp.sec, msg1->header.stamp.nsec)); if (times.back() > latest) latest = times.back(); LOG(INFO) << "No. cam " << i << " messages: " << view_cams_ptr.back()->size(); } for(size_t i=0; i<numCameras;++i) { if ((latest - times[i]).toSec() > 0.01) view_cam_iterators[i]++; } int counter = 0; okvis::Time start(0.0); while (ros::ok()) { ros::spinOnce(); okvis_estimator.display(); // check if at the end if (view_imu_iterator == view_imu.end()){ std::cout << std::endl << "Finished. Press any key to exit." << std::endl << std::flush; char k = 0; while(k==0 && ros::ok()){ k = cv::waitKey(1); ros::spinOnce(); } return 0; } for (size_t i = 0; i < numCameras; ++i) { if (view_cam_iterators[i] == view_cams_ptr[i]->end()) { std::cout << std::endl << "Finished. Press any key to exit." << std::endl << std::flush; char k = 0; while(k==0 && ros::ok()){ k = cv::waitKey(1); ros::spinOnce(); } return 0; } } // add images okvis::Time t; for(size_t i=0; i<numCameras;++i) { sensor_msgs::ImageConstPtr msg1 = view_cam_iterators[i] ->instantiate<sensor_msgs::Image>(); cv::Mat filtered(msg1->height, msg1->width, CV_8UC1); memcpy(filtered.data, &msg1->data[0], msg1->height * msg1->width); t = okvis::Time(msg1->header.stamp.sec, msg1->header.stamp.nsec); if (start == okvis::Time(0.0)) { start = t; } // get all IMU measurements till then okvis::Time t_imu=start; do { sensor_msgs::ImuConstPtr msg = view_imu_iterator ->instantiate<sensor_msgs::Imu>(); Eigen::Vector3d gyr(msg->angular_velocity.x, msg->angular_velocity.y, msg->angular_velocity.z); Eigen::Vector3d acc(msg->linear_acceleration.x, msg->linear_acceleration.y, msg->linear_acceleration.z); t_imu = okvis::Time(msg->header.stamp.sec, msg->header.stamp.nsec); // add the IMU measurement for (blocking) processing if (t_imu - start > deltaT) okvis_estimator.addImuMeasurement(t_imu, acc, gyr); view_imu_iterator++; } while (view_imu_iterator != view_imu.end() && t_imu <= t); // add the image to the frontend for (blocking) processing if (t - start > deltaT) okvis_estimator.addImage(t, i, filtered); view_cam_iterators[i]++; } ++counter; // display progress if (counter % 20 == 0) { std::cout << "\rProgress: " << int(double(counter) / double(view_cams_ptr.back()->size()) * 100) << "% " ; } } std::cout << std::endl; return 0; }