/*! * \brief Main Function * * \author Scott K Logan * * Initializes ROS, instantiates the node handle for the driver to use and * instantiates the angel_controller class. * * \param argc Number of command line arguments * \param argv 2D character array of command line arguments * * \returns EXIT_SUCCESS, or an error state */ int main( int argc, char *argv[] ) { ros::init( argc, argv, "basic_twist_integrator" ); ros::NodeHandle nh; ros::NodeHandle nh_priv( "~" ); basic_twist_integrator::basic_twist_integrator odom( nh, nh_priv ); odom.start( ); ros::spin( ); std::exit( EXIT_SUCCESS ); }
void Data::parse(char sensor, double data[]){ switch (sensor){ case 'O': { Odom odom(data); this->odoms.push_back(&odom); break; } case 'L': { Laser laser(data); this->lasers.push_back(&laser); break; } } }
int main(int argc, char **argv) { const char *optstring = "hc:v"; int c; struct option long_opts[] = { { "help", no_argument, 0, 'h' }, { "camera", required_argument, 0, 'c' }, { "visualize", no_argument, 0, 'v' }, { 0, 0, 0, 0 } }; std::string camera_id; bool visualize = false; while ((c = getopt_long (argc, argv, optstring, long_opts, 0)) >= 0) { switch (c) { case 'c': camera_id = optarg; break; case 'v': visualize = true; break; default: usage(); return 1; }; } if(optind >= argc - 1) { usage(); } std::string datadir = argv[optind]; std::string outdir = argv[optind+1]; if(camera_id.empty()) { err("Missing --camera parameter. Run with -h for options."); return 1; } fovis::CameraIntrinsicsParameters camParams; camParams.width = 640; camParams.height = 480; // RGB camera parameters if(camera_id == "fr1") { camParams.fx = 517.306408; camParams.fy = 516.469215; camParams.cx = 318.643040; camParams.cy = 255.313989; camParams.k1 = 0.262383; camParams.k2 = -0.953104; camParams.p1 = -0.005358; camParams.p2 = 0.002628; camParams.k3 = 1.163314; } else if(camera_id == "fr2") { camParams.fx = 520.908620; camParams.fy = 521.007327; camParams.cx = 325.141442; camParams.cy = 249.701764; camParams.k1 = 0.231222; camParams.k2 = -0.784899; camParams.p1 = -0.003257; camParams.p2 = -0.000105; camParams.k3 = 0.917205; } else if(camera_id == "fr3") { camParams.fx = 537.960322; camParams.fy = 539.597659; camParams.cx = 319.183641; camParams.cy = 247.053820; camParams.k1 = 0.026370; camParams.k2 = -0.100086; camParams.p1 = 0.003138; camParams.p2 = 0.002421; camParams.k3 = 0.000000; } else { err("Unknown camera id [%s]", camera_id.c_str()); return 1; } info("Loading data from: [%s]\n", datadir.c_str()); fovis::Rectification rect(camParams); // If we wanted to play around with the different VO parameters, we could set // them here in the "options" variable. fovis::VisualOdometryOptions options = fovis::VisualOdometry::getDefaultOptions(); // setup the visual odometry fovis::VisualOdometry odom(&rect, options); // create the output trajectory file std::string traj_fname = outdir + "/traj.txt"; FILE* traj_fp = fopen(traj_fname.c_str(), "w"); if(!traj_fp) { err("Unable to create %s - %s", traj_fname.c_str(), strerror(errno)); return 1; } std::string gray_pgm_dir = outdir + "/gray"; mkdir(gray_pgm_dir.c_str(), 0755); std::string depth_pgm_dir = outdir + "/depth"; mkdir(depth_pgm_dir.c_str(), 0755); std::string vis_dir = outdir + "/vis"; mkdir(vis_dir.c_str(), 0755); // read the RGB and depth index files std::vector<TimestampFilename> rgb_fnames = read_file_index(datadir + "/rgb.txt"); std::vector<TimestampFilename> depth_fnames = read_file_index(datadir + "/depth.txt"); int depth_fname_index = 0; DrawImage draw_img(camParams.width, camParams.height * 2); for(int rgb_fname_index=0; rgb_fname_index < rgb_fnames.size(); rgb_fname_index++) { int64_t timestamp = rgb_fnames[rgb_fname_index].timestamp; std::string rgb_fname = datadir + "/" + rgb_fnames[rgb_fname_index].filename; long long ts_sec = timestamp / 1000000; long ts_usec = timestamp % 1000000; char ts_str[80]; snprintf(ts_str, 80, "%lld.%06ld", ts_sec, ts_usec); // match the RGB image up with a depth image bool matched_depth_image = false; while(depth_fname_index < depth_fnames.size()) { int64_t depth_ts = depth_fnames[depth_fname_index].timestamp; // declare a depth image match if the depth image timestamp is within // 40ms of the RGB image. int64_t dt_usec = depth_ts - timestamp; if(dt_usec > 40000) { dbg(" stop %lld.%06ld (dt %f)", (long long)(depth_ts / 1000000), (long)(depth_ts % 1000000), dt_usec / 1000.); break; } else if(dt_usec < -40000) { dbg(" skip %lld.%06ld (dt %f)", (long long)(depth_ts / 1000000), (long)(depth_ts % 1000000), dt_usec / 1000.); depth_fname_index++; } else { matched_depth_image = true; dbg(" mtch %lld.%06ld (dt %f)", (long long)(depth_ts / 1000000), (long)(depth_ts % 1000000), dt_usec / 1000.); break; } } if(!matched_depth_image) { // didn't find a depth image with a close enough timestamp. Skip this // RGB image. info("# skip %s", ts_str); continue; } std::string depth_fname = datadir + "/" + depth_fnames[depth_fname_index].filename; depth_fname_index++; // read RGB data std::vector<uint8_t> rgb_data; unsigned rgb_width; unsigned rgb_height; std::vector<uint8_t> rgb_png_data; lodepng::load_file(rgb_png_data, rgb_fname.c_str()); if(rgb_png_data.empty()) { err("Failed to load %s", rgb_fname.c_str()); continue; } if(0 != lodepng::decode(rgb_data, rgb_width, rgb_height, rgb_png_data, LCT_RGB, 8)) { err("Error decoding PNG %s", rgb_fname.c_str()); continue; } // convert RGB data to grayscale std::vector<uint8_t> gray_data(rgb_width*rgb_height); const uint8_t* rgb_pixel = &rgb_data[0]; for(int pixel_index=0; pixel_index<rgb_width*rgb_height; pixel_index++) { uint8_t r = rgb_pixel[0]; uint8_t g = rgb_pixel[1]; uint8_t b = rgb_pixel[2]; gray_data[pixel_index] = (r + g + b) / 3; rgb_pixel += 3; } // write gray image out. write_pgm(outdir + "/gray/" + ts_str + "-gray.pgm", &gray_data[0], rgb_width, rgb_height, rgb_width); // read depth data std::vector<uint8_t> depth_data_u8; unsigned depth_width; unsigned depth_height; std::vector<uint8_t> depth_png_data; lodepng::load_file(depth_png_data, depth_fname.c_str()); if(depth_png_data.empty()) { err("Failed to load %s", depth_fname.c_str()); continue; } if(0 != lodepng::decode(depth_data_u8, depth_width, depth_height, depth_png_data, LCT_GREY, 16)) { err("Error decoding PNG %s", depth_fname.c_str()); continue; } // convert depth data to a DepthImage object fovis::DepthImage depth_image(camParams, depth_width, depth_height); std::vector<float> depth_data(depth_width * depth_height); for(int i=0; i<depth_width*depth_height; i++) { // lodepng loads 16-bit PNGs into memory with big-endian ordering. // swizzle the bytes to get a uint16_t uint8_t high_byte = depth_data_u8[i*2]; uint8_t low_byte = depth_data_u8[i*2+1]; uint16_t data16 = (high_byte << 8) | low_byte; if(0 == data16) depth_data[i] = NAN; else depth_data[i] = data16 / float(5000); } depth_image.setDepthImage(&depth_data[0]); // debug depth image std::vector<uint8_t> depth_pgm_data(depth_width * depth_height); float depth_pgm_max_depth = 5; for(int i=0; i<depth_width*depth_height; i++) { if(isnan(depth_data[i])) depth_pgm_data[i] = 0; else { float d = depth_data[i]; if(d > depth_pgm_max_depth) d = depth_pgm_max_depth; depth_pgm_data[i] = 255 * d / depth_pgm_max_depth; } } write_pgm(outdir + "/depth/" + ts_str + "-depth.pgm", &depth_pgm_data[0], depth_width, depth_height, depth_width); // process the frame odom.processFrame(&gray_data[0], &depth_image); fovis::MotionEstimateStatusCode status = odom.getMotionEstimateStatus(); switch(status) { case fovis::INSUFFICIENT_INLIERS: printf("# %s - insufficient inliers\n", ts_str); break; case fovis::OPTIMIZATION_FAILURE: printf("# %s - optimization failed\n", ts_str); break; case fovis::REPROJECTION_ERROR: printf("# %s - reprojection error too high\n", ts_str); break; case fovis::NO_DATA: printf("# %s - no data\n", ts_str); break; case fovis::SUCCESS: default: break; } // get the integrated pose estimate. Eigen::Isometry3d cam_to_local = odom.getPose(); Eigen::Vector3d t = cam_to_local.translation(); Eigen::Quaterniond q(cam_to_local.rotation()); printf("%lld.%06ld %f %f %f %f %f %f %f\n", ts_sec, ts_usec, t.x(), t.y(), t.z(), q.x(), q.y(), q.z(), q.w()); fprintf(traj_fp, "%lld.%06ld %f %f %f %f %f %f %f\n", ts_sec, ts_usec, t.x(), t.y(), t.z(), q.x(), q.y(), q.z(), q.w()); // visualization if(visualize) { DrawColor status_colors[] = { DrawColor(255, 255, 0), DrawColor(0, 255, 0), DrawColor(255, 0, 0), DrawColor(255, 0, 255), DrawColor(127, 127, 0) }; DrawColor red(255, 0, 0); DrawColor green(0, 255, 0); DrawColor blue(0, 0, 255); memset(&draw_img.data[0], 0, draw_img.data.size()); const fovis::OdometryFrame* ref_frame = odom.getReferenceFrame(); const fovis::OdometryFrame* tgt_frame = odom.getTargetFrame(); const fovis::PyramidLevel* ref_pyr_base = ref_frame->getLevel(0); const uint8_t* ref_img_data = ref_pyr_base->getGrayscaleImage(); int ref_img_stride = ref_pyr_base->getGrayscaleImageStride(); const fovis::PyramidLevel* tgt_pyr_base = tgt_frame->getLevel(0); const uint8_t* tgt_img_data = tgt_pyr_base->getGrayscaleImage(); int tgt_img_stride = tgt_pyr_base->getGrayscaleImageStride(); int tgt_yoff = ref_pyr_base->getHeight(); // draw the reference frame on top draw_gray_img_rgb(ref_img_data, ref_pyr_base->getWidth(), ref_pyr_base->getHeight(), ref_img_stride, 0, 0, &draw_img); // draw the target frame on bottom draw_gray_img_rgb(tgt_img_data, tgt_pyr_base->getWidth(), tgt_pyr_base->getHeight(), tgt_img_stride, 0, tgt_yoff, &draw_img); const fovis::MotionEstimator* mestimator = odom.getMotionEstimator(); int num_levels = ref_frame->getNumLevels(); for(int level_index=0; level_index<num_levels; level_index++) { // draw reference features const fovis::PyramidLevel* ref_level = ref_frame->getLevel(level_index); int num_ref_keypoints = ref_level->getNumKeypoints(); for(int ref_kp_ind=0; ref_kp_ind<num_ref_keypoints; ref_kp_ind++) { const fovis::KeypointData* ref_kp = ref_level->getKeypointData(ref_kp_ind); int ref_u = (int)round(ref_kp->base_uv.x()); int ref_v = (int)round(ref_kp->base_uv.y()); draw_box_rgb(ref_u-1, ref_v-1, ref_u+1, ref_v+1, blue, &draw_img); } // draw target features const fovis::PyramidLevel* tgt_level = tgt_frame->getLevel(level_index); int num_tgt_keypoints = tgt_level->getNumKeypoints(); for(int tgt_kp_ind=0; tgt_kp_ind<num_tgt_keypoints; tgt_kp_ind++) { const fovis::KeypointData* tgt_kp = tgt_level->getKeypointData(tgt_kp_ind); int tgt_u = (int)round(tgt_kp->base_uv.x()); int tgt_v = (int)round(tgt_kp->base_uv.y()); draw_box_rgb(tgt_u-1, tgt_v-1 + tgt_yoff, tgt_u+1, tgt_v+1 + tgt_yoff, blue, &draw_img); } } const fovis::FeatureMatch* matches = mestimator->getMatches(); int num_matches = mestimator->getNumMatches(); // draw non-inlier matches for(int match_index=0; match_index<num_matches; match_index++) { const fovis::FeatureMatch& match = matches[match_index]; if(match.inlier) continue; const fovis::KeypointData* ref_keypoint = match.ref_keypoint; const fovis::KeypointData* tgt_keypoint = match.target_keypoint; int ref_u = (int)round(ref_keypoint->base_uv.x()); int ref_v = (int)round(ref_keypoint->base_uv.y()); int tgt_u = (int)round(tgt_keypoint->base_uv.x()); int tgt_v = (int)round(tgt_keypoint->base_uv.y()); draw_line_rgb(ref_u, ref_v, tgt_u, tgt_v + tgt_yoff, red, &draw_img); } // draw inlier matches for(int match_index=0; match_index<num_matches; match_index++) { const fovis::FeatureMatch& match = matches[match_index]; if(!match.inlier) continue; const fovis::KeypointData* ref_keypoint = match.ref_keypoint; const fovis::KeypointData* tgt_keypoint = match.target_keypoint; int ref_u = (int)round(ref_keypoint->base_uv.x()); int ref_v = (int)round(ref_keypoint->base_uv.y()); int tgt_u = (int)round(tgt_keypoint->base_uv.x()); int tgt_v = (int)round(tgt_keypoint->base_uv.y()); draw_line_rgb(ref_u, ref_v, tgt_u, tgt_v + tgt_yoff, green, &draw_img); } // draw a couple lines indicating the VO status draw_box_rgb(0, tgt_yoff - 1, draw_img.width, tgt_yoff + 1, status_colors[status], &draw_img); // save visualization std::string vis_fname = vis_dir + "/" + ts_str + "-vis.png"; lodepng::encode(vis_fname, &draw_img.data[0], draw_img.width, draw_img.height, LCT_RGB); } } fclose(traj_fp); return 0; }
/* Main ----------------------------------------------------------------------*/ int main(void){ /*Valuable*/ int i; uint8_t mode1=0,mode2=0,mode3=0,mode4=0; uint32_t Enc1=0,Enc2=0,Enc3=0,Enc4=0; float pwmval[4]={0,0,0,0}; bool relay[4]={false,false,false,false}; bool S_R1=false,S_R2=false,S_L1=false,S_L2=false; /*Nvic*/ Nvic::initialise(); /*delay*/ MillisecondTimer Milli; Milli.initialise(); /*UHM*/ UHM uhm; uhm.led1.Off(); uhm.led2.Off(); uhm.led3.Off(); uhm.led4.Off(); uhm.buzz.pat_01(); /* ROS */ ros::NodeHandle nh; HR_2016::odometry odometry; //ros::Subscriber<HR_2016::controller> cont("ps3con", &ps3_cb); ros::Subscriber<HR_2016::omni> omniwheel("omni_corrected", &omni_cb); ros::Publisher odom("odometry",&odometry); nh.initNode(); //nh.subscribe(cont); nh.subscribe(omniwheel); nh.advertise(odom); /*Loop*/ while(1){ if(SAFE){ if( wheel.flag[0]==false && (bwhe.flag[0]==true) ){ fall.flag[0]=true; }else{ fall.flag[0]=false; } bwhe.flag[0] = wheel.flag[0]; if( wheel.flag[1]==false && (bwhe.flag[1]==true) ){ fall.flag[1]=true; }else{ fall.flag[1]=false; } bwhe.flag[1] = wheel.flag[1]; if( wheel.flag[2]==false && (bwhe.flag[2]==true) ){ fall.flag[2]=true; }else{ fall.flag[2]=false; } bwhe.flag[2] = wheel.flag[2]; if( wheel.flag[3]==false && (bwhe.flag[3]==true) ){ fall.flag[3]=true; }else{ fall.flag[3]=false; } bwhe.flag[3] = wheel.flag[3]; /* Enc read */ /*Motor*/ pwmval[0]=wheel.rate/0.707; pwmval[1]=wheel.rate/0.707; pwmval[2]=wheel.rate/0.707; pwmval[3]=wheel.rate/0.707; //オムニっぽくしてみる(4/17) if(mode1 == 1 && mode2 == 1 && mode3 == 1 && mode4 == 1){ if(con.DOWN==1){ uhm.motor2.Backward(); uhm.motor4.Backward(); }else if(con.UP==1){ uhm.motor2.Forward(); uhm.motor4.Forward(); } if(con.BATSU==1){ uhm.motor1.Forward(); uhm.motor3.Forward(); }else if(con.SANKAKU==1){ uhm.motor1.Backward(); uhm.motor3.Backward(); } if(con.LEFT==1 || con.SHIKAKU==1){ HR_Left(); }else if(con.MARU==1 || con.RIGHT==1){ HR_Right(); } if(con.UP==0 && con.SANKAKU==0 && con.DOWN==0 && con.BATSU==0 && con.MARU==0 && con.RIGHT==0 && con.LEFT==0 && con.SHIKAKU==0){ HR_Stop(); }else{ HR_Setpwm(0.95,0.95,0.95,0.95); } } else if((wheel.flag[0]) && !relay[1] && !relay[2] && !relay[3]){ HR_For(); uhm.led1.On(); uhm.led2.Off(); uhm.led3.Off(); uhm.led4.Off(); HR_Setpwm(pwmval[0],pwmval[1],pwmval[2],pwmval[3]); relay[0] =true; }else if((wheel.flag[1]) && !relay[0] && !relay[2] && !relay[3]){ HR_Back(); uhm.led4.On(); uhm.led1.Off(); uhm.led2.Off(); uhm.led3.Off(); HR_Setpwm(pwmval[0],pwmval[1],pwmval[2],pwmval[3]); relay[1] =true; }else if((wheel.flag[2]) && !relay[0] && !relay[1] && !relay[3]){ HR_Left(); uhm.led2.On(); uhm.led1.Off(); uhm.led3.Off(); uhm.led4.Off(); HR_Setpwm(pwmval[0],pwmval[1],pwmval[2],pwmval[3]); relay[2] =true; }else if((wheel.flag[3]) && !relay[0] && !relay[1] && !relay[2]){ HR_Right(); uhm.led3.On(); uhm.led1.Off(); uhm.led2.Off(); uhm.led4.Off(); HR_Setpwm(pwmval[0],pwmval[1],pwmval[2],pwmval[3]); relay[3] =true; }else if((wheel.flag[4]) && !relay[1] && !relay[2] && !relay[3]){ HR_For(); uhm.led1.On(); uhm.led3.On(); uhm.led2.Off(); uhm.led4.Off(); HR_Setpwm(pwmval[0],pwmval[1],pwmval[2],pwmval[3]); relay[0] =true; }else if((wheel.flag[5]) && !relay[1] && !relay[2] && !relay[3]){ HR_For(); uhm.led1.On(); uhm.led2.On(); uhm.led3.Off(); uhm.led4.Off(); HR_Setpwm(pwmval[0],pwmval[1],pwmval[2],pwmval[3]); relay[0] =true; }else if((wheel.flag[6]) && !relay[0] && !relay[2] && !relay[3]){ HR_Back(); uhm.led3.On(); uhm.led4.On(); uhm.led2.Off(); uhm.led1.Off(); HR_Setpwm(pwmval[0],pwmval[1],pwmval[2],pwmval[3]); relay[1] =true; }else if((wheel.flag[7]) && !relay[0] && !relay[2] && !relay[3]){ HR_Back(); uhm.led2.On(); uhm.led4.On(); uhm.led1.Off(); uhm.led3.Off(); HR_Setpwm(pwmval[0],pwmval[1],pwmval[2],pwmval[3]); relay[1] =true; }else if((con.MARU) && !relay[0] && !relay[1] && !relay[2] && !relay[3]){ HR_Rcir(); uhm.led2.On(); uhm.led4.On(); uhm.led1.Off(); uhm.led3.Off(); HR_Setpwm(0.9,0.9,0.9,0.9); }else if((con.BATSU) && !relay[0] && !relay[1] && !relay[2] && !relay[3]){ HR_Lcir(); uhm.led2.On(); uhm.led4.On(); uhm.led1.Off(); uhm.led3.Off(); HR_Setpwm(0.9,0.9,0.9,0.9); }else { HR_Stop(); HR_Setpwm(0,0,0,0); relay[0]=false; relay[1]=false; relay[2]=false; relay[3]=false; uhm.led1.Off(); uhm.led2.Off(); uhm.led3.Off(); uhm.led4.Off(); } if(mode1 == 1 && mode2 == 1 && mode3 == 1 && mode4 == 1){ } else{ if(fall.flag[0]){ for(i=0;i<5;i++){ HR_For(); Milli.delay(5); HR_Stop(); Milli.delay(15); } } if(fall.flag[1]){ for(i=0;i<5;i++){ HR_Back(); Milli.delay(5); HR_Stop(); Milli.delay(15); } }if(fall.flag[2]){ for(i=0;i<5;i++){ HR_Left(); Milli.delay(5); HR_Stop(); Milli.delay(15); } } if(fall.flag[3]){ for(i=0;i<5;i++){ HR_Right(); Milli.delay(5); HR_Stop(); Milli.delay(15); } } } /*Fan*/ //ファンに関しては昔のものを流用(4/10) if( con.R1 && (S_R1==false) ){ uhm.communication.Fan1_Up(); uhm.led1.Toggle(); } S_R1 = con.R1; if( con.R2 && (S_R2==false) ){ uhm.communication.Fan1_Down(); uhm.led1.Toggle(); } S_R2 = con.R2; /*open*/ if( con.L2 && (S_L2==false) ){ uhm.communication.Fan_open(); uhm.led1.Toggle(); } S_L2 = con.L2; /*open*/ if( con.L1 && (S_L1==false) ){ uhm.communication.sig2PIC(); uhm.led1.Toggle(); } S_L1 = con.L1; /*pole*/ uhm.pole_state = uhm.communication.Pole_In(); if( uhm.pole_state && uhm.pole_save ==false){ uhm.pole_count++; if( uhm.pole_count==2 ){ uhm.communication.sig2PIC(); } } else{ } uhm.pole_save = uhm.pole_state; /*mode check*/ }else{ HR_Stop(); HR_Setpwm(0,0,0,0); relay[0]=false; relay[1]=false; relay[2]=false; relay[3]=false; uhm.led1.Off(); uhm.led2.Off(); uhm.led3.Off(); uhm.led4.Off(); } /* Encoder */ Enc1 = uhm.encoders.getCounter1(); Enc2 = uhm.encoders.getCounter2(); Enc3 = uhm.encoders.getCounter3(); Enc4 = uhm.encoders.getCounter4(); /* Odometry */ odometry.x = 0; odometry.y = 0; odometry.z = 0; odometry.w = 0; /* Mode */ mode1 = (uhm.mode1.input_pin(true) & 0b00000111); mode2 = (uhm.mode2.input_pin(true) & 0b00000111); mode3 = (uhm.mode3.input_pin(true) & 0b00000111); mode4 = (uhm.mode4.input_pin(true) & 0b00000111); odometry.mode1 = mode1; odometry.mode2 = mode2; odometry.mode3 = mode3; odometry.mode4 = mode4; /* ros */ odom.publish(&odometry); nh.spinOnce(); /* delay */ Milli.delay(10); } }
int main(int argc, char** argv) { ros::init(argc, argv, "data_player"); //ULogger::setType(ULogger::kTypeConsole); //ULogger::setLevel(ULogger::kDebug); //ULogger::setEventLevel(ULogger::kWarning); bool publishClock = false; for(int i=1;i<argc;++i) { if(strcmp(argv[i], "--clock") == 0) { publishClock = true; } } ros::NodeHandle nh; ros::NodeHandle pnh("~"); std::string frameId = "base_link"; std::string odomFrameId = "odom"; std::string cameraFrameId = "camera_optical_link"; std::string scanFrameId = "base_laser_link"; double rate = 1.0f; std::string databasePath = ""; bool publishTf = true; int startId = 0; bool useDbStamps = true; pnh.param("frame_id", frameId, frameId); pnh.param("odom_frame_id", odomFrameId, odomFrameId); pnh.param("camera_frame_id", cameraFrameId, cameraFrameId); pnh.param("scan_frame_id", scanFrameId, scanFrameId); pnh.param("rate", rate, rate); // Ratio of the database stamps pnh.param("database", databasePath, databasePath); pnh.param("publish_tf", publishTf, publishTf); pnh.param("start_id", startId, startId); // A general 360 lidar with 0.5 deg increment double scanAngleMin, scanAngleMax, scanAngleIncrement, scanRangeMin, scanRangeMax; pnh.param<double>("scan_angle_min", scanAngleMin, -M_PI); pnh.param<double>("scan_angle_max", scanAngleMax, M_PI); pnh.param<double>("scan_angle_increment", scanAngleIncrement, M_PI / 720.0); pnh.param<double>("scan_range_min", scanRangeMin, 0.0); pnh.param<double>("scan_range_max", scanRangeMax, 60); ROS_INFO("frame_id = %s", frameId.c_str()); ROS_INFO("odom_frame_id = %s", odomFrameId.c_str()); ROS_INFO("camera_frame_id = %s", cameraFrameId.c_str()); ROS_INFO("scan_frame_id = %s", scanFrameId.c_str()); ROS_INFO("rate = %f", rate); ROS_INFO("publish_tf = %s", publishTf?"true":"false"); ROS_INFO("start_id = %d", startId); ROS_INFO("Publish clock (--clock): %s", publishClock?"true":"false"); if(databasePath.empty()) { ROS_ERROR("Parameter \"database\" must be set (path to a RTAB-Map database)."); return -1; } databasePath = uReplaceChar(databasePath, '~', UDirectory::homeDir()); if(databasePath.size() && databasePath.at(0) != '/') { databasePath = UDirectory::currentDir(true) + databasePath; } ROS_INFO("database = %s", databasePath.c_str()); rtabmap::DBReader reader(databasePath, -rate, false, false, false, startId); if(!reader.init()) { ROS_ERROR("Cannot open database \"%s\".", databasePath.c_str()); return -1; } ros::ServiceServer pauseSrv = pnh.advertiseService("pause", pauseCallback); ros::ServiceServer resumeSrv = pnh.advertiseService("resume", resumeCallback); image_transport::ImageTransport it(nh); image_transport::Publisher imagePub; image_transport::Publisher rgbPub; image_transport::Publisher depthPub; image_transport::Publisher leftPub; image_transport::Publisher rightPub; ros::Publisher rgbCamInfoPub; ros::Publisher depthCamInfoPub; ros::Publisher leftCamInfoPub; ros::Publisher rightCamInfoPub; ros::Publisher odometryPub; ros::Publisher scanPub; ros::Publisher scanCloudPub; ros::Publisher globalPosePub; ros::Publisher gpsFixPub; ros::Publisher clockPub; tf2_ros::TransformBroadcaster tfBroadcaster; if(publishClock) { clockPub = nh.advertise<rosgraph_msgs::Clock>("/clock", 1); } UTimer timer; rtabmap::CameraInfo cameraInfo; rtabmap::SensorData data = reader.takeImage(&cameraInfo); rtabmap::OdometryInfo odomInfo; odomInfo.reg.covariance = cameraInfo.odomCovariance; rtabmap::OdometryEvent odom(data, cameraInfo.odomPose, odomInfo); double acquisitionTime = timer.ticks(); while(ros::ok() && odom.data().id()) { ROS_INFO("Reading sensor data %d...", odom.data().id()); ros::Time time(odom.data().stamp()); if(publishClock) { rosgraph_msgs::Clock msg; msg.clock = time; clockPub.publish(msg); } sensor_msgs::CameraInfo camInfoA; //rgb or left sensor_msgs::CameraInfo camInfoB; //depth or right camInfoA.K.assign(0); camInfoA.K[0] = camInfoA.K[4] = camInfoA.K[8] = 1; camInfoA.R.assign(0); camInfoA.R[0] = camInfoA.R[4] = camInfoA.R[8] = 1; camInfoA.P.assign(0); camInfoA.P[10] = 1; camInfoA.header.frame_id = cameraFrameId; camInfoA.header.stamp = time; camInfoB = camInfoA; int type = -1; if(!odom.data().depthRaw().empty() && (odom.data().depthRaw().type() == CV_32FC1 || odom.data().depthRaw().type() == CV_16UC1)) { if(odom.data().cameraModels().size() > 1) { ROS_WARN("Multi-cameras detected in database but this node cannot send multi-images yet..."); } else { //depth if(odom.data().cameraModels().size()) { camInfoA.D.resize(5,0); camInfoA.P[0] = odom.data().cameraModels()[0].fx(); camInfoA.K[0] = odom.data().cameraModels()[0].fx(); camInfoA.P[5] = odom.data().cameraModels()[0].fy(); camInfoA.K[4] = odom.data().cameraModels()[0].fy(); camInfoA.P[2] = odom.data().cameraModels()[0].cx(); camInfoA.K[2] = odom.data().cameraModels()[0].cx(); camInfoA.P[6] = odom.data().cameraModels()[0].cy(); camInfoA.K[5] = odom.data().cameraModels()[0].cy(); camInfoB = camInfoA; } type=0; if(rgbPub.getTopic().empty()) rgbPub = it.advertise("rgb/image", 1); if(depthPub.getTopic().empty()) depthPub = it.advertise("depth_registered/image", 1); if(rgbCamInfoPub.getTopic().empty()) rgbCamInfoPub = nh.advertise<sensor_msgs::CameraInfo>("rgb/camera_info", 1); if(depthCamInfoPub.getTopic().empty()) depthCamInfoPub = nh.advertise<sensor_msgs::CameraInfo>("depth_registered/camera_info", 1); } } else if(!odom.data().rightRaw().empty() && odom.data().rightRaw().type() == CV_8U) { //stereo if(odom.data().stereoCameraModel().isValidForProjection()) { camInfoA.D.resize(8,0); camInfoA.P[0] = odom.data().stereoCameraModel().left().fx(); camInfoA.K[0] = odom.data().stereoCameraModel().left().fx(); camInfoA.P[5] = odom.data().stereoCameraModel().left().fy(); camInfoA.K[4] = odom.data().stereoCameraModel().left().fy(); camInfoA.P[2] = odom.data().stereoCameraModel().left().cx(); camInfoA.K[2] = odom.data().stereoCameraModel().left().cx(); camInfoA.P[6] = odom.data().stereoCameraModel().left().cy(); camInfoA.K[5] = odom.data().stereoCameraModel().left().cy(); camInfoB = camInfoA; camInfoB.P[3] = odom.data().stereoCameraModel().right().Tx(); // Right_Tx = -baseline*fx } type=1; if(leftPub.getTopic().empty()) leftPub = it.advertise("left/image", 1); if(rightPub.getTopic().empty()) rightPub = it.advertise("right/image", 1); if(leftCamInfoPub.getTopic().empty()) leftCamInfoPub = nh.advertise<sensor_msgs::CameraInfo>("left/camera_info", 1); if(rightCamInfoPub.getTopic().empty()) rightCamInfoPub = nh.advertise<sensor_msgs::CameraInfo>("right/camera_info", 1); } else { if(imagePub.getTopic().empty()) imagePub = it.advertise("image", 1); } camInfoA.height = odom.data().imageRaw().rows; camInfoA.width = odom.data().imageRaw().cols; camInfoB.height = odom.data().depthOrRightRaw().rows; camInfoB.width = odom.data().depthOrRightRaw().cols; if(!odom.data().laserScanRaw().isEmpty()) { if(scanPub.getTopic().empty() && odom.data().laserScanRaw().is2d()) { scanPub = nh.advertise<sensor_msgs::LaserScan>("scan", 1); if(odom.data().laserScanRaw().angleIncrement() > 0.0f) { ROS_INFO("Scan will be published."); } else { ROS_INFO("Scan will be published with those parameters:"); ROS_INFO(" scan_angle_min=%f", scanAngleMin); ROS_INFO(" scan_angle_max=%f", scanAngleMax); ROS_INFO(" scan_angle_increment=%f", scanAngleIncrement); ROS_INFO(" scan_range_min=%f", scanRangeMin); ROS_INFO(" scan_range_max=%f", scanRangeMax); } } else if(scanCloudPub.getTopic().empty()) { scanCloudPub = nh.advertise<sensor_msgs::PointCloud2>("scan_cloud", 1); ROS_INFO("Scan cloud will be published."); } } if(!odom.data().globalPose().isNull() && odom.data().globalPoseCovariance().cols==6 && odom.data().globalPoseCovariance().rows==6) { if(globalPosePub.getTopic().empty()) { globalPosePub = nh.advertise<geometry_msgs::PoseWithCovarianceStamped>("global_pose", 1); ROS_INFO("Global pose will be published."); } } if(odom.data().gps().stamp() > 0.0) { if(gpsFixPub.getTopic().empty()) { gpsFixPub = nh.advertise<sensor_msgs::NavSatFix>("gps/fix", 1); ROS_INFO("GPS will be published."); } } // publish transforms first if(publishTf) { rtabmap::Transform localTransform; if(odom.data().cameraModels().size() == 1) { localTransform = odom.data().cameraModels()[0].localTransform(); } else if(odom.data().stereoCameraModel().isValidForProjection()) { localTransform = odom.data().stereoCameraModel().left().localTransform(); } if(!localTransform.isNull()) { geometry_msgs::TransformStamped baseToCamera; baseToCamera.child_frame_id = cameraFrameId; baseToCamera.header.frame_id = frameId; baseToCamera.header.stamp = time; rtabmap_ros::transformToGeometryMsg(localTransform, baseToCamera.transform); tfBroadcaster.sendTransform(baseToCamera); } if(!odom.pose().isNull()) { geometry_msgs::TransformStamped odomToBase; odomToBase.child_frame_id = frameId; odomToBase.header.frame_id = odomFrameId; odomToBase.header.stamp = time; rtabmap_ros::transformToGeometryMsg(odom.pose(), odomToBase.transform); tfBroadcaster.sendTransform(odomToBase); } if(!scanPub.getTopic().empty() || !scanCloudPub.getTopic().empty()) { geometry_msgs::TransformStamped baseToLaserScan; baseToLaserScan.child_frame_id = scanFrameId; baseToLaserScan.header.frame_id = frameId; baseToLaserScan.header.stamp = time; rtabmap_ros::transformToGeometryMsg(odom.data().laserScanCompressed().localTransform(), baseToLaserScan.transform); tfBroadcaster.sendTransform(baseToLaserScan); } } if(!odom.pose().isNull()) { if(odometryPub.getTopic().empty()) odometryPub = nh.advertise<nav_msgs::Odometry>("odom", 1); if(odometryPub.getNumSubscribers()) { nav_msgs::Odometry odomMsg; odomMsg.child_frame_id = frameId; odomMsg.header.frame_id = odomFrameId; odomMsg.header.stamp = time; rtabmap_ros::transformToPoseMsg(odom.pose(), odomMsg.pose.pose); UASSERT(odomMsg.pose.covariance.size() == 36 && odom.covariance().total() == 36 && odom.covariance().type() == CV_64FC1); memcpy(odomMsg.pose.covariance.begin(), odom.covariance().data, 36*sizeof(double)); odometryPub.publish(odomMsg); } } // Publish async topics first (so that they can catched by rtabmap before the image topics) if(globalPosePub.getNumSubscribers() > 0 && !odom.data().globalPose().isNull() && odom.data().globalPoseCovariance().cols==6 && odom.data().globalPoseCovariance().rows==6) { geometry_msgs::PoseWithCovarianceStamped msg; rtabmap_ros::transformToPoseMsg(odom.data().globalPose(), msg.pose.pose); memcpy(msg.pose.covariance.data(), odom.data().globalPoseCovariance().data, 36*sizeof(double)); msg.header.frame_id = frameId; msg.header.stamp = time; globalPosePub.publish(msg); } if(odom.data().gps().stamp() > 0.0) { sensor_msgs::NavSatFix msg; msg.longitude = odom.data().gps().longitude(); msg.latitude = odom.data().gps().latitude(); msg.altitude = odom.data().gps().altitude(); msg.position_covariance_type = sensor_msgs::NavSatFix::COVARIANCE_TYPE_DIAGONAL_KNOWN; msg.position_covariance.at(0) = msg.position_covariance.at(4) = msg.position_covariance.at(8)= odom.data().gps().error()* odom.data().gps().error(); msg.header.frame_id = frameId; msg.header.stamp.fromSec(odom.data().gps().stamp()); gpsFixPub.publish(msg); } if(type >= 0) { if(rgbCamInfoPub.getNumSubscribers() && type == 0) { rgbCamInfoPub.publish(camInfoA); } if(leftCamInfoPub.getNumSubscribers() && type == 1) { leftCamInfoPub.publish(camInfoA); } if(depthCamInfoPub.getNumSubscribers() && type == 0) { depthCamInfoPub.publish(camInfoB); } if(rightCamInfoPub.getNumSubscribers() && type == 1) { rightCamInfoPub.publish(camInfoB); } } if(imagePub.getNumSubscribers() || rgbPub.getNumSubscribers() || leftPub.getNumSubscribers()) { cv_bridge::CvImage img; if(odom.data().imageRaw().channels() == 1) { img.encoding = sensor_msgs::image_encodings::MONO8; } else { img.encoding = sensor_msgs::image_encodings::BGR8; } img.image = odom.data().imageRaw(); sensor_msgs::ImagePtr imageRosMsg = img.toImageMsg(); imageRosMsg->header.frame_id = cameraFrameId; imageRosMsg->header.stamp = time; if(imagePub.getNumSubscribers()) { imagePub.publish(imageRosMsg); } if(rgbPub.getNumSubscribers() && type == 0) { rgbPub.publish(imageRosMsg); } if(leftPub.getNumSubscribers() && type == 1) { leftPub.publish(imageRosMsg); leftCamInfoPub.publish(camInfoA); } } if(depthPub.getNumSubscribers() && !odom.data().depthRaw().empty() && type==0) { cv_bridge::CvImage img; if(odom.data().depthRaw().type() == CV_32FC1) { img.encoding = sensor_msgs::image_encodings::TYPE_32FC1; } else { img.encoding = sensor_msgs::image_encodings::TYPE_16UC1; } img.image = odom.data().depthRaw(); sensor_msgs::ImagePtr imageRosMsg = img.toImageMsg(); imageRosMsg->header.frame_id = cameraFrameId; imageRosMsg->header.stamp = time; depthPub.publish(imageRosMsg); depthCamInfoPub.publish(camInfoB); } if(rightPub.getNumSubscribers() && !odom.data().rightRaw().empty() && type==1) { cv_bridge::CvImage img; img.encoding = sensor_msgs::image_encodings::MONO8; img.image = odom.data().rightRaw(); sensor_msgs::ImagePtr imageRosMsg = img.toImageMsg(); imageRosMsg->header.frame_id = cameraFrameId; imageRosMsg->header.stamp = time; rightPub.publish(imageRosMsg); rightCamInfoPub.publish(camInfoB); } if(!odom.data().laserScanRaw().isEmpty()) { if(scanPub.getNumSubscribers() && odom.data().laserScanRaw().is2d()) { //inspired from pointcloud_to_laserscan package sensor_msgs::LaserScan msg; msg.header.frame_id = scanFrameId; msg.header.stamp = time; msg.angle_min = scanAngleMin; msg.angle_max = scanAngleMax; msg.angle_increment = scanAngleIncrement; msg.time_increment = 0.0; msg.scan_time = 0; msg.range_min = scanRangeMin; msg.range_max = scanRangeMax; if(odom.data().laserScanRaw().angleIncrement() > 0.0f) { msg.angle_min = odom.data().laserScanRaw().angleMin(); msg.angle_max = odom.data().laserScanRaw().angleMax(); msg.angle_increment = odom.data().laserScanRaw().angleIncrement(); msg.range_min = odom.data().laserScanRaw().rangeMin(); msg.range_max = odom.data().laserScanRaw().rangeMax(); } uint32_t rangesSize = std::ceil((msg.angle_max - msg.angle_min) / msg.angle_increment); msg.ranges.assign(rangesSize, 0.0); const cv::Mat & scan = odom.data().laserScanRaw().data(); for (int i=0; i<scan.cols; ++i) { const float * ptr = scan.ptr<float>(0,i); double range = hypot(ptr[0], ptr[1]); if (range >= msg.range_min && range <=msg.range_max) { double angle = atan2(ptr[1], ptr[0]); if (angle >= msg.angle_min && angle <= msg.angle_max) { int index = (angle - msg.angle_min) / msg.angle_increment; if (index>=0 && index<rangesSize && (range < msg.ranges[index] || msg.ranges[index]==0)) { msg.ranges[index] = range; } } } } scanPub.publish(msg); } else if(scanCloudPub.getNumSubscribers()) { sensor_msgs::PointCloud2 msg; pcl_conversions::moveFromPCL(*rtabmap::util3d::laserScanToPointCloud2(odom.data().laserScanRaw()), msg); msg.header.frame_id = scanFrameId; msg.header.stamp = time; scanCloudPub.publish(msg); } } if(odom.data().userDataRaw().type() == CV_8SC1 && odom.data().userDataRaw().cols >= 7 && // including null str ending odom.data().userDataRaw().rows == 1 && memcmp(odom.data().userDataRaw().data, "GOAL:", 5) == 0) { //GOAL format detected, remove it from the user data and send it as goal event std::string goalStr = (const char *)odom.data().userDataRaw().data; if(!goalStr.empty()) { std::list<std::string> strs = uSplit(goalStr, ':'); if(strs.size() == 2) { int goalId = atoi(strs.rbegin()->c_str()); if(goalId > 0) { ROS_WARN("Goal %d detected, calling rtabmap's set_goal service!", goalId); rtabmap_ros::SetGoal setGoalSrv; setGoalSrv.request.node_id = goalId; setGoalSrv.request.node_label = ""; if(!ros::service::call("set_goal", setGoalSrv)) { ROS_ERROR("Can't call \"set_goal\" service"); } } } } } ros::spinOnce(); while(ros::ok() && paused) { uSleep(100); ros::spinOnce(); } timer.restart(); cameraInfo = rtabmap::CameraInfo(); data = reader.takeImage(&cameraInfo); odomInfo.reg.covariance = cameraInfo.odomCovariance; odom = rtabmap::OdometryEvent(data, cameraInfo.odomPose, odomInfo); acquisitionTime = timer.ticks(); } return 0; }
void VescToOdom::vescStateCallback(const vesc_msgs::VescStateStamped::ConstPtr& state) { // check that we have a last servo command if we are depending on it for angular velocity if (use_servo_cmd_ && !last_servo_cmd_) return; // convert to engineering units double current_speed = ( state->state.speed - speed_to_erpm_offset_ ) / speed_to_erpm_gain_; double current_steering_angle(0.0), current_angular_velocity(0.0); if (use_servo_cmd_) { current_steering_angle = ( last_servo_cmd_->data - steering_to_servo_offset_ ) / steering_to_servo_gain_; current_angular_velocity = current_speed * tan(current_steering_angle) / wheelbase_; } // use current state as last state if this is our first time here if (!last_state_) last_state_ = state; // calc elapsed time ros::Duration dt = state->header.stamp - last_state_->header.stamp; /** @todo could probably do better propigating odometry, e.g. trapezoidal integration */ // propigate odometry double x_dot = current_speed * cos(yaw_); double y_dot = current_speed * sin(yaw_); x_ += x_dot * dt.toSec(); y_ += y_dot * dt.toSec(); if (use_servo_cmd_) yaw_ += current_angular_velocity * dt.toSec(); // save state for next time last_state_ = state; // publish odometry message nav_msgs::Odometry::Ptr odom(new nav_msgs::Odometry); odom->header.frame_id = odom_frame_; odom->header.stamp = state->header.stamp; odom->child_frame_id = base_frame_; // Position odom->pose.pose.position.x = x_; odom->pose.pose.position.y = y_; odom->pose.pose.orientation.x = 0.0; odom->pose.pose.orientation.y = 0.0; odom->pose.pose.orientation.z = sin(yaw_/2.0); odom->pose.pose.orientation.w = cos(yaw_/2.0); // Position uncertainty /** @todo Think about position uncertainty, perhaps get from parameters? */ odom->pose.covariance[0] = 0.2; ///< x odom->pose.covariance[7] = 0.2; ///< y odom->pose.covariance[35] = 0.4; ///< yaw // Velocity ("in the coordinate frame given by the child_frame_id") odom->twist.twist.linear.x = current_speed; odom->twist.twist.linear.y = 0.0; odom->twist.twist.angular.z = current_angular_velocity; // Velocity uncertainty /** @todo Think about velocity uncertainty */ if (publish_tf_) { geometry_msgs::TransformStamped tf; tf.header.frame_id = odom_frame_; tf.child_frame_id = base_frame_; tf.header.stamp = ros::Time::now(); tf.transform.translation.x = x_; tf.transform.translation.y = y_; tf.transform.translation.z = 0.0; tf.transform.rotation = odom->pose.pose.orientation; if (ros::ok()) { tf_pub_->sendTransform(tf); } } if (ros::ok()) { odom_pub_.publish(odom); } }
int main(int argc, char** argv) { ros::init(argc, argv, "data_player"); //ULogger::setType(ULogger::kTypeConsole); //ULogger::setLevel(ULogger::kDebug); //ULogger::setEventLevel(ULogger::kWarning); ros::NodeHandle nh; ros::NodeHandle pnh("~"); std::string frameId = "base_link"; std::string odomFrameId = "odom"; std::string cameraFrameId = "camera_optical_link"; std::string scanFrameId = "base_laser_link"; double rate = -1.0f; std::string databasePath = ""; bool publishTf = true; int startId = 0; pnh.param("frame_id", frameId, frameId); pnh.param("odom_frame_id", odomFrameId, odomFrameId); pnh.param("camera_frame_id", cameraFrameId, cameraFrameId); pnh.param("scan_frame_id", scanFrameId, scanFrameId); pnh.param("rate", rate, rate); // Set -1 to use database stamps pnh.param("database", databasePath, databasePath); pnh.param("publish_tf", publishTf, publishTf); pnh.param("start_id", startId, startId); // based on URG-04LX double scanHeight, scanAngleMin, scanAngleMax, scanAngleIncrement, scanTime, scanRangeMin, scanRangeMax; pnh.param<double>("scan_height", scanHeight, 0.3); pnh.param<double>("scan_angle_min", scanAngleMin, -M_PI / 2.0); pnh.param<double>("scan_angle_max", scanAngleMax, M_PI / 2.0); pnh.param<double>("scan_angle_increment", scanAngleIncrement, M_PI / 360.0); pnh.param<double>("scan_time", scanTime, 1.0 / 10.0); pnh.param<double>("scan_range_min", scanRangeMin, 0.02); pnh.param<double>("scan_range_max", scanRangeMax, 6.0); ROS_INFO("frame_id = %s", frameId.c_str()); ROS_INFO("odom_frame_id = %s", odomFrameId.c_str()); ROS_INFO("camera_frame_id = %s", cameraFrameId.c_str()); ROS_INFO("scan_frame_id = %s", scanFrameId.c_str()); ROS_INFO("rate = %f", rate); ROS_INFO("publish_tf = %s", publishTf?"true":"false"); ROS_INFO("start_id = %d", startId); if(databasePath.empty()) { ROS_ERROR("Parameter \"database\" must be set (path to a RTAB-Map database)."); return -1; } databasePath = uReplaceChar(databasePath, '~', UDirectory::homeDir()); if(databasePath.size() && databasePath.at(0) != '/') { databasePath = UDirectory::currentDir(true) + databasePath; } ROS_INFO("database = %s", databasePath.c_str()); rtabmap::DBReader reader(databasePath, rate, false, false, false, startId); if(!reader.init()) { ROS_ERROR("Cannot open database \"%s\".", databasePath.c_str()); return -1; } ros::ServiceServer pauseSrv = pnh.advertiseService("pause", pauseCallback); ros::ServiceServer resumeSrv = pnh.advertiseService("resume", resumeCallback); image_transport::ImageTransport it(nh); image_transport::Publisher imagePub; image_transport::Publisher rgbPub; image_transport::Publisher depthPub; image_transport::Publisher leftPub; image_transport::Publisher rightPub; ros::Publisher rgbCamInfoPub; ros::Publisher depthCamInfoPub; ros::Publisher leftCamInfoPub; ros::Publisher rightCamInfoPub; ros::Publisher odometryPub; ros::Publisher scanPub; tf2_ros::TransformBroadcaster tfBroadcaster; UTimer timer; rtabmap::CameraInfo info; rtabmap::SensorData data = reader.takeImage(&info); rtabmap::OdometryEvent odom(data, info.odomPose, info.odomCovariance); double acquisitionTime = timer.ticks(); while(ros::ok() && odom.data().id()) { ROS_INFO("Reading sensor data %d...", odom.data().id()); ros::Time time = ros::Time::now(); sensor_msgs::CameraInfo camInfoA; //rgb or left sensor_msgs::CameraInfo camInfoB; //depth or right camInfoA.K.assign(0); camInfoA.K[0] = camInfoA.K[4] = camInfoA.K[8] = 1; camInfoA.R.assign(0); camInfoA.R[0] = camInfoA.R[4] = camInfoA.R[8] = 1; camInfoA.P.assign(0); camInfoA.P[10] = 1; camInfoA.header.frame_id = cameraFrameId; camInfoA.header.stamp = time; camInfoB = camInfoA; int type = -1; if(!odom.data().depthRaw().empty() && (odom.data().depthRaw().type() == CV_32FC1 || odom.data().depthRaw().type() == CV_16UC1)) { if(odom.data().cameraModels().size() > 1) { ROS_WARN("Multi-cameras detected in database but this node cannot send multi-images yet..."); } else { //depth if(odom.data().cameraModels().size()) { camInfoA.D.resize(5,0); camInfoA.P[0] = odom.data().cameraModels()[0].fx(); camInfoA.K[0] = odom.data().cameraModels()[0].fx(); camInfoA.P[5] = odom.data().cameraModels()[0].fy(); camInfoA.K[4] = odom.data().cameraModels()[0].fy(); camInfoA.P[2] = odom.data().cameraModels()[0].cx(); camInfoA.K[2] = odom.data().cameraModels()[0].cx(); camInfoA.P[6] = odom.data().cameraModels()[0].cy(); camInfoA.K[5] = odom.data().cameraModels()[0].cy(); camInfoB = camInfoA; } type=0; if(rgbPub.getTopic().empty()) rgbPub = it.advertise("rgb/image", 1); if(depthPub.getTopic().empty()) depthPub = it.advertise("depth_registered/image", 1); if(rgbCamInfoPub.getTopic().empty()) rgbCamInfoPub = nh.advertise<sensor_msgs::CameraInfo>("rgb/camera_info", 1); if(depthCamInfoPub.getTopic().empty()) depthCamInfoPub = nh.advertise<sensor_msgs::CameraInfo>("depth_registered/camera_info", 1); } } else if(!odom.data().rightRaw().empty() && odom.data().rightRaw().type() == CV_8U) { //stereo if(odom.data().stereoCameraModel().isValidForProjection()) { camInfoA.D.resize(8,0); camInfoA.P[0] = odom.data().stereoCameraModel().left().fx(); camInfoA.K[0] = odom.data().stereoCameraModel().left().fx(); camInfoA.P[5] = odom.data().stereoCameraModel().left().fy(); camInfoA.K[4] = odom.data().stereoCameraModel().left().fy(); camInfoA.P[2] = odom.data().stereoCameraModel().left().cx(); camInfoA.K[2] = odom.data().stereoCameraModel().left().cx(); camInfoA.P[6] = odom.data().stereoCameraModel().left().cy(); camInfoA.K[5] = odom.data().stereoCameraModel().left().cy(); camInfoB = camInfoA; camInfoB.P[3] = odom.data().stereoCameraModel().right().Tx(); // Right_Tx = -baseline*fx } type=1; if(leftPub.getTopic().empty()) leftPub = it.advertise("left/image", 1); if(rightPub.getTopic().empty()) rightPub = it.advertise("right/image", 1); if(leftCamInfoPub.getTopic().empty()) leftCamInfoPub = nh.advertise<sensor_msgs::CameraInfo>("left/camera_info", 1); if(rightCamInfoPub.getTopic().empty()) rightCamInfoPub = nh.advertise<sensor_msgs::CameraInfo>("right/camera_info", 1); } else { if(imagePub.getTopic().empty()) imagePub = it.advertise("image", 1); } camInfoA.height = odom.data().imageRaw().rows; camInfoA.width = odom.data().imageRaw().cols; camInfoB.height = odom.data().depthOrRightRaw().rows; camInfoB.width = odom.data().depthOrRightRaw().cols; if(!odom.data().laserScanRaw().empty()) { if(scanPub.getTopic().empty()) scanPub = nh.advertise<sensor_msgs::LaserScan>("scan", 1); } // publish transforms first if(publishTf) { rtabmap::Transform localTransform; if(odom.data().cameraModels().size() == 1) { localTransform = odom.data().cameraModels()[0].localTransform(); } else if(odom.data().stereoCameraModel().isValidForProjection()) { localTransform = odom.data().stereoCameraModel().left().localTransform(); } if(!localTransform.isNull()) { geometry_msgs::TransformStamped baseToCamera; baseToCamera.child_frame_id = cameraFrameId; baseToCamera.header.frame_id = frameId; baseToCamera.header.stamp = time; rtabmap_ros::transformToGeometryMsg(localTransform, baseToCamera.transform); tfBroadcaster.sendTransform(baseToCamera); } if(!odom.pose().isNull()) { geometry_msgs::TransformStamped odomToBase; odomToBase.child_frame_id = frameId; odomToBase.header.frame_id = odomFrameId; odomToBase.header.stamp = time; rtabmap_ros::transformToGeometryMsg(odom.pose(), odomToBase.transform); tfBroadcaster.sendTransform(odomToBase); } if(!scanPub.getTopic().empty()) { geometry_msgs::TransformStamped baseToLaserScan; baseToLaserScan.child_frame_id = scanFrameId; baseToLaserScan.header.frame_id = frameId; baseToLaserScan.header.stamp = time; rtabmap_ros::transformToGeometryMsg(rtabmap::Transform(0,0,scanHeight,0,0,0), baseToLaserScan.transform); tfBroadcaster.sendTransform(baseToLaserScan); } } if(!odom.pose().isNull()) { if(odometryPub.getTopic().empty()) odometryPub = nh.advertise<nav_msgs::Odometry>("odom", 1); if(odometryPub.getNumSubscribers()) { nav_msgs::Odometry odomMsg; odomMsg.child_frame_id = frameId; odomMsg.header.frame_id = odomFrameId; odomMsg.header.stamp = time; rtabmap_ros::transformToPoseMsg(odom.pose(), odomMsg.pose.pose); UASSERT(odomMsg.pose.covariance.size() == 36 && odom.covariance().total() == 36 && odom.covariance().type() == CV_64FC1); memcpy(odomMsg.pose.covariance.begin(), odom.covariance().data, 36*sizeof(double)); odometryPub.publish(odomMsg); } } if(type >= 0) { if(rgbCamInfoPub.getNumSubscribers() && type == 0) { rgbCamInfoPub.publish(camInfoA); } if(leftCamInfoPub.getNumSubscribers() && type == 1) { leftCamInfoPub.publish(camInfoA); } if(depthCamInfoPub.getNumSubscribers() && type == 0) { depthCamInfoPub.publish(camInfoB); } if(rightCamInfoPub.getNumSubscribers() && type == 1) { rightCamInfoPub.publish(camInfoB); } } if(imagePub.getNumSubscribers() || rgbPub.getNumSubscribers() || leftPub.getNumSubscribers()) { cv_bridge::CvImage img; if(odom.data().imageRaw().channels() == 1) { img.encoding = sensor_msgs::image_encodings::MONO8; } else { img.encoding = sensor_msgs::image_encodings::BGR8; } img.image = odom.data().imageRaw(); sensor_msgs::ImagePtr imageRosMsg = img.toImageMsg(); imageRosMsg->header.frame_id = cameraFrameId; imageRosMsg->header.stamp = time; if(imagePub.getNumSubscribers()) { imagePub.publish(imageRosMsg); } if(rgbPub.getNumSubscribers() && type == 0) { rgbPub.publish(imageRosMsg); } if(leftPub.getNumSubscribers() && type == 1) { leftPub.publish(imageRosMsg); leftCamInfoPub.publish(camInfoA); } } if(depthPub.getNumSubscribers() && !odom.data().depthRaw().empty() && type==0) { cv_bridge::CvImage img; if(odom.data().depthRaw().type() == CV_32FC1) { img.encoding = sensor_msgs::image_encodings::TYPE_32FC1; } else { img.encoding = sensor_msgs::image_encodings::TYPE_16UC1; } img.image = odom.data().depthRaw(); sensor_msgs::ImagePtr imageRosMsg = img.toImageMsg(); imageRosMsg->header.frame_id = cameraFrameId; imageRosMsg->header.stamp = time; depthPub.publish(imageRosMsg); depthCamInfoPub.publish(camInfoB); } if(rightPub.getNumSubscribers() && !odom.data().rightRaw().empty() && type==1) { cv_bridge::CvImage img; img.encoding = sensor_msgs::image_encodings::MONO8; img.image = odom.data().rightRaw(); sensor_msgs::ImagePtr imageRosMsg = img.toImageMsg(); imageRosMsg->header.frame_id = cameraFrameId; imageRosMsg->header.stamp = time; rightPub.publish(imageRosMsg); rightCamInfoPub.publish(camInfoB); } if(scanPub.getNumSubscribers() && !odom.data().laserScanRaw().empty()) { //inspired from pointcloud_to_laserscan package sensor_msgs::LaserScan msg; msg.header.frame_id = scanFrameId; msg.header.stamp = time; msg.angle_min = scanAngleMin; msg.angle_max = scanAngleMax; msg.angle_increment = scanAngleIncrement; msg.time_increment = 0.0; msg.scan_time = scanTime; msg.range_min = scanRangeMin; msg.range_max = scanRangeMax; uint32_t rangesSize = std::ceil((msg.angle_max - msg.angle_min) / msg.angle_increment); msg.ranges.assign(rangesSize, 0.0); const cv::Mat & scan = odom.data().laserScanRaw(); UASSERT(scan.type() == CV_32FC2 || scan.type() == CV_32FC3); UASSERT(scan.rows == 1); for (int i=0; i<scan.cols; ++i) { cv::Vec2f pos = scan.at<cv::Vec2f>(i); double range = hypot(pos[0], pos[1]); if (range >= scanRangeMin && range <=scanRangeMax) { double angle = atan2(pos[1], pos[0]); if (angle >= msg.angle_min && angle <= msg.angle_max) { int index = (angle - msg.angle_min) / msg.angle_increment; if (index>=0 && index<rangesSize && (range < msg.ranges[index] || msg.ranges[index]==0)) { msg.ranges[index] = range; } } } } scanPub.publish(msg); } if(odom.data().userDataRaw().type() == CV_8SC1 && odom.data().userDataRaw().cols >= 7 && // including null str ending odom.data().userDataRaw().rows == 1 && memcmp(odom.data().userDataRaw().data, "GOAL:", 5) == 0) { //GOAL format detected, remove it from the user data and send it as goal event std::string goalStr = (const char *)odom.data().userDataRaw().data; if(!goalStr.empty()) { std::list<std::string> strs = uSplit(goalStr, ':'); if(strs.size() == 2) { int goalId = atoi(strs.rbegin()->c_str()); if(goalId > 0) { ROS_WARN("Goal %d detected, calling rtabmap's set_goal service!", goalId); rtabmap_ros::SetGoal setGoalSrv; setGoalSrv.request.node_id = goalId; setGoalSrv.request.node_label = ""; if(!ros::service::call("set_goal", setGoalSrv)) { ROS_ERROR("Can't call \"set_goal\" service"); } } } } } ros::spinOnce(); while(ros::ok() && paused) { uSleep(100); ros::spinOnce(); } timer.restart(); info = rtabmap::CameraInfo(); data = reader.takeImage(&info); odom = rtabmap::OdometryEvent(data, info.odomPose, info.odomCovariance); acquisitionTime = timer.ticks(); } return 0; }