void multiCallback(topic_tools::ShapeShifter const &input) { if (input.getDataType() == "nav_msgs/Odometry") { nav_msgs::Odometry::ConstPtr odom = input.instantiate<nav_msgs::Odometry>(); odomCallback(*odom); return; } if (input.getDataType() == "geometry_msgs/PoseStamped") { geometry_msgs::PoseStamped::ConstPtr pose = input.instantiate<geometry_msgs::PoseStamped>(); poseCallback(*pose); return; } if (input.getDataType() == "sensor_msgs/Imu") { sensor_msgs::Imu::ConstPtr imu = input.instantiate<sensor_msgs::Imu>(); imuCallback(*imu); return; } ROS_ERROR_THROTTLE(1.0, "message_to_tf received a %s message. Supported message types: nav_msgs/Odometry geometry_msgs/PoseStamped sensor_msgs/Imu", input.getDataType().c_str()); }
void OpenNIListener::processBagfile(std::string filename, const std::string& visua_tpc, const std::string& depth_tpc, const std::string& points_tpc, const std::string& cinfo_tpc, const std::string& odom_tpc, const std::string& tf_tpc) { ROS_INFO_NAMED("OpenNIListener", "Loading Bagfile %s", filename.c_str()); Q_EMIT iamBusy(4, "Loading Bagfile", 0); { //bag will be destructed after this block (hopefully frees memory for the optimizer) rosbag::Bag input_bag; try{ input_bag.open(filename, rosbag::bagmode::Read); } catch(rosbag::BagIOException ex) { ROS_FATAL("Opening Bagfile %s failed: %s Quitting!", filename.c_str(), ex.what()); ros::shutdown(); return; } ROS_INFO_NAMED("OpenNIListener", "Opened Bagfile %s", filename.c_str()); std::vector<std::string> topics; topics = createTopicsVector(visua_tpc, depth_tpc, points_tpc, cinfo_tpc, odom_tpc, tf_tpc); rosbag::View view(input_bag, rosbag::TopicQuery(topics)); Q_EMIT iamBusy(4, "Processing Bagfile", view.size()); // Simulate sending of the messages in the bagfile std::deque<sensor_msgs::Image::ConstPtr> vis_images; std::deque<sensor_msgs::Image::ConstPtr> dep_images; std::deque<sensor_msgs::CameraInfo::ConstPtr> cam_infos; std::deque<sensor_msgs::PointCloud2::ConstPtr> pointclouds; std::deque<nav_msgs::OdometryConstPtr> odometries; //ros::Time last_tf=ros::Time(0); ros::Time last_tf=ros::TIME_MIN; bool tf_available = false; int counter = 0; BOOST_FOREACH(rosbag::MessageInstance const m, view) { Q_EMIT progress(4, "Processing Bagfile", counter++); do{ usleep(10); if(!ros::ok()) return; } while(pause_); ROS_INFO_NAMED("OpenNIListener", "Processing %s of type %s with timestamp %f", m.getTopic().c_str(), m.getDataType().c_str(), m.getTime().toSec()); if (m.getTopic() == odom_tpc || ("/" + m.getTopic() == odom_tpc)) { ROS_INFO_NAMED("OpenNIListener", "Processing %s of type %s with timestamp %f", m.getTopic().c_str(), m.getDataType().c_str(), m.getTime().toSec()); nav_msgs::OdometryConstPtr odommsg = m.instantiate<nav_msgs::Odometry>(); if (odommsg) odometries.push_back(odommsg); } else if (m.getTopic() == visua_tpc || ("/" + m.getTopic() == visua_tpc)) { sensor_msgs::Image::ConstPtr rgb_img = m.instantiate<sensor_msgs::Image>(); if (rgb_img) vis_images.push_back(rgb_img); ROS_DEBUG("Found Message of %s", visua_tpc.c_str()); } else if (m.getTopic() == depth_tpc || ("/" + m.getTopic() == depth_tpc)) { sensor_msgs::Image::ConstPtr depth_img = m.instantiate<sensor_msgs::Image>(); //if (depth_img) depth_img_sub_->newMessage(depth_img); if (depth_img) dep_images.push_back(depth_img); ROS_DEBUG("Found Message of %s", depth_tpc.c_str()); } else if (m.getTopic() == points_tpc || ("/" + m.getTopic() == points_tpc)) { sensor_msgs::PointCloud2::ConstPtr pointcloud = m.instantiate<sensor_msgs::PointCloud2>(); //if (cam_info) cam_info_sub_->newMessage(cam_info); if (pointcloud) pointclouds.push_back(pointcloud); ROS_DEBUG("Found Message of %s", points_tpc.c_str()); } else if (m.getTopic() == cinfo_tpc || ("/" + m.getTopic() == cinfo_tpc)) { sensor_msgs::CameraInfo::ConstPtr cam_info = m.instantiate<sensor_msgs::CameraInfo>(); //if (cam_info) cam_info_sub_->newMessage(cam_info); if (cam_info) cam_infos.push_back(cam_info); ROS_DEBUG("Found Message of %s", cinfo_tpc.c_str()); } else if (m.getTopic() == tf_tpc|| ("/" + m.getTopic() == tf_tpc)){ tf::tfMessage::ConstPtr tf_msg = m.instantiate<tf::tfMessage>(); if (tf_msg) { tf_available = true; addTFMessageDirectlyToTransformer(tf_msg, tflistener_); last_tf = tf_msg->transforms[0].header.stamp; last_tf -= ros::Duration(0.1); } } if (last_tf == ros::TIME_MIN){//If not a valid time yet, set to something before first message's stamp last_tf = m.getTime(); last_tf -= ros::Duration(0.1); } //last_tf = m.getTime();//FIXME: No TF -> no processing while(!odometries.empty() && odometries.front()->header.stamp < last_tf){ ROS_INFO_NAMED("OpenNIListener", "Sending Odometry message"); odomCallback(odometries.front()); odometries.pop_front(); } while(!vis_images.empty() && vis_images.front()->header.stamp < last_tf){ ROS_INFO_NAMED("OpenNIListener", "Forwarding buffered visual message from time %12f", vis_images.front()->header.stamp.toSec()); rgb_img_sub_->newMessage(vis_images.front()); vis_images.pop_front(); } while(!dep_images.empty() && dep_images.front()->header.stamp < last_tf){ ROS_INFO_NAMED("OpenNIListener", "Forwarding buffered depth message from time %12f", dep_images.front()->header.stamp.toSec()); depth_img_sub_->newMessage(dep_images.front()); dep_images.pop_front(); } while(!cam_infos.empty() && cam_infos.front()->header.stamp < last_tf){ ROS_INFO_NAMED("OpenNIListener", "Forwarding buffered cam info message from time %12f", cam_infos.front()->header.stamp.toSec()); cam_info_sub_->newMessage(cam_infos.front()); cam_infos.pop_front(); } while(!pointclouds.empty() && pointclouds.front()->header.stamp < last_tf){ pc_sub_->newMessage(pointclouds.front()); pointclouds.pop_front(); } } ROS_WARN_NAMED("eval", "Finished processing of Bagfile"); input_bag.close(); }