// Main callback controlling the output rate. void jointStateCallback(const sensor_msgs::JointStateConstPtr& msg) { const sensor_msgs::JointState* data = msg.get(); int ps = data->position.size(); int name = data->name.size(); //15 is not fixed (la,ra,torso) int joint_state_size; if (simulation) joint_state_size = 21; else{ //FOR BOXY // joint_state_size = 15; //FOR LASA joint_state_size = 7; } //Check joint state message size if(name != joint_state_size) return; // Collect the right joints by name int counter=0; for(unsigned int i=0;i<ps; ++i) { sprintf(buf, "right_arm_%d_joint", counter); if(!strcmp(buf, data->name[i].c_str())) { read_jpos[counter++] = data->position[i]; } if(counter == numdof) { break; } } // If number of joints not as expected! if(counter != numdof) { ROS_WARN_STREAM_THROTTLE(1, "Only found "<<counter<<" DOF in message. Expected "<<numdof); isJointOkay = false; } else { isJointOkay = true; if(isAllOkay) { // All OK. Compute and send joint velocity computeJointImpedance(joint_stiff, joint_damp); computeJointVelocity(joint_vel); if(!bJointAction){ sendJointMessage(joint_vel, joint_stiff, joint_damp); bJointAction = false; bPosCommand = false; } else ROS_WARN("Base Joint Action controlling robot, NOT SENDING JOINT COMMAND"); } else ROS_WARN_STREAM_THROTTLE(1, "Not computing Joint Velocities"); } }
bool tf2::lookupTransformSafe(const tf2_ros::Buffer& tf_buffer, const std::string& targetFrame, const std::string& sourceFrame, const ros::Time& time, tf2::Transform& sourceToTarget) { bool retVal = true; // First try to transform the data at the requested time try { tf2::fromMsg(tf_buffer.lookupTransform(targetFrame, sourceFrame, time).transform, sourceToTarget); } catch (tf2::TransformException& ex) { // The issue might be that the transforms that are available are not close // enough temporally to be used. In that case, just use the latest available // transform and warn the user. try { geometry_msgs::TransformStamped transformStamped = tf_buffer.lookupTransform(targetFrame, sourceFrame, ros::Time(0)); tf2::fromMsg(transformStamped.transform, sourceToTarget); ROS_WARN_STREAM_THROTTLE(2.0, "Transform from " << sourceFrame << " to " << targetFrame << " was unavailable for the time requested (" << time << "). " << "Using nearest at time " << transformStamped.header.stamp << " instead (" << transformStamped.header.stamp - time << ")."); } catch(tf2::TransformException& ex) { ROS_WARN_STREAM_THROTTLE(2.0, "Could not obtain transform from " << sourceFrame << " to " << targetFrame << ". Error was " << ex.what() << "\n"); retVal = false; } } // Transforming from a frame id to itself can fail when the tf tree isn't // being broadcast (e.g., for some bag files). This is the only failure that // would throw an exception, so check for this situation before giving up. if (not retVal) { if (targetFrame == sourceFrame) { sourceToTarget.setIdentity(); retVal = true; } } return retVal; }
void TrackerViewer::timerCallback() { const unsigned threshold = 3 * countAll_; if (countImages_ < threshold || countCameraInfo_ < threshold || countTrackingResult_ < threshold || countMovingEdgeSites_ < threshold || countKltPoints_ < threshold) { boost::format fmt ("[visp_tracker] Low number of synchronized tuples received.\n" "Images: %d\n" "Camera info: %d\n" "Tracking result: %d\n" "Moving edge sites: %d\n" "Synchronized tuples: %d\n" "Possible issues:\n" "\t* The network is too slow."); fmt % countImages_ % countCameraInfo_ % countTrackingResult_ % countMovingEdgeSites_ % countAll_; ROS_WARN_STREAM_THROTTLE(10, fmt.str()); } }
void QualisysDriver::run() { prt_packet = port_protocol.GetRTPacket(); CRTPacket::EPacketType e_type; port_protocol.GetCurrentFrame(CRTProtocol::Component6dEuler); if(port_protocol.ReceiveRTPacket(e_type, true)) { switch(e_type) { // Case 1 - sHeader.nType 0 indicates an error case CRTPacket::PacketError: ROS_ERROR_STREAM_THROTTLE( 1, "Error when streaming frames: " << port_protocol.GetRTPacket()->GetErrorString()); break; // Case 2 - No more data case CRTPacket::PacketNoMoreData: ROS_WARN_STREAM_THROTTLE(1, "No more data"); break; // Case 3 - Data received case CRTPacket::PacketData: handleFrame(); break; default: ROS_ERROR_THROTTLE(1, "Unknown CRTPacket case"); break; } } return; }
void DiffDrivePoseControllerROS::spinOnce() { if (this->getState()) { ROS_DEBUG_STREAM_THROTTLE(1.0, "Controller spinning. [" << name_ <<"]"); // determine pose difference in polar coordinates if (!getPoseDiff()) { ROS_WARN_STREAM_THROTTLE(1.0, "Getting pose difference failed. Skipping control loop. [" << name_ <<"]"); return; } // determine controller output (v, w) and check if goal is reached step(); // set control output (v, w) setControlOutput(); // Logging ROS_DEBUG_STREAM_THROTTLE(1.0, "Current state: [" << name_ <<"]"); ROS_DEBUG_STREAM_THROTTLE(1.0, "r = " << r_ << ", theta = " << theta_ << ", delta = " << delta_ << " [" << name_ <<"]"); ROS_DEBUG_STREAM_THROTTLE(1.0, "cur = " << cur_ << ", v = " << v_ << ", w = " << w_ << " [" << name_ <<"]"); } else { ROS_DEBUG_STREAM_THROTTLE(3.0, "Controller is disabled. Idling ... [" << name_ <<"]"); } }
void tf2::lookupTransform(const tf2_ros::Buffer& tf_buffer, const std::string& targetFrame, const std::string& sourceFrame, const ros::Time& time, tf2::Transform& sourceToTarget) { // First try to transform the data at the requested time try { tf2::fromMsg(tf_buffer.lookupTransform(targetFrame, sourceFrame, time).transform, sourceToTarget); } catch (tf2::TransformException& ex) { // The issue might be that the transforms that are available are not close // enough temporally to be used. In that case, just use the latest available // transform and warn the user. // If this also fails, then just throw an exception. geometry_msgs::TransformStamped transformStamped = tf_buffer.lookupTransform(targetFrame, sourceFrame, ros::Time(0)); tf2::fromMsg(transformStamped.transform, sourceToTarget); ROS_WARN_STREAM_THROTTLE(2.0, "Transform from " << sourceFrame << " to " << targetFrame << " was unavailable for the time requested (" << time << "). " << "Using nearest at time " << transformStamped.header.stamp << " instead (" << transformStamped.header.stamp - time << ")."); } }
bool DiffDrivePoseControllerROS::getPoseDiff() { // use tf to get information about the goal pose relative to the base try { tf_listener_.lookupTransform(base_frame_name_, goal_frame_name_, ros::Time(0), tf_goal_pose_rel_); } catch (tf::TransformException const &ex) { ROS_WARN_STREAM_THROTTLE(1.0, "Couldn't get transform from base to goal pose! [" << name_ <<"]"); ROS_DEBUG_STREAM_THROTTLE(1.0, "tf error: " << ex.what()); return false; } // determine distance to goal double r = std::sqrt( std::pow(tf_goal_pose_rel_.getOrigin().getX(), 2) + std::pow(tf_goal_pose_rel_.getOrigin().getY(), 2)); // determine orientation of r relative to the base frame double delta = std::atan2(-tf_goal_pose_rel_.getOrigin().getY(), tf_goal_pose_rel_.getOrigin().getX()); // determine orientation of r relative to the goal frame // helper: theta = tf's orientation + delta double heading = mtk::wrapAngle(tf::getYaw(tf_goal_pose_rel_.getRotation())); double theta = heading + delta; setInput(r, delta, theta); return true; }
void gpsCb(const sensor_msgs::NavSatFixConstPtr & msg){ static int count = 1; if (msg->status.status != sensor_msgs::NavSatStatus::STATUS_FIX) { ROS_WARN_STREAM_THROTTLE(1, "No GPS fix"); return; } g_lat_ref += msg->latitude; g_lon_ref += msg->longitude; g_alt_ref += msg->altitude; ROS_INFO("Current measurement: %f %f %f", msg->latitude, msg->longitude, msg->altitude); if(count == g_its){ g_lat_ref /= g_its; g_lon_ref /= g_its; g_alt_ref /= g_its; ros::NodeHandle nh; nh.setParam("/gps_ref_latitude", g_lat_ref); nh.setParam("/gps_ref_longitude", g_lon_ref); nh.setParam("/gps_ref_altitude", g_alt_ref); ROS_INFO("final reference position: %f %f %f", g_lat_ref, g_lon_ref, g_alt_ref); ros::shutdown(); return; } count ++; }
void tf2::waitForTransform(const tf2_ros::Buffer& tf_buffer, const std::string& targetFrame, const std::string& sourceFrame, const ros::Time& time, const ros::Duration& timeout, tf2::Transform& sourceToTarget) { // First try to transform the data at the requested time try { tf2::fromMsg(tf_buffer.lookupTransform(targetFrame, sourceFrame, time, timeout).transform, sourceToTarget); } catch(tf2::LookupException& ex) { ROS_WARN_STREAM_THROTTLE(2.0, "Frames " << sourceFrame << " or " << targetFrame << " doesn't currently exist, waiting and retrying.."); waitUntilCanTransform(tf_buffer, targetFrame, sourceFrame, time, timeout); tf2::fromMsg(tf_buffer.lookupTransform(targetFrame, sourceFrame, time).transform, sourceToTarget); } catch(tf2::ConnectivityException& ex) { ROS_WARN_STREAM_THROTTLE(2.0, "Frames " << sourceFrame << " or " << targetFrame << " aren't connected on the tf tree, waiting and retrying.."); waitUntilCanTransform(tf_buffer, targetFrame, sourceFrame, time, timeout); tf2::fromMsg(tf_buffer.lookupTransform(targetFrame, sourceFrame, time).transform, sourceToTarget); } catch (tf2::TransformException& ex) { // The issue might be that the transforms that are available are not close // enough temporally to be used. In that case, just use the latest available // transform and warn the user. // If this also fails, then just throw an exception. geometry_msgs::TransformStamped transformStamped = tf_buffer.lookupTransform(targetFrame, sourceFrame, ros::Time(0)); tf2::fromMsg(transformStamped.transform, sourceToTarget); ROS_WARN_STREAM_THROTTLE(2.0, "Transform from " << sourceFrame << " to " << targetFrame << " was unavailable for the time requested (" << time << "). " << "Using nearest at time " << transformStamped.header.stamp << " instead (" << transformStamped.header.stamp - time << ")."); } }
void callbackThreadFunction(SharedMemoryTransport<T>* smt, boost::function<void(T&)> callback) { T msg; std::string serialized_data; ros::Rate loop_rate(10.0); while(ros::ok()) //wait for the field to at least have something { if(!smt->initialized()) { ROS_WARN("%s: Shared memory transport was shut down while we were waiting for connections. Stopping callback thread!", m_nh->getNamespace().c_str()); return; } if(!smt->connected()) { smt->connect(); } else if(smt->hasData()) { break; } ROS_WARN_STREAM_THROTTLE(1.0, m_nh->getNamespace() << ": Trying to connect to field " << smt->getFieldName() << "..."); loop_rate.sleep(); //boost::this_thread::interruption_point(); } if(getCurrentMessage(msg)) { callback(msg); } while(ros::ok()) { try { if(m_use_polling) { smt->awaitNewDataPolled(msg); } else { smt->awaitNewData(msg); } callback(msg); } catch(ros::serialization::StreamOverrunException& ex) { ROS_ERROR("%s: Deserialization failed for topic %s! The string was:\n%s", m_nh->getNamespace().c_str(), m_full_topic_path.c_str(), serialized_data.c_str()); } //boost::this_thread::interruption_point(); } }
void gps_callback(const sensor_msgs::NavSatFixConstPtr & msg) { ros::NodeHandle nh; nh.getParam("/gps_ref_is_init", gps_ref_is_init); if (!gps_ref_is_init){ if (msg->status.status < sensor_msgs::NavSatStatus::STATUS_FIX) { ROS_WARN_STREAM_THROTTLE(1, "No GPS fix"); return; } g_lat_ref += msg->latitude; g_lon_ref += msg->longitude; g_alt_ref += msg->altitude; ROS_INFO("Current measurement: %f, %f, %f", msg->latitude, msg->longitude, msg->altitude); if (g_count == g_its) { if (g_mode == MODE_AVERAGE) { g_lat_ref /= g_its; g_lon_ref /= g_its; g_alt_ref /= g_its; } else { g_lat_ref = msg->latitude; g_lon_ref = msg->longitude; g_alt_ref = msg->altitude; } nh.setParam("/gps_ref_latitude", g_lat_ref); nh.setParam("/gps_ref_longitude", g_lon_ref); nh.setParam("/gps_ref_altitude", g_alt_ref); nh.setParam("/gps_ref_is_init", true); ROS_INFO("Final reference position: %f, %f, %f", g_lat_ref, g_lon_ref, g_alt_ref); return; } else { ROS_INFO(" Still waiting for %d measurements", g_its - g_count); } g_count++; } }
void CovarianceVisual::setMessage(const geometry_msgs::PoseWithCovariance& msg) { // Construct pose position and orientation. const geometry_msgs::Point& p = msg.pose.position; const geometry_msgs::Quaternion& q = msg.pose.orientation; Ogre::Vector3 position(p.x, p.y, p.z); Ogre::Quaternion orientation(q.w, q.x, q.y, q.z); // Set position and orientation for axes scene node. if(!position.isNaN()) axes_->setPosition(position); else ROS_WARN_STREAM_THROTTLE(1, "position contains NaN: " << position); if(!orientation.isNaN()) axes_->setOrientation (orientation); else ROS_WARN_STREAM_THROTTLE(1, "orientation contains NaN: " << orientation); // check for NaN in covariance for (unsigned i = 0; i < 3; ++i) { if(isnan(msg.covariance[i])) { ROS_WARN_THROTTLE(1, "covariance contains NaN"); return; } } // Compute eigen values and vectors for both shapes. std::pair<Eigen::Matrix3d, Eigen::Vector3d> positionEigenVectorsAndValues(computeEigenValuesAndVectors(msg, 0)); std::pair<Eigen::Matrix3d, Eigen::Vector3d> orientationEigenVectorsAndValues(computeEigenValuesAndVectors(msg, 3)); Ogre::Quaternion positionQuaternion(computeRotation(msg, positionEigenVectorsAndValues)); Ogre::Quaternion orientationQuaternion(computeRotation(msg, orientationEigenVectorsAndValues)); positionNode_->setOrientation(positionQuaternion); orientationNode_->setOrientation(orientationQuaternion); // Compute scaling. //Ogre::Vector3 axesScaling(1, 1, 1); //axesScaling *= scaleFactor_; Ogre::Vector3 positionScaling (std::sqrt (positionEigenVectorsAndValues.second[0]), std::sqrt (positionEigenVectorsAndValues.second[1]), std::sqrt (positionEigenVectorsAndValues.second[2])); positionScaling *= scaleFactor_; Ogre::Vector3 orientationScaling (std::sqrt (orientationEigenVectorsAndValues.second[0]), std::sqrt (orientationEigenVectorsAndValues.second[1]), std::sqrt (orientationEigenVectorsAndValues.second[2])); orientationScaling *= scaleFactor_; // Set the scaling. /*if(!axesScaling.isNaN()) axes_->setScale(axesScaling); else ROS_WARN_STREAM("axesScaling contains NaN: " << axesScaling);*/ if(!positionScaling.isNaN()) positionNode_->setScale(positionScaling); else ROS_WARN_STREAM("positionScaling contains NaN: " << positionScaling); if(!orientationScaling.isNaN()) orientationNode_->setScale(orientationScaling); else ROS_WARN_STREAM("orientationScaling contains NaN: " << orientationScaling); // Debugging. ROS_INFO_STREAM_THROTTLE (1., "Position:\n" << position << "\n" << "Positional part 3x3 eigen values:\n" << positionEigenVectorsAndValues.second << "\n" << "Positional part 3x3 eigen vectors:\n" << positionEigenVectorsAndValues.first << "\n" << "Sphere orientation:\n" << positionQuaternion << "\n" << positionQuaternion.getRoll () << " " << positionQuaternion.getPitch () << " " << positionQuaternion.getYaw () << "\n" << "Sphere scaling:\n" << positionScaling << "\n" << "Rotational part 3x3 eigen values:\n" << orientationEigenVectorsAndValues.second << "\n" << "Rotational part 3x3 eigen vectors:\n" << orientationEigenVectorsAndValues.first << "\n" << "Cone orientation:\n" << orientationQuaternion << "\n" << orientationQuaternion.getRoll () << " " << orientationQuaternion.getPitch () << " " << orientationQuaternion.getYaw () << "\n" << "Cone scaling:\n" << orientationScaling ); }
void QualisysDriver::handlePacketData(CRTPacket* prt_packet) { // Number of rigid bodies int body_count = prt_packet->Get6DOFEulerBodyCount(); // Check the publishers for the rigid bodies checkPublishers(body_count); // Publish data for each rigid body for(int i = 0; i < body_count; ++i) { float x, y, z, roll, pitch, yaw; prt_packet->Get6DOFEulerBody(i, x, y, z, roll, pitch, yaw); if(isnan(x) || isnan(y) || isnan(z) || isnan(roll) || isnan(pitch) || isnan(yaw)) { ROS_WARN_STREAM_THROTTLE(3, "Rigid-body " << i + 1 << "/" << body_count << " not detected"); continue; } // ROTATION: GLOBAL (FIXED) X Y Z (R P Y) //std::stringstream name; //name << port_protocol.Get6DOFBodyName(i); string subject_name(port_protocol.Get6DOFBodyName(i)); // Qualisys sometimes flips 180 degrees around the x axis if(roll > 90) roll -= 180; else if(roll < -90) roll += 180; // Send transform tf::StampedTransform stamped_transform = tf::StampedTransform( tf::Transform( tf::createQuaternionFromRPY( roll * deg2rad, pitch * deg2rad, yaw * deg2rad), tf::Vector3(x, y, z) / 1000.), ros::Time::now(), "qualisys", subject_name); if (publish_tf) tf_publisher.sendTransform(stamped_transform); // Send Subject msg geometry_msgs::TransformStamped geom_stamped_transform; tf::transformStampedTFToMsg(stamped_transform, geom_stamped_transform); qualisys::Subject subject_msg; subject_msg.header = geom_stamped_transform.header; subject_msg.name = subject_name; subject_msg.position.x = geom_stamped_transform.transform.translation.x; subject_msg.position.y = geom_stamped_transform.transform.translation.y; subject_msg.position.z = geom_stamped_transform.transform.translation.z; subject_msg.orientation = geom_stamped_transform.transform.rotation; subject_publishers[subject_name].publish(subject_msg); } return; }
void Demo_policies::update(arma::colvec3& velocity, const arma::colvec3& mls_WF, const arma::colvec3& mls_SF, const arma::colvec3& socket_pos_WF, const arma::colvec3& peg_origin_WF, const arma::colvec& Y_c, const std::vector<STATES>& states, Insert_peg& insert_peg, Get_back_on& get_back_on, Force_control& force_control, Peg_sensor_model& peg_sensor_model, const arma::colvec& belief_state_SF, const arma::colvec3& open_loop_x_origin_arma_WF) { if(Y_c(8) == 1) { // policy_type = POLICY_TYPE::INSERT; in_socket=false; }else{ in_socket=false; } if(policy_type == POLICY_TYPE::DEMO){ switch(demo_type){ case DEMO_TYPE::DEMO_1: { spolicy_one->update(velocity,mls_WF,mls_SF,socket_pos_WF,peg_origin_WF,Y_c,states,insert_peg,get_back_on,force_control,peg_sensor_model); break; } case DEMO_TYPE::DEMO_2: { spolicy_two->update(velocity,mls_WF,mls_SF,socket_pos_WF,peg_origin_WF,Y_c,states,insert_peg,get_back_on,force_control,peg_sensor_model); break; } case DEMO_TYPE::DEMO_3: { spolicy_three->update(velocity,mls_WF,mls_SF,socket_pos_WF,peg_origin_WF,Y_c,states,insert_peg,get_back_on,force_control,peg_sensor_model); break; } } if(Planning_stack::has_state(STATES::LOW_UNCERTAINTY,states)){ policy_type = POLICY_TYPE::GREEDY; ROS_WARN_STREAM("SWITCHING TO GREEDY POLICY"); } }else if(policy_type == POLICY_TYPE::GREEDY){ ROS_WARN_STREAM_THROTTLE(1.0," == GREEDY POLICY"); gmm.update(velocity,belief_state_SF,states); if(Planning_stack::has_state(STATES::SOCKET_ENTRY,states)){ policy_type = POLICY_TYPE::INSERT; ROS_WARN_STREAM("SWITCHING TO INSERT POLICY"); } }else if(policy_type == POLICY_TYPE::INSERT){ ROS_WARN_STREAM_THROTTLE(1.0," == INSERT POLICY"); velocity.zeros(); target_WF = socket_pos_WF; if( dist_yz(target_WF,peg_origin_WF) < 0.001){ velocity = target_WF - peg_origin_WF; }else{ velocity(1) = target_WF(1) - peg_origin_WF(1); velocity(2) = target_WF(2) - peg_origin_WF(2); } if(arma::norm(velocity) > 0.02) { velocity = arma::normalise(velocity); velocity = 0.02 * velocity; } }else if(policy_type == POLICY_TYPE::FORWARD){ ROS_WARN_STREAM_THROTTLE(1.0,"[[POLICY::FORWARD]]"); forward_policy.update(velocity,peg_origin_WF); } /// Takes care if we are stuck at an edge if(policy_type != POLICY_TYPE::INSERT && policy_type != POLICY_TYPE::FORWARD){ if(State_machine::has_state(STATES::STUCK_EDGE,states)){ force_control.get_over_edge(velocity,open_loop_x_origin_arma_WF,peg_origin_WF); }else{ force_control.update_x(velocity); } } force_control.force_safety(velocity,8); if(policy_type != POLICY_TYPE::INSERT && policy_type != POLICY_TYPE::FORWARD){ velocity = arma::normalise(velocity); if(!velocity.is_finite()){ ROS_WARN_THROTTLE(1.0,"arma_velocity is not finite() [Search_policy::get_velocity]!"); velocity.zeros(); } } }
void gps_callback(const sensor_msgs::NavSatFixConstPtr& msg) { if (!g_got_imu) { ROS_WARN_STREAM_THROTTLE(1, "No IMU data yet"); return; } if (msg->status.status < sensor_msgs::NavSatStatus::STATUS_FIX) { ROS_WARN_STREAM_THROTTLE(1, "No GPS fix"); return; } if (!g_geodetic_converter.isInitialised()) { ROS_WARN_STREAM_THROTTLE(1, "No GPS reference point set, not publishing"); return; } double x, y, z; g_geodetic_converter.geodetic2Enu(msg->latitude, msg->longitude, msg->altitude, &x, &y, &z); // (NWU -> ENU) for simulation if (g_is_sim) { double aux = x; x = y; y = -aux; //z = z; } // Fill up pose message geometry_msgs::PoseWithCovarianceStampedPtr pose_msg( new geometry_msgs::PoseWithCovarianceStamped); pose_msg->header = msg->header; pose_msg->header.frame_id = "world"; pose_msg->pose.pose.position.x = x; pose_msg->pose.pose.position.y = y; pose_msg->pose.pose.position.z = z; pose_msg->pose.pose.orientation = g_latest_imu_msg.orientation; // Fill up position message geometry_msgs::PointStampedPtr position_msg( new geometry_msgs::PointStamped); position_msg->header = pose_msg->header; position_msg->header.frame_id = "world"; position_msg->point = pose_msg->pose.pose.position; // If external altitude messages received, include in pose and position messages if (g_got_altitude) { pose_msg->pose.pose.position.z = g_latest_altitude_msg.data; position_msg->point.z = g_latest_altitude_msg.data; } pose_msg->pose.covariance.assign(0); // Set default covariances pose_msg->pose.covariance[6 * 0 + 0] = g_covariance_position_x; pose_msg->pose.covariance[6 * 1 + 1] = g_covariance_position_y; pose_msg->pose.covariance[6 * 2 + 2] = g_covariance_position_z; pose_msg->pose.covariance[6 * 3 + 3] = g_covariance_orientation_x; pose_msg->pose.covariance[6 * 4 + 4] = g_covariance_orientation_y; pose_msg->pose.covariance[6 * 5 + 5] = g_covariance_orientation_z; // Take covariances from GPS if (g_trust_gps) { if (msg->position_covariance_type == sensor_msgs::NavSatFix::COVARIANCE_TYPE_KNOWN || msg->position_covariance_type == sensor_msgs::NavSatFix::COVARIANCE_TYPE_APPROXIMATED) { // Fill in completely for (int i = 0; i <= 2; i++) { for (int j = 0; j <= 2; j++) { pose_msg->pose.covariance[6 * i + j] = msg->position_covariance[3 * i + j]; } } } else if (msg->position_covariance_type == sensor_msgs::NavSatFix::COVARIANCE_TYPE_DIAGONAL_KNOWN) { // Only fill in diagonal for (int i = 0; i <= 2; i++) { pose_msg->pose.covariance[6 * i + i] = msg->position_covariance[3 * i + i]; } } } g_gps_pose_pub.publish(pose_msg); g_gps_position_pub.publish(position_msg); // Fill up transform message geometry_msgs::TransformStampedPtr transform_msg(new geometry_msgs::TransformStamped); transform_msg->header = msg->header; transform_msg->header.frame_id = "world"; transform_msg->transform.translation.x = x; transform_msg->transform.translation.y = y; transform_msg->transform.translation.z = z; transform_msg->transform.rotation = g_latest_imu_msg.orientation; if (g_got_altitude) { transform_msg->transform.translation.z = g_latest_altitude_msg.data; } g_gps_transform_pub.publish(transform_msg); }
void GazeboRosKobuki::OnUpdate() { /* * First process ROS callbacks */ ros::spinOnce(); /* * Update current time and time step */ common::Time time_now = world_->GetSimTime(); common::Time step_time = time_now - prev_update_time_; prev_update_time_ = time_now; /* * Joint states */ joint_state_.header.stamp.sec = time_now.sec; joint_state_.header.stamp.nsec = time_now.nsec; joint_state_.position[LEFT] = joints_[LEFT]->GetAngle(0).Radian(); joint_state_.velocity[LEFT] = joints_[LEFT]->GetVelocity(0); joint_state_.position[RIGHT] = joints_[RIGHT]->GetAngle(0).Radian(); joint_state_.velocity[RIGHT] = joints_[RIGHT]->GetVelocity(0); joint_state_pub_.publish(joint_state_); /* * Odometry */ odom_.header.stamp.sec = time_now.sec; odom_.header.stamp.nsec = time_now.nsec; odom_.header.frame_id = "odom"; odom_.child_frame_id = "base_footprint"; odom_tf_.header = odom_.header; odom_tf_.child_frame_id = odom_.child_frame_id; // Distance travelled by front wheels double d1, d2; double dr, da; d1 = d2 = 0; dr = da = 0; // if (set_joints_[LEFT]) d1 = step_time.Double() * (wheel_diam_ / 2) * joints_[LEFT]->GetVelocity(0); // if (set_joints_[RIGHT]) d2 = step_time.Double() * (wheel_diam_ / 2) * joints_[RIGHT]->GetVelocity(0); // Can see NaN values here, just zero them out if needed if (isnan(d1)) { ROS_WARN_STREAM_THROTTLE(0.1, "Gazebo ROS Kobuki plugin: NaN in d1. Step time: " << step_time.Double() << ", WD: " << wheel_diam_ << ", velocity: " << joints_[LEFT]->GetVelocity(0)); d1 = 0; } if (isnan(d2)) { ROS_WARN_STREAM_THROTTLE(0.1, "Gazebo ROS Kobuki plugin: NaN in d2. Step time: " << step_time.Double() << ", WD: " << wheel_diam_ << ", velocity: " << joints_[RIGHT]->GetVelocity(0)); d2 = 0; } dr = (d1 + d2) / 2; da = (d2 - d1) / wheel_sep_; // Compute odometric pose odom_pose_[0] += dr * cos( odom_pose_[2] ); odom_pose_[1] += dr * sin( odom_pose_[2] ); odom_pose_[2] += da; // Compute odometric instantaneous velocity odom_vel_[0] = dr / step_time.Double(); odom_vel_[1] = 0.0; odom_vel_[2] = da / step_time.Double(); odom_.pose.pose.position.x = odom_pose_[0]; odom_.pose.pose.position.y = odom_pose_[1]; odom_.pose.pose.position.z = 0; btQuaternion qt; qt.setEuler(0,0,odom_pose_[2]); odom_.pose.pose.orientation.x = qt.getX(); odom_.pose.pose.orientation.y = qt.getY(); odom_.pose.pose.orientation.z = qt.getZ(); odom_.pose.pose.orientation.w = qt.getW(); memcpy(&odom_.pose.covariance[0], pose_cov_, sizeof(double)*36); memcpy(&odom_.twist.covariance[0], pose_cov_, sizeof(double)*36); odom_.twist.twist.linear.x = 0; odom_.twist.twist.linear.y = 0; odom_.twist.twist.linear.z = 0; odom_.twist.twist.angular.x = 0; odom_.twist.twist.angular.y = 0; odom_.twist.twist.angular.z = 0; odom_pub_.publish(odom_); // publish odom message odom_tf_.transform.translation.x = odom_.pose.pose.position.x; odom_tf_.transform.translation.y = odom_.pose.pose.position.y; odom_tf_.transform.translation.z = odom_.pose.pose.position.z; odom_tf_.transform.rotation = odom_.pose.pose.orientation; tf_broadcaster_.sendTransform(odom_tf_); /* * Propagate velocity commands * TODO: Check how to simulate disabled motors, e.g. set MaxForce to zero, but then damping is important! */ if (((time_now - last_cmd_vel_time_).Double() > cmd_vel_timeout_) || !motors_enabled_) { wheel_speed_cmd_[LEFT] = 0.0; wheel_speed_cmd_[RIGHT] = 0.0; } joints_[LEFT]->SetVelocity(0, wheel_speed_cmd_[LEFT] / (wheel_diam_ / 2.0)); joints_[RIGHT]->SetVelocity(0, wheel_speed_cmd_[RIGHT] / (wheel_diam_ / 2.0)); joints_[LEFT]->SetMaxForce(0, torque_); joints_[RIGHT]->SetMaxForce(0, torque_); /* * Cliff sensors */ // check current state cliff_event_.sensor = 0; cliff_event_.state = kobuki_msgs::CliffEvent::FLOOR; if (cliff_sensor_left_->GetRange(0) >= cliff_detection_threshold_) { cliff_event_.sensor += kobuki_msgs::CliffEvent::LEFT; cliff_event_.state = kobuki_msgs::CliffEvent::CLIFF; max_floot_dist_ = cliff_sensor_left_->GetRange(0); } if (cliff_sensor_front_->GetRange(0) >= cliff_detection_threshold_) { cliff_event_.sensor += kobuki_msgs::CliffEvent::CENTER; cliff_event_.state = kobuki_msgs::CliffEvent::CLIFF; if (cliff_sensor_front_->GetRange(0) > max_floot_dist_) { max_floot_dist_ = cliff_sensor_front_->GetRange(0); } } if (cliff_sensor_right_->GetRange(0) >= cliff_detection_threshold_) { cliff_event_.sensor += kobuki_msgs::CliffEvent::RIGHT; cliff_event_.state = kobuki_msgs::CliffEvent::CLIFF; if (cliff_sensor_right_->GetRange(0) > max_floot_dist_) { max_floot_dist_ = cliff_sensor_right_->GetRange(0); } } // Only publish new message, if something has changed if ((cliff_event_.state == kobuki_msgs::CliffEvent::CLIFF) && (cliff_event_.sensor != cliff_event_old_.sensor)) { // max_floot_dist_ = static_cast<int>(0.995f / ( tan( static_cast<float>( m_pk.psd[i]) / 76123.0f ) cliff_event_.bottom = (int)(76123.0f * atan2(0.995f, max_floot_dist_)); // convert distance back to an AD reading cliff_event_pub_.publish(cliff_event_); cliff_event_old_ = cliff_event_; } /* * Bumpers */ msgs::Contacts contacts; contacts = bumper_->GetContacts(); // for (int i = 0; i < contacts.contact_size(); ++i) // { // std::cout << "Collision between[" << contacts.contact(i).collision1() // << "] and [" << contacts.contact(i).collision2() << "]\n"; // // for (int j = 0; j < contacts.contact(i).position_size(); ++j) // { // std::cout << j << " Position:" // << contacts.contact(i).position(j).x() << " " // << contacts.contact(i).position(j).y() << " " // << contacts.contact(i).position(j).z() << "\n"; // std::cout << " Normal:" // << contacts.contact(i).normal(j).x() << " " // << contacts.contact(i).normal(j).y() << " " // << contacts.contact(i).normal(j).z() << "\n"; // std::cout << " Depth:" << contacts.contact(i).depth(j) << "\n"; // } // } // In order to simulate the three bumper sensors, a contact is assigned to one of the bumpers // depending on its position. Each sensor covers a range of 60 degrees. // +90 ... +30: left bumper // +30 ... -30: centre bumper // -30 ... -90: right bumper bumper_event_.state = 0; bumper_event_.bumper = 0; // flags used for avoiding multiple triggering of the same bumper due to multiple contacts bool bumper_left_pressed = false; bool bumper_centre_pressed = false; bool bumper_right_pressed = false; for (int i = 0; i < contacts.contact_size(); ++i) { if ((contacts.contact(i).position(0).z() >= 0.015) && (contacts.contact(i).position(0).z() <= 0.085)) // only consider contacts at the height of the bumper { math::Pose current_pose = model_->GetWorldPose(); double robot_heading = current_pose.rot.GetYaw(); // using the force normals below, since the contact position is given in world coordinates // negate normal, because it points from contact to robot centre double global_contact_angle = std::atan2(-contacts.contact(i).normal(0).y(), -contacts.contact(i).normal(0).x()); double relative_contact_angle = global_contact_angle - robot_heading; std::cout << "vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv" << std::endl; std::cout << " Position:" << contacts.contact(i).position(0).x() << " " << contacts.contact(i).position(0).y() << " " << contacts.contact(i).position(0).z() << "\n"; std::cout << " Normal:" << contacts.contact(i).normal(0).x() << " " << contacts.contact(i).normal(0).y() << " " << contacts.contact(i).normal(0).z() << "\n"; // std::cout << "Current robot heading: " << (robot_heading * (180/M_PI)) << std::endl; // std::cout << "Global contact angle: " << (contact_angle * (180/M_PI)) << std::endl; // std::cout << "Robot contact angle: " << (relative_contact_angle * (180/M_PI)) << std::endl; if ((relative_contact_angle <= (M_PI/2)) && (relative_contact_angle > (M_PI/6))) { if (!bumper_left_pressed) { bumper_left_pressed = true; bumper_event_.state = kobuki_msgs::BumperEvent::PRESSED; bumper_event_.bumper += kobuki_msgs::BumperEvent::LEFT; // std::cout << "Left bumper pressed." << std::endl; // std::cout << "-----------------------------------------" << std::endl; } } else if ((relative_contact_angle <= (M_PI/6)) && (relative_contact_angle >= (-M_PI/6))) { if (!bumper_centre_pressed) { bumper_centre_pressed = true; bumper_event_.state = kobuki_msgs::BumperEvent::PRESSED; bumper_event_.bumper += kobuki_msgs::BumperEvent::CENTER; // std::cout << "Centre bumper pressed." << std::endl; // std::cout << "-----------------------------------------" << std::endl; } } else if ((relative_contact_angle < (-M_PI/6)) && (relative_contact_angle >= (-M_PI/2))) { if (!bumper_right_pressed) { bumper_right_pressed = true; bumper_event_.state = kobuki_msgs::BumperEvent::PRESSED; bumper_event_.bumper += kobuki_msgs::BumperEvent::RIGHT; // std::cout << "Right bumper pressed." << std::endl; // std::cout << "-----------------------------------------" << std::endl; } } } } // Only publish new message, if something has changed if ((bumper_event_.state != bumper_event_old_.state) || (bumper_event_.bumper != bumper_event_old_.bumper)) { bumper_event_pub_.publish(bumper_event_); bumper_event_old_ = bumper_event_; } }
/* * Odometry (encoders & IMU) */ void GazeboRosKobuki::updateOdometry(common::Time& step_time) { std::string odom_frame = gazebo_ros_->resolveTF("odom"); std::string base_frame = gazebo_ros_->resolveTF("base_footprint"); odom_.header.stamp = joint_state_.header.stamp; odom_.header.frame_id = odom_frame; odom_.child_frame_id = base_frame; // Distance travelled by main wheels double d1, d2; double dr, da; d1 = d2 = 0; dr = da = 0; d1 = step_time.Double() * (wheel_diam_ / 2) * joints_[LEFT]->GetVelocity(0); d2 = step_time.Double() * (wheel_diam_ / 2) * joints_[RIGHT]->GetVelocity(0); // Can see NaN values here, just zero them out if needed if (isnan(d1)) { ROS_WARN_STREAM_THROTTLE(0.1, "Gazebo ROS Kobuki plugin: NaN in d1. Step time: " << step_time.Double() << ", WD: " << wheel_diam_ << ", velocity: " << joints_[LEFT]->GetVelocity(0)); d1 = 0; } if (isnan(d2)) { ROS_WARN_STREAM_THROTTLE(0.1, "Gazebo ROS Kobuki plugin: NaN in d2. Step time: " << step_time.Double() << ", WD: " << wheel_diam_ << ", velocity: " << joints_[RIGHT]->GetVelocity(0)); d2 = 0; } dr = (d1 + d2) / 2; da = (d2 - d1) / wheel_sep_; // ignored // Just as in the Kobuki driver, the angular velocity is taken directly from the IMU vel_angular_ = imu_->GetAngularVelocity(); // Compute odometric pose odom_pose_[0] += dr * cos( odom_pose_[2] ); odom_pose_[1] += dr * sin( odom_pose_[2] ); odom_pose_[2] += vel_angular_.z * step_time.Double(); // Compute odometric instantaneous velocity odom_vel_[0] = dr / step_time.Double(); odom_vel_[1] = 0.0; odom_vel_[2] = vel_angular_.z; odom_.pose.pose.position.x = odom_pose_[0]; odom_.pose.pose.position.y = odom_pose_[1]; odom_.pose.pose.position.z = 0; tf::Quaternion qt; qt.setEuler(0,0,odom_pose_[2]); odom_.pose.pose.orientation.x = qt.getX(); odom_.pose.pose.orientation.y = qt.getY(); odom_.pose.pose.orientation.z = qt.getZ(); odom_.pose.pose.orientation.w = qt.getW(); odom_.pose.covariance[0] = 0.1; odom_.pose.covariance[7] = 0.1; odom_.pose.covariance[35] = 0.05; odom_.pose.covariance[14] = 1e6; odom_.pose.covariance[21] = 1e6; odom_.pose.covariance[28] = 1e6; odom_.twist.twist.linear.x = odom_vel_[0]; odom_.twist.twist.linear.y = 0; odom_.twist.twist.linear.z = 0; odom_.twist.twist.angular.x = 0; odom_.twist.twist.angular.y = 0; odom_.twist.twist.angular.z = odom_vel_[2]; odom_pub_.publish(odom_); // publish odom message if (publish_tf_) { odom_tf_.header = odom_.header; odom_tf_.child_frame_id = odom_.child_frame_id; odom_tf_.transform.translation.x = odom_.pose.pose.position.x; odom_tf_.transform.translation.y = odom_.pose.pose.position.y; odom_tf_.transform.translation.z = odom_.pose.pose.position.z; odom_tf_.transform.rotation = odom_.pose.pose.orientation; tf_broadcaster_.sendTransform(odom_tf_); } }
// Desired cart_vel used to compute the corresponding IK joint_vel void RTKRobotArm::getIKJointVelocity(const Eigen::VectorXd& cart_vel, Eigen::VectorXd& joint_vel) { MathLib::Vector cerr; if(bOrientCtrl) { cerr.Resize(6); } else { cerr.Resize(3); } copy(cart_vel, cerr); if(joint_vel.size() != numdof) { joint_vel.resize(numdof); } mIKSolver.SetTarget(cerr); mIKSolver.SetJacobian(mKinematicChain.GetJacobian()); // Try first with all joint weights equal MathLib::Vector wts(numdof); wts.One(); mSensorsGroup.ReadSensors(); MathLib::Vector curr = mSensorsGroup.GetJointPositions(); if(bUseNull) { MathLib::Vector nul(numdof); for(int i=0; i<numdof; ++i) { nul[i] = null_posture(i) - curr[mapping[i]]; } mIKSolver.SetNullTarget(nul); } mIKSolver.SetDofsWeights(wts); mIKSolver.Solve(); MathLib::Vector ikout = mIKSolver.GetOutput(); for(int i=0; i<numdof; ++i) { joint_vel[i] = ikout[mapping[i]]; } // Check if any joint is near limit bool redo = false; for(int i=0; i<numdof; ++i) { if( (curr(i)> ulim(i) - DEG2RAD(5) && joint_vel[i] > 0) || (curr(i) < llim(i) + DEG2RAD(5) && joint_vel[i] < 0) ) { ROS_WARN_STREAM_THROTTLE(0.5, "Reducing DOF "<<i<<" IK weight"); redo = true; // Set the problematic joint weights to a small value wts(i) = 0.1; } else { wts(i) = 1.0; } } // If any joint is in problem, resolve with new joint weights if(redo) { mIKSolver.SetDofsWeights(wts); mIKSolver.Solve(); MathLib::Vector ikout = mIKSolver.GetOutput(); for(int i=0; i<numdof; ++i) { joint_vel[i] = ikout[mapping[i]]; } } }
int main(int argc, char** argv) { ros::init(argc, argv, "optitrack_tf_broadcaster"); ros::NodeHandle nh; ros::NodeHandle nh_private("~"); tf::Vector3 tmpV3; tf::Quaternion tmpQuat; tf::Matrix3x3 btm; OptiTrack *tracker; std::vector<tf::StampedTransform> robotTf; tf::StampedTransform visionTf; visionTf.setIdentity(); visionTf.frame_id_ = "world"; visionTf.child_frame_id_ = "vision"; double pos[3]; double orient[3][3]; static tf::TransformBroadcaster br; std::string localip, objlist; bool exists = nh_private.getParam("local_ip", localip); if (!exists) { ROS_FATAL("You have to define local_ip!"); return -1; } int publish_frequency = 250; nh_private.getParam("publish_frequency", publish_frequency); bool bUseThread = false; nh_private.getParam("use_thread", bUseThread); tracker = new OptiTrack(); int ret; if((ret = tracker->Init(localip.c_str(), bUseThread)) <= 0) { if(ret == -1) ROS_FATAL("Cannot open socket. Check local ip!"); else if(ret == -2) ROS_FATAL("Cannot receive data. Check windows server!"); return -1; } tracker->enableWarnings(false); std::string calibfile; bool bUseCalibration = false; if(nh_private.getParam("calib_file", calibfile)) { std::vector<std::string> all_calib_files; explode(calibfile, all_calib_files); for(unsigned int f=0; f<all_calib_files.size(); ++f) { FILE* file = NULL; file = fopen(all_calib_files[f].c_str(), "r"); robotTf.push_back(newInitialRobotFrame(f)); if(file == NULL) { ROS_WARN_STREAM("Calibration file \""<<calibfile<<"\" not found for robot number "<<f<<". Robot frame will be identity."); } else { int dum; for(int i=0; i<3; i++) for(int j=0; j<3; j++) dum = fscanf(file, "%lf", &(orient[i][j])); btm.setValue(orient[0][0], orient[0][1], orient[0][2], orient[1][0], orient[1][1], orient[1][2], orient[2][0], orient[2][1], orient[2][2]); for(int j=0; j<3; j++) dum = fscanf(file, "%lf", &(pos[j])); tmpV3.setValue(pos[0], pos[1], pos[2]); btm = btm.transpose(); tmpV3 = btm*tmpV3; tmpV3 *= -1.0; btm.getRotation(tmpQuat); robotTf[f].setOrigin(tmpV3); robotTf[f].setRotation(tmpQuat); bUseCalibration = true; fclose(file); } } } if(robotTf.size() == 0) { ROS_WARN("No calibration file provided! Assuming single robot with identity calibration."); robotTf.push_back(newInitialRobotFrame(0)); } std::vector<std::string> obj_names; exists &= nh_private.getParam("obj_list", objlist); if(!exists) { // If parameter obj_list does not exist, track all objects available obj_names = tracker->GetRBodyNameList(); } else { // If parameter obj_list exists, retrieve object names from the delimited list string explode(objlist, obj_names); } std::string tmp = ""; unsigned int i; for(i=0; i<obj_names.size()-1; i++) tmp = tmp + obj_names[i] + " ; "; tmp = tmp + obj_names[i]; unsigned int num_obj = obj_names.size(); ROS_INFO("The following parameters will be used"); ROS_INFO_STREAM("publish_frequency = " << publish_frequency << " Hz"); ROS_INFO_STREAM("local_ip = " << localip); ROS_INFO_STREAM("Tracking " << num_obj << " objects ==> " <<tmp); ROS_INFO_STREAM("use_thread = " << bUseThread); ROS_INFO_STREAM("use_calibration = " << bUseCalibration); for(i=0; i<num_obj; i++) tracker->enableRBody(obj_names[i].c_str(), true); ros::Rate r(publish_frequency); std::vector<tf::StampedTransform> transforms; if(num_obj) transforms.resize(num_obj); for(i=0; i<num_obj; i++) { transforms[i].child_frame_id_ = obj_names[i]; transforms[i].frame_id_ = "vision"; transforms[i].setIdentity(); } for(unsigned int i=0; i<robotTf.size(); ++i ) { transforms.push_back(robotTf[i]); } // Only for backward compatibility. // The robot frame used to be called /robot // In the multi-robot setting the frames are called /robot<i> // Here we make a new frame called /robot as a copy of the frame /robot<0> tf::StampedTransform rbtframe = robotTf[0]; rbtframe.child_frame_id_ = "robot"; transforms.push_back(rbtframe); transforms.push_back(visionTf); ROS_INFO("TF Broadcaster started"); while(ros::ok()) { ros::spinOnce(); if(tracker->Update() < 0) { ROS_ERROR_STREAM_THROTTLE(1, "Tracker update failed"); break; } for(i=0; i<num_obj; i++) { if(!tracker->getRBodyPosition(pos, obj_names[i].c_str()) || !tracker->getRBodyOrientation(orient, obj_names[i].c_str())) ROS_WARN_STREAM_THROTTLE(1, "Object " << obj_names[i] << " not detected"); else { btm.setValue(orient[0][0], orient[0][1], orient[0][2], orient[1][0], orient[1][1], orient[1][2], orient[2][0], orient[2][1], orient[2][2]); tmpV3[0] = pos[0]; tmpV3[1] = pos[1]; tmpV3[2] = pos[2]; btm.getRotation(tmpQuat); transforms[i].setOrigin(tmpV3); transforms[i].setRotation(tmpQuat); } transforms[i].stamp_ = ros::Time::now(); } transforms[num_obj].stamp_ = ros::Time::now(); transforms[num_obj+1].stamp_ = ros::Time::now(); br.sendTransform(transforms); r.sleep(); } ROS_INFO_STREAM("Stopping node"); delete tracker; nh.shutdown(); nh_private.shutdown(); return 0; }
void ObstaclePointCloud::broadcast() { if (q_obstacles_.size() < 1) { return; } const auto sim_obstacles = q_obstacles_.front(); using Cell = std::pair<float, float>; std::vector<Cell> all_cells; for (const auto& line : sim_obstacles->obstacles) { const auto cells = pedsim::LineObstacleToCells(line.start.x, line.start.y, line.end.x, line.end.y); std::copy(cells.begin(), cells.end(), std::back_inserter(all_cells)); } constexpr int point_density = 100; const int num_points = all_cells.size() * point_density; std::default_random_engine generator; // \todo - Read params from config file. std::uniform_int_distribution<int> color_distribution(1, 255); std::uniform_real_distribution<float> height_distribution(0, 1); std::uniform_real_distribution<float> width_distribution(-0.5, 0.5); sensor_msgs::PointCloud pcd_global; pcd_global.header.stamp = ros::Time::now(); pcd_global.header.frame_id = sim_obstacles->header.frame_id; pcd_global.points.resize(num_points); pcd_global.channels.resize(1); pcd_global.channels[0].name = "intensities"; pcd_global.channels[0].values.resize(num_points); sensor_msgs::PointCloud pcd_local; pcd_local.header.stamp = ros::Time::now(); pcd_local.header.frame_id = robot_odom_.header.frame_id; pcd_local.points.resize(num_points); pcd_local.channels.resize(1); pcd_local.channels[0].name = "intensities"; pcd_local.channels[0].values.resize(num_points); // prepare the transform to robot odom frame. tf::StampedTransform robot_transform; try { transform_listener_->lookupTransform(robot_odom_.header.frame_id, sim_obstacles->header.frame_id, ros::Time(0), robot_transform); } catch (tf::TransformException& e) { ROS_WARN_STREAM_THROTTLE(5.0, "TFP lookup from [" << sim_obstacles->header.frame_id << "] to [" << robot_odom_.header.frame_id << "] failed. Reason: " << e.what()); return; } size_t index = 0; for (const auto& cell : all_cells) { const int cell_color = color_distribution(generator); for (size_t j = 0; j < point_density; ++j) { if (fov_->inside(cell.first, cell.second)) { const tf::Vector3 point(cell.first + width_distribution(generator), cell.second + width_distribution(generator), 0.); const auto transformed_point = transformPoint(robot_transform, point); pcd_local.points[index].x = transformed_point.getOrigin().x(); pcd_local.points[index].y = transformed_point.getOrigin().y(); pcd_local.points[index].z = height_distribution(generator); pcd_local.channels[0].values[index] = cell_color; // Global observations. pcd_global.points[index].x = cell.first + width_distribution(generator); pcd_global.points[index].y = cell.second + width_distribution(generator); pcd_global.points[index].z = height_distribution(generator); pcd_global.channels[0].values[index] = cell_color; } index++; } } if (pcd_local.channels[0].values.size() > 1) { pub_signals_local_.publish(pcd_local); } if (pcd_global.channels[0].values.size() > 1) { pub_signals_global_.publish(pcd_global); } q_obstacles_.pop(); };
/* * Odometry (encoders & IMU) */ void GazeboRosKobuki::updateOdometry(common::Time& step_time) { std::string odom_frame = gazebo_ros_->resolveTF("odom"); std::string base_frame = gazebo_ros_->resolveTF("base_footprint"); odom_.header.stamp = joint_state_.header.stamp; odom_.header.frame_id = odom_frame; odom_.child_frame_id = base_frame; // Distance travelled by main wheels double d1, d2; double dr, da; d1 = d2 = 0; dr = da = 0; // 多分ここがencoder 直接速度を取得してる // (wheel_diam_ / 2)= 直径/2= 半径 // 弧度法なら (theta/2*pi)*radius d1 = step_time.Double() * (wheel_diam_ / 2) * joints_[LEFT]->GetVelocity(0); d2 = step_time.Double() * (wheel_diam_ / 2) * joints_[RIGHT]->GetVelocity(0); // Can see NaN values here, just zero them out if needed if (isnan(d1)) { ROS_WARN_STREAM_THROTTLE(0.1, "Gazebo ROS Kobuki plugin: NaN in d1. Step time: " << step_time.Double() << ", WD: " << wheel_diam_ << ", velocity: " << joints_[LEFT]->GetVelocity(0)); d1 = 0; } if (isnan(d2)) { ROS_WARN_STREAM_THROTTLE(0.1, "Gazebo ROS Kobuki plugin: NaN in d2. Step time: " << step_time.Double() << ", WD: " << wheel_diam_ << ", velocity: " << joints_[RIGHT]->GetVelocity(0)); d2 = 0; } // 参照 // http://www.mech.tohoku-gakuin.ac.jp/rde/contents/course/robotics/wheelrobot.html // もし旋回ならd1+d2が0になって、並進距離は0になる? dr = (d1 + d2) / 2; // 台車中心の速度 // wheel_sep_は2車輪の距離 da = (d2 - d1) / wheel_sep_; // 台車の旋回速度 // std::cout << "omega[rad/s]: " << da << std::endl; // imuから直接旋回速度を取得してる // Just as in the Kobuki driver, the angular velocity is taken directly from the IMU // vel_angular_ = imu_->GetAngularVelocity(); // Compute odometric pose odom_pose_[0] += dr * cos( odom_pose_[2] ); odom_pose_[1] += dr * sin( odom_pose_[2] ); // original // odom_pose_[2] += vel_angular_.z * step_time.Double(); // 車輪速度から算出した、旋回速度。の積分 odom_pose_[2] += da; // Compute odometric instantaneous velocity odom_vel_[0] = dr / step_time.Double(); odom_vel_[1] = 0.0; // odom_vel_[2] = vel_angular_.z; odom_.pose.pose.position.x = odom_pose_[0]; odom_.pose.pose.position.y = odom_pose_[1]; odom_.pose.pose.position.z = 0; tf::Quaternion qt; qt.setEuler(0,0,odom_pose_[2]); odom_.pose.pose.orientation.x = qt.getX(); odom_.pose.pose.orientation.y = qt.getY(); odom_.pose.pose.orientation.z = qt.getZ(); odom_.pose.pose.orientation.w = qt.getW(); // odom_.pose.covariance[0] = 0; // x // odom_.pose.covariance[7] = 0; // y // odom_.pose.covariance[35] = 0 ; // yaw odom_.pose.covariance[0] = 0.1; // x odom_.pose.covariance[7] = 0.1; // y odom_.pose.covariance[35] = 0.05 ; // yaw // odom_.pose.covariance[35] = 0.05; // yaw odom_.pose.covariance[14] = 1e6; odom_.pose.covariance[21] = 1e6; odom_.pose.covariance[28] = 1e6; odom_.twist.twist.linear.x = odom_vel_[0]; odom_.twist.twist.linear.y = 0; odom_.twist.twist.linear.z = 0; odom_.twist.twist.angular.x = 0; odom_.twist.twist.angular.y = 0; odom_.twist.twist.angular.z = odom_vel_[2]; odom_pub_.publish(odom_); // publish odom message if (publish_tf_) { odom_tf_.header = odom_.header; odom_tf_.child_frame_id = odom_.child_frame_id; odom_tf_.transform.translation.x = odom_.pose.pose.position.x; odom_tf_.transform.translation.y = odom_.pose.pose.position.y; odom_tf_.transform.translation.z = odom_.pose.pose.position.z; odom_tf_.transform.rotation = odom_.pose.pose.orientation; tf_broadcaster_.sendTransform(odom_tf_); } }
// Main callback controlling the output rate void jointStateCallback(const sensor_msgs::JointStateConstPtr& msg) { const sensor_msgs::JointState* data = msg.get(); int es = data->effort.size(); int ps = data->position.size(); int name = data->name.size(); //15 is not fixed (la,ra,torso) int joint_state_size; if (simulation) joint_state_size = 21; else joint_state_size = 14; //Check joint state message size if(name != joint_state_size) return; if(es != ps) { ROS_WARN_STREAM_THROTTLE(1, "Effort and position sized unequal! EffortSize: "<<es<<" PosSize: "<<numdof); return; } // Collect the right joint angles by name int counter=0; for(unsigned int i=0;i<es; ++i) { sprintf(buf, "right_arm_%d_joint", counter); if(!strcmp(buf, data->name[i].c_str())) { // DLR gravity is negative!! if(negate_torque) { read_torque[counter] = - data->effort[i]; } else { read_torque[counter] = data->effort[i]; } read_jpos[counter] = data->position[i]; ++counter; } // If cannot find all the joints if(counter == numdof) { break; } } // If found more or less joints than expected! if(counter != numdof) { ROS_WARN_STREAM_THROTTLE(1, "Only found "<<counter<<" DOF in message. Expected "<<numdof); } else { // All OK. Compute and send. mRobot->setJoints(read_jpos); mRobot->getEEPose(ee_pose); //Use estimated FT of ee if (simulation){ computeFT(ee_ft); sendFT(ee_ft); } else { pub_ft.publish(msg_ft); } //Send estimated ee pose sendPose(ee_pose); } }