const float Utility::findAngleFromAToB(const tf::Vector3 a, const tf::Vector3 b) const { std::vector<float> a_vec; a_vec.push_back(a.getX()); a_vec.push_back(a.getY()); std::vector<float> b_vec; b_vec.push_back(b.getX()); b_vec.push_back(b.getY()); return findAngleFromAToB(a_vec, b_vec); }
float getVel(tf::Vector3 & pose1, tf::Vector3 & pose2, const ros::Duration & dur) { float vel; float delta_x = fabs(pose1.getX() - pose2.getX()); float delta_y = fabs(pose1.getY() - pose2.getY()); float dist = sqrt(delta_x*delta_x + delta_y*delta_y); vel = dist / dur.toSec(); return vel; }
std::vector<geometry_msgs::Point> MoveBaseDoorAction::getOrientedFootprint(const tf::Vector3 pos, double theta_cost) { double cos_th = cos(theta_cost); double sin_th = sin(theta_cost); std::vector<geometry_msgs::Point> oriented_footprint; for(unsigned int i = 0; i < footprint_.size(); ++i){ geometry_msgs::Point new_pt; new_pt.x = pos.x() + (footprint_[i].x * cos_th - footprint_[i].y * sin_th); new_pt.y = pos.y() + (footprint_[i].x * sin_th + footprint_[i].y * cos_th); oriented_footprint.push_back(new_pt); } return oriented_footprint; }
tf::Vector3 JPCT_Math::rotate(tf::Vector3 vec, tf::Matrix3x3& mat) { float xr = vec.getX() * mat[0][0] + vec.getY() * mat[1][0] + vec.getZ() * mat[2][0]; float yr = vec.getX() * mat[0][1] + vec.getY() * mat[1][1] + vec.getZ() * mat[2][1]; float zr = vec.getX() * mat[0][2] + vec.getY() * mat[1][2] + vec.getZ() * mat[2][2]; vec.setX(xr); vec.setY(yr); vec.setZ(zr); return vec; }
double getMinimumDistance(tf::Vector3 position, nav_msgs::GridCells grid) { double distance_sq=-1; int size = grid.cells.size(); double my_x=position.getX(), my_y=position.getY(); double stop_radius_sq = stop_radius*stop_radius; for (int i=0; i<size; i++) { double d_sq = pow(grid.cells[i].x-my_x,2)+pow(grid.cells[i].y-my_y,2); if (distance_sq==-1 || d_sq<distance_sq) distance_sq=d_sq; if (d_sq<=stop_radius_sq) return 0; } return sqrt(distance_sq); }
double DistanceFromPlane( const tf::Vector3& plane_normal, const tf::Vector3& plane_point, const tf::Vector3& point) { return plane_normal.normalized().dot(point - plane_point); }
void CameraTransformOptimization::calculateMarkerError(CalibrationState state, tf::Vector3 markerPoint, float& error) { error = 0; for (int i = 0; i < this->measurePoints.size(); i++) { float currentError = 0; MeasurePoint& current = this->measurePoints[i]; tf::Transform opticalToFixed = current.opticalToFixed(state); tf::Vector3 transformedPoint = opticalToFixed * current.measuredPosition; currentError += pow(markerPoint.x() - transformedPoint.x(), 2); currentError += pow(markerPoint.y() - transformedPoint.y(), 2); currentError += pow(markerPoint.z() - transformedPoint.z(), 2); error += currentError; } }
double DistanceFromLineSegment( const tf::Vector3& line_start, const tf::Vector3& line_end, const tf::Vector3& point) { return point.distance(ProjectToLineSegment(line_start, line_end, point)); }
bool Hand_filter::jumped(const tf::Vector3& p_current,const tf::Vector3& p_previous, const float threashold) const{ if(p_current.distance(p_previous) < threashold){ return false; }else{ return true; } }
void MTMHaptics::convert_bodyForcetoSpatialForce(geometry_msgs::WrenchStamped &body_wrench){ visualize_haptic_force(body_force_pub); rot_quat.setX(cur_mtm_pose.orientation.x); rot_quat.setY(cur_mtm_pose.orientation.y); rot_quat.setZ(cur_mtm_pose.orientation.z); rot_quat.setW(cur_mtm_pose.orientation.w); F7wrt0.setValue(body_wrench.wrench.force.x, body_wrench.wrench.force.y, body_wrench.wrench.force.z); rot_matrix.setRotation(rot_quat); F0wrt7 = rot_matrix.transpose() * F7wrt0; body_wrench.wrench.force.x = F0wrt7.x(); body_wrench.wrench.force.y = F0wrt7.y(); body_wrench.wrench.force.z = F0wrt7.z(); visualize_haptic_force(spatial_force_pub); }
void Jumps::update(tf::Vector3& origin,tf::Quaternion& orientation){ if(bFirst){ origin_buffer.push_back(origin); orientation_buffer.push_back(orientation); if(origin_buffer.size() == origin_buffer.capacity()){ bFirst = false; ROS_INFO("====== jump filter full ======"); } }else{ origin_tmp = origin_buffer[origin_buffer.size()-1]; orientation_tmp = orientation_buffer[orientation_buffer.size()-1]; if(bDebug){ std::cout<< "=== jum debug === " << std::endl; std::cout<< "p : " << origin.x() << "\t" << origin.y() << "\t" << origin.z() << std::endl; std::cout<< "p_tmp: " << origin_tmp.x() << "\t" << origin_tmp.y() << "\t" << origin_tmp.z() << std::endl; std::cout<< "p_dis: " << origin.distance(origin_tmp) << std::endl; std::cout<< "q : " << orientation.x() << "\t" << orientation.y() << "\t" << orientation.z() << "\t" << orientation.w() << std::endl; std::cout<< "q_tmp: " << orientation_tmp.x() << "\t" << orientation_tmp.y() << "\t" << orientation_tmp.z() << "\t" << orientation_tmp.w() << std::endl; std::cout<< "q_dis: " << dist(orientation,orientation_tmp) << std::endl; } /// Position jump if(jumped(origin,origin_tmp,origin_threashold)){ ROS_INFO("position jumped !"); origin = origin_tmp; // exit(0); }else{ origin_buffer.push_back(origin); } /// Orientation jump if(jumped(orientation,orientation_tmp,orientation_threashold)){ ROS_INFO("orientation jumped !"); orientation = orientation_tmp; //exit(0); }else{ orientation_buffer.push_back(orientation); } } }
tf::Matrix3x3 GetTfRotationMatrix(tf::Vector3 xaxis, tf::Vector3 zaxis) { tf::Matrix3x3 m; tf::Vector3 yaxis = zaxis.cross(xaxis); m.setValue(xaxis.getX(), yaxis.getX(), zaxis.getX(), xaxis.getY(), yaxis.getY(), zaxis.getY(), xaxis.getZ(), yaxis.getZ(), zaxis.getZ()); return m; }
bool HapticsPSM::has_normal_changed(tf::Vector3 &v1, tf::Vector3 &v2){ tfScalar angle; angle = v1.angle(v2); if(angle > coll_psm.epsilon){ return true; //The new normal has changed } else{ return false; //The new normal has not changed } }
/* Dynamic reconfigure callback */ void BeaconKFNode::configCallback(beacon_localizer::beacon_localizer_paramsConfig &config, uint32_t level) { double platform_angle; int platform_index; _reference_coords[0] = config.reference_x; _reference_coords[1] = config.reference_y; //get platform orientation in degrees, convert to reference system platform_angle = config.platform_orientation; platform_angle = platform_angle*(M_PI/180); platform_angle = platform_angle + atan(_reference_coords[1]/_reference_coords[0]); _platform_orientation = tf::createQuaternionFromYaw(platform_angle); platform_index = config.platform_index; _platform_origin.setX(_platform_x_coords[platform_index]); _platform_origin.setY(_platform_y_coords[platform_index]); }
int extractFrame (const pcl::ModelCoefficients& coeffs, const ARPoint& p1, const ARPoint& p2, const ARPoint& p3, const ARPoint& p4, tf::Matrix3x3 &retmat) { // Get plane coeffs and project points onto the plane double a=0, b=0, c=0, d=0; if(getCoeffs(coeffs, &a, &b, &c, &d) < 0) return -1; const tf::Vector3 q1 = project(p1, a, b, c, d); const tf::Vector3 q2 = project(p2, a, b, c, d); const tf::Vector3 q3 = project(p3, a, b, c, d); const tf::Vector3 q4 = project(p4, a, b, c, d); // Make sure points aren't the same so things are well-defined if((q2-q1).length() < 1e-3) return -1; // (inverse) matrix with the given properties const tf::Vector3 v = (q2-q1).normalized(); const tf::Vector3 n(a, b, c); const tf::Vector3 w = -v.cross(n); tf::Matrix3x3 m(v[0], v[1], v[2], w[0], w[1], w[2], n[0], n[1], n[2]); // Possibly flip things based on third point const tf::Vector3 diff = (q4-q3).normalized(); //ROS_INFO_STREAM("w = " << w << " and d = " << diff); if (w.dot(diff)<0) { //ROS_INFO_STREAM("Flipping normal based on p3. Current value: " << m); m[1] = -m[1]; m[2] = -m[2]; //ROS_INFO_STREAM("New value: " << m); } // Invert and return retmat = m.inverse(); //cerr << "Frame is " << retmat << endl; return 0; }
/********** callback for the cmd velocity from the autonomy **********/ void cmd_vel_callback(const geometry_msgs::Twist& msg) { watchdogTimer.stop(); error.setValue(msg.linear.x - body_vel.linear.x, msg.linear.y - body_vel.linear.y, msg.linear.z - body_vel.linear.z); //std::cout << "error x: " << error.getX() << " y: " << error.getY() << " z: " << error.getZ() << std::endl; //std::cout << std::abs(curr_body_vel_time.toSec() - last_body_vel_time.toSec()) << std::endl; error_yaw = msg.angular.z - body_vel.angular.z; //std::cout << "error yaw: " << error_yaw << std::endl; // if some time has passed between the last body velocity time and the current body velocity time then will calculate the (feed forward PD) if (std::abs(curr_body_vel_time.toSec() - last_body_vel_time.toSec()) > 0.00001) { errorDot = (1/(curr_body_vel_time - last_body_vel_time).toSec()) * (error - last_error); //std::cout << "errordot x: " << errorDot.getX() << " y: " << errorDot.getY() << " z: " << errorDot.getZ() << std::endl; errorDot_yaw = (1/(curr_body_vel_time - last_body_vel_time).toSec()) * (error_yaw - last_error_yaw); //std::cout << "error dot yaw " << errorDot_yaw << std::endl; velocity_command.linear.x = cap_vel_auton(kx*msg.linear.x + (kp*error).getX() + (kd*errorDot).getX()); velocity_command.linear.y = cap_vel_auton(ky*msg.linear.y + (kp*error).getY() + (kd*errorDot).getY()); velocity_command.linear.z = cap_vel_auton(kz*msg.linear.z + (kp*error).getZ() + (kd*errorDot).getZ()); velocity_command.angular.z = -1*cap_vel_auton(kyaw*msg.angular.z + kp_yaw*error_yaw + kd_yaw*errorDot_yaw); // z must be switched because bebop driver http://bebop-autonomy.readthedocs.org/en/latest/piloting.html } last_body_vel_time = curr_body_vel_time;// update last time body velocity was recieved last_error = error; last_error_yaw = error_yaw; error_gm.linear.x = error.getX(); error_gm.linear.y = error.getY(); error_gm.linear.z = error.getZ(); error_gm.angular.z = error_yaw; errorDot_gm.linear.x = errorDot.getX(); errorDot_gm.linear.y = errorDot.getY(); errorDot_gm.linear.z = errorDot.getZ(); errorDot_gm.angular.z = kyaw*msg.angular.z + kp_yaw*error_yaw + kd_yaw*errorDot_yaw; error_pub.publish(error_gm); errorDot_pub.publish(errorDot_gm); if (start_autonomous) { recieved_command_from_tracking = true; } watchdogTimer.start(); }
GridRayTrace::GridRayTrace(tf::Vector3 start, tf::Vector3 end, const OccupancyGrid& grid_ref) : _x_store(), _y_store() { //Transform (x,y) into map frame start = grid_ref.toGridFrame(start); end = grid_ref.toGridFrame(end); double x0 = start.getX(), y0 = start.getY(); double x1 = end.getX(), y1 = end.getY(); //ROS_WARN("start: (%f, %f) -> end: (%f, %f)", x0, y0, x1, y1); bresenham ( floor(x0), floor(y0), floor(x1), floor(y1), _x_store, _y_store ); _cur_idx = 0; _max_idx = std::min(_x_store.size(), _y_store.size()); }
bool planning_environment::configureForAttachedBodyMask(planning_models::KinematicState& state, planning_environment::CollisionModels* cm, tf::TransformListener& tf, const std::string& sensor_frame, const ros::Time& sensor_time, tf::Vector3& sensor_pos) { state.setKinematicStateToDefault(); cm->bodiesLock(); const std::map<std::string, std::map<std::string, bodies::BodyVector*> >& link_att_objects = cm->getLinkAttachedObjects(); if(link_att_objects.empty()) { cm->bodiesUnlock(); return false; } planning_environment::updateAttachedObjectBodyPoses(cm, state, tf); sensor_pos.setValue(0.0,0.0,0.0); // compute the origin of the sensor in the frame of the cloud if (!sensor_frame.empty()) { std::string err; try { tf::StampedTransform transf; tf.lookupTransform(cm->getWorldFrameId(), sensor_frame, sensor_time, transf); sensor_pos = transf.getOrigin(); } catch(tf::TransformException& ex) { ROS_ERROR("Unable to lookup transform from %s to %s. Exception: %s", sensor_frame.c_str(), cm->getWorldFrameId().c_str(), ex.what()); sensor_pos.setValue(0, 0, 0); } } cm->bodiesUnlock(); return true; }
void MetalDetectorCallback(const std_msgs::Float32 v) { static int sMarkerID=0; if(v.data>0.95) // Mine detected { //ROS_INFO("Found a mine"); // Check whether a mine at the same position has already been found for(auto it = m_vMinesPositions.begin(); it!= m_vMinesPositions.end(); ++it) { if(squaredDistance2D(it->x(), it->y(), m_WorldSpaceToolPosition.x(), m_WorldSpaceToolPosition.y())<0.04) return; } //ROS_INFO("Publishing marker"); visualization_msgs::Marker m; m.header.stamp = m_MsgHeaderStamp; m.header.frame_id = "/world"; m.ns = "mine"; m.id = sMarkerID++; m_vMinesPositions.push_back(m_WorldSpaceToolPosition); m.type = visualization_msgs::Marker::CYLINDER; m.action = visualization_msgs::Marker::ADD; m.pose.position.x = m_WorldSpaceToolPosition.x(); m.pose.position.y = m_WorldSpaceToolPosition.y(); m.pose.position.z = m_WorldSpaceToolPosition.z(); m.scale.x = 0.4; m.scale.y = 0.4; m.scale.z = 0.4; m.color.a = 0.5; m.color.r = 0.0; m.color.g = 1.0; m.color.b = 0.0; //m.frame_locked = true; // Finally publish the marker m_MineMarkerPublisher.publish(m); } }
tf::Matrix3x3 JPCT_Math::rotateAxis(tf::Vector3 axis, tf::Matrix3x3& original, float angle) { if(angle != lastRot) { lastRot = angle; lastSin = (float)sin((double)angle); lastCos = (float)cos((double)angle); } float c = lastCos; float s = lastSin; float t = 1.0F - c; //axis = axis.normalize(axis); float x = axis.x(); float y = axis.y(); float z = axis.z(); tf::Matrix3x3 mat; mat.setIdentity(); float sy = s * y; float sx = s * x; float sz = s * z; float txy = t * x * y; float txz = t * x * z; float tyz = t * y * z; mat[0][0] = t * x * x + c; mat[1][0] = txy + sz; mat[2][0] = txz - sy; mat[0][1] = txy - sz; mat[1][1] = t * y * y + c; mat[2][1] = tyz + sx; mat[0][2] = txz + sy; mat[1][2] = tyz - sx; mat[2][2] = t * z * z + c; orthonormalize(mat); original *= mat; return original; }
bool control(PID_control::controlserver::Request &req, PID_control::controlserver::Response &ret) { tf::vector3MsgToTF(req.target_error,sp); tf::transformMsgToTF(req.transform,pose); tf::vector3MsgToTF(req.velocity,vel); if (req.reset) { pParam=2,iParam=0,dParam=0,vParam=-2,cumul=0,lastE=0,pAlphaE=0,pBetaE=0,psp2=0,psp1=0,prevYaw=0; ROS_INFO("Controller reset"); } e = (pose*sp).z() - pose.getOrigin().z(); cumul=cumul+e; pv=pParam*e; thrust=5.335+pv+iParam*cumul+dParam*(e-lastE)+vel.z()*vParam; lastE=e; // Horizontal control: vx = pose*tf::Vector3(1,0,0); vy = pose*tf::Vector3(0,1,0); alphaE=(vy.z()-pose.getOrigin().z()); alphaCorr=0.25*alphaE+2.1*(alphaE-pAlphaE); betaE=(vx.z()-pose.getOrigin().z()); betaCorr=-0.25*betaE-2.1*(betaE-pBetaE); pAlphaE=alphaE; pBetaE=betaE; alphaCorr=alphaCorr+sp.y()*0.005+1*(sp.y()-psp2); betaCorr=betaCorr-sp.x()*0.005-1*(sp.x()-psp1); psp2=sp.y(); psp1=sp.x(); pose.getBasis().getRPY(t1, t2, yaw); rotCorr=yaw*0.1+2*(yaw-prevYaw); prevYaw = yaw; // Decide of the motor velocities: a=thrust*((double)1-alphaCorr+betaCorr+rotCorr); b=thrust*((double)1-alphaCorr-betaCorr-rotCorr); c=thrust*((double)1+alphaCorr-betaCorr+rotCorr); d=thrust*((double)1+alphaCorr+betaCorr-rotCorr); ret.a=a; ret.b=b; ret.c=c; ret.d=d; }
/** * @brief Creates a sphere RViz marker for publishing * @param color_g The color for the marker, in range of 0 to 1, 0 is pure red, 1 is pure green * @param sphere_size The radius of the sphere * @param pos The position of the marker on the map * @return A marker */ visualization_msgs::Marker prepareSphereMarker(double color_g, double sphere_size, tf::Vector3& pos) { visualization_msgs::Marker marker; marker.header.frame_id = "/map"; marker.header.stamp = ros::Time::now(); marker.ns = "wifi_heatmap"; marker.id = marker_id++; marker.type = visualization_msgs::Marker::SPHERE; // Set the marker action. Options are ADD, DELETE, and new in ROS Indigo: 3 (DELETEALL) marker.action = visualization_msgs::Marker::ADD; // Set the pose of the marker. This is a full 6DOF pose relative to the frame/time specified in the header marker.pose.position.x = pos.x(); marker.pose.position.y = pos.y(); marker.pose.position.z = pos.z(); marker.pose.orientation.x = 0.0; marker.pose.orientation.y = 0.0; marker.pose.orientation.z = 0.0; marker.pose.orientation.w = 1.0; // Set the scale of the marker marker.scale.x = sphere_size; marker.scale.y = sphere_size; marker.scale.z = sphere_size; // Set the color -- be sure to set alpha to something non-zero! marker.color.r = 1.0 - color_g; marker.color.g = color_g; marker.color.b = 0.0f; marker.color.a = 1.0; marker.lifetime = ros::Duration(); return marker; }
int farthestPoint(const tf::Vector3& point, const std::vector<tf::Vector3>& hand_positions) { double dist, max_distant = 0; size_t index = 0; for (size_t i = 0; i < hand_positions.size(); i++) { dist = point.distance(hand_positions[i]); if (dist > max_distant) { max_distant = dist; index = i; } } return index; }
void Hand_filter::align_quaternion_axis(tf::Quaternion& q_out, float &angle, const tf::Quaternion &q_in, tf::Vector3 target){ tf::Matrix3x3 R; R.setRotation(q_in); tf::Vector3 z_axis(R[0][2],R[1][2],R[2][2]); z_axis = z_axis.normalize(); tf::Vector3 c = z_axis.cross(target); angle = z_axis.dot(target); tf::Quaternion q_r; q_r.setX(c.x()); q_r.setY(c.y()); q_r.setZ(c.z()); q_r.setW(sqrt(z_axis.length2() * target.length2()) + angle); q_out = q_r*q_in; }
tf::Vector3 BasePositionPid::updateControl(const tf::Vector3& commanded_pos, const tf::Vector3& actual_pos, double time_elapsed) { double err_x = actual_pos.x() - commanded_pos.x() ; double err_y = actual_pos.y() - commanded_pos.y() ; double err_w = angles::shortest_angular_distance(commanded_pos.z(), actual_pos.z()) ; tf::Vector3 velocity_cmd ; double odom_cmd_x = pid_x_.updatePid(err_x, time_elapsed) ; // Translation X in the odometric frame double odom_cmd_y = pid_y_.updatePid(err_y, time_elapsed) ; // Translation Y in the odometric frame // Rotate the translation commands so that they're in the base frame (instead of the odom frame) velocity_cmd.setX( odom_cmd_x*cos(actual_pos.z()) + odom_cmd_y*sin(actual_pos.z())) ; velocity_cmd.setY(-odom_cmd_x*sin(actual_pos.z()) + odom_cmd_y*cos(actual_pos.z())) ; velocity_cmd.setZ(pid_w_.updatePid(err_w, time_elapsed)) ; // Rotation command is same is Odom and Base frames return velocity_cmd ; }
Socket_one::Socket_one(std::string name, const tf::Vector3& origin, const tf::Vector3 &rpy,float scale){ //const std::string& name, const geo::fCVec3& C, const geo::fMat33& R, float r geo::fMat33 R; R.eye(); disk_radius = 0.02 * scale; hole_radius = 0.002 * scale; std::array<geo::Disk,3> holes; geo::fCVec3 position; position.zeros(); geo::Disk plate("plate",position,R,disk_radius); position.zeros(); position(0) = -0.01 * scale; holes[0] = geo::Disk("hole_left",position,R,hole_radius); position.zeros(); position(0) = 0.01 * scale; holes[1] = geo::Disk("hole_right",position,R,hole_radius); position.zeros(); position(1) = 0.005 * scale; holes[2] = geo::Disk("hole_up",position,R,hole_radius); wsocket = wobj::WSocket(name,plate,holes); geo::fCVec3 dim = {{0.07,0.07,0.05}}; dim = dim * scale; geo::fCVec3 orientation = {{0,0,0}}; position.zeros(); position(2) = -0.005/2 - (0.05 - 0.005)/2; wbox = wobj::WBox("box_socket_one",dim,position,orientation); // Transformation orientation(0) = rpy.x(); orientation(1) = rpy.y(); orientation(2) = rpy.z(); geo::fCVec3 T = {{origin.x(),origin.y(),origin.z()}}; wsocket.transform(T,orientation); wbox.transform(T,orientation); }
Point calcSquareNormGrad(tf::Vector3 bl,tf::Vector3 br,tf::Vector3 tr,tf::Vector3 tl){ //ROS_INFO("[calcSquareNormGrad]\tentering..."); double dx = ((bl.getZ() - br.getZ()) + (tl.getZ() - tr.getZ()))/2.0; double dy = ((tl.getZ() - bl.getZ()) + (tr.getZ() - br.getZ()))/2.0; double total_dist = pow( pow(dx, 2) + pow(dy,2) , 0.5); if(total_dist != 0){ Point p = Point(0.5*dx/total_dist, 0.5*dy/total_dist); return p; } //if were on a plateau, just move a small distance towards the goal double xd = (g_pos.x - r_pos.x); double yd = (g_pos.y - r_pos.y); if(abs(xd) > 1.0) xd = xd/(abs(xd)); if(abs(yd) > 1.0) yd = yd/(abs(yd)); Point toreturn = Point(xd, yd); return toreturn; }
double getSafeVelocityLimit(tf::Vector3 vel) { //get current position + orientation // by listening to the transform from obstacles->header->frame_id to /base_link tf::StampedTransform transform; try { tf_listener->lookupTransform( obstacles.header.frame_id, "/base_link", ros::Time(0), transform); } catch (tf::TransformException ex) { ROS_ERROR("Couldn't get current position: %s",ex.what()); return min_spd; } //estimate future position, current_position+cmd_vel*sometimeconst*orientation tf::Vector3 future_pos = transform.getOrigin(); tf::Vector3 v = vel.rotate(tf::Vector3(0,0,1),tf::getYaw(transform.getRotation())); future_pos=future_pos+timestep*v; //get minimum distance of future position if (!has_grid) { ROS_ERROR("Haven't received a grid yet so speed will be fully limited"); return min_spd; } double mindist = getMinimumDistance(future_pos,obstacles); ROS_DEBUG("got minimum distance %f",mindist); //calculate velocity scale based on mindist of futurepos // (minspd@<=minrad --lerp--> 1@>=maxrad) double speed_limit; if (mindist>inflation_radius) speed_limit=max_spd; else if (mindist<=stop_radius) speed_limit=min_spd; else speed_limit = min(min_spd+(max_spd-min_spd)*(mindist-stop_radius)/(inflation_radius-stop_radius),max_spd); //return it ROS_DEBUG_STREAM(" computed velocity scale: "<<speed_limit); return speed_limit; }
bool checkCloud(const sensor_msgs::PointCloud2& cloud_msg, typename pcl::PointCloud<T>::Ptr hand_cloud, //typename pcl::PointCloud<T>::Ptr finger_cloud, const std::string& frame_id, tf::Vector3& hand_position, tf::Vector3& arm_position, tf::Vector3& arm_direction) { typename pcl::PointCloud<T>::Ptr cloud(new pcl::PointCloud<T>); pcl::fromROSMsg(cloud_msg, *cloud); if((cloud->points.size() < g_config.min_cluster_size) || (cloud->points.size() > g_config.max_cluster_size)) return false; pcl::PCA<T> pca; pca.setInputCloud(cloud); Eigen::Vector4f mean = pca.getMean(); if((mean.coeff(0) < g_config.min_x) || (mean.coeff(0) > g_config.max_x)) return false; if((mean.coeff(1) < g_config.min_y) || (mean.coeff(1) > g_config.max_y)) return false; if((mean.coeff(2) < g_config.min_z) || (mean.coeff(2) > g_config.max_z)) return false; Eigen::Vector3f eigen_value = pca.getEigenValues(); double ratio = eigen_value.coeff(0) / eigen_value.coeff(1); if((ratio < g_config.min_eigen_value_ratio) || (ratio > g_config.max_eigen_value_ratio)) return false; T search_point; Eigen::Matrix3f ev = pca.getEigenVectors(); Eigen::Vector3f main_axis(ev.coeff(0, 0), ev.coeff(1, 0), ev.coeff(2, 0)); main_axis.normalize(); arm_direction.setX(main_axis.coeff(0)); arm_direction.setY(main_axis.coeff(1)); arm_direction.setZ(main_axis.coeff(2)); arm_position.setX(mean.coeff(0)); arm_position.setY(mean.coeff(1)); arm_position.setZ(mean.coeff(2)); main_axis = (-main_axis * 0.3) + Eigen::Vector3f(mean.coeff(0), mean.coeff(1), mean.coeff(2)); search_point.x = main_axis.coeff(0); search_point.y = main_axis.coeff(1); search_point.z = main_axis.coeff(2); //find hand pcl::KdTreeFLANN<T> kdtree; kdtree.setInputCloud(cloud); //find the closet point from the serach_point std::vector<int> point_indeices(1); std::vector<float> point_distances(1); if ( kdtree.nearestKSearch (search_point, 1, point_indeices, point_distances) > 0 ) { //update search point search_point = cloud->points[point_indeices[0]]; //show seach point if(g_marker_array_pub.getNumSubscribers() != 0) { pushSimpleMarker(search_point.x, search_point.y, search_point.z, 1.0, 0, 0, 0.02, g_marker_id, g_marker_array, frame_id); } //hand point_indeices.clear(); point_distances.clear(); kdtree.radiusSearch(search_point, g_config.hand_lenght, point_indeices, point_distances); for (size_t i = 0; i < point_indeices.size (); ++i) { hand_cloud->points.push_back(cloud->points[point_indeices[i]]); hand_cloud->points.back().r = 255; hand_cloud->points.back().g = 0; hand_cloud->points.back().b = 0; } Eigen::Vector4f centroid; pcl::compute3DCentroid(*hand_cloud, centroid); if(g_marker_array_pub.getNumSubscribers() != 0) { pushSimpleMarker(centroid.coeff(0), centroid.coeff(1), centroid.coeff(2), 0.0, 1.0, 0, 0.02, g_marker_id, g_marker_array, frame_id); } hand_position.setX(centroid.coeff(0)); hand_position.setY(centroid.coeff(1)); hand_position.setZ(centroid.coeff(2)); #if 0 //fingers search_point.x = centroid.coeff(0); search_point.y = centroid.coeff(1); search_point.z = centroid.coeff(2); std::vector<int> point_indeices_inner; std::vector<float> point_distances_inner; kdtree.radiusSearch(search_point, 0.07, point_indeices_inner, point_distances_inner); std::vector<int> point_indeices_outter; std::vector<float> point_distances_outter; kdtree.radiusSearch(search_point, 0.1, point_indeices_outter, point_distances_outter); //ROS_INFO("before %d %d", point_indeices_inner.size(), point_indeices_outter.size()); std::vector<int>::iterator it; for(size_t i = 0; i < point_indeices_inner.size(); i++) { it = std::find(point_indeices_outter.begin(), point_indeices_outter.end(), point_indeices_inner[i]); if(it != point_indeices_outter.end()) { point_indeices_outter.erase(it); } } //ROS_INFO("after %d %d", point_indeices_inner.size(), point_indeices_outter.size()); //ROS_DEBUG_THROTTLE(1.0, "found %lu", point_indeices.size ()); for (size_t i = 0; i < point_indeices_outter.size (); ++i) { finger_cloud->points.push_back(cloud->points[point_indeices_outter[i]]); finger_cloud->points.back().r = 255; finger_cloud->points.back().g = 0; finger_cloud->points.back().b = 0; } #endif } else { return false; } if(g_marker_array_pub.getNumSubscribers() != 0) { pushEigenMarker<T>(pca, g_marker_id, g_marker_array, 0.1, frame_id); } return true; }
bool ArucoMapping::processImage(cv::Mat input_image,cv::Mat output_image) { aruco::MarkerDetector Detector; std::vector<aruco::Marker> temp_markers; //Set visibility flag to false for all markers for(size_t i = 0; i < num_of_markers_; i++) markers_[i].visible = false; // Save previous marker count marker_counter_previous_ = marker_counter_; // Detect markers Detector.detect(input_image,temp_markers,aruco_calib_params_,marker_size_); // If no marker found, print statement if(temp_markers.size() == 0) ROS_DEBUG("No marker found!"); //------------------------------------------------------ // FIRST MARKER DETECTED //------------------------------------------------------ if((temp_markers.size() > 0) && (first_marker_detected_ == false)) { //Set flag first_marker_detected_=true; // Detect lowest marker ID lowest_marker_id_ = temp_markers[0].id; for(size_t i = 0; i < temp_markers.size();i++) { if(temp_markers[i].id < lowest_marker_id_) lowest_marker_id_ = temp_markers[i].id; } ROS_DEBUG_STREAM("The lowest Id marker " << lowest_marker_id_ ); // Identify lowest marker ID with world's origin markers_[0].marker_id = lowest_marker_id_; markers_[0].geometry_msg_to_world.position.x = 0; markers_[0].geometry_msg_to_world.position.y = 0; markers_[0].geometry_msg_to_world.position.z = 0; markers_[0].geometry_msg_to_world.orientation.x = 0; markers_[0].geometry_msg_to_world.orientation.y = 0; markers_[0].geometry_msg_to_world.orientation.z = 0; markers_[0].geometry_msg_to_world.orientation.w = 1; // Relative position and Global position markers_[0].geometry_msg_to_previous.position.x = 0; markers_[0].geometry_msg_to_previous.position.y = 0; markers_[0].geometry_msg_to_previous.position.z = 0; markers_[0].geometry_msg_to_previous.orientation.x = 0; markers_[0].geometry_msg_to_previous.orientation.y = 0; markers_[0].geometry_msg_to_previous.orientation.z = 0; markers_[0].geometry_msg_to_previous.orientation.w = 1; // Transformation Pose to TF tf::Vector3 position; position.setX(0); position.setY(0); position.setZ(0); tf::Quaternion rotation; rotation.setX(0); rotation.setY(0); rotation.setZ(0); rotation.setW(1); markers_[0].tf_to_previous.setOrigin(position); markers_[0].tf_to_previous.setRotation(rotation); // Relative position of first marker equals Global position markers_[0].tf_to_world=markers_[0].tf_to_previous; // Increase count marker_counter_++; // Set sign of visibility of first marker markers_[0].visible=true; ROS_INFO_STREAM("First marker with ID: " << markers_[0].marker_id << " detected"); //First marker does not have any previous marker markers_[0].previous_marker_id = THIS_IS_FIRST_MARKER; } //------------------------------------------------------ // FOR EVERY MARKER DO //------------------------------------------------------ for(size_t i = 0; i < temp_markers.size();i++) { int index; int current_marker_id = temp_markers[i].id; //Draw marker convex, ID, cube and axis temp_markers[i].draw(output_image, cv::Scalar(0,0,255),2); aruco::CvDrawingUtils::draw3dCube(output_image,temp_markers[i], aruco_calib_params_); aruco::CvDrawingUtils::draw3dAxis(output_image,temp_markers[i], aruco_calib_params_); // Existing marker ? bool existing = false; int temp_counter = 0; while((existing == false) && (temp_counter < marker_counter_)) { if(markers_[temp_counter].marker_id == current_marker_id) { index = temp_counter; existing = true; ROS_DEBUG_STREAM("Existing marker with ID: " << current_marker_id << "found"); } temp_counter++; } //New marker ? if(existing == false) { index = marker_counter_; markers_[index].marker_id = current_marker_id; existing = true; ROS_DEBUG_STREAM("New marker with ID: " << current_marker_id << " found"); } // Change visibility flag of new marker for(size_t j = 0;j < marker_counter_; j++) { for(size_t k = 0;k < temp_markers.size(); k++) { if(markers_[j].marker_id == temp_markers[k].id) markers_[j].visible = true; } } //------------------------------------------------------ // For existing marker do //------------------------------------------------------ if((index < marker_counter_) && (first_marker_detected_ == true)) { markers_[index].current_camera_tf = arucoMarker2Tf(temp_markers[i]); markers_[index].current_camera_tf = markers_[index].current_camera_tf.inverse(); const tf::Vector3 marker_origin = markers_[index].current_camera_tf.getOrigin(); markers_[index].current_camera_pose.position.x = marker_origin.getX(); markers_[index].current_camera_pose.position.y = marker_origin.getY(); markers_[index].current_camera_pose.position.z = marker_origin.getZ(); const tf::Quaternion marker_quaternion = markers_[index].current_camera_tf.getRotation(); markers_[index].current_camera_pose.orientation.x = marker_quaternion.getX(); markers_[index].current_camera_pose.orientation.y = marker_quaternion.getY(); markers_[index].current_camera_pose.orientation.z = marker_quaternion.getZ(); markers_[index].current_camera_pose.orientation.w = marker_quaternion.getW(); } //------------------------------------------------------ // For new marker do //------------------------------------------------------ if((index == marker_counter_) && (first_marker_detected_ == true)) { markers_[index].current_camera_tf=arucoMarker2Tf(temp_markers[i]); tf::Vector3 marker_origin = markers_[index].current_camera_tf.getOrigin(); markers_[index].current_camera_pose.position.x = marker_origin.getX(); markers_[index].current_camera_pose.position.y = marker_origin.getY(); markers_[index].current_camera_pose.position.z = marker_origin.getZ(); tf::Quaternion marker_quaternion = markers_[index].current_camera_tf.getRotation(); markers_[index].current_camera_pose.orientation.x = marker_quaternion.getX(); markers_[index].current_camera_pose.orientation.y = marker_quaternion.getY(); markers_[index].current_camera_pose.orientation.z = marker_quaternion.getZ(); markers_[index].current_camera_pose.orientation.w = marker_quaternion.getW(); // Naming - TFs std::stringstream camera_tf_id; std::stringstream camera_tf_id_old; std::stringstream marker_tf_id_old; camera_tf_id << "camera_" << index; // Flag to keep info if any_known marker_visible in actual image bool any_known_marker_visible = false; // Array ID of markers, which position of new marker is calculated int last_marker_id; // Testing, if is possible calculate position of a new marker to old known marker for(int k = 0; k < index; k++) { if((markers_[k].visible == true) && (any_known_marker_visible == false)) { if(markers_[k].previous_marker_id != -1) { any_known_marker_visible = true; camera_tf_id_old << "camera_" << k; marker_tf_id_old << "marker_" << k; markers_[index].previous_marker_id = k; last_marker_id = k; } } } // New position can be calculated if(any_known_marker_visible == true) { // Generating TFs for listener for(char k = 0; k < 10; k++) { // TF from old marker and its camera broadcaster_.sendTransform(tf::StampedTransform(markers_[last_marker_id].current_camera_tf,ros::Time::now(), marker_tf_id_old.str(),camera_tf_id_old.str())); // TF from old camera to new camera broadcaster_.sendTransform(tf::StampedTransform(markers_[index].current_camera_tf,ros::Time::now(), camera_tf_id_old.str(),camera_tf_id.str())); ros::Duration(BROADCAST_WAIT_INTERVAL).sleep(); } // Calculate TF between two markers listener_->waitForTransform(marker_tf_id_old.str(),camera_tf_id.str(),ros::Time(0), ros::Duration(WAIT_FOR_TRANSFORM_INTERVAL)); try { broadcaster_.sendTransform(tf::StampedTransform(markers_[last_marker_id].current_camera_tf,ros::Time::now(), marker_tf_id_old.str(),camera_tf_id_old.str())); broadcaster_.sendTransform(tf::StampedTransform(markers_[index].current_camera_tf,ros::Time::now(), camera_tf_id_old.str(),camera_tf_id.str())); listener_->lookupTransform(marker_tf_id_old.str(),camera_tf_id.str(),ros::Time(0), markers_[index].tf_to_previous); } catch(tf::TransformException &e) { ROS_ERROR("Not able to lookup transform"); } // Save origin and quaternion of calculated TF marker_origin = markers_[index].tf_to_previous.getOrigin(); marker_quaternion = markers_[index].tf_to_previous.getRotation(); // If plane type selected roll, pitch and Z axis are zero if(space_type_ == "plane") { double roll, pitch, yaw; tf::Matrix3x3(marker_quaternion).getRPY(roll,pitch,yaw); roll = 0; pitch = 0; marker_origin.setZ(0); marker_quaternion.setRPY(pitch,roll,yaw); } markers_[index].tf_to_previous.setRotation(marker_quaternion); markers_[index].tf_to_previous.setOrigin(marker_origin); marker_origin = markers_[index].tf_to_previous.getOrigin(); markers_[index].geometry_msg_to_previous.position.x = marker_origin.getX(); markers_[index].geometry_msg_to_previous.position.y = marker_origin.getY(); markers_[index].geometry_msg_to_previous.position.z = marker_origin.getZ(); marker_quaternion = markers_[index].tf_to_previous.getRotation(); markers_[index].geometry_msg_to_previous.orientation.x = marker_quaternion.getX(); markers_[index].geometry_msg_to_previous.orientation.y = marker_quaternion.getY(); markers_[index].geometry_msg_to_previous.orientation.z = marker_quaternion.getZ(); markers_[index].geometry_msg_to_previous.orientation.w = marker_quaternion.getW(); // increase marker count marker_counter_++; // Invert and position of new marker to compute camera pose above it markers_[index].current_camera_tf = markers_[index].current_camera_tf.inverse(); marker_origin = markers_[index].current_camera_tf.getOrigin(); markers_[index].current_camera_pose.position.x = marker_origin.getX(); markers_[index].current_camera_pose.position.y = marker_origin.getY(); markers_[index].current_camera_pose.position.z = marker_origin.getZ(); marker_quaternion = markers_[index].current_camera_tf.getRotation(); markers_[index].current_camera_pose.orientation.x = marker_quaternion.getX(); markers_[index].current_camera_pose.orientation.y = marker_quaternion.getY(); markers_[index].current_camera_pose.orientation.z = marker_quaternion.getZ(); markers_[index].current_camera_pose.orientation.w = marker_quaternion.getW(); // Publish all TFs and markers publishTfs(false); } } //------------------------------------------------------ // Compute global position of new marker //------------------------------------------------------ if((marker_counter_previous_ < marker_counter_) && (first_marker_detected_ == true)) { // Publish all TF five times for listener for(char k = 0; k < 5; k++) publishTfs(false); std::stringstream marker_tf_name; marker_tf_name << "marker_" << index; listener_->waitForTransform("world",marker_tf_name.str(),ros::Time(0), ros::Duration(WAIT_FOR_TRANSFORM_INTERVAL)); try { listener_->lookupTransform("world",marker_tf_name.str(),ros::Time(0), markers_[index].tf_to_world); } catch(tf::TransformException &e) { ROS_ERROR("Not able to lookup transform"); } // Saving TF to Pose const tf::Vector3 marker_origin = markers_[index].tf_to_world.getOrigin(); markers_[index].geometry_msg_to_world.position.x = marker_origin.getX(); markers_[index].geometry_msg_to_world.position.y = marker_origin.getY(); markers_[index].geometry_msg_to_world.position.z = marker_origin.getZ(); tf::Quaternion marker_quaternion=markers_[index].tf_to_world.getRotation(); markers_[index].geometry_msg_to_world.orientation.x = marker_quaternion.getX(); markers_[index].geometry_msg_to_world.orientation.y = marker_quaternion.getY(); markers_[index].geometry_msg_to_world.orientation.z = marker_quaternion.getZ(); markers_[index].geometry_msg_to_world.orientation.w = marker_quaternion.getW(); } } //------------------------------------------------------ // Compute which of visible markers is the closest to the camera //------------------------------------------------------ bool any_markers_visible=false; int num_of_visible_markers=0; if(first_marker_detected_ == true) { double minimal_distance = INIT_MIN_SIZE_VALUE; for(int k = 0; k < num_of_markers_; k++) { double a,b,c,size; // If marker is visible, distance is calculated if(markers_[k].visible==true) { a = markers_[k].current_camera_pose.position.x; b = markers_[k].current_camera_pose.position.y; c = markers_[k].current_camera_pose.position.z; size = std::sqrt((a * a) + (b * b) + (c * c)); if(size < minimal_distance) { minimal_distance = size; closest_camera_index_ = k; } any_markers_visible = true; num_of_visible_markers++; } } } //------------------------------------------------------ // Publish all known markers //------------------------------------------------------ if(first_marker_detected_ == true) publishTfs(true); //------------------------------------------------------ // Compute global camera pose //------------------------------------------------------ if((first_marker_detected_ == true) && (any_markers_visible == true)) { std::stringstream closest_camera_tf_name; closest_camera_tf_name << "camera_" << closest_camera_index_; listener_->waitForTransform("world",closest_camera_tf_name.str(),ros::Time(0), ros::Duration(WAIT_FOR_TRANSFORM_INTERVAL)); try { listener_->lookupTransform("world",closest_camera_tf_name.str(),ros::Time(0), world_position_transform_); } catch(tf::TransformException &ex) { ROS_ERROR("Not able to lookup transform"); } // Saving TF to Pose const tf::Vector3 marker_origin = world_position_transform_.getOrigin(); world_position_geometry_msg_.position.x = marker_origin.getX(); world_position_geometry_msg_.position.y = marker_origin.getY(); world_position_geometry_msg_.position.z = marker_origin.getZ(); tf::Quaternion marker_quaternion = world_position_transform_.getRotation(); world_position_geometry_msg_.orientation.x = marker_quaternion.getX(); world_position_geometry_msg_.orientation.y = marker_quaternion.getY(); world_position_geometry_msg_.orientation.z = marker_quaternion.getZ(); world_position_geometry_msg_.orientation.w = marker_quaternion.getW(); } //------------------------------------------------------ // Publish all known markers //------------------------------------------------------ if(first_marker_detected_ == true) publishTfs(true); //------------------------------------------------------ // Publish custom marker message //------------------------------------------------------ aruco_mapping::ArucoMarker marker_msg; if((any_markers_visible == true)) { marker_msg.header.stamp = ros::Time::now(); marker_msg.header.frame_id = "world"; marker_msg.marker_visibile = true; marker_msg.num_of_visible_markers = num_of_visible_markers; marker_msg.global_camera_pose = world_position_geometry_msg_; marker_msg.marker_ids.clear(); marker_msg.global_marker_poses.clear(); for(size_t j = 0; j < marker_counter_; j++) { if(markers_[j].visible == true) { marker_msg.marker_ids.push_back(markers_[j].marker_id); marker_msg.global_marker_poses.push_back(markers_[j].geometry_msg_to_world); } } } else { marker_msg.header.stamp = ros::Time::now(); marker_msg.header.frame_id = "world"; marker_msg.num_of_visible_markers = num_of_visible_markers; marker_msg.marker_visibile = false; marker_msg.marker_ids.clear(); marker_msg.global_marker_poses.clear(); } // Publish custom marker msg marker_msg_pub_.publish(marker_msg); return true; }