void Hand_filter::update(tf::Vector3 p, tf::Quaternion& q){ if(b_first){ p_filter_buffer.push_back(p); q_filter_buffer.push_back(q); if(p_filter_buffer.size() == p_filter_buffer.capacity()){ b_first = false; ROS_INFO("====== hand filter ======"); // ROS_INFO("buffer full: %d",p_filter_buffer.size()); ROS_INFO("p: %f %f %f",p.x(),p.y(),p.z()); ROS_INFO("q: %f %f %f %f",q.x(),q.y(),q.z(),q.w()); k_position(0) = p.x();k_position(1) = p.y(); k_position(2) = p.z(); kalman_filter.Init(k_position); q_tmp = q; p_tmp = p; } }else{ /// Orientation filter if(jumped(q,q_tmp,q_threashold)){ ROS_INFO("q jumped !"); q = q_tmp; } q_attractor(q,up); q = q_tmp.slerp(q,0.1); /// Position filter if(!jumped(p,p_tmp,p_threashold)){ k_measurement(0) = p.x(); k_measurement(1) = p.y(); k_measurement(2) = p.z(); }else{ ROS_INFO("p jumped !"); k_measurement(0) = p_tmp.x(); k_measurement(1) = p_tmp.y(); k_measurement(2) = p_tmp.z(); } kalman_filter.Update(k_measurement,0.001); kalman_filter.GetPosition(k_position); p.setValue(k_position(0),k_position(1),k_position(2)); q_tmp = q; p_tmp = p; } }
void HapticsPSM::compute_force_in_tip_frame(geometry_msgs::Wrench &wrench){ rot_mat6wrt0.setRPY(group->getCurrentRPY().at(0), group->getCurrentRPY().at(1), group->getCurrentRPY().at(2)); tf_vec3.setValue(wrench.force.x,wrench.force.y,wrench.force.z); tf_vec3 = rot_mat6wrt0.transpose() * tf_vec3; wrench.force.x = tf_vec3.getX(); wrench.force.y = tf_vec3.getY(); wrench.force.z = tf_vec3.getZ(); }
bool planning_environment::configureForAttachedBodyMask(planning_models::KinematicState& state, planning_environment::CollisionModels* cm, tf::TransformListener& tf, const std::string& sensor_frame, const ros::Time& sensor_time, tf::Vector3& sensor_pos) { state.setKinematicStateToDefault(); cm->bodiesLock(); const std::map<std::string, std::map<std::string, bodies::BodyVector*> >& link_att_objects = cm->getLinkAttachedObjects(); if(link_att_objects.empty()) { cm->bodiesUnlock(); return false; } planning_environment::updateAttachedObjectBodyPoses(cm, state, tf); sensor_pos.setValue(0.0,0.0,0.0); // compute the origin of the sensor in the frame of the cloud if (!sensor_frame.empty()) { std::string err; try { tf::StampedTransform transf; tf.lookupTransform(cm->getWorldFrameId(), sensor_frame, sensor_time, transf); sensor_pos = transf.getOrigin(); } catch(tf::TransformException& ex) { ROS_ERROR("Unable to lookup transform from %s to %s. Exception: %s", sensor_frame.c_str(), cm->getWorldFrameId().c_str(), ex.what()); sensor_pos.setValue(0, 0, 0); } } cm->bodiesUnlock(); return true; }
void MTMHaptics::convert_bodyForcetoSpatialForce(geometry_msgs::WrenchStamped &body_wrench){ visualize_haptic_force(body_force_pub); rot_quat.setX(cur_mtm_pose.orientation.x); rot_quat.setY(cur_mtm_pose.orientation.y); rot_quat.setZ(cur_mtm_pose.orientation.z); rot_quat.setW(cur_mtm_pose.orientation.w); F7wrt0.setValue(body_wrench.wrench.force.x, body_wrench.wrench.force.y, body_wrench.wrench.force.z); rot_matrix.setRotation(rot_quat); F0wrt7 = rot_matrix.transpose() * F7wrt0; body_wrench.wrench.force.x = F0wrt7.x(); body_wrench.wrench.force.y = F0wrt7.y(); body_wrench.wrench.force.z = F0wrt7.z(); visualize_haptic_force(spatial_force_pub); }
/********** callback for the cmd velocity from the autonomy **********/ void cmd_vel_callback(const geometry_msgs::Twist& msg) { watchdogTimer.stop(); error.setValue(msg.linear.x - body_vel.linear.x, msg.linear.y - body_vel.linear.y, msg.linear.z - body_vel.linear.z); //std::cout << "error x: " << error.getX() << " y: " << error.getY() << " z: " << error.getZ() << std::endl; //std::cout << std::abs(curr_body_vel_time.toSec() - last_body_vel_time.toSec()) << std::endl; error_yaw = msg.angular.z - body_vel.angular.z; //std::cout << "error yaw: " << error_yaw << std::endl; // if some time has passed between the last body velocity time and the current body velocity time then will calculate the (feed forward PD) if (std::abs(curr_body_vel_time.toSec() - last_body_vel_time.toSec()) > 0.00001) { errorDot = (1/(curr_body_vel_time - last_body_vel_time).toSec()) * (error - last_error); //std::cout << "errordot x: " << errorDot.getX() << " y: " << errorDot.getY() << " z: " << errorDot.getZ() << std::endl; errorDot_yaw = (1/(curr_body_vel_time - last_body_vel_time).toSec()) * (error_yaw - last_error_yaw); //std::cout << "error dot yaw " << errorDot_yaw << std::endl; velocity_command.linear.x = cap_vel_auton(kx*msg.linear.x + (kp*error).getX() + (kd*errorDot).getX()); velocity_command.linear.y = cap_vel_auton(ky*msg.linear.y + (kp*error).getY() + (kd*errorDot).getY()); velocity_command.linear.z = cap_vel_auton(kz*msg.linear.z + (kp*error).getZ() + (kd*errorDot).getZ()); velocity_command.angular.z = -1*cap_vel_auton(kyaw*msg.angular.z + kp_yaw*error_yaw + kd_yaw*errorDot_yaw); // z must be switched because bebop driver http://bebop-autonomy.readthedocs.org/en/latest/piloting.html } last_body_vel_time = curr_body_vel_time;// update last time body velocity was recieved last_error = error; last_error_yaw = error_yaw; error_gm.linear.x = error.getX(); error_gm.linear.y = error.getY(); error_gm.linear.z = error.getZ(); error_gm.angular.z = error_yaw; errorDot_gm.linear.x = errorDot.getX(); errorDot_gm.linear.y = errorDot.getY(); errorDot_gm.linear.z = errorDot.getZ(); errorDot_gm.angular.z = kyaw*msg.angular.z + kp_yaw*error_yaw + kd_yaw*errorDot_yaw; error_pub.publish(error_gm); errorDot_pub.publish(errorDot_gm); if (start_autonomous) { recieved_command_from_tracking = true; } watchdogTimer.start(); }
void HapticsPSM::get_current_position(tf::Vector3 &v){ v.setValue(group->getCurrentPose().pose.position.x, group->getCurrentPose().pose.position.y, group->getCurrentPose().pose.position.z); }