geometry_msgs::Pose PoseAffineToGeomMsg(const Eigen::Affine3d &e) { geometry_msgs::Pose m; m.position.x = e.translation().x(); m.position.y = e.translation().y(); m.position.z = e.translation().z(); // This is a column major vs row major matrice faux pas! #if 0 MatrixEXd em = e.rotation(); Eigen::Quaterniond q = EMatrix2Quaterion(em); #endif Eigen::Quaterniond q(e.rotation()); m.orientation.x = q.x(); m.orientation.y = q.y(); m.orientation.z = q.z(); m.orientation.w = q.w(); #if 0 if (m.orientation.w < 0) { m.orientation.x *= -1; m.orientation.y *= -1; m.orientation.z *= -1; m.orientation.w *= -1; } #endif }
bool kinematic_constraints::VisibilityConstraint::equal(const KinematicConstraint &other, double margin) const { if (other.getType() != type_) return false; const VisibilityConstraint &o = static_cast<const VisibilityConstraint&>(other); if (target_frame_id_ == o.target_frame_id_ && sensor_frame_id_ == o.sensor_frame_id_ && cone_sides_ == o.cone_sides_ && sensor_view_direction_ == o.sensor_view_direction_) { if (fabs(max_view_angle_ - o.max_view_angle_) > margin || fabs(target_radius_ - o.target_radius_) > margin) return false; Eigen::Affine3d diff = sensor_pose_.inverse() * o.sensor_pose_; if (diff.translation().norm() > margin) return false; if (!diff.rotation().isIdentity(margin)) return false; diff = target_pose_.inverse() * o.target_pose_; if (diff.translation().norm() > margin) return false; if (!diff.rotation().isIdentity(margin)) return false; return true; } return false; }
/*! * \brief affine3d2UrdfPose converts an Eigen affine 4x4 matrix o represent the pose into a urdf pose * vparam pose eigen Affine3d pose * \return urdf pose with position and rotation. */ RCS::Pose Affine3d2UrdfPose(const Eigen::Affine3d &pose) { RCS::Pose p; p.getOrigin().setX(pose.translation().x()); p.getOrigin().setY(pose.translation().y()); p.getOrigin().setZ(pose.translation().z()); Eigen::Quaterniond q (pose.rotation()); tf::Quaternion qtf(q.x(),q.y(),q.z(),q.w()); //std::cout << "Affine3d2UrdfPose Quaterion = \n" << q.x() << ":" << q.y() << ":" << q.z() << ":" << q.w() << std::endl; p.setRotation(qtf); //std::cout << "After Affine3d2UrdfPose Quaterion = \n" << p.getRotation().x() << ":" << p.getRotation().y() << ":" << p.getRotation().z() << ":" << p.getRotation().w() << std::endl; #if 0 MatrixEXd m = pose.rotation(); Eigen::Quaterniond q = EMatrix2Quaterion(m); Eigen::Quaterniond q(pose.rotation()); p.getRotation().setX(q.x()); p.getRotation().setY(q.y()); p.getRotation().setZ(q.z()); p.getRotation().setW(q.w()); #endif return p; }
void UpperBodyPlanner::pose_moveTo2(const std::string &link_name, const Eigen::Affine3d& current_pose, const Eigen::Affine3d& desired_pose, const int &step_num, std::vector<geometry_msgs::Pose> &pose_sequence) { //Eigen::Affine3d current_pose = kinematic_state->getGlobalLinkTransform(link_name); Eigen::Quaterniond current_q = Rmat2Quaternion(current_pose.rotation()); Eigen::Quaterniond desired_q = Rmat2Quaternion(desired_pose.rotation()); Eigen::Quaterniond step_q; std::cout << "**********************************" << std::endl; std::cout << "end_effector_name is: " << link_name << std::endl; std::cout << "Current translation is: " << current_pose.translation().transpose() << std::endl; std::cout << "current rotation is: " << std::endl; std::cout << current_pose.rotation() << std::endl; std::cout << "current quaternion is: "<< std::endl; std::cout << "w = " << current_q.w() << ", x = " << current_q.x() << ", y = " << current_q.y() << ", z = " << current_q.z() << std::endl; std::cout << "Desired translation is: " << desired_pose.translation().transpose() << std::endl; std::cout << "Desired rotation is: " << std::endl; std::cout << desired_pose.rotation() << std::endl; std::cout << "Desired quaternion is: " << std::endl; std::cout << "w = " << desired_q.w() << ", x = " << desired_q.x() << ", y = " << desired_q.y() << ", z = " << desired_q.z() << std::endl; std::cout << "**********************************" << std::endl; Eigen::Vector3d step_translation_movement = (desired_pose.translation() - current_pose.translation()) / step_num; for (int i = 0; i < step_num; i++) { Eigen::Affine3d step_target; step_target.translation() = current_pose.translation() + (i + 1) * step_translation_movement; step_q = current_q.slerp((i + 1.0) / step_num, desired_q); geometry_msgs::Pose target_pose = Eigen2msgPose(step_target.translation(), step_q); pose_sequence.push_back(target_pose); } }
void UpperBodyPlanner::addPose_moveTo(const Eigen::Affine3d& start_pose, const Eigen::Affine3d& end_pose, const int& step_num, std::vector<geometry_msgs::Pose>& pose_sequence) { Eigen::Quaterniond start_q = Rmat2Quaternion(start_pose.rotation()); Eigen::Quaterniond end_q = Rmat2Quaternion(end_pose.rotation()); Eigen::Quaterniond step_q; std::cout << "**********************************" << std::endl; std::cout << "start_pose translation is: " << start_pose.translation().transpose() << std::endl; std::cout << "start_pose rotation is: " << std::endl; std::cout << start_pose.rotation() << std::endl; std::cout << "start_pose quaternion is: "<< std::endl; std::cout << "w = " << start_q.w() << ", x = " << start_q.x() << ", y = " << start_q.y() << ", z = " << start_q.z() << std::endl; std::cout << "end_pose translation is: " << end_pose.translation().transpose() << std::endl; std::cout << "end_pose rotation is: " << std::endl; std::cout << end_pose.rotation() << std::endl; std::cout << "Desired quaternion is: " << std::endl; std::cout << "w = " << end_q.w() << ", x = " << end_q.x() << ", y = " << end_q.y() << ", z = " << end_q.z() << std::endl; std::cout << "**********************************" << std::endl; Eigen::Vector3d step_translation_movement = (end_pose.translation() - start_pose.translation()) / step_num; for (int i = 0; i < step_num; i++) { Eigen::Affine3d step_target; step_target.translation() = start_pose.translation() + (i + 1) * step_translation_movement; step_q = start_q.slerp((i + 1.0) / step_num, end_q); geometry_msgs::Pose target_pose = Eigen2msgPose(step_target.translation(), step_q); pose_sequence.push_back(target_pose); } }
bool SensorProcessorBase::updateTransformations(const std::string& sensorFrameId, const ros::Time& timeStamp) { try { transformListener_.waitForTransform(sensorFrameId, mapFrameId_, timeStamp, ros::Duration(1.0)); tf::StampedTransform transformTf; transformListener_.lookupTransform(mapFrameId_, sensorFrameId, timeStamp, transformTf); poseTFToEigen(transformTf, transformationSensorToMap_); transformListener_.lookupTransform(robotBaseFrameId_, sensorFrameId, timeStamp, transformTf); // TODO Why wrong direction? Eigen::Affine3d transform; poseTFToEigen(transformTf, transform); rotationBaseToSensor_.setMatrix(transform.rotation().matrix()); translationBaseToSensorInBaseFrame_.toImplementation() = transform.translation(); transformListener_.lookupTransform(mapFrameId_, robotBaseFrameId_, timeStamp, transformTf); // TODO Why wrong direction? poseTFToEigen(transformTf, transform); rotationMapToBase_.setMatrix(transform.rotation().matrix()); translationMapToBaseInMapFrame_.toImplementation() = transform.translation(); return true; } catch (tf::TransformException &ex) { ROS_ERROR("%s", ex.what()); return false; } }
TEST_F(LoadPlanningModelsPr2, InitOK) { ASSERT_TRUE(urdf_ok_); ASSERT_EQ(urdf_model_->getName(), "pr2_test"); robot_model::RobotModelPtr kmodel(new robot_model::RobotModel(urdf_model_, srdf_model_)); robot_state::RobotState ks(kmodel); ks.setToRandomValues(); ks.setToDefaultValues(); robot_state::Transforms tf(kmodel->getModelFrame()); Eigen::Affine3d t1; t1.setIdentity(); t1.translation() = Eigen::Vector3d(10.0, 1.0, 0.0); tf.setTransform(t1, "some_frame_1"); Eigen::Affine3d t2(Eigen::Translation3d(10.0, 1.0, 0.0)*Eigen::AngleAxisd(0.5, Eigen::Vector3d::UnitY())); tf.setTransform(t2, "some_frame_2"); Eigen::Affine3d t3; t3.setIdentity(); t3.translation() = Eigen::Vector3d(0.0, 1.0, -1.0); tf.setTransform(t3, "some_frame_3"); EXPECT_TRUE(tf.isFixedFrame("some_frame_1")); EXPECT_FALSE(tf.isFixedFrame("base_footprint")); EXPECT_TRUE(tf.isFixedFrame(kmodel->getModelFrame())); Eigen::Affine3d x; x.setIdentity(); tf.transformPose(ks, "some_frame_2", x, x); EXPECT_TRUE(t2.translation() == x.translation()); EXPECT_TRUE(t2.rotation() == x.rotation()); tf.transformPose(ks, kmodel->getModelFrame(), x, x); EXPECT_TRUE(t2.translation() == x.translation()); EXPECT_TRUE(t2.rotation() == x.rotation()); x.setIdentity(); tf.transformPose(ks, "r_wrist_roll_link", x, x); EXPECT_NEAR(x.translation().x(), 0.585315, 1e-4); EXPECT_NEAR(x.translation().y(), -0.188, 1e-4); EXPECT_NEAR(x.translation().z(), 1.24001, 1e-4); }
void planning_models::KinematicModel::RevoluteJointModel::computeJointStateValues(const Eigen::Affine3d& transf, std::vector<double> &joint_values) const { joint_values.resize(1); Eigen::Quaterniond q(transf.rotation()); q.normalize(); joint_values[0] = acos(q.w())*2.0f; }
Eigen::Isometry3d toIsometry(const Eigen::Affine3d& pose) { Eigen::Isometry3d p(pose.rotation()); p.translation() = pose.translation(); return p; }
bool kinematic_constraints::PositionConstraint::equal(const KinematicConstraint &other, double margin) const { if (other.getType() != type_) return false; const PositionConstraint &o = static_cast<const PositionConstraint&>(other); if (link_model_ == o.link_model_ && constraint_frame_id_ == o.constraint_frame_id_) { if ((offset_ - o.offset_).norm() > margin) return false; if (constraint_region_.size() != o.constraint_region_.size()) return false; for (std::size_t i = 0 ; i < constraint_region_.size() ; ++i) { Eigen::Affine3d diff = constraint_region_pose_[i].inverse() * o.constraint_region_pose_[i]; if (diff.translation().norm() > margin) return false; if (!diff.rotation().isIdentity(margin)) return false; if (fabs(constraint_region_[i]->computeVolume() - o.constraint_region_[i]->computeVolume()) >= margin) return false; } return true; } return false; }
bool CartesianTrajectoryAction::checkTolerance(Eigen::Affine3d err, cartesian_trajectory_msgs::CartesianTolerance tol) { if ((tol.position.x > 0.0) && (fabs(err.translation().x()) > tol.position.x)) { return false; } if ((tol.position.y > 0.0) && (fabs(err.translation().y()) > tol.position.y)) { return false; } if ((tol.position.z > 0.0) && (fabs(err.translation().z()) > tol.position.z)) { return false; } Eigen::AngleAxisd ax(err.rotation()); Eigen::Vector3d rot = ax.axis() * ax.angle(); if ((tol.rotation.x > 0.0) && (fabs(rot(0)) > tol.rotation.x)) { return false; } if ((tol.rotation.y > 0.0) && (fabs(rot(1)) > tol.rotation.y)) { return false; } if ((tol.rotation.z > 0.0) && (fabs(rot(2)) > tol.rotation.z)) { return false; } return true; }
void MotionPlanningFrame::updateCollisionObjectPose(bool update_marker_position) { QList<QListWidgetItem *> sel = ui_->collision_objects_list->selectedItems(); if (sel.empty()) return; planning_scene_monitor::LockedPlanningSceneRW ps = planning_display_->getPlanningSceneRW(); if (ps) { collision_detection::CollisionWorld::ObjectConstPtr obj = ps->getWorld()->getObject(sel[0]->text().toStdString()); if (obj && obj->shapes_.size() == 1) { Eigen::Affine3d p; p.translation()[0] = ui_->object_x->value(); p.translation()[1] = ui_->object_y->value(); p.translation()[2] = ui_->object_z->value(); p = Eigen::Translation3d(p.translation()) * (Eigen::AngleAxisd(ui_->object_rx->value(), Eigen::Vector3d::UnitX()) * Eigen::AngleAxisd(ui_->object_ry->value(), Eigen::Vector3d::UnitY()) * Eigen::AngleAxisd(ui_->object_rz->value(), Eigen::Vector3d::UnitZ())); ps->getWorldNonConst()->moveShapeInObject(obj->id_, obj->shapes_[0], p); planning_display_->queueRenderSceneGeometry(); // Update the interactive marker pose to match the manually introduced one if (update_marker_position && scene_marker_) { Eigen::Quaterniond eq(p.rotation()); scene_marker_->setPose(Ogre::Vector3(ui_->object_x->value(), ui_->object_y->value(), ui_->object_z->value()), Ogre::Quaternion(eq.w(), eq.x(), eq.y(), eq.z()), ""); } } } }
inline std::string PrettyPrint(const Eigen::Affine3d& transform_to_print, const bool add_delimiters, const std::string& separator) { UNUSED(add_delimiters); UNUSED(separator); Eigen::Vector3d vector_to_print = transform_to_print.translation(); Eigen::Quaterniond quaternion_to_print(transform_to_print.rotation()); return "Affine3d <x: " + std::to_string(vector_to_print.x()) + " y: " + std::to_string(vector_to_print.y()) + " z: " + std::to_string(vector_to_print.z()) + ">, <x: " + std::to_string(quaternion_to_print.x()) + " y: " + std::to_string(quaternion_to_print.y()) + " z: " + std::to_string(quaternion_to_print.z()) + " w: " + std::to_string(quaternion_to_print.w()) + ">"; }
void SceneCloudView::setViewerPose(const Eigen::Affine3d& viewer_pose) { Eigen::Vector3d pos_vector = viewer_pose * Eigen::Vector3d(0, 0, 0); Eigen::Vector3d look_at_vector = viewer_pose.rotation() * Eigen::Vector3d(0, 0, 0) + pos_vector; Eigen::Vector3d up_vector = viewer_pose.rotation() * Eigen::Vector3d(0, -1, 0); pcl17::visualization::Camera cam; cam.pos[0] = pos_vector[0]; cam.pos[1] = pos_vector[1]; cam.pos[2] = pos_vector[2]; cam.focal[0] = look_at_vector[0]; cam.focal[1] = look_at_vector[1]; cam.focal[2] = look_at_vector[2]; cam.view[0] = up_vector[0]; cam.view[1] = up_vector[1]; cam.view[2] = up_vector[2]; cloud_viewer_->setCameraPosition((double)pos_vector[0], (double)pos_vector[1], (double)pos_vector[2], (double)look_at_vector[0], (double)look_at_vector[1], (double)look_at_vector[2], (double)up_vector[0], (double)up_vector[1], (double)up_vector[2]); cloud_viewer_->updateCamera(); }
void robot_model::FloatingJointModel::computeJointStateValues(const Eigen::Affine3d& transf, std::vector<double> &joint_values) const { joint_values.resize(7); joint_values[0] = transf.translation().x(); joint_values[1] = transf.translation().y(); joint_values[2] = transf.translation().z(); Eigen::Quaterniond q(transf.rotation()); joint_values[3] = q.x(); joint_values[4] = q.y(); joint_values[5] = q.z(); joint_values[6] = q.w(); }
TEST(TfEigen, ConvertAffine3d) { const Eigen::Affine3d v(Eigen::Translation3d(1,2,3) * Eigen::AngleAxis<double>(1, Eigen::Vector3d::UnitX())); Eigen::Affine3d v1; geometry_msgs::Pose p1; tf2::convert(v, p1); tf2::convert(p1, v1); EXPECT_EQ(v.translation(), v1.translation()); EXPECT_EQ(v.rotation(), v1.rotation()); }
/** * @brief Send attitude setpoint to FCU attitude controller * * @note ENU frame. */ void send_attitude_target(const ros::Time &stamp, const Eigen::Affine3d &tr) { /* Thrust + RPY, also bits numbering started from 1 in docs */ const uint8_t ignore_all_except_q = (1 << 6) | (7 << 0); float q[4]; UAS::quaternion_to_mavlink( UAS::transform_orientation_enu_ned( UAS::transform_orientation_baselink_aircraft(Eigen::Quaterniond(tr.rotation()))),q); set_attitude_target(stamp.toNSec() / 1000000, ignore_all_except_q, q, 0.0, 0.0, 0.0, 0.0); }
void ParticleFilter3D::predict(Eigen::Affine3d Tmotion, double vx, double vy, double vz, double vroll, double vpitch, double vyaw){ Eigen::Vector3d tr = Tmotion.translation(); Eigen::Vector3d rot = Tmotion.rotation().eulerAngles(0,1,2); for(unsigned int i=0;i<pcloud.size();i++){ double x = tr[0] + myrand.normalRandom() * vx; double y = tr[1] + myrand.normalRandom() * vy; double z = tr[2] + myrand.normalRandom() * vz; double roll = rot[0] + myrand.normalRandom() * vroll; double pitch = rot[1] + myrand.normalRandom() * vpitch; double yaw = rot[2] + myrand.normalRandom() * vyaw; pcloud[i].T = pcloud[i].T *(xyzrpy2affine(x,y,z,roll,pitch,yaw)); } }
/** * @brief Send attitude setpoint to FCU attitude controller * * @note ENU frame. */ void send_attitude_target(const ros::Time &stamp, const Eigen::Affine3d &tr) { /* Thrust + RPY, also bits numbering started from 1 in docs */ const uint8_t ignore_all_except_q = (1 << 6) | (7 << 0); float q[4]; // Eigen use same convention as mavlink: w x y z Eigen::Map<Eigen::Quaternionf> q_out(q); q_out = UAS::transform_frame_enu_ned(Eigen::Quaterniond(tr.rotation())).cast<float>(); set_attitude_target(stamp.toNSec() / 1000000, ignore_all_except_q, q, 0.0, 0.0, 0.0, 0.0); }
void robot_model::PlanarJointModel::computeJointStateValues(const Eigen::Affine3d& transf, std::vector<double> &joint_values) const { joint_values.resize(3); joint_values[0] = transf.translation().x(); joint_values[1] = transf.translation().y(); Eigen::Quaterniond q(transf.rotation()); //taken from Bullet double s_squared = 1.0-(q.w()*q.w()); if (s_squared < 10.0 * std::numeric_limits<double>::epsilon()) joint_values[2] = 0.0; else { double s = 1.0/sqrt(s_squared); joint_values[2] = (acos(q.w())*2.0f)*(q.z()*s); } }
/** * @brief Send attitude setpoint and thrust to FCU attitude controller */ void send_attitude_quaternion(const ros::Time &stamp, const Eigen::Affine3d &tr, const float thrust) { /** * @note RPY, also bits numbering started from 1 in docs */ const uint8_t ignore_all_except_q_and_thrust = (7 << 0); auto q = ftf::transform_orientation_enu_ned( ftf::transform_orientation_baselink_aircraft(Eigen::Quaterniond(tr.rotation())) ); set_attitude_target(stamp.toNSec() / 1000000, ignore_all_except_q_and_thrust, q, Eigen::Vector3d::Zero(), thrust); }
// Compute the desired orientation for the end effector given the handrail group void getEndEffectorOrientation(const std::string& handrail_group, std::vector<double>& rpy) { // "Normal" grasp orientation for R2 on a handrail at the origin Eigen::Affine3d pose = Eigen::Affine3d::Identity(); pose *= (Eigen::AngleAxisd(3.141592653, Eigen::Vector3d::UnitX()) * Eigen::AngleAxisd(0.0, Eigen::Vector3d::UnitY()) * Eigen::AngleAxisd(1.57079, Eigen::Vector3d::UnitZ())); Eigen::Affine3d rotation = Eigen::Affine3d::Identity(); if (handrail_group == "Handrail_starboard") { rotation *= (Eigen::AngleAxisd(0.0, Eigen::Vector3d::UnitX()) * Eigen::AngleAxisd(-1.57079, Eigen::Vector3d::UnitY()) * Eigen::AngleAxisd(-1.57079, Eigen::Vector3d::UnitZ())); } else if (handrail_group == "Handrail_port") { rotation *= (Eigen::AngleAxisd(0.0, Eigen::Vector3d::UnitX()) * Eigen::AngleAxisd(1.57079, Eigen::Vector3d::UnitY()) * Eigen::AngleAxisd(-1.57079, Eigen::Vector3d::UnitZ())); } else if (handrail_group == "Handrail_deck") { rotation *= (Eigen::AngleAxisd(0.0, Eigen::Vector3d::UnitX()) * Eigen::AngleAxisd(0.0, Eigen::Vector3d::UnitY()) * Eigen::AngleAxisd(-1.57079, Eigen::Vector3d::UnitZ())); } else if (handrail_group == "Handrail_overhead") { rotation *= (Eigen::AngleAxisd(-3.141592653, Eigen::Vector3d::UnitX()) * Eigen::AngleAxisd(0.0, Eigen::Vector3d::UnitY()) * Eigen::AngleAxisd(-1.57079, Eigen::Vector3d::UnitZ())); } else ROS_WARN("Unknown handrail group '%s'", handrail_group.c_str()); pose = pose * rotation; Eigen::Vector3d euler = pose.rotation().eulerAngles(0, 1, 2); rpy.resize(3); rpy[0] = euler(0); rpy[1] = euler(1); rpy[2] = euler(2); }
/** * @brief Send vision estimate transform to FCU position controller */ void send_vision_estimate(const ros::Time &stamp, const Eigen::Affine3d &tr) { /** * @warning Issue #60. * This now affects pose callbacks too. */ if (last_transform_stamp == stamp) { ROS_DEBUG_THROTTLE_NAMED(10, "vision_pose", "Vision: Same transform as last one, dropped."); return; } last_transform_stamp = stamp; auto position = ftf::transform_frame_enu_ned(Eigen::Vector3d(tr.translation())); auto rpy = ftf::quaternion_to_rpy( ftf::transform_orientation_enu_ned(Eigen::Quaterniond(tr.rotation()))); vision_position_estimate(stamp.toNSec() / 1000, position, rpy); }
/** * @brief Send setpoint to FCU position controller. * * @warning Send only XYZ, Yaw. ENU frame. */ void send_position_target(const ros::Time &stamp, const Eigen::Affine3d &tr) { /* Documentation start from bit 1 instead 0; * Ignore velocity and accel vectors, yaw rate. * * In past versions on PX4 there been bug described in #273. * If you got similar issue please try update firmware first. */ const uint16_t ignore_all_except_xyz_y = (1 << 11) | (7 << 6) | (7 << 3); auto p = UAS::transform_frame_enu_ned(Eigen::Vector3d(tr.translation())); auto q = UAS::transform_frame_enu_ned(Eigen::Quaterniond(tr.rotation())); set_position_target_local_ned(stamp.toNSec() / 1000000, MAV_FRAME_LOCAL_NED, ignore_all_except_xyz_y, p.x(), p.y(), p.z(), 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, UAS::quaternion_get_yaw(q), 0.0); }
bool VoNode::initCb(){ srv.request.timeA = time_first; srv.request.timeB = time_second; std::cout<<"\ntime_first:\n"<<(time_first)<<std::endl; std::cout<<"\ntime_second:\n"<<(time_second)<<std::endl; ROS_INFO("Calling service get_between_pose"); if (client.call(srv)){ ROS_INFO("Got inti pose from get_between_pose srv!"); geometry_msgs::Pose m = srv.response.A2B;; Eigen::Affine3d e = Eigen::Translation3d(m.position.x, m.position.y, m.position.z) * Eigen::Quaterniond(m.orientation.w, m.orientation.x, m.orientation.y, m.orientation.z); Eigen::Matrix3d rot = e.rotation(); Eigen::Vector3d trans = e.translation(); std::cout<<"Time diff in first and second:\n"<<(time_second-time_first)<<std::endl; std::cout<<time_first<<std::endl<<time_second<<std::endl; SE3 current_trans = SE3(rot,trans); std::cout<<"trans:\n"<<trans<<std::endl; std::cout<<"rot:\n"<<rot<<std::endl; vo_->InitPose(current_trans); our_scale_ = current_trans.matrix().block<3,1>(0,3).norm(); return true; } else{ ROS_ERROR("Failed to call service get_between_pose"); return false; } }
bool CaptureStereoSynchronizer::checkNearPose( const geometry_msgs::Pose& new_pose) { Eigen::Affine3d new_affine; tf::poseMsgToEigen(new_pose, new_affine); for (size_t i = 0; i < poses_.size(); i++) { // convert pose into eigen Eigen::Affine3d affine; tf::poseMsgToEigen(poses_[i], affine); // compute difference Eigen::Affine3d diff = affine.inverse() * new_affine; double positional_difference = diff.translation().norm(); if (positional_difference < positional_bin_size_) { Eigen::AngleAxisd rotational_difference(diff.rotation()); if (rotational_difference.angle() < rotational_bin_size_) { return true; } } } return false; }
bool kinematic_constraints::PositionConstraint::equal(const KinematicConstraint &other, double margin) const { if (other.getType() != type_) return false; const PositionConstraint &o = static_cast<const PositionConstraint&>(other); if (link_model_ == o.link_model_ && constraint_frame_id_ == o.constraint_frame_id_) { if ((offset_ - o.offset_).norm() > margin) return false; std::vector<bool> other_region_matches_this(constraint_region_.size(), false); for (std::size_t i = 0 ; i < constraint_region_.size() ; ++i) { bool some_match = false; //need to check against all other regions for(std::size_t j = 0 ; j < o.constraint_region_.size() ; ++j) { Eigen::Affine3d diff = constraint_region_pose_[i].inverse() * o.constraint_region_pose_[j]; if (diff.translation().norm() < margin && diff.rotation().isIdentity(margin) && constraint_region_[i]->getType() == o.constraint_region_[j]->getType() && fabs(constraint_region_[i]->computeVolume() - o.constraint_region_[j]->computeVolume()) < margin) { some_match = true; //can't break, as need to do matches the other way as well other_region_matches_this[j] = true; } } if (!some_match) return false; } for (std::size_t i = 0 ; i < o.constraint_region_.size() ; ++i) if (!other_region_matches_this[i]) return false; return true; } return false; }
/** * @function setXform */ void KState::setXform( const Eigen::Affine3d& _t ) { body_pos = _t.translation(); body_rot = _t.rotation(); }
Eigen::Affine3d NDTFuserHMT::update(Eigen::Affine3d Tmotion, pcl::PointCloud<pcl::PointXYZ> &cloud) { if(!isInit){ fprintf(stderr,"NDT-FuserHMT: Call Initialize first!!\n"); return Tnow; } Todom = Todom * Tmotion; //we track this only for display purposes! double t0=0,t1=0,t2=0,t3=0,t4=0,t5=0,t6=0; ///Set the cloud to sensor frame with respect to base lslgeneric::transformPointCloudInPlace(sensor_pose, cloud); t0 = getDoubleTime(); ///Create local map lslgeneric::NDTMap ndlocal(new lslgeneric::LazyGrid(resolution)); ndlocal.guessSize(0,0,0,sensor_range,sensor_range,map_size_z); ndlocal.loadPointCloud(cloud,sensor_range); ndlocal.computeNDTCells(CELL_UPDATE_MODE_SAMPLE_VARIANCE); //pass through ndlocal and set all cells with vertically pointing normals to non-gaussian :-O /*SpatialIndex *index = ndlocal.getMyIndex(); typename SpatialIndexctorItr it = index->begin(); while (it != index->end()) { NDTCell *cell = dynamic_cast<NDTCell*> (*it); if(cell!=NULL) { if(cell->hasGaussian_) { if(cell->getClass() == NDTCell::HORIZONTAL) { cell->hasGaussian_ = false; } } } it++; }*/ t1 = getDoubleTime(); Eigen::Affine3d Tinit = Tnow * Tmotion; if(disableRegistration) { Tnow = Tinit; lslgeneric::transformPointCloudInPlace(Tnow, cloud); Eigen::Affine3d spose = Tnow*sensor_pose; map->addPointCloudMeanUpdate(spose.translation(),cloud,localMapSize, 1e5, 25, 2*map_size_z, 0.06); if(visualize) //&&ctr%20==0) { #ifndef NO_NDT_VIZ if(ctr%50==0) { viewer->plotNDTSAccordingToOccupancy(-1,map); //viewer->plotLocalNDTMap(cloud,resolution); } viewer->addTrajectoryPoint(Tnow.translation()(0),Tnow.translation()(1),Tnow.translation()(2)+0.2,0,1,0); viewer->addTrajectoryPoint(Todom.translation()(0),Todom.translation()(1),Todom.translation()(2)+0.2,0.5,0,0.5); viewer->displayTrajectory(); viewer->setCameraPointing(Tnow.translation()(0),Tnow.translation()(1),Tnow.translation()(2)+3); viewer->repaint(); #endif } ctr++; return Tnow; } if(doMultires) { //create two ndt maps with resolution = 3*resolution (or 5?) lslgeneric::NDTMap ndlocalLow(new lslgeneric::LazyGrid(3*resolution)); ndlocalLow.guessSize(0,0,0,sensor_range,sensor_range,map_size_z); ndlocalLow.loadPointCloud(cloud,sensor_range); ndlocalLow.computeNDTCells(CELL_UPDATE_MODE_SAMPLE_VARIANCE); lslgeneric::NDTMap mapLow(new lslgeneric::LazyGrid(3*resolution)); //add distros double cx,cy,cz; if(!map->getCentroid(cx, cy, cz)){ fprintf(stderr,"Centroid NOT Given-abort!\n"); } mapLow.initialize(cx,cy,cz,3*map_size_x,3*map_size_y,map_size_z); std::vector<lslgeneric::NDTCell*> ndts; ndts = map->getAllCells(); //this copies cells? for(int i=0; i<ndts.size(); i++) { NDTCell *cell = ndts[i]; if(cell!=NULL) { if(cell->hasGaussian_) { Eigen::Vector3d m = cell->getMean(); Eigen::Matrix3d cov = cell->getCov(); unsigned int nump = cell->getN(); mapLow.addDistributionToCell(cov, m,nump); } } delete cell; } //do match if(matcher2D.match( mapLow, ndlocalLow,Tinit,true)){ //if success, set Tmotion to result t2 = getDoubleTime(); //std::cout<<"success: new initial guess! t= "<<t2-t1<<std::endl; } else { Tinit = Tnow * Tmotion; } } if(be2D) { t2 = getDoubleTime(); if(matcher2D.match( *map, ndlocal,Tinit,true) || fuseIncomplete){ t3 = getDoubleTime(); Eigen::Affine3d diff = (Tnow * Tmotion).inverse() * Tinit; if((diff.translation().norm() > max_translation_norm || diff.rotation().eulerAngles(0,1,2).norm() > max_rotation_norm) && checkConsistency){ fprintf(stderr,"**** NDTFuserHMT -- ALMOST DEFINATELY A REGISTRATION FAILURE *****\n"); Tnow = Tnow * Tmotion; }else{ Tnow = Tinit; lslgeneric::transformPointCloudInPlace(Tnow, cloud); Eigen::Affine3d spose = Tnow*sensor_pose; Eigen::Affine3d diff_fuse = Tlast_fuse.inverse()*Tnow; if(diff_fuse.translation().norm() > translation_fuse_delta || diff_fuse.rotation().eulerAngles(0,1,2).norm() > rotation_fuse_delta) { //std::cout<<"F: "<<spose.translation().transpose()<<" "<<spose.rotation().eulerAngles(0,1,2).transpose()<<std::endl; t4 = getDoubleTime(); //TSV: originally this! //map->addPointCloudMeanUpdate(spose.translation(),cloud,localMapSize, 1e5, 1250, map_size_z/2, 0.06); map->addPointCloudMeanUpdate(spose.translation(),cloud,localMapSize, 1e5, 25, 2*map_size_z, 0.06); t5 = getDoubleTime(); //map->addPointCloud(spose.translation(),cloud, 0.06, 25); //map->computeNDTCells(CELL_UPDATE_MODE_SAMPLE_VARIANCE, 1e5, 255, spose.translation(), 0.1); //t4 = getDoubleTime(); //std::cout<<"match: "<<t3-t2<<" addPointCloud: "<<t5-t4<<" ndlocal "<<t1-t0<<" total: "<<t5-t0<<std::endl; Tlast_fuse = Tnow; if(visualize) //&&ctr%20==0) { #ifndef NO_NDT_VIZ if(ctr%30==0) { viewer->plotNDTSAccordingToOccupancy(-1,map); //viewer->plotLocalNDTMap(cloud,resolution); } viewer->addTrajectoryPoint(Tnow.translation()(0),Tnow.translation()(1),Tnow.translation()(2)+0.2,0,1,0); viewer->addTrajectoryPoint(Todom.translation()(0),Todom.translation()(1),Todom.translation()(2)+0.2,0.5,0,0.5); viewer->displayTrajectory(); viewer->setCameraPointing(Tnow.translation()(0),Tnow.translation()(1),Tnow.translation()(2)+3); viewer->repaint(); //viewer->win3D->process_events(); #endif } ctr++; } } }else{ t3 = getDoubleTime(); Tnow = Tnow * Tmotion; } } else { t2 = getDoubleTime(); if(matcher.match( *map, ndlocal,Tinit,true) || fuseIncomplete){ t3 = getDoubleTime(); Eigen::Affine3d diff = (Tnow * Tmotion).inverse() * Tinit; if((diff.translation().norm() > max_translation_norm || diff.rotation().eulerAngles(0,1,2).norm() > max_rotation_norm) && checkConsistency){ fprintf(stderr,"**** NDTFuserHMT -- ALMOST DEFINATELY A REGISTRATION FAILURE *****\n"); Tnow = Tnow * Tmotion; //save offending map: //map->writeToJFF("map.jff"); //ndlocal.writeToJFF("local.jff"); }else{ Tnow = Tinit; //Tnow = Tnow * Tmotion; lslgeneric::transformPointCloudInPlace(Tnow, cloud); Eigen::Affine3d spose = Tnow*sensor_pose; Eigen::Affine3d diff_fuse = Tlast_fuse.inverse()*Tnow; if(diff_fuse.translation().norm() > translation_fuse_delta || diff_fuse.rotation().eulerAngles(0,1,2).norm() > rotation_fuse_delta) { //std::cout<<"F: "<<spose.translation().transpose()<<" "<<spose.rotation().eulerAngles(0,1,2).transpose()<<std::endl; t4 = getDoubleTime(); map->addPointCloudMeanUpdate(spose.translation(),cloud,localMapSize, 1e5, 1250, map_size_z/2, 0.06); t5 = getDoubleTime(); //map->addPointCloudMeanUpdate(spose.translation(),cloud,localMapSize, 1e5, 25, 2*map_size_z, 0.06); //map->addPointCloud(spose.translation(),cloud, 0.06, 25); //map->computeNDTCells(CELL_UPDATE_MODE_SAMPLE_VARIANCE, 1e5, 255, spose.translation(), 0.1); //t4 = getDoubleTime(); //std::cout<<"match: "<<t3-t2<<" addPointCloud: "<<t5-t4<<" ndlocal "<<t1-t0<<" total: "<<t5-t0<<std::endl; Tlast_fuse = Tnow; if(visualize) //&&ctr%20==0) { #ifndef NO_NDT_VIZ if(ctr%2==0) { viewer->plotNDTSAccordingToOccupancy(-1,map); //viewer->plotLocalNDTMap(cloud,resolution); } viewer->addTrajectoryPoint(Tnow.translation()(0),Tnow.translation()(1),Tnow.translation()(2)+0.2,0,1,0); viewer->addTrajectoryPoint(Todom.translation()(0),Todom.translation()(1),Todom.translation()(2)+0.2,0.5,0,0.5); viewer->displayTrajectory(); viewer->setCameraPointing(Tnow.translation()(0),Tnow.translation()(1),Tnow.translation()(2)+3); viewer->repaint(); viewer->win3D->process_events(); #endif } ctr++; } } }else{ t3 = getDoubleTime(); Tnow = Tnow * Tmotion; } } t6 = getDoubleTime(); if(fAddTimes!=NULL) { fprintf(fAddTimes,"%lf %lf %lf\n",t3-t2,t5-t4,t6-t0); fflush(fAddTimes); } return Tnow; }
void CartesianImpedance::updateHook() { RESTRICT_ALLOC; // ToolMass toolsM[K]; Eigen::Affine3d r[K]; // read inputs port_joint_position_.read(joint_position_); if (!joint_position_.allFinite()) { RTT::Logger::In in("CartesianImpedance::updateHook"); error(); RTT::log(RTT::Error) << "joint_position_ contains NaN or inf" << RTT::endlog(); return; } port_joint_velocity_.read(joint_velocity_); if (!joint_velocity_.allFinite()) { RTT::Logger::In in("CartesianImpedance::updateHook"); error(); RTT::log(RTT::Error) << "joint_velocity_ contains NaN or inf" << RTT::endlog(); return; } port_nullspace_torque_command_.read(nullspace_torque_command_); if (!nullspace_torque_command_.allFinite()) { RTT::Logger::In in("CartesianImpedance::updateHook"); error(); RTT::log(RTT::Error) << "nullspace_torque_command_ contains NaN or inf" << RTT::endlog(); return; } for (size_t i = 0; i < K; i++) { geometry_msgs::Pose pos; if (port_cartesian_position_command_[i]->read(pos) == RTT::NewData) { tf::poseMsgToEigen(pos, r_cmd[i]); } if (port_tool_position_command_[i]->read(pos) == RTT::NewData) { tools[i](0) = pos.position.x; tools[i](1) = pos.position.y; tools[i](2) = pos.position.z; tools[i](3) = pos.orientation.w; tools[i](4) = pos.orientation.x; tools[i](5) = pos.orientation.y; tools[i](6) = pos.orientation.z; } cartesian_trajectory_msgs::CartesianImpedance impedance; if (port_cartesian_impedance_command_[i]->read(impedance) == RTT::NewData) { Kc(i * 6 + 0) = impedance.stiffness.force.x; Kc(i * 6 + 1) = impedance.stiffness.force.y; Kc(i * 6 + 2) = impedance.stiffness.force.z; Kc(i * 6 + 3) = impedance.stiffness.torque.x; Kc(i * 6 + 4) = impedance.stiffness.torque.y; Kc(i * 6 + 5) = impedance.stiffness.torque.z; Dxi(i * 6 + 0) = impedance.damping.force.x; Dxi(i * 6 + 1) = impedance.damping.force.y; Dxi(i * 6 + 2) = impedance.damping.force.z; Dxi(i * 6 + 3) = impedance.damping.torque.x; Dxi(i * 6 + 4) = impedance.damping.torque.y; Dxi(i * 6 + 5) = impedance.damping.torque.z; } } port_mass_matrix_inv_.read(M); if (!M.allFinite()) { RTT::Logger::In in("CartesianImpedance::updateHook"); error(); RTT::log(RTT::Error) << "M contains NaN or inf" << RTT::endlog(); return; } // calculate robot data robot_->jacobian(J, joint_position_, &tools[0]); if (!J.allFinite()) { RTT::Logger::In in("CartesianImpedance::updateHook"); error(); RTT::log(RTT::Error) << "J contains NaN or inf" << RTT::endlog(); return; } robot_->fkin(r, joint_position_, &tools[0]); JT = J.transpose(); lu_.compute(M); Mi = lu_.inverse(); if (!Mi.allFinite()) { RTT::Logger::In in("CartesianImpedance::updateHook"); error(); RTT::log(RTT::Error) << "Mi contains NaN or inf" << RTT::endlog(); return; } // calculate stiffness component for (size_t i = 0; i < K; i++) { Eigen::Affine3d tmp; tmp = r[i].inverse() * r_cmd[i]; p(i * 6) = tmp.translation().x(); p(i * 6 + 1) = tmp.translation().y(); p(i * 6 + 2) = tmp.translation().z(); Eigen::Quaternion<double> quat = Eigen::Quaternion<double>( tmp.rotation()); p(i * 6 + 3) = quat.x(); p(i * 6 + 4) = quat.y(); p(i * 6 + 5) = quat.z(); } F.noalias() = (Kc.array() * p.array()).matrix(); joint_torque_command_.noalias() = JT * F; if (!joint_torque_command_.allFinite()) { RTT::Logger::In in("CartesianImpedance::updateHook"); error(); RTT::log(RTT::Error) << "joint_torque_command_ contains NaN or inf" << RTT::endlog(); return; } // calculate damping component #if 1 tmpNK_.noalias() = J * Mi; A.noalias() = tmpNK_ * JT; luKK_.compute(A); A = luKK_.inverse(); tmpKK_ = Kc.asDiagonal(); UNRESTRICT_ALLOC; es_.compute(tmpKK_, A); RESTRICT_ALLOC; K0 = es_.eigenvalues(); luKK_.compute(es_.eigenvectors()); Q = luKK_.inverse(); tmpKK_ = Dxi.asDiagonal(); Dc.noalias() = Q.transpose() * tmpKK_; tmpKK_ = K0.cwiseSqrt().asDiagonal(); tmpKK2_.noalias() = Dc * tmpKK_; Dc.noalias() = tmpKK2_ * Q; tmpK_.noalias() = J * joint_velocity_; F.noalias() = Dc * tmpK_; if (!F.allFinite()) { RTT::Logger::In in("CartesianImpedance::updateHook"); error(); RTT::log(RTT::Error) << "F contains NaN or inf" << RTT::endlog(); return; } joint_torque_command_.noalias() -= JT * F; #else UNRESTRICT_ALLOC; tmpNN_.noalias() = JT * Kc.asDiagonal() * J; es_.compute(tmpNN_, M, Eigen::ComputeEigenvectors | Eigen::BAx_lx); K0 = es_.eigenvalues(); Q = es_.eigenvectors(); RESTRICT_ALLOC; tmpNN_ = K0.cwiseSqrt().asDiagonal(); UNRESTRICT_ALLOC; Dc.noalias() = Q * 0.7 * tmpNN_ * Q.adjoint(); RESTRICT_ALLOC; joint_torque_command_.noalias() -= Dc * joint_velocity_; #endif // calculate null-space component tmpNK_.noalias() = J * Mi; tmpKK_.noalias() = tmpNK_ * JT; luKK_.compute(tmpKK_); tmpKK_ = luKK_.inverse(); tmpKN_.noalias() = Mi * JT; Ji.noalias() = tmpKN_ * tmpKK_; P.noalias() = Eigen::MatrixXd::Identity(P.rows(), P.cols()); P.noalias() -= J.transpose() * A * J * Mi; if (!P.allFinite()) { RTT::Logger::In in("CartesianImpedance::updateHook"); error(); RTT::log(RTT::Error) << "P contains NaN or inf" << RTT::endlog(); return; } joint_torque_command_.noalias() += P * nullspace_torque_command_; // write outputs UNRESTRICT_ALLOC; port_joint_torque_command_.write(joint_torque_command_); for (size_t i = 0; i < K; i++) { geometry_msgs::Pose pos; tf::poseEigenToMsg(r[i], pos); port_cartesian_position_[i]->write(pos); } }