void ObjectModelLine::projectPoints (const std::vector<int> &inliers, const Eigen::VectorXd &model_coefficients, PointCloud3D* projectedPointCloud){ assert (model_coefficients.size () == 6); // Obtain the line point and direction Eigen::Vector4d line_pt (model_coefficients[0], model_coefficients[1], model_coefficients[2], 0); Eigen::Vector4d line_dir (model_coefficients[3], model_coefficients[4], model_coefficients[5], 0); // Iterate through the 3d points and calculate the distances from them to the line for (size_t i = 0; i < inliers.size (); ++i) { Eigen::Vector4d pt ((*inputPointCloud->getPointCloud())[inliers[i]].getX(), (*inputPointCloud->getPointCloud())[inliers[i]].getY(), (*inputPointCloud->getPointCloud())[inliers[i]].getZ(), 0); // double k = (DOT_PROD_3D (points[i], p21) - dotA_B) / dotB_B; double k = (pt.dot (line_dir) - line_pt.dot (line_dir)) / line_dir.dot (line_dir); Eigen::Vector4d pp = line_pt + k * line_dir; // Calculate the projection of the point on the line (pointProj = A + k * B) // std::vector<Point3D> *projectedPoints = projectedPointCloud->getPointCloud(); (*projectedPointCloud->getPointCloud())[i].setX(pp[0]); (*projectedPointCloud->getPointCloud())[i].setY(pp[1]); (*projectedPointCloud->getPointCloud())[i].setZ(pp[2]); } }
Eigen::Vector3d StereoDisparity::getXyzValues(int u, int v, float disparity) { Eigen::Vector4d uvd1((double) u, (double) v, disparity, 1); Eigen::Vector4d xyzw = (*_uvd1_to_xyz) * uvd1; return xyzw.head<3>() / xyzw.w(); }
inline sm::kinematics::Transformation transformationFromPropertyTree(const sm::ConstPropertyTree & config) { Eigen::Vector4d q = sm::eigen::quaternionFromPropertyTree( sm::ConstPropertyTree(config, "q_a_b") ); q = q / q.norm(); Eigen::Vector3d t = sm::eigen::vector3FromPropertyTree( sm::ConstPropertyTree(config, "t_a_b_a" ) ); return sm::kinematics::Transformation(q,t); }
void BezierBoundarySolver::_IterateCurvatureReduction(BezierBoundaryProblem* pProblem,Eigen::Vector4d& params) { double epsilon = 0.0001; //create a jacobian for the parameters by perturbing them Eigen::Vector4d Jt; //transpose of the jacobian BezierBoundaryProblem origProblem = *pProblem; double maxK = _GetMaximumCurvature(pProblem); for(int ii = 0; ii < 4 ; ii++){ Eigen::Vector4d epsilonParams = params; epsilonParams[ii] += epsilon; _Get5thOrderBezier(pProblem,epsilonParams); _Sample5thOrderBezier(pProblem); double kPlus = _GetMaximumCurvature(pProblem); epsilonParams[ii] -= 2*epsilon; _Get5thOrderBezier(pProblem,epsilonParams); _Sample5thOrderBezier(pProblem); double kMinus = _GetMaximumCurvature(pProblem); Jt[ii] = (kPlus-kMinus)/(2*epsilon); } //now that we have Jt, we can calculate JtJ Eigen::Matrix4d JtJ = Jt*Jt.transpose(); //thikonov regularization JtJ += Eigen::Matrix4d::Identity(); Eigen::Vector4d deltaParams = JtJ.inverse() * Jt*maxK; params -= deltaParams; _Get5thOrderBezier(pProblem,params); _Sample5thOrderBezier(pProblem); //double finalMaxK = _GetMaximumCurvature(pProblem); //dout("2D Iteration took k from " << maxK << " to " << finalMaxK); }
static void qprod(const Eigen::Vector4d & q, const Eigen::Vector4d & p, Eigen::Vector4d & prod_result) { double a=q(0); Eigen::Vector3d v = q.segment(1, 3); //coefficients q double x=p(0); Eigen::Vector3d u = p.segment(1, 3); //coefficients p prod_result << a*x-v.transpose()*u, (a*u+x*v) + v.cross(u); }
void ExpMapQuaternion::pseudoLog_(RefVec out, const ConstRefVec& x, const ConstRefVec& y) { Eigen::Vector4d tmp; toQuat q(tmp.data()); const toConstQuat xQ(x.data()); const toConstQuat yQ(y.data()); q = xQ.inverse()*yQ; //TODO double-check that formula logarithm(out,tmp); }
Eigen::Vector3d PointcloudProc::projectPoint(Eigen::Vector4d& point, CamParams cam) const { Eigen::Vector3d keypoint; keypoint(0) = (cam.fx*point.x()) / point.z() + cam.cx; keypoint(1) = (cam.fy*point.y()) / point.z() + cam.cy; keypoint(2) = (cam.fx*(point.x()-cam.tx)/point.z() + cam.cx); return keypoint; }
Eigen::Vector3d intersectRayPlane(const Eigen::Vector3d ray, const Eigen::Vector3d ray_origin, const Eigen::Vector4d plane) { // Plane defined as ax + by + cz + d = 0 // Ray: P = P0 + tV // Plane: P.N + d = 0, where P is intersection point // t = -(P0.N + d)/(V.N) , P = P0 + tV float t = -(ray_origin.dot(plane.head(3)) + plane[3]) / (ray.dot(plane.head(3))); Eigen::Vector3d P = ray_origin + t*ray; return P; }
void randomUnitQuaternion(Eigen::Vector4d &quat) { static boost::random::mt19937 rng(time(NULL)); static boost::random::normal_distribution<> normal; do { quat(0) = normal(rng); quat(1) = normal(rng); quat(2) = normal(rng); quat(3) = normal(rng); } while (quat.norm() < 0.00001); quat.normalize(); }
const double CMiniVisionToolbox::getTransformationDelta( const Eigen::Isometry3d& p_matTransformationFrom, const Eigen::Isometry3d& p_matTransformationTo ) { //ds compute pose change const Eigen::Isometry3d matTransformChange( p_matTransformationTo*p_matTransformationFrom.inverse( ) ); //ds check point const Eigen::Vector4d vecSamplePoint( 40.2, -1.25, 2.5, 1.0 ); double dNorm( vecSamplePoint.norm( ) ); const Eigen::Vector4d vecDifference( vecSamplePoint-matTransformChange*vecSamplePoint ); dNorm = ( dNorm + vecDifference.norm( ) )/2; //ds return norm return ( std::fabs( vecDifference(0) ) + std::fabs( vecDifference(1) ) + std::fabs( vecDifference(2) ) )/dNorm; }
bool WHeadPositionCorrection::checkMovementThreshold( const WLEMDHPI::TransformationT& trans ) { const Eigen::Vector4d diffTrans = m_transExc.data().col( 3 ) - trans.data().col( 3 ); if( diffTrans.norm() > m_movementThreshold ) { m_transExc = trans; return true; } // TODO(pieloth): check rotation, e.g. with 1 or more key points // initial: pick 6 key points from dipoles min/max (x,0,0); (0,y,0); (0,0,z) // compare max distance // keyPoint 3x6 // (trans * keyPoints).colwise().norm() - (m_transExc * keyPoints).colwise().norm()).maxCoeff(), mind homog. coord. return false; }
mathtools::geometry::euclidian::Line<4> skeleton::model::Perspective::toObj( const Eigen::Matrix<double,skeleton::model::meta<skeleton::model::Projective>::stordim,1> &vec, const mathtools::geometry::euclidian::Line<4> &) const { Eigen::Vector4d origin; origin.block<3,1>(0,0) = m_frame3->getOrigin(); origin(3) = 0.0; Eigen::Vector4d vecdir; vecdir.block<3,1>(0,0) = m_frame3->getBasis()->getMatrix()*Eigen::Vector3d(vec(0),vec(1),1.0); vecdir(3) = vec(2); vecdir.normalize(); return mathtools::geometry::euclidian::Line<4>(origin,vecdir); }
Visualization::Visualization(bot_lcmgl_t* lcmgl, const StereoCalibration* calib) : _lcmgl(lcmgl), _calibration(calib) { // take the Z corresponding to disparity 5 px as 'max Z' Eigen::Matrix4d uvdtoxyz = calib->getUvdToXyz(); Eigen::Vector4d xyzw = uvdtoxyz * Eigen::Vector4d(1, 1, 5, 1); xyzw /= xyzw.w(); _max_z = xyzw.z(); // take the Z corresponding to 3/4 disparity img width px as 'min Z' xyzw = uvdtoxyz * Eigen::Vector4d(1, 1, (3*calib->getWidth())/4, 1); xyzw /= xyzw.w(); _min_z = xyzw.z(); }
void Node::motors_cb(const controller::Motors::ConstPtr& msg) { //get quad charectaristics float d = msg->d; float kf = msg->kf; float kt = msg->kt; //get motor speeds (rad/s) const Eigen::Vector4d motor_speeds( msg->motor1, msg->motor2, msg->motor3, msg->motor4); //calculate forces and moments const Eigen::Vector4d motor_thrust = kf * motor_speeds.cwiseProduct(motor_speeds); Eigen::Matrix4d convert; convert << 1, 1, 1, 1, 0, d, 0, -d, -d, 0, d, 0, kt, -kt, kt, -kt; const Eigen::Vector4d f_moments = convert * motor_thrust; //publish message geometry_msgs::Twist out; out.linear.z = f_moments[0]; out.angular.x = f_moments[1]; out.angular.y = f_moments[2]; out.angular.z = f_moments[3]; force_pub_.publish(out); }
Eigen::Vector2i pcl::visualization::worldToView (const Eigen::Vector4d &world_pt, const Eigen::Matrix4d &view_projection_matrix, int width, int height) { // Transform world to clipping coordinates Eigen::Vector4d world (view_projection_matrix * world_pt); // Normalize w-component world /= world.w (); // X/Y screen space coordinate int screen_x = int (floor (double (((world.x () + 1) / 2.0) * width) + 0.5)); int screen_y = int (floor (double (((world.y () + 1) / 2.0) * height) + 0.5)); // Calculate -world_pt.y () because the screen Y axis is oriented top->down, ie 0 is top-left //int winY = (int) floor ( (double) (((1 - world_pt.y ()) / 2.0) * height) + 0.5); // top left return (Eigen::Vector2i (screen_x, screen_y)); }
std::unique_ptr<unsigned char[]> getRaster() { const unsigned int row_bytes = scene.cam.viewportWidth * 3; const unsigned int buf_len = scene.cam.viewportHeight * row_bytes; auto buf = std::unique_ptr<unsigned char[]>(new unsigned char[buf_len]); const double multFactor = 1 / (1.0 * NUM_AA_SUBPIXELS); const Eigen::Vector4d maxrgb(255, 255, 255, 0); tbb::parallel_for( tbb::blocked_range2d<int, int>(0, scene.cam.viewportHeight, 0, scene.cam.viewportWidth), [&](tbb::blocked_range2d<int, int> &r) { // Support for AA sampler std::vector<Ray, Eigen::aligned_allocator<Ray>> rays( NUM_AA_SUBPIXELS, Ray(Eigen::Vector4d(0, 0, 0, 1), Eigen::Vector4d(1, 0, 0, 0), ID_AIR)); RandomAASampler sampler(scene.cam); for (int row = r.rows().begin(); row != r.rows().end(); ++row) { for (int col = r.cols().begin(); col != r.cols().end(); ++col) { Camera cam(scene.cam); Eigen::Vector4d color = Eigen::Vector4d::Zero(); // materials stack for refraction std::array<int, MAX_DEPTH + 1> objStack; if (!aa) { Ray ray = cam.constructRayThroughPixel(col, row); objStack.fill(ID_AIR); color = getColor(ray, 0, 0, objStack); } else { sampler.constructRaysThroughPixel(col, row, rays); for (int i = 0; i < NUM_AA_SUBPIXELS; ++i) { objStack.fill(ID_AIR); color += getColor(rays[i], 0, 0, objStack); } color *= multFactor; } color = 255 * color; color = color.cwiseMin(maxrgb); buf[row * row_bytes + (col * 3)] = color[0]; buf[row * row_bytes + (col * 3) + 1] = color[1]; buf[row * row_bytes + (col * 3) + 2] = color[2]; } } }); return buf; }
void ObjectModelCylinder::selectWithinDistance (const Eigen::VectorXd &model_coefficients, double threshold, std::vector<int> &inliers){ assert (model_coefficients.size () == 7); int nr_p = 0; inliers.resize (this->inputPointCloud->getSize()); Eigen::Vector4d line_pt (model_coefficients[0], model_coefficients[1], model_coefficients[2], 0); Eigen::Vector4d line_dir (model_coefficients[3], model_coefficients[4], model_coefficients[5], 0); double ptdotdir = line_pt.dot (line_dir); double dirdotdir = 1.0 / line_dir.dot (line_dir); // Iterate through the 3d points and calculate the distances from them to the sphere for (size_t i = 0; i < this->inputPointCloud->getSize(); ++i) { // Aproximate the distance from the point to the cylinder as the difference between // dist(point,cylinder_axis) and cylinder radius Eigen::Vector4d pt = Eigen::Vector4d ((*inputPointCloud->getPointCloud())[i].getX(), (*inputPointCloud->getPointCloud())[i].getY(), (*inputPointCloud->getPointCloud())[i].getZ(), 0); Eigen::Vector4d n = Eigen::Vector4d (this->normals->getNormals()->data()[i].getX(), this->normals->getNormals()->data()[i].getY(), this->normals->getNormals()->data()[i].getZ(), 0); double d_euclid = fabs (pointToLineDistance (pt, model_coefficients) - model_coefficients[6]); // Calculate the point's projection on the cylinder axis double k = (pt.dot (line_dir) - ptdotdir) * dirdotdir; Eigen::Vector4d pt_proj = line_pt + k * line_dir; Eigen::Vector4d dir = pt - pt_proj; dir.normalize (); // Calculate the angular distance between the point normal and the (dir=pt_proj->pt) vector double d_normal = fabs (getAngle3D (n, dir)); d_normal = fmin (d_normal, M_PI - d_normal); if (fabs (this->normalDistanceWeight * d_normal + (1 - this->normalDistanceWeight) * d_euclid) < threshold) { // Returns the indices of the points whose distances are smaller than the threshold inliers[nr_p] = i; nr_p++; } } inliers.resize (nr_p); }
const CPoint3DCAMERA CMiniVisionToolbox::getPointStereoLinearTriangulationSVDDLT( const cv::Point2d& p_ptPointLEFT, const cv::Point2d& p_ptPointRIGHT, const Eigen::Matrix< double, 3, 4 >& p_matProjectionLEFT, const Eigen::Matrix< double, 3, 4 >& p_matProjectionRIGHT ) { //ds A matrix for system: A*X=0 Eigen::Matrix4d matAHomogeneous; matAHomogeneous.row(0) = p_ptPointLEFT.x*p_matProjectionLEFT.row(2)-p_matProjectionLEFT.row(0); matAHomogeneous.row(1) = p_ptPointLEFT.y*p_matProjectionLEFT.row(2)-p_matProjectionLEFT.row(1); matAHomogeneous.row(2) = p_ptPointRIGHT.x*p_matProjectionRIGHT.row(2)-p_matProjectionRIGHT.row(0); matAHomogeneous.row(3) = p_ptPointRIGHT.y*p_matProjectionRIGHT.row(2)-p_matProjectionRIGHT.row(1); //ds solve system subject to ||A*x||=0 ||x||=1 const Eigen::JacobiSVD< Eigen::Matrix4d > matSVD( matAHomogeneous, Eigen::ComputeFullU | Eigen::ComputeFullV ); //ds solution x is the last column of V const Eigen::Vector4d vecX( matSVD.matrixV( ).col( 3 ) ); return vecX.head( 3 )/vecX(3); }
// Calculate the fitting error of a plane to certain 3D data double PlaneFit::fitPlaneError(const Points3D& P, const Eigen::Vector4d& coeff) { // Calculate the normalised equation coefficients (to put the resulting error in units of normal distance to plane) Eigen::Vector3d normal = coeff.head<3>(); double norm = normal.norm(); double scale = (norm > 0.0 ? norm : coeff.w()); Eigen::Vector3d abc = normal / scale; double d = coeff.w() / scale; // Calculate the average distance to the plane of the data points double err = 0.0; size_t N = P.size(); for(size_t i = 0; i < N; i++) err += fabs(abc.dot(P[i]) + d)/N; // Return the calculated error return err; }
TEST(UncertainHomogeneousPointTestSuite, testUav) { try { using namespace sm::kinematics; Eigen::Vector4d p; p.setRandom(); p = p/p.norm(); Eigen::Matrix3d U; U = sm::eigen::randomCovariance<3>(); UncertainHomogeneousPoint uhp(p,U); sm::eigen::assertNear(U, uhp.U_av_form(), 1e-10, SM_SOURCE_FILE_POS, "Checking that I can recover the av/form point uncertainty"); } catch(std::exception const & e) { FAIL() << e.what(); } }
namespace motors { class Node { public: explicit Node(const ros::NodeHandle& pnh); void motors_cb(const controller::Motors::ConstPtr& msg); private: ros::NodeHandle pnh_; ros::Subscriber motors_sub_; ros::Publisher force_pub_; }; //class Node Node::Node(const ros::NodeHandle& pnh) : pnh_(pnh) { motors_sub_ = pnh_.subscribe("/motor_speeds", 1, &Node::motors_cb, this); force_pub_ = pnh_.advertise<geometry_msgs::Twist>("/force", 1); } void Node::motors_cb(const controller::Motors::ConstPtr& msg) { //get quad charectaristics float d = msg->d; float kf = msg->kf; float kt = msg->kt; //get motor speeds (rad/s) const Eigen::Vector4d motor_speeds( msg->motor1, msg->motor2, msg->motor3, msg->motor4); //calculate forces and moments const Eigen::Vector4d motor_thrust = kf * motor_speeds.cwiseProduct(motor_speeds); Eigen::Matrix4d convert; convert << 1, 1, 1, 1, 0, d, 0, -d, -d, 0, d, 0, kt, -kt, kt, -kt; const Eigen::Vector4d f_moments = convert * motor_thrust; //publish message geometry_msgs::Twist out; out.linear.z = f_moments[0]; out.angular.x = f_moments[1]; out.angular.y = f_moments[2]; out.angular.z = f_moments[3]; force_pub_.publish(out); } } //namespace motors
EIGEN_MAKE_ALIGNED_OPERATOR_NEW /* Structures containing "fixed-sized vectorizable" Eigen classes need proper memory alignment */ Ray(const Eigen::Vector4d &orig, const Eigen::Vector4d &direction, const int fromObj=ID_AIR) : origin(orig), dir(direction), fromObj(fromObj) { BOOST_ASSERT_MSG(orig[3] == 1, "Origin of ray must have 4th coord equal to 1"); BOOST_ASSERT_MSG(direction[3] == 0, "Direction of ray must have 4th coord equal to 0"); BOOST_ASSERT_MSG(std::abs(direction.norm() - 1) < EPSILON, "Require unit direction vector!"); }
void ObjectModelCylinder::getDistancesToModel (const Eigen::VectorXd &model_coefficients, std::vector<double> &distances){ assert (model_coefficients.size () == 7); distances.resize (this->inputPointCloud->getSize()); Eigen::Vector4d line_pt (model_coefficients[0], model_coefficients[1], model_coefficients[2], 0); Eigen::Vector4d line_dir (model_coefficients[3], model_coefficients[4], model_coefficients[5], 0); double ptdotdir = line_pt.dot (line_dir); double dirdotdir = 1.0 / line_dir.dot (line_dir); // Iterate through the 3d points and calculate the distances from them to the sphere for (size_t i = 0; i < this->inputPointCloud->getSize(); ++i) { // Aproximate the distance from the point to the cylinder as the difference between // dist(point,cylinder_axis) and cylinder radius // Todo to be revised Eigen::Vector4d pt = Eigen::Vector4d ((*inputPointCloud->getPointCloud())[i].getX(), (*inputPointCloud->getPointCloud())[i].getY(), (*inputPointCloud->getPointCloud())[i].getZ(), 0); Eigen::Vector4d n = Eigen::Vector4d (this->normals->getNormals()->data()[i].getX(), this->normals->getNormals()->data()[i].getY(), this->normals->getNormals()->data()[i].getZ(), 0); double d_euclid = fabs (pointToLineDistance (pt, model_coefficients) - model_coefficients[6]); // Calculate the point's projection on the cylinder axis double k = (pt.dot (line_dir) - ptdotdir) * dirdotdir; Eigen::Vector4d pt_proj = line_pt + k * line_dir; Eigen::Vector4d dir = pt - pt_proj; dir.normalize (); // Calculate the angular distance between the point normal and the (dir=pt_proj->pt) vector double d_normal = fabs (getAngle3D (n, dir)); d_normal = fmin (d_normal, M_PI - d_normal); distances[i] = fabs (this->normalDistanceWeight * d_normal + (1 - this->normalDistanceWeight) * d_euclid); } }
/* * @brief Convert a quaternion to the corresponding rotation matrix * @note Pay attention to the convention used. The function follows the * conversion in "Indirect Kalman Filter for 3D Attitude Estimation: * A Tutorial for Quaternion Algebra", Equation (78). * * The input quaternion should be in the form * [q1, q2, q3, q4(scalar)]^T */ inline Eigen::Matrix3d quaternionToRotation( const Eigen::Vector4d& q) { const Eigen::Vector3d& q_vec = q.block(0, 0, 3, 1); const double& q4 = q(3); Eigen::Matrix3d R = (2*q4*q4-1)*Eigen::Matrix3d::Identity() - 2*q4*skewSymmetric(q_vec) + 2*q_vec*q_vec.transpose(); //TODO: Is it necessary to use the approximation equation // (Equation (87)) when the rotation angle is small? return R; }
void cloudCallback(const pcl::PointCloud<pcl::PointXYZ>::ConstPtr & cloud) { if( ( !cloud_updated ) && ( goal_completion_time + ros::Duration(2.0) < cloud->header.stamp ) ) { try { // update the pose listener.waitForTransform( "/arm_kinect_frame", "/map", cloud->header.stamp, ros::Duration(5.0)); listener.lookupTransform( "/map", "/arm_kinect_frame", cloud->header.stamp, kinect2map); listener.waitForTransform( "/arm_kinect_optical_frame", "/map", cloud->header.stamp, ros::Duration(5.0)); listener.lookupTransform( "/map", "/arm_kinect_optical_frame", cloud->header.stamp, optical2map); tf::Vector3 position_( kinect2map.getOrigin() ); position.x() = position_.x(); position.y() = position_.y(); position.z() = position_.z(); tf::Quaternion orientation_( kinect2map.getRotation() ); orientation.x() = orientation_.x(); orientation.y() = orientation_.y(); orientation.z() = orientation_.z(); orientation.w() = orientation_.w(); ROS_INFO_STREAM("position = " << position.transpose() ); ROS_INFO_STREAM("orientation = " << orientation.transpose() ); // update the cloud pcl::copyPointCloud(*cloud, *xyz_cld_ptr); // Do I need to copy it? //xyz_cld_ptr = cloud; cloud_updated = true; } catch (tf::TransformException ex) { ROS_ERROR("%s", ex.what()); } } }
/* * @brief Convert a rotation matrix to a quaternion. * @note Pay attention to the convention used. The function follows the * conversion in "Indirect Kalman Filter for 3D Attitude Estimation: * A Tutorial for Quaternion Algebra", Equation (78). * * The input quaternion should be in the form * [q1, q2, q3, q4(scalar)]^T */ inline Eigen::Vector4d rotationToQuaternion( const Eigen::Matrix3d& R) { Eigen::Vector4d score; score(0) = R(0, 0); score(1) = R(1, 1); score(2) = R(2, 2); score(3) = R.trace(); int max_row = 0, max_col = 0; score.maxCoeff(&max_row, &max_col); Eigen::Vector4d q = Eigen::Vector4d::Zero(); if (max_row == 0) { q(0) = std::sqrt(1+2*R(0, 0)-R.trace()) / 2.0; q(1) = (R(0, 1)+R(1, 0)) / (4*q(0)); q(2) = (R(0, 2)+R(2, 0)) / (4*q(0)); q(3) = (R(1, 2)-R(2, 1)) / (4*q(0)); } else if (max_row == 1) { q(1) = std::sqrt(1+2*R(1, 1)-R.trace()) / 2.0; q(0) = (R(0, 1)+R(1, 0)) / (4*q(1)); q(2) = (R(1, 2)+R(2, 1)) / (4*q(1)); q(3) = (R(2, 0)-R(0, 2)) / (4*q(1)); } else if (max_row == 2) { q(2) = std::sqrt(1+2*R(2, 2)-R.trace()) / 2.0; q(0) = (R(0, 2)+R(2, 0)) / (4*q(2)); q(1) = (R(1, 2)+R(2, 1)) / (4*q(2)); q(3) = (R(0, 1)-R(1, 0)) / (4*q(2)); } else { q(3) = std::sqrt(1+R.trace()) / 2.0; q(0) = (R(1, 2)-R(2, 1)) / (4*q(3)); q(1) = (R(2, 0)-R(0, 2)) / (4*q(3)); q(2) = (R(0, 1)-R(1, 0)) / (4*q(3)); } if (q(3) < 0) q = -q; quaternionNormalize(q); return q; }
bool FrustumCulling::Contains(const Eigen::Vector3d& point) const { Eigen::Vector4d pointHomo = toHomo( point ); return (pointHomo.dot (leftPlane_) <= 0) && (pointHomo.dot (rightPlane_) <= 0) && (pointHomo.dot (topPlane_) <= 0) && (pointHomo.dot (bottomPlane_) <= 0) && (pointHomo.dot (nearPlane_) <= 0) && // el plano lejano no se tiene en cuenta para filtrar puntos (se podria omitir su computo) (pointHomo.dot (farPlane_) <= 0); }
void publishPointMarker(ros::Publisher &vis_pub, const Eigen::Vector4d &p, int ID) { visualization_msgs::Marker marker; // Set the frame ID and timestamp. See the TF tutorials for information on these. marker.header.frame_id = "base_link"; marker.header.stamp = ros::Time::now(); marker.ns = "point"; marker.id = ID; // Set the marker type. Initially this is CUBE, and cycles between that and SPHERE, ARROW, and CYLINDE marker.type = visualization_msgs::Marker::SPHERE; marker.action = visualization_msgs::Marker::ADD; marker.pose.position.x = p.x(); marker.pose.position.y = p.y(); marker.pose.position.z = p.z(); marker.pose.orientation.x = 0.0; marker.pose.orientation.y = 0.0; marker.pose.orientation.z = 0.0; marker.pose.orientation.w = 1.0; marker.scale.x = 0.05; marker.scale.y = 0.05; marker.scale.z = 0.05; marker.color.a = 1.0; marker.color.r = 1.0; marker.color.g = 0.0; marker.color.b = 0.0; marker.lifetime = ros::Duration(); vis_pub.publish(marker); }
Eigen::Vector4d Centroid3D::computeCentroid (PointCloud3D *inCloud, const std::vector<int> &indices) { Eigen::Vector4d centroid; centroid.setZero (); if (indices.size () == 0) { return Eigen::Vector4d(0,0,0,0); } // For each point in the cloud int cp = 0; for (size_t i = 0; i < indices.size (); ++i) { // Check if the point is invalid if ( isnan( (*inCloud->getPointCloud())[indices[i]].getX() ) || isnan( (*inCloud->getPointCloud())[indices[i]].getY() ) || isnan( (*inCloud->getPointCloud())[indices[i]].getZ() )) continue; centroid[0] += (*inCloud->getPointCloud())[indices[i]].getX(); centroid[1] += (*inCloud->getPointCloud())[indices[i]].getY(); centroid[2] += (*inCloud->getPointCloud())[indices[i]].getZ(); cp++; } centroid /= cp; return centroid; }
double ObjectModelCylinder::pointToLineDistance (const Eigen::Vector4d &pt, const Eigen::Vector4d &line_pt, const Eigen::Vector4d &line_dir) { // Calculate the distance from the point to the line // D = ||(P2-P1) x (P1-P0)|| / ||P2-P1|| = norm (cross (p2-p1, p2-p0)) / norm(p2-p1) Eigen::Vector4d r, p_t; r = line_pt + line_dir; p_t = r - pt; #ifdef EIGEN3 Eigen::Vector3d c = p_t.head<3> ().cross (line_dir.head<3> ()); #else Eigen::Vector3d c = p_t.start<3> ().cross (line_dir.start<3> ()); #endif return (sqrt (c.dot (c) / line_dir.dot (line_dir))); }