void Mass::f(Eigen::Vector4f *pos, Eigen::Vector4f *result){ float stiff = 10.0f; Eigen::Vector4f a; a << 0,0,0,0; Eigen::Vector4f v; v << velocity.x(),velocity.y(),velocity.z(),0; for(int i = 0; i<links.size(); i++){ Mass *link = links[i]; float length = (*pos-link->position).norm(); float stretch = length-2.0f/40; Eigen::Vector4f force; force = (stiff*(link->position-(*pos))*stretch); if(force.norm()>.2){ force = force*(.2/force.norm()); } a += force; } a.z() += .2f*GRAVITY*.0001f; for(int i = 0; i<objects.size(); i++){ Eigen::Vector4f normal; if(objects[i]->intersects(&position,&normal)) { v *= 0; a *= 0; } } v += a; *result = v; }
vtkSmartPointer<vtkDataSet> pcl::visualization::createLine (const Eigen::Vector4f &pt1, const Eigen::Vector4f &pt2) { vtkSmartPointer<vtkLineSource> line = vtkSmartPointer<vtkLineSource>::New (); line->SetPoint1 (pt1.x (), pt1.y (), pt1.z ()); line->SetPoint2 (pt2.x (), pt2.y (), pt2.z ()); line->Update (); return (line->GetOutput ()); }
template <typename PointT> inline void pcl::computeCovarianceMatrix (const pcl::PointCloud<PointT> &cloud, const std::vector<int> &indices, const Eigen::Vector4f ¢roid, Eigen::Matrix3f &covariance_matrix) { // Initialize to 0 covariance_matrix.setZero (); if (indices.empty ()) return; // If the data is dense, we don't need to check for NaN if (cloud.is_dense) { // For each point in the cloud for (size_t i = 0; i < indices.size (); ++i) { Eigen::Vector4f pt = cloud.points[indices[i]].getVector4fMap () - centroid; covariance_matrix (1, 1) += pt.y () * pt.y (); covariance_matrix (1, 2) += pt.y () * pt.z (); covariance_matrix (2, 2) += pt.z () * pt.z (); pt *= pt.x (); covariance_matrix (0, 0) += pt.x (); covariance_matrix (0, 1) += pt.y (); covariance_matrix (0, 2) += pt.z (); } } // NaN or Inf values could exist => check for them else { // For each point in the cloud for (size_t i = 0; i < indices.size (); ++i) { // Check if the point is invalid if (!pcl_isfinite (cloud.points[indices[i]].x) || !pcl_isfinite (cloud.points[indices[i]].y) || !pcl_isfinite (cloud.points[indices[i]].z)) continue; Eigen::Vector4f pt = cloud.points[indices[i]].getVector4fMap () - centroid; covariance_matrix (1, 1) += pt.y () * pt.y (); covariance_matrix (1, 2) += pt.y () * pt.z (); covariance_matrix (2, 2) += pt.z () * pt.z (); pt *= pt.x (); covariance_matrix (0, 0) += pt.x (); covariance_matrix (0, 1) += pt.y (); covariance_matrix (0, 2) += pt.z (); } } covariance_matrix (1, 0) = covariance_matrix (0, 1); covariance_matrix (2, 0) = covariance_matrix (0, 2); covariance_matrix (2, 1) = covariance_matrix (1, 2); }
template <typename PointNT> void pcl::GridProjection<PointNT>::getProjectionWithPlaneFit (const Eigen::Vector4f &p, std::vector<int> (&pt_union_indices), Eigen::Vector4f &projection) { // Compute the plane coefficients Eigen::Vector4f model_coefficients; /// @remark iterative weighted least squares or sac might give better results Eigen::Matrix3f covariance_matrix; Eigen::Vector4f xyz_centroid; computeMeanAndCovarianceMatrix (*data_, pt_union_indices, covariance_matrix, xyz_centroid); // Get the plane normal EIGEN_ALIGN16 Eigen::Vector3f::Scalar eigen_value; EIGEN_ALIGN16 Eigen::Vector3f eigen_vector; pcl::eigen33 (covariance_matrix, eigen_value, eigen_vector); // The normalization is not necessary, since the eigenvectors from libeigen are already normalized model_coefficients[0] = eigen_vector [0]; model_coefficients[1] = eigen_vector [1]; model_coefficients[2] = eigen_vector [2]; model_coefficients[3] = 0; // Hessian form (D = nc . p_plane (centroid here) + p) model_coefficients[3] = -1 * model_coefficients.dot (xyz_centroid); // Projected point Eigen::Vector3f point (p.x (), p.y (), p.z ()); //= Eigen::Vector3f::MapAligned (&output.points[cp].x, 3); float distance = point.dot (model_coefficients.head <3> ()) + model_coefficients[3]; point -= distance * model_coefficients.head < 3 > (); projection = Eigen::Vector4f (point[0], point[1], point[2], 0); }
void Mass::update(float dt){ if(pinned) return; Eigen::Vector4f guess; guess << position.x(),position.y(),position.z(),1; Eigen::Vector4f prev; prev << position.x(),position.y(),position.z(),1; Eigen::Vector4f vguess; vguess << velocity.x(),velocity.y(),velocity.z(),1; for(int i = 0; i<1; i++){ f(&guess,&vguess); guess = guess+10*dt*vguess; } temp = guess; // position = position + velocity * dt; }
/** \brief Constructor. Sets the cloud we search over, and the maximum radius search we will guarantee to be correct * \param cloud the incoming point cloud * \param tol the maximum radius search we will guarantee to be correct */ SplitCloud2::SplitCloud2(pcl::PointCloud<pcl::PointXYZ> &cloud, double tol){ max_tol=tol; Eigen::Vector4f vec; pcl::compute3DCentroid(cloud,vec); xdiv=vec.x(); ydiv=vec.y(); zdiv=vec.z(); fillMInds(cloud); //fill out divs initClouds(cloud); }
Eigen::Vector3f quaternionMutltiply(Eigen::Vector4f quat, Eigen::Vector3f vec){ float num = quat.x() * 2.0f; float num2 = quat.y() * 2.0f; float num3 = quat.z() * 2.0f; float num4 = quat.x()* num; float num5 = quat.y() * num2; float num6 = quat.z() * num3; float num7 = quat.x() * num2; float num8 = quat.x() * num3; float num9 = quat.y ()* num3; float num10 = quat.w() * num; float num11 = quat.w ()* num2; float num12 = quat.w ()* num3; Eigen::Vector3f result; result.x() = (1.0f - (num5 + num6)) * vec.x ()+ (num7 - num12) * vec.y() + (num8 + num11) * vec.z(); result.y() = (num7 + num12) * vec.x() + (1.0f - (num4 + num6)) * vec.y() + (num9 - num10) * vec.z(); result.z() = (num8 - num11) * vec.x() + (num9 + num10) * vec.y() + (1.0f - (num4 + num5)) * vec.z(); return result; }
/** \brief Constructor. Sets the cloud we search over, and the maximum radius search we will guarantee to be correct * \param cloud the incoming point cloud * \param tol the maximum radius search we will guarantee to be correct */ SplitCloud2::SplitCloud2(pcl::PointCloud<pcl::PointXYZ> &cloud, double tol){ max_tol=tol; Eigen::Vector4f vec; pcl::compute3DCentroid(cloud,vec); xdiv=vec.x(); ydiv=vec.y(); zdiv=vec.z(); fillMInds(cloud); //fill out divs initClouds(cloud); } SplitCloud2::~SplitCloud2(void) { } ////////////////////////////////////////////////////////////////////////////////////////////////////////////////// //** \brief fill a list of indices that indicate which hexiquadrant the point is in //* also, fills out the boundaries of the subdivisions //* \param cloud the incoming point cloud //*/ void SplitCloud2::fillMInds(pcl::PointCloud<pcl::PointXYZ> &cloud){ minds.resize(8); for(int i=0;i<cloud.points.size();i++){ minds[getMIndex(cloud.points[i])].push_back(i); } Eigen::Vector4f vec; for(int i=0;i<8;i++){ pcl::compute3DCentroid(cloud,minds[i],vec); xdivs[i]=vec.x(); ydivs[i]=vec.y(); zdivs[i]=vec.z(); } }
void computeCovarianceMatrix(PointCloudRGB cloud, std::vector<int> indices, Eigen::Vector4f centroid, Eigen::Matrix3f &covariance_matrix) { // ROS_INFO("Inside computeCovarianceMatrix() "); // Initialize to 0 covariance_matrix.setZero (); if (indices.empty ()) return; // If the data is dense, we don't need to check for NaN if (cloud.is_dense) { // For each point in the cloud for (size_t i = 0; i < indices.size (); ++i) { //Eigen::Vector4f ptx = cloud.points[indices[i]].getVector4fMap (); ///std::cout << "Index : "<< i <<" : "<<ptx<<std::endl; Eigen::Vector4f pt = cloud.points[indices[i]].getVector4fMap () - centroid; covariance_matrix (0, 0) += pt.x () * pt.x (); covariance_matrix (0, 1) += pt.x () * pt.y (); covariance_matrix (0, 2) += pt.x () * pt.z (); covariance_matrix (1, 1) += pt.y () * pt.y (); covariance_matrix (1, 2) += pt.y () * pt.z (); covariance_matrix (2, 2) += pt.z () * pt.z (); } } // NaN or Inf values could exist => check for them else { //std::cout<<"Cloud is not dense "<<std::endl; // For each point in the cloud for (size_t i = 0; i < indices.size (); ++i) { // Check if the point is invalid if (!pcl_isfinite (cloud.points[indices[i]].x) || !pcl_isfinite (cloud.points[indices[i]].y) || !pcl_isfinite (cloud.points[indices[i]].z)) continue; // Eigen::Vector4f ptx = cloud.points[indices[i]].getVector4fMap (); // std::cout << "Index : "<< i <<" : "<<ptx<<std::endl; Eigen::Vector4f pt = cloud.points[indices[i]].getVector4fMap () - centroid; covariance_matrix (0, 0) += pt.x () * pt.x (); covariance_matrix (0, 1) += pt.x () * pt.y (); covariance_matrix (0, 2) += pt.x () * pt.z (); covariance_matrix (1, 1) += pt.y () * pt.y (); covariance_matrix (1, 2) += pt.y () * pt.z (); covariance_matrix (2, 2) += pt.z () * pt.z (); covariance_matrix (1, 1) += pt.y () * pt.y (); } } covariance_matrix (1, 0) = covariance_matrix (0, 1); covariance_matrix (2, 0) = covariance_matrix (0, 2); covariance_matrix (2, 1) = covariance_matrix (1, 2); covariance_matrix /= indices.size(); }
void window::process_scanline(int y, const Eigen::Vector4f& pa, const Eigen::Vector4f& pb, const Eigen::Vector4f& pc, const Eigen::Vector4f& pd) { if (y < 0 || y >= height) { return; } float gradient1 = pa.y() != pb.y() ? (y - pa.y()) / (pb.y() - pa.y()) : 1.0f; float gradient2 = pc.y() != pd.y() ? (y - pc.y()) / (pd.y() - pc.y()) : 1.0f; int sx = static_cast<int>(interpolate(pa.x(), pb.x(), gradient1)); int ex = static_cast<int>(interpolate(pc.x(), pd.x(), gradient2)); float z1 = interpolate(pa.z(), pb.z(), gradient1); float z2 = interpolate(pc.z(), pd.z(), gradient2); for (int x = sx; x < ex; ++x) { if (x < 0 || x >= width) { continue; } size_t index = (y * width) + x; float z = interpolate(z1, z2, (x - sx) / static_cast<float>(ex - sx)); if (depth_buf[index] > z) { continue; } depth_buf[index] = z; buf[index] = YUMERGBA(r, g, b, a); } }
// according to http://www.pcl-users.org/Finding-oriented-bounding-box-of-a-cloud-td4024616.html FitCube PCLTools::fitBox(PointCloud<PointXYZ>::Ptr cloud) { FitCube retCube; PCA<PointXYZ> pca; PointCloud<PointXYZ> proj; pca.setInputCloud(cloud); pca.project(*cloud, proj); PointXYZ proj_min; PointXYZ proj_max; getMinMax3D(proj, proj_min, proj_max); PointXYZ min; PointXYZ max; pca.reconstruct(proj_min, min); pca.reconstruct(proj_max, max); // Rotation of PCA Eigen::Matrix3f rot_mat = pca.getEigenVectors(); // Translation of PCA Eigen::Vector3f cl_translation = pca.getMean().head(3); Eigen::Matrix3f affine_trans; // Reordering of principal components affine_trans.col(2) << (rot_mat.col(0).cross(rot_mat.col(1))).normalized(); affine_trans.col(0) << rot_mat.col(0); affine_trans.col(1) << rot_mat.col(1); retCube.rotation = Eigen::Quaternionf(affine_trans); Eigen::Vector4f t = pca.getMean(); retCube.translation = Eigen::Vector3f(t.x(), t.y(), t.z()); retCube.width = fabs(proj_max.x - proj_min.x); retCube.height = fabs(proj_max.y - proj_min.y); retCube.depth = fabs(proj_max.z - proj_min.z); return retCube; }
template <typename PointT, typename PointNT, typename PointFeature> void pcl::NormalBasedSignatureEstimation<PointT, PointNT, PointFeature>::computeFeature (FeatureCloud &output) { // do a few checks before starting the computations PointFeature test_feature; (void)test_feature; if (N_prime_ * M_prime_ != sizeof (test_feature.values) / sizeof (float)) { PCL_ERROR ("NormalBasedSignatureEstimation: not using the proper signature size: %u vs %u\n", N_prime_ * M_prime_, sizeof (test_feature.values) / sizeof (float)); return; } std::vector<int> k_indices; std::vector<float> k_sqr_distances; tree_->setInputCloud (input_); output.points.resize (indices_->size ()); for (size_t index_i = 0; index_i < indices_->size (); ++index_i) { size_t point_i = (*indices_)[index_i]; Eigen::MatrixXf s_matrix (N_, M_); Eigen::Vector4f center_point = input_->points[point_i].getVector4fMap (); for (size_t k = 0; k < N_; ++k) { Eigen::VectorXf s_row (M_); for (size_t l = 0; l < M_; ++l) { Eigen::Vector4f normal = normals_->points[point_i].getNormalVector4fMap (); Eigen::Vector4f normal_u = Eigen::Vector4f::Zero (); Eigen::Vector4f normal_v = Eigen::Vector4f::Zero (); if (fabs (normal.x ()) > 0.0001f) { normal_u.x () = - normal.y () / normal.x (); normal_u.y () = 1.0f; normal_u.z () = 0.0f; normal_u.normalize (); } else if (fabs (normal.y ()) > 0.0001f) { normal_u.x () = 1.0f; normal_u.y () = - normal.x () / normal.y (); normal_u.z () = 0.0f; normal_u.normalize (); } else { normal_u.x () = 0.0f; normal_u.y () = 1.0f; normal_u.z () = - normal.y () / normal.z (); } normal_v = normal.cross3 (normal_u); Eigen::Vector4f zeta_point = 2.0f * static_cast<float> (l + 1) * scale_h_ / static_cast<float> (M_) * (cosf (2.0f * static_cast<float> (M_PI) * static_cast<float> ((k + 1) / N_)) * normal_u + sinf (2.0f * static_cast<float> (M_PI) * static_cast<float> ((k + 1) / N_)) * normal_v); // Compute normal by using the neighbors Eigen::Vector4f zeta_point_plus_center = zeta_point + center_point; PointT zeta_point_pcl; zeta_point_pcl.x = zeta_point_plus_center.x (); zeta_point_pcl.y = zeta_point_plus_center.y (); zeta_point_pcl.z = zeta_point_plus_center.z (); tree_->radiusSearch (zeta_point_pcl, search_radius_, k_indices, k_sqr_distances); // Do k nearest search if there are no neighbors nearby if (k_indices.size () == 0) { k_indices.resize (5); k_sqr_distances.resize (5); tree_->nearestKSearch (zeta_point_pcl, 5, k_indices, k_sqr_distances); } Eigen::Vector4f average_normal = Eigen::Vector4f::Zero (); float average_normalization_factor = 0.0f; // Normals weighted by 1/squared_distances for (size_t nn_i = 0; nn_i < k_indices.size (); ++nn_i) { if (k_sqr_distances[nn_i] < 1e-7f) { average_normal = normals_->points[k_indices[nn_i]].getNormalVector4fMap (); average_normalization_factor = 1.0f; break; } average_normal += normals_->points[k_indices[nn_i]].getNormalVector4fMap () / k_sqr_distances[nn_i]; average_normalization_factor += 1.0f / k_sqr_distances[nn_i]; } average_normal /= average_normalization_factor; float s = zeta_point.dot (average_normal) / zeta_point.norm (); s_row[l] = s; } // do DCT on the s_matrix row-wise Eigen::VectorXf dct_row (M_); for (int m = 0; m < s_row.size (); ++m) { float Xk = 0.0f; for (int n = 0; n < s_row.size (); ++n) Xk += static_cast<float> (s_row[n] * cos (M_PI / (static_cast<double> (M_ * n) + 0.5) * static_cast<double> (k))); dct_row[m] = Xk; } s_row = dct_row; s_matrix.row (k).matrix () = dct_row; } // do DFT on the s_matrix column-wise Eigen::MatrixXf dft_matrix (N_, M_); for (size_t column_i = 0; column_i < M_; ++column_i) { Eigen::VectorXf dft_col (N_); for (size_t k = 0; k < N_; ++k) { float Xk_real = 0.0f, Xk_imag = 0.0f; for (size_t n = 0; n < N_; ++n) { Xk_real += static_cast<float> (s_matrix (n, column_i) * cos (2.0f * M_PI / static_cast<double> (N_ * k * n))); Xk_imag += static_cast<float> (s_matrix (n, column_i) * sin (2.0f * M_PI / static_cast<double> (N_ * k * n))); } dft_col[k] = sqrtf (Xk_real*Xk_real + Xk_imag*Xk_imag); } dft_matrix.col (column_i).matrix () = dft_col; } Eigen::MatrixXf final_matrix = dft_matrix.block (0, 0, N_prime_, M_prime_); PointFeature feature_point; for (size_t i = 0; i < N_prime_; ++i) for (size_t j = 0; j < M_prime_; ++j) feature_point.values[i*M_prime_ + j] = final_matrix (i, j); output.points[index_i] = feature_point; } }
void VertexProcessor::parameter(vp::Parameter param, const Eigen::Vector4f& v) { parameter(param, v.x(), v.y(), v.z(), v.w()); }
template <typename PointT> inline unsigned pcl::computeCovarianceMatrix (const pcl::PointCloud<PointT> &cloud, const Eigen::Vector4f ¢roid, Eigen::Matrix3f &covariance_matrix) { if (cloud.points.empty ()) return (0); // Initialize to 0 covariance_matrix.setZero (); unsigned point_count; // If the data is dense, we don't need to check for NaN if (cloud.is_dense) { point_count = static_cast<unsigned> (cloud.size ()); // For each point in the cloud for (size_t i = 0; i < point_count; ++i) { Eigen::Vector4f pt = cloud [i].getVector4fMap () - centroid; covariance_matrix (1, 1) += pt.y () * pt.y (); covariance_matrix (1, 2) += pt.y () * pt.z (); covariance_matrix (2, 2) += pt.z () * pt.z (); pt *= pt.x (); covariance_matrix (0, 0) += pt.x (); covariance_matrix (0, 1) += pt.y (); covariance_matrix (0, 2) += pt.z (); } } // NaN or Inf values could exist => check for them else { point_count = 0; // For each point in the cloud for (size_t i = 0; i < cloud.size (); ++i) { // Check if the point is invalid if (!isFinite (cloud [i])) continue; Eigen::Vector4f pt = cloud [i].getVector4fMap () - centroid; covariance_matrix (1, 1) += pt.y () * pt.y (); covariance_matrix (1, 2) += pt.y () * pt.z (); covariance_matrix (2, 2) += pt.z () * pt.z (); pt *= pt.x (); covariance_matrix (0, 0) += pt.x (); covariance_matrix (0, 1) += pt.y (); covariance_matrix (0, 2) += pt.z (); ++point_count; } } covariance_matrix (1, 0) = covariance_matrix (0, 1); covariance_matrix (2, 0) = covariance_matrix (0, 2); covariance_matrix (2, 1) = covariance_matrix (1, 2); return (point_count); }
NavGraphGeneratorThread::ObstacleMap NavGraphGeneratorThread::map_obstacles(float line_max_dist) { ObstacleMap obstacles; unsigned int obstacle_i = 0; std::vector<std::pair<int, int> > free_space_indices; map_t *map = load_map(free_space_indices); logger->log_info(name(), "Map Obstacles: map size: %ux%u (%zu of %u cells free, %.1f%%)", map->size_x, map->size_y, free_space_indices.size(), map->size_x * map->size_y, (float)free_space_indices.size() / (float)(map->size_x * map->size_y) * 100.); size_t occ_cells = map->size_x * map->size_y - free_space_indices.size(); // convert map to point cloud pcl::PointCloud<pcl::PointXYZ>::Ptr map_cloud(new pcl::PointCloud<pcl::PointXYZ>()); map_cloud->points.resize(occ_cells); size_t pi = 0; for (int x = 0; x < map->size_x; ++x) { for (int y = 0; y < map->size_y; ++y) { if (map->cells[MAP_INDEX(map, x, y)].occ_state > 0) { // cell is occupied, generate point in cloud pcl::PointXYZ p; p.x = MAP_WXGX(map, x) + 0.5 * map->scale; p.y = MAP_WYGY(map, y) + 0.5 * map->scale; p.z = 0.; map_cloud->points[pi++] = p; } } } logger->log_info(name(), "Map Obstacles: filled %zu/%zu points", pi, occ_cells); pcl::PointCloud<pcl::PointXYZ>::Ptr no_line_cloud(new pcl::PointCloud<pcl::PointXYZ>()); // determine lines std::vector<LineInfo> linfos = calc_lines<pcl::PointXYZ>(map_cloud, cfg_map_line_segm_min_inliers_, cfg_map_line_segm_max_iterations_, /* segm distance threshold */ 2 * map->scale, /* segm sample max dist */ 2 * map->scale, cfg_map_line_cluster_tolerance_, cfg_map_line_cluster_quota_, cfg_map_line_min_length_, -1, -1, -1, no_line_cloud); logger->log_info(name(), "Map Obstacles: found %zu lines, %zu points remaining", linfos.size(), no_line_cloud->points.size()); // determine line obstacle points for (const LineInfo &line : linfos) { const unsigned int num_points = ceilf(line.length / line_max_dist); float distribution = line.length / num_points; obstacles[NavGraph::format_name("Map_%u", ++obstacle_i)] = cart_coord_2d_t(line.end_point_1[0], line.end_point_1[1]); for (unsigned int i = 1; i <= num_points; ++i) { Eigen::Vector3f p = line.end_point_1 + i * distribution * line.line_direction; obstacles[NavGraph::format_name("Map_%d", ++obstacle_i)] = cart_coord_2d_t(p[0], p[1]); } } // cluster in remaining points to find more points of interest pcl::search::KdTree<pcl::PointXYZ>::Ptr kdtree_cluster(new pcl::search::KdTree<pcl::PointXYZ>()); std::vector<pcl::PointIndices> cluster_indices; pcl::EuclideanClusterExtraction<pcl::PointXYZ> ec; ec.setClusterTolerance(2 * map->scale); ec.setMinClusterSize(1); ec.setMaxClusterSize(no_line_cloud->points.size()); ec.setSearchMethod(kdtree_cluster); ec.setInputCloud(no_line_cloud); ec.extract(cluster_indices); unsigned int i = 0; for (auto cluster : cluster_indices) { Eigen::Vector4f centroid; pcl::compute3DCentroid(*no_line_cloud, cluster.indices, centroid); logger->log_info(name(), "Map Obstacles: Cluster %u with %zu points at (%f, %f, %f)", i, cluster.indices.size(), centroid.x(), centroid.y(), centroid.z()); obstacles[NavGraph::format_name("MapCluster_%u", ++i)] = cart_coord_2d_t(centroid.x(), centroid.y()); } map_free(map); return obstacles; }
void glsl_program::set_uniform(unsigned loc, const Eigen::Vector4f& value) const { if (used_program != this) throw runtime_error("glsl_program::set_uniform, program not bound!"); glUniform4f(loc, value.x(), value.y(), value.z(), value.w()); }