Example #1
0
		::rl::math::Vector
		SchmersalLss300::getDistances() const
		{
			assert(this->isConnected());
			
			::rl::math::Vector distances(this->getDistancesCount());
			
			::rl::math::Real scale = static_cast< ::rl::math::Real>(0.01);
			::std::uint16_t count = Endian::hostWord(this->data[8], this->data[7]);
			::std::uint8_t mask = 0x1F;
			
			for (::std::size_t i = 0; i < count; ++i)
			{
				if (this->data[10 + i * 2] & 32)
				{
					distances(i) = ::std::numeric_limits< ::rl::math::Real>::quiet_NaN();
				}
				else
				{
					distances(i) = Endian::hostWord(this->data[10 + i * 2] & mask, this->data[9 + i * 2]);
					distances(i) *= scale;
				}
			}
			
			return distances;
		}
Example #2
0
void ClassicalScaling::run( PointWiseMapping* mymap ){
   // Retrieve the distances from the dimensionality reduction object
   double half=(-0.5); Matrix<double> distances( half*mymap->modifyDmat() ); 
 
   // Apply centering transtion
   unsigned n=distances.nrows(); double sum;
   // First HM
   for(unsigned i=0;i<n;++i){
       sum=0; for(unsigned j=0;j<n;++j) sum+=distances(i,j);
       for(unsigned j=0;j<n;++j) distances(i,j) -= sum/n;
   }
   // Now (HM)H
   for(unsigned i=0;i<n;++i){
      sum=0; for(unsigned j=0;j<n;++j) sum+=distances(j,i);
      for(unsigned j=0;j<n;++j) distances(j,i) -= sum/n; 
   }

   // Diagonalize matrix
   std::vector<double> eigval(n); Matrix<double> eigvec(n,n);
   diagMat( distances, eigval, eigvec );

   // Pass final projections to map object
   for(unsigned i=0;i<n;++i){
      for(unsigned j=0;j<mymap->getNumberOfProperties();++j) mymap->setProjectionCoordinate( i, j, sqrt(eigval[n-1-j])*eigvec(n-1-j,i) ); 
   }
}
void NeighborSearchRules<
    SortPolicy,
    MetricType,
    TreeType>::
UpdateAfterRecursion(TreeType& queryNode, TreeType& /* referenceNode */)
{
  // Find the worst distance that the children found (including any points), and
  // update the bound accordingly.
  double worstDistance = SortPolicy::BestDistance();

  // First look through children nodes.
  for (size_t i = 0; i < queryNode.NumChildren(); ++i)
  {
    if (SortPolicy::IsBetter(worstDistance, queryNode.Child(i).Stat().Bound()))
      worstDistance = queryNode.Child(i).Stat().Bound();
  }

  // Now look through children points.
  for (size_t i = 0; i < queryNode.NumPoints(); ++i)
  {
    if (SortPolicy::IsBetter(worstDistance,
        distances(distances.n_rows - 1, queryNode.Point(i))))
      worstDistance = distances(distances.n_rows - 1, queryNode.Point(i));
  }

  // Take the worst distance from all of these, and update our bound to reflect
  // that.
  queryNode.Stat().Bound() = worstDistance;
}
/**
 * Returns a vector of distances between the points in R and point y 
 */
vec GreedyMaxMinDesign::dist(mat R, vec y)
{
    vec distances(R.rows());
    
    // For each point x in R
    for (int i=0; i<R.rows(); i++) {
        vec x = R.get_row(i);
        distances(i) = weightedSquareNorm(x-y);
    }
    
    return distances;
}
void CartesianCoordinatesTest::distanceMatrix()
{
    chemkit::CartesianCoordinates coordinates(4);
    coordinates.setPosition(0, chemkit::Point3(1, 0, 0));
    coordinates.setPosition(1, chemkit::Point3(2, 0, 0));
    coordinates.setPosition(2, chemkit::Point3(0, 5, 0));
    coordinates.setPosition(3, chemkit::Point3(10, 5, 2));

    chemkit::Matrix distances = coordinates.distanceMatrix();
    QVERIFY(distances.rows() == 4);
    QVERIFY(distances.cols() == 4);
    QCOMPARE(qRound(distances(0, 0)), 0);
    QCOMPARE(qRound(distances(0, 1)), 1);
    QCOMPARE(qRound(distances(1, 0)), 1);
}
Example #6
0
size_t closest_point_index_rayOMP(const pcl::PointCloud<PointT>& cloud,
                                  const Eigen::Vector3f& direction_pre,
                                  const Eigen::Vector3f& line_pt,
                                  double& distance_to_ray) {
    Eigen::Vector3f direction = direction_pre / direction_pre.norm();
    PointT point;
    std::vector<double> distances(cloud.points.size(), 10);  // Initialize to value 10
// Generate a vector of distances
    #pragma omp parallel for
    for (size_t index = 0; index < cloud.points.size(); index++) {
        point = cloud.points[index];

        Eigen::Vector3f cloud_pt(point.x, point.y, point.z);
        Eigen::Vector3f difference = (line_pt - cloud_pt);
        // https://en.wikipedia.org/wiki/Distance_from_a_point_to_a_line#Vector_formulation
        double distance = (difference - (difference.dot(direction) * direction)).norm();
        distances[index] = distance;
    }

    double min_distance = std::numeric_limits<double>::infinity();
    size_t min_index = 0;
    // Find index of maximum (TODO: Figure out how to OMP this)
    for (size_t index = 0; index < cloud.points.size(); index++) {
        const double distance = distances[index];
        if (distance < min_distance) {
            min_distance = distance;
            min_index = index;
        }
    }

    distance_to_ray = min_distance;

    return (min_index);
}
Example #7
0
static void kmeans_center_multiple_restarts(
    unsigned nb_restarts, cluster_t nb_center,
    void (*center_init_f)(cluster_t, histogram_c &, dataset_t &, nbgen &),
    histogram_c &center, dataset_t &dataset, nbgen &rng) {
  std::vector<histogram_c> center_c(nb_restarts);
  for (unsigned i = 0; i < nb_restarts; ++i)
    center_init_f(nb_center, center_c[i], dataset, rng);

  unsigned nb_features = dataset[0].histogram.size();
  std::vector<double> cluster_dists(nb_restarts);
  for (unsigned r = 0; r < nb_restarts; ++r) {
    double sum = 0;
    unsigned count = 0;
    std::vector<double> distances(nb_center, 0);
    for (unsigned i = 0; i < nb_center; ++i) {
      for (unsigned j = 0; j < nb_center; ++j) {
        if (j == i)
          continue;
        double dist = l2_distance(center_c[r][i], center_c[r][j], nb_features);
        distances[i] += dist;
        ++count;
      }
      sum += distances[i];
    }
    cluster_dists[r] = sum / count;
    // printf("restart:%u -> %f\n", r, cluster_dists[r]);
  }
  size_t max_cluster = std::distance(
      cluster_dists.begin(),
      std::max_element(cluster_dists.begin(), cluster_dists.end()));
  // printf("min center index: %zu\n", max_cluster);
  center = center_c[max_cluster];
}
Example #8
0
void
PolycrystalReducedIC::initialSetup()
{
 //Set up domain bounds with mesh tools
  for (unsigned int i = 0; i < LIBMESH_DIM; i++)
  {
    _bottom_left(i) = _mesh.getMinInDimension(i);
    _top_right(i) = _mesh.getMaxInDimension(i);
  }
  _range = _top_right - _bottom_left;

  if (_op_num > _grain_num)
     mooseError("ERROR in PolycrystalReducedIC: Number of order parameters (op_num) can't be larger than the number of grains (grain_num)");

  MooseRandom::seed(_rand_seed);

  //Randomly generate the centers of the individual grains represented by the Voronoi tesselation
  _centerpoints.resize(_grain_num);
  _assigned_op.resize(_grain_num);
  std::vector<Real> distances(_grain_num);

  //Assign actual center point positions
  for (unsigned int grain = 0; grain < _grain_num; grain++)
  {
    for (unsigned int i = 0; i < LIBMESH_DIM; i++)
      _centerpoints[grain](i) = _bottom_left(i) + _range(i) * MooseRandom::rand();
    if (_columnar_3D)
      _centerpoints[grain](2) = _bottom_left(2) + _range(2) * 0.5;
  }

  //Assign grains to specific order parameters in a way that maximizes the distance
  _assigned_op = PolycrystalICTools::assignPointsToVariables(_centerpoints, _op_num, _mesh, _var);
}
Example #9
0
std::vector<float>
WorkerStemFit::compute_distances(std::vector<pcl::ModelCoefficients>  &cylinders)
{

    std::vector<float> distances ( _cloud->points.size () );
    std::fill ( distances.begin (), distances.end (), 0.5 );

    pcl::octree::OctreePointCloudSearch<PointI> octree ( 0.02f );
    octree.setInputCloud ( _cloud );
    octree.addPointsFromInputCloud ();
    for (size_t i = 0; i < cylinders.size(); i++) {

        pcl::ModelCoefficients cylinder = cylinders.at(i);
        std::vector<int> indices = indexOfPointsNearCylinder ( octree, cylinder );
        for ( size_t i = 0; i < indices.size (); i++ ) {
            PointI point = _cloud->points[indices[i]];
            simpleTree::Cylinder cyl (cylinder);

            float dist = cyl.distToPoint ( point );
            if ( std::abs ( dist ) < std::abs ( distances[indices[i]] ) ) {
                distances[indices[i]] = dist;
            }
        }
    }
    return distances;
}
std::pair<std::vector<count>, node> BFS::run_Feist(const Graph& g, node source) const {
    count infDist = std::numeric_limits<count>::max();
    count n = g.numberOfNodes();
    std::vector<count> distances(n, infDist);
    std::queue<node> q;

    distances[source] = 0;
    q.push(source);
    node max_node; 

    while (! q.empty()) {
        node current = q.front();
        q.pop();
        //std::cout << "current node in BFS: " << current << " ";

        // insert untouched neighbors into queue
        g.forNeighborsOf(current, [&](node neighbor) {
                if (distances[neighbor] == infDist) {
                    q.push(neighbor);
                    distances[neighbor] = distances[current] + 1;
                    max_node = neighbor; 
                }    
            });
    }

    return std::make_pair (distances, max_node);
}
Example #11
0
template <typename PointT> double
pcl::MaximumLikelihoodSampleConsensus<PointT>::computeMedianAbsoluteDeviation (
    const PointCloudConstPtr &cloud, 
    const boost::shared_ptr <std::vector<int> > &indices, 
    double sigma)
{
  std::vector<double> distances (indices->size ());

  Eigen::Vector4f median;
  // median (dist (x - median (x)))
  computeMedian (cloud, indices, median);

  for (size_t i = 0; i < indices->size (); ++i)
  {
    pcl::Vector4fMapConst pt = cloud->points[(*indices)[i]].getVector4fMap ();
    Eigen::Vector4f ptdiff = pt - median;
    ptdiff[3] = 0;
    distances[i] = ptdiff.dot (ptdiff);
  }

  std::sort (distances.begin (), distances.end ());

  double result;
  size_t mid = indices->size () / 2;
  // Do we have a "middle" point or should we "estimate" one ?
  if (indices->size () % 2 == 0)
    result = (sqrt (distances[mid-1]) + sqrt (distances[mid])) / 2;
  else
    result = sqrt (distances[mid]);
  return (sigma * result);
}
Example #12
0
 void KMeans<ScalarType, AssignmentType>::calculateInitGuess(const std::vector<Eigen::Matrix<ScalarType, Eigen::Dynamic, 1> >& data, std::vector<Eigen::Matrix<ScalarType, Eigen::Dynamic, 1> >& initGuess, unsigned int meanCount)
 {
     assert(meanCount > 0u);
     assert(data.size() > 0u);
     //first clear the initGuess vector. might not be empty.
     initGuess.clear();
     
     UniformRNG<double> rng;
     
     unsigned int dataSize = data.size();
     //insert first point.
     initGuess.push_back(data[std::rand() % dataSize]);
     
     Eigen::VectorXf distances(dataSize);
     
     float distance, minDistance;
     int minDistanceElement=0;
     double draw;
     
     //create meanCount guesses. first value has been taken.
     for (unsigned int g=1; g<meanCount; g++)
     {
         //calculate distances to nearest cluster mean guess
         for (unsigned int i=0; i<dataSize; i++)
         {
             distance = minDistance = std::numeric_limits<float>::max();
             for (unsigned int j=0; j<initGuess.size(); j++)
             {
                 // since we don't need the distance, but only the
                 // /smallest/ distance, we can omit the sqrt operation.
                 // it is not proven to be faster this way, so I will have
                 // both instructions here - you can choose. both work.
                 distance = (initGuess[j] - data[i]).norm();
                 //distance = (initGuess[j] - data[i]).array().square().sum();
                 if (distance < minDistance)
                 {
                     minDistance = distance;
                 }
             }
             distances[i] = minDistance;
         }
         //draw random value in [0, maxDistance];
         draw = rng.rand() * distances.maxCoeff();
         //find the element that best fits to the drawn value (distance).
         minDistance = std::numeric_limits<float>::max();
         for (unsigned int i=0; i<dataSize; i++)
         {
             distance = std::fabs(distances[i] - draw);
             if (distance < minDistance)
             {
                 minDistance = distance;
                 minDistanceElement = i;
             }
         }
         //take that element.
         initGuess.push_back(data[minDistanceElement]);
     }
     
     assert(initGuess.size() == meanCount);
 }
Example #13
0
	bool Search(U_INT src, U_INT trg, T& PathRes, U_INT& Cost) {
		typedef boost::graph_traits<GraphT>::vertex_descriptor vertex_descriptor;
		std::vector<vertex_descriptor> predecessors(num_vertices(hGraph));
		if (src>=num_vertices(hGraph) || trg>=num_vertices(hGraph)) return false;
		vertex_descriptor source_vertex = vertex(src, hGraph);
		vertex_descriptor target_vertex = vertex(trg, hGraph);
		std::vector<U_INT> distances(num_vertices(hGraph));
		typedef std::vector<boost::default_color_type> colormap_t;
		colormap_t colors(num_vertices(hGraph));
		try {
			boost::astar_search(
			    hGraph, source_vertex,
			    distance_heuristic<GraphT>(hGraph, target_vertex),
			    boost::predecessor_map(&predecessors[0]).
			    weight_map(get(( &xEdge::cost ), hGraph)).
			    distance_map(&distances[0]).
			    color_map(&colors[0]).
			    visitor(astar_goal_visitor<vertex_descriptor>(target_vertex)));
		} catch (found_goal fg) {
			Cost=distances[target_vertex];
			PathRes.clear();
			PathRes.push_front(target_vertex);
			size_t max=num_vertices(hGraph);
			while (target_vertex != source_vertex) {
				if (target_vertex == predecessors[target_vertex])
					return false;
				target_vertex = predecessors[target_vertex];
				PathRes.push_front(target_vertex);
				if (!max--)
					return false;
			}
			return true;
		}
		return false;
	}
Example #14
0
void toNormalizedNEXUS(ifstream & inf, ostream *os)
	{
	NxsTaxaBlock taxa;
	NxsTreesBlock trees(&taxa);
	NxsAssumptionsBlock assumptions(&taxa);
	NxsCharactersBlock character(&taxa, &assumptions);
	NxsDataBlock data(&taxa, &assumptions);
	NxsDistancesBlock distances(&taxa);
	NxsUnalignedBlock unaligned(&taxa, &assumptions);

	NormalizingReader nexus(os);
	nexus.Add(&taxa);
	nexus.Add(&trees);
	nexus.Add(&assumptions);
	nexus.Add(&character);
	nexus.Add(&data);
	nexus.Add(&distances);
	nexus.Add(&unaligned);

	if (os)
		{
		*os << "#NEXUS\n";
		}
	NormalizingToken token(inf, os);
	
	nexus.Execute(token);
	}
void computeVarianceAndCorrespondences(
		const pcl::PointCloud<pcl::PointXYZ>::ConstPtr & cloudA,
		const pcl::PointCloud<pcl::PointXYZ>::ConstPtr & cloudB,
		double maxCorrespondenceDistance,
		double & variance,
		int & correspondencesOut)
{
	variance = 1;
	correspondencesOut = 0;
	pcl::registration::CorrespondenceEstimation<pcl::PointXYZ, pcl::PointXYZ>::Ptr est;
	est.reset(new pcl::registration::CorrespondenceEstimation<pcl::PointXYZ, pcl::PointXYZ>);
	est->setInputTarget(cloudB);
	est->setInputSource(cloudA);
	pcl::Correspondences correspondences;
	est->determineCorrespondences(correspondences, maxCorrespondenceDistance);

	if(correspondences.size()>=3)
	{
		std::vector<double> distances(correspondences.size());
		for(unsigned int i=0; i<correspondences.size(); ++i)
		{
			distances[i] = correspondences[i].distance;
		}

		//variance
		std::sort(distances.begin (), distances.end ());
		double median_error_sqr = distances[distances.size () >> 1];
		variance = (2.1981 * median_error_sqr);
	}
inline
void
cv_dist_vector_LogEucl::train(int tr_scale, int tr_shift )
{
  distances(tr_scale, tr_shift);
  svm_train(); 
  
}
double getlength(int n,point *p)
{
    if(n==1)    return 0;
	double ans=0;
	for(int i=0;i<n;i++)
		ans+=distances(p[i],p[(i+n+1)%n]);
	return ans;
}
inline
void
cv_dist_vector_GrassBC::train(int tr_scale, int tr_shift )
{
  distances(tr_scale, tr_shift);
  svm_train(); 
  
}
Example #19
0
std::vector<ompl::control::ProductGraph::State*>
ompl::control::ProductGraph::computeLead(
    ProductGraph::State* start,
    const boost::function<double(ProductGraph::State*, ProductGraph::State*)>& edgeWeight)
{
    std::vector<GraphType::vertex_descriptor> parents(boost::num_vertices(graph_));
    std::vector<double> distances(boost::num_vertices(graph_));
    EdgeIter ei, eend;
    //first build up the edge weights
    for (boost::tie(ei,eend) = boost::edges(graph_); ei != eend; ++ei)
    {
        GraphType::vertex_descriptor src = boost::source(*ei, graph_);
        GraphType::vertex_descriptor target = boost::target(*ei, graph_);
        graph_[*ei].cost = edgeWeight(graph_[src], graph_[target]);
    }
    int startIndex = stateToIndex_[start];
    boost::dijkstra_shortest_paths(graph_, boost::vertex(startIndex,graph_),
                                   boost::weight_map(get(&Edge::cost, graph_)).distance_map(
                                       boost::make_iterator_property_map(distances.begin(), get(boost::vertex_index, graph_)
                                                                        )).predecessor_map(
                                       boost::make_iterator_property_map(parents.begin(), get(boost::vertex_index, graph_))
                                   )
                                  );
    //pick state from solutionStates_ such that distance[state] is minimized
    State* bestSoln = *solutionStates_.begin();
    double cost = distances[boost::vertex(stateToIndex_[bestSoln], graph_)];
    for (std::vector<State*>::const_iterator s = solutionStates_.begin()+1; s != solutionStates_.end(); ++s)
    {
        if (distances[boost::vertex(stateToIndex_[*s], graph_)] < cost)
        {
            cost = distances[boost::vertex(stateToIndex_[*s], graph_)];
            bestSoln = *s;
        }
    }
    //build lead from bestSoln parents
    std::stack<State*> leadStack;
    while (!(bestSoln == start))
    {
        leadStack.push(bestSoln);
        bestSoln = graph_[parents[boost::vertex(stateToIndex_[bestSoln], graph_)]];
    }
    leadStack.push(bestSoln);

    std::vector<State*> lead;
    while (!leadStack.empty())
    {
        lead.push_back(leadStack.top());
        leadStack.pop();
        // Truncate the lead as early when it hits the desired automaton states
        // \todo: more elegant way to do this?
        if (lead.back()->cosafeState == solutionStates_.front()->cosafeState
                && lead.back()->safeState == solutionStates_.front()->safeState)
            break;
    }
    return lead;
}
Example #20
0
    DBL_MATRIX Alignment::estimateAlpha_(DBL_MATRIX& points, DBL_MATRIX& rt_vec)
    {
        unsigned int n_points = points.rowCount();
        unsigned int rt_size = rt_vec.rowCount();

        if(n_points < 1){
            MSPP_LOG(logERROR) << "Alignment::estimateAlpha_(): number of points must be greater than 0!" << std::endl;
        }
        mspp_precondition(n_points >= 1 , "Alignment::estimateAlpha_(): number of points must be greater than 0!");
 
        if(points.columnCount() != rt_vec.columnCount()){
            MSPP_LOG(logERROR) << "Alignment::estimateAlpha_(): Wrong dimensionality of input - points and rt_vec must have equal dimensions!" << std::endl;
        }
        mspp_precondition(points.columnCount() == rt_vec.columnCount() , "Alignment::estimateAlpha_(): Wrong dimensionality of input - points and rt_vec must have equal dimensions!");

        //vector containing new alpha values
        DBL_MATRIX returnAlpha (n_points,1,0.);

        for(unsigned int i = 0; i < n_points; i++){
            DBL_MATRIX pointMat = rt_vec;
            pointMat.init(0.);

            //fill each line in pointMat with points(i,*)
            for(unsigned int k = 0; k < rt_size; k++){
                for(int l = 0; l < rt_vec.columnCount(); l++){
                    pointMat(k,l) = points(i,l);
                }
            }

            //calculate distances from rt_vec to points(i,*)
            vigra::linalg::Matrix<double> distances = rt_vec - pointMat;
            std::vector<double> sqrdist (rt_size); //needs to be sorted --> vector
            for(unsigned int k = 0; k < rt_size; k++){
                double sum = 0;
                for(int l = 0; l < rt_vec.columnCount(); l++){
                    sum += pow(distances(k,l) , 2);
                }
                sqrdist[k] = sum;
            }

            //sort sqrdist
            std::sort(sqrdist.begin(),sqrdist.end());

            double mean = 0;
            int range = ((20<rt_size)?20:rt_size);
            for(int k = 0; k < range; k++){
                mean += sqrdist[k];
            }
            mean /= range;

            returnAlpha(i,0) =( 1./sqrt(mean) * 10 + 0.1 );
           
        }

        return returnAlpha;
    }
Example #21
0
  void LowessSmoothing::smoothData(const DoubleVector& input_x, const DoubleVector& input_y, DoubleVector& smoothed_output)
  {
    if (input_x.size() != input_y.size())
    {
      throw Exception::InvalidValue(__FILE__, __LINE__, OPENMS_PRETTY_FUNCTION,
          "Sizes of x and y values not equal! Aborting... ", String(input_x.size()));
    }

    // unable to smooth over 2 or less data points (we need at least 3)
    if (input_x.size() <= 2)
    {
      smoothed_output = input_y;
      return;
    }

    Size input_size = input_y.size();

    // const Size q = floor( input_size * alpha );
    const Size q = (window_size_ < input_size) ? static_cast<Size>(window_size_) : input_size - 1;

    DoubleVector distances(input_size, 0.0);
    DoubleVector sortedDistances(input_size, 0.0);

    for (Size outer_idx = 0; outer_idx < input_size; ++outer_idx)
    {
      // Compute distances.
      // Size inner_idx = 0;
      for (Size inner_idx = 0; inner_idx < input_size; ++inner_idx)
      {
        distances[inner_idx] = std::fabs(input_x[outer_idx] - input_x[inner_idx]);
        sortedDistances[inner_idx] = distances[inner_idx];
      }

      // Sort distances in order from smallest to largest.
      std::sort(sortedDistances.begin(), sortedDistances.end());

      // Compute weigths.
      std::vector<double> weigths(input_size, 0);
      for (Size inner_idx = 0; inner_idx < input_size; ++inner_idx)
      {
        weigths.at(inner_idx) = tricube_(distances[inner_idx], sortedDistances[q]);
      }

      //calculate regression
      Math::QuadraticRegression qr;
      std::vector<double>::const_iterator w_begin = weigths.begin();
      qr.computeRegressionWeighted(input_x.begin(), input_x.end(), input_y.begin(), w_begin);

      //smooth y-values
      double rt = input_x[outer_idx];
      smoothed_output.push_back(qr.eval(rt));
    }

    return;
  }
int main()
{
#ifndef ONLINE_JUDGE
	freopen("in.txt","r",stdin);
#endif
	int t;
	scanf("%d",&t);
	while(t--)
	{
		scanf("%d",&n);
		int pos=0;
		for(int i=0;i<n;i++)
		{
			scanf("%d %lf %lf",&number[i],&p[i].x,&p[i].y);
			if(p[pos].y>p[i].y)	pos=i;
		}

		printf("%d %d",n,number[pos]);
		memset(vis,0,sizeof(vis));
		vis[pos]=1;

		point last=p[pos];

		for(int i=1;i<n;i++)
		{
			double dist;

			for(int j=0;j<n;j++)
				if(!vis[j])
				{
					pos=j;
					break;
				}
			for(int j=pos+1;j<n;j++)
			{
				if(vis[j])continue;


				double tmp=xmult(p[pos],p[j],last);
				double tmpdist=distances(last,p[j]);
				if(tmp<-eps || (zero(tmp) && tmpdist<dist))
				{
					pos=j;
					dist=tmpdist;
				}
			}
			printf(" %d",number[pos]);
			vis[pos]=1;
			last=p[pos];
		}
        printf("\n");
	}

	return 0;
}
Example #23
0
template <typename PointT> void
pcl::UnaryClassifier<PointT>::queryFeatureDistances (std::vector<pcl::PointCloud<pcl::FPFHSignature33>::Ptr> &trained_features,
                                                     pcl::PointCloud<pcl::FPFHSignature33>::Ptr query_features,
                                                     std::vector<int> &indi,
                                                     std::vector<float> &dist)
{
  // estimate the total number of row's needed
  int n_row = 0;
  for (size_t i = 0; i < trained_features.size (); i++)
    n_row += static_cast<int> (trained_features[i]->points.size ());

  // Convert data into FLANN format
  int n_col = 33;
  flann::Matrix<float> data (new float[n_row * n_col], n_row, n_col);
  for (size_t k = 0; k < trained_features.size (); k++)
  {
    pcl::PointCloud<pcl::FPFHSignature33>::Ptr hist = trained_features[k];
    int c = static_cast<int> (hist->points.size ());
    for (size_t i = 0; i < c; ++i)
      for (size_t j = 0; j < data.cols; ++j)
        data[(k*c)+i][j] = hist->points[i].histogram[j];
  }

  // build kd-tree given the training features
  flann::Index<flann::ChiSquareDistance<float> > *index;
  index = new flann::Index<flann::ChiSquareDistance<float> > (data, flann::LinearIndexParams ());
  //flann::Index<flann::ChiSquareDistance<float> > index (data, flann::LinearIndexParams ());
  //flann::Index<flann::ChiSquareDistance<float> > index (data, flann::KMeansIndexParams (5, -1));  
  //flann::Index<flann::ChiSquareDistance<float> > index (data, flann::KDTreeIndexParams (4));
  index->buildIndex ();

  int k = 1;
  indi.resize (query_features->points.size ());
  dist.resize (query_features->points.size ());
  // Query all points
  for (size_t i = 0; i < query_features->points.size (); i++)
  {
    // Query point  
    flann::Matrix<float> p = flann::Matrix<float>(new float[n_col], 1, n_col);
    memcpy (&p.ptr ()[0], query_features->points[i].histogram, p.cols * p.rows * sizeof (float));

    flann::Matrix<int> indices (new int[k], 1, k);
    flann::Matrix<float> distances (new float[k], 1, k);  
    index->knnSearch (p, indices, distances, k, flann::SearchParams (512));

    indi[i] = indices[0][0];
    dist[i] = distances[0][0];

    delete[] p.ptr ();
  }

  //std::cout << "kdtree size: " << index->size () << std::endl;

  delete[] data.ptr ();
}
Example #24
0
// This function returns the "anchor point" closes to the specified bacteria.
// It calculates the distance between the bacteria centrum and the various
// anchors on the food nugget, and then it takes the shortest of those
// distances and returns the coordinates of that anchor.
coordinate_pair_t food::closest_anchor(const vector & bacteria_vector)
{
	coordinate_pair_t retval;
	vector this_vector = vector(0, 0, F_ANCHOR_1_X + x, F_ANCHOR_1_Y + y);
	float distance_1, distance_2, distance_3, distance_4, eval_dist;
	std::valarray<float> distances(4);

	if(anchor_1 == 0)
	{
		distance_1 = vector::distance_between(this_vector, bacteria_vector);
		distances[0] = distance_1;
	} else
		distances[0] = INFINITY;

	if(anchor_2 == 0)
	{
		this_vector.set_xy(F_ANCHOR_2_X + x, F_ANCHOR_2_Y + y);
		distance_2 = vector::distance_between(this_vector, bacteria_vector);
		distances[1] = distance_2;
	} else 
		distances[1] = INFINITY;

	if(anchor_3 == 0)
	{
		this_vector.set_xy(F_ANCHOR_3_X + x, F_ANCHOR_3_Y + y);
		distance_3 = vector::distance_between(this_vector, bacteria_vector);
		distances[2] = distance_3;
	} else 
		distances[2] = INFINITY;

	if(anchor_4 == 0)
	{
		this_vector.set_xy(F_ANCHOR_4_X + x, F_ANCHOR_4_Y + y);
		distance_4 = vector::distance_between(this_vector, bacteria_vector);
		distances[3] = distance_4;
	} else
		distances[3] = INFINITY;

	eval_dist = distances.min();

	if(eval_dist == distance_1)
		retval = ANCHOR_1;
	
	if(eval_dist == distance_2)
		retval = ANCHOR_2;

	if(eval_dist == distance_3)
		retval = ANCHOR_3;

	if(eval_dist == distance_4)
		retval = ANCHOR_4;

	return retval;
}
inline double NeighborSearchRules<SortPolicy, MetricType, TreeType>::Score(
    const size_t queryIndex,
    TreeType& referenceNode) const
{
  const arma::vec queryPoint = querySet.unsafe_col(queryIndex);
  const double distance = SortPolicy::BestPointToNodeDistance(queryPoint,
      &referenceNode);
  const double bestDistance = distances(distances.n_rows - 1, queryIndex);

  return (SortPolicy::IsBetter(distance, bestDistance)) ? distance : DBL_MAX;
}
 double clusteringValidity::getMaximumDistance(const dmatrix& m1,
                                               const dmatrix& m2) const {
   int i,j;
   dmatrix distances(m1.rows(),m2.rows());
   l2Distance<double> dist;
   for (i=0; i<m1.rows(); i++) {
     for (j=0; j<m2.rows(); j++) {
       distances[i][j]=dist.apply(m1.getRow(i),m2.getRow(j));
     }
   }
   return distances.maximum();
 }
Example #27
0
        void BeliefGenerator<M>::expandBeliefList(size_t max, BeliefList * blp) const {
            assert(blp);
            auto & bl = *blp;
            size_t size = bl.size();

            std::vector<Belief> newBeliefs(A);
            std::vector<double> distances(A);
            auto dBegin = std::begin(distances), dEnd = std::end(distances);

            // L1 distance
            auto computeDistance = [this](const Belief & lhs, const Belief & rhs) {
                double distance = 0.0;
                for ( size_t i = 0; i < S; ++i )
                    distance += std::abs(lhs[i] - rhs[i]);
                return distance;
            };

            Belief helper; double distance;
            // We apply the discovery process also to all beliefs we discover
            // along the way.
            for ( auto it = std::begin(bl); it != std::end(bl); ++it ) {
                // Compute all new beliefs
                for ( size_t a = 0; a < A; ++a ) {
                    distances[a] = 0.0;
                    for ( int j = 0; j < 20; ++j ) {
                        size_t s = sampleProbability(S, *it, rand_);

                        size_t o;
                        std::tie(std::ignore, o, std::ignore) = model_.sampleSOR(s, a);
                        helper = updateBelief(model_, *it, a, o);

                        // Compute distance (here we compare also against elements we just added!)
                        distance = computeDistance(helper, bl.front());
                        for ( auto jt = ++std::begin(bl); jt != std::end(bl); ++jt ) {
                            if ( checkEqualSmall(distance, 0.0) ) break; // We already have it!
                            distance = std::min(distance, computeDistance(helper, *jt));
                        }
                        // Select the best found over 20 times
                        if ( distance > distances[a] ) {
                            distances[a] = distance;
                            newBeliefs[a] = helper;
                        }
                    }
                }
                // Find furthest away, add only if it is new.
                size_t id = std::distance( dBegin, std::max_element(dBegin, dEnd) );
                if ( checkDifferentSmall(distances[id], 0.0) ) {
                    bl.emplace_back(std::move(newBeliefs[id]));
                    ++size;
                    if ( size == max ) break;
                }
            }
        }
 double clusteringValidity::getStandardDiameter(const dmatrix& m1) const {
   dmatrix distances(m1.rows(),m1.rows());
   int j,k;
   l2Distance<double> dist;
   for (j=0; j<m1.rows(); j++) {
     for (k=j+1; k<m1.rows(); k++) {
       distances[j][k]=dist.apply(m1.getRow(j),
                                  m1.getRow(k));
     }
   }
   return distances.maximum();  
 }
Example #29
0
int Targets::getCurrentTarget(Point point) {
    vector<double> distances(targets.size());
    //    debugtee(targets);
    transform(targets.begin(), targets.end(), distances.begin(),
	      sigc::mem_fun(point, &Point::distance));
    //    debugtee(distances);
    return min_element(distances.begin(), distances.end()) - distances.begin();
//     for(int i=0; i<targets.size(); i++)
// 	if (point.distance(targets[i]) < 30)
// 	    return i;
//     return -1;
}
Example #30
0
        DelaunayConstraints::DelaunayConstraints(PatchSet& patches) {

            Point2f* positions = new Point2f[patches.size()];
            vector<vector<int> > neighborhoods;

            C = Mat_<double>(patches.size(), patches.size());
            // C = Mat_<float>(patches.size(),patches.size());

            for (int i = 0; i < patches.size(); i++) {
                positions[i] = patches.get_position(i);
                neighborhoods.push_back(vector<int>());
            }

            // determine neighborhoods using Delaunay triangulation
            delaunay_neihgbours(positions, patches.size(), neighborhoods);

            float* D ;
            D = new float[patches.size()];

            // add closest non-DT node to the neighborhood for nodes with only two neighbors
            for (int i = 0; i < patches.size(); i++) { // KAKO DELUJE TA KODA?
                if (neighborhoods[i].size() > 2) { continue; } // tovrimo trikotnik na silo, èe sosed nima dveh sosedov

                // kako prepreèimo kolinearnost izbranih treh toèk?
                distances(positions[i], positions, patches.size(), D);

                int minNode = -1;
                float minDistance = FLT_MAX;

                for (int j = 0; j < patches.size(); j++) {
                    if (D[j] > 0 && j != neighborhoods[i][0] && j != neighborhoods[i][1] && D[j] < minDistance) {
                        minNode = j;
                        minDistance = D[j];
                    }
                }

                if (minNode >= 0) { neighborhoods[i].push_back(minNode); }
            }

            delete [] D;

            C.setTo(0.0);

            for (int i = 0; i < neighborhoods.size(); i++) {
                for (int j = 0; j < neighborhoods[i].size(); j++) {
                    C.at<double>(i, neighborhoods[i][j]) = 1.0;
                }
            }

            delete [] positions;

        }