Exemplo n.º 1
0
double computeStep(const TThickQuadratic &quad, double pixelSize) {
  TThickPoint cp0 = quad.getThickP0(), cp1 = quad.getThickP1(),
              cp2 = quad.getThickP2();

  TQuadratic q1(TPointD(cp0.x, cp0.y), TPointD(cp1.x, cp1.y),
                TPointD(cp2.x, cp2.y)),
      q2(TPointD(cp0.y, cp0.thick), TPointD(cp1.y, cp1.thick),
         TPointD(cp2.y, cp2.thick)),
      q3(TPointD(cp0.x, cp0.thick), TPointD(cp1.x, cp1.thick),
         TPointD(cp2.x, cp2.thick));

  return std::min({computeStep(q1, pixelSize), computeStep(q2, pixelSize),
                   computeStep(q3, pixelSize)});
}
Exemplo n.º 2
0
bool HierarchicalIKSolver::computeSteps(float stepSize, float minChange, int maxSteps)
{
    //std::vector<RobotNodePtr> rn = rns->getAllRobotNodes();
    //RobotPtr robot = rns->getRobot();
    //std::vector<float> jv(rns->getSize(),0.0f);
    int step = 0;
    checkTolerances();
    //float lastDist = FLT_MAX;

    while (step < maxSteps)
    {
        Eigen::VectorXf delta = computeStep(jacobies, stepSize);

        if (!MathTools::isValid(delta))
        {
#ifdef DEBUG
            VR_INFO << "Singular Jacobian" << endl;
#endif
            return false;
        }

        Eigen::VectorXf jv(delta.rows());
        rns->getJointValues(jv);
        jv += delta;
        rns->setJointValues(jv);

        if (checkTolerances())
        {
#ifdef DEBUG
            VR_INFO << "Tolerances ok, loop:" << step << endl;
#endif
            return true;
        }

        if (delta.norm() < minChange)
        {
#ifdef DEBUG
            VR_INFO << "Could not improve result any more (dTheta.norm()=" << delta.norm() << "), loop:" << step << endl;
#endif
            return false;
        }

        step++;
    }

    return false;
}
Exemplo n.º 3
0
void PIDControl::driveToValue(int pwmPin, double objectiveSpeed){
	referenceValue = objectiveSpeed;
	unsigned long tic = millis();
	double leftExtreme = 0.999*referenceValue;
	double rightExtreme = referenceValue + (0.01*referenceValue);
	while(!(currentValue >= leftExtreme && currentValue <= rightExtreme)){
		unsigned long toc = millis();
		if((double)((tic-toc)/1000) >= dt){
			computeStep();
			if (currentValue < 0){
				currentValue = 0;
			}
			analogWrite(pwmPin,(int)currentValue);
			tic = millis();
		}
	}
}
Exemplo n.º 4
0
void OptCG::optimize()
//------------------------------------------------------------------------
// Nonlinear Preconditioned Conjugate Gradient
// 
// Given a nonlinear operator objfcn find the minimizer using a
// nonlinear conjugate gradient method
// This version uses the Polak-Ribiere formula.
// and a line search routine due to More and Thuente as implemented
// in the routines mcsrch and mcstep
//
// Notes: The parameters ftol and gtol should be set so that
//        0 < ftol < gtol < 0.5
//        Default values: ftol = 1.e-1, gtol = 5.e-1
//        This results in a fairly accurate line search
//
// Here is the mathematical description of the algorithm
// (g = grad f).
//                     -1
//        1.  set z = M  g, search = -z; 
//
//        2.  for i=0 until convergence
//
//                 find alpha that minimizes f(x + alpha*search)
//                 subject to the strong Wolfe conditions
//
//                 Test for convergence
//
//
//                 beta = -( g  ,  (z     - z ) ) / ( g  , z  )
//                            i+1    i + 1   i         i    i
//
//                 search     =  - z     +   beta * search
//                       i+1        i+1                   i
//
//----------------------------------------------------------------------------
     
{

  int i, nlcg_iter;
  int convgd = 0;
  int step_type;

  double beta;
  double delta, delta_old, delta_mid, delta_new;
  double slope, gnorm;

  double step;
  double zero = 0.;
  
// Allocate local vectors 

  int n = dim;
  int maxiter;
  double fvalue;
  ColumnVector search(n), grad(n), z(n), diag(n), xc(n);

// Initialize iteration

  maxiter = tol.getMaxIter();

  initOpt();

  if (ret_code == 0) {
    //  compute preconditioned gradient

    diag = getFcnScale();
    grad = nlp->getGrad();
    for (i=1; i<=n; i++) z(i) = grad(i)/diag(i);

    search    = -z;
    delta_old = delta_new = Dot(grad,z);
    gnorm     = sqrt(Dot(grad,grad));

    step    = 1.0/gnorm;

//---------------------------------------------------------------------------
//
//
//
//
//---------------------------------------------------------------------------

    for (nlcg_iter=1; nlcg_iter <= maxiter; nlcg_iter++) {

      iter_taken = nlcg_iter;

      //  compute a step along the direction search 

      if ((step_type = computeStep(search)) < 0) {
	setMesg("Algorithm terminated - No longer able to compute step with sufficient decrease");
	ret_code = step_type;
        setReturnCode(ret_code);
	return;
      }
    
      //  Accept this step and update the nonlinear model

      acceptStep(nlcg_iter, step_type);
      updateModel(nlcg_iter, n, xprev);

      xc         = nlp->getXc();
      mem_step   = xc - xprev;
      step       = Norm2(mem_step);

      fvalue     = nlp->getF();
      grad       = nlp->getGrad();
      gnorm      = sqrt(Dot(grad,grad));
      slope      = Dot(grad,search);

      //  Test for Convergence

      convgd = checkConvg();
      if (convgd > 0) {
	ret_code = convgd;
        setReturnCode(ret_code);
	*optout  << d(nlcg_iter,5) << " " << e(fvalue,12,4)  << " "
		 << e(gnorm,12,4)  << e(step,12,4) << "\n";
	return;
      }

      //
      //  compute a new search direction
      //  1. compute preconditioned gradient,  z = grad;
      //  2. beta is computed using Polak-Ribiere Formula constrained 
      //     so that beta > 0
      //  3  Update search direction and norms 
  
      delta_old = delta_new; delta_mid = Dot(grad,z);

      for (i=1; i<=n; i++) z(i) = grad(i)/diag(i);

      delta_new = Dot(grad,z);
      delta     = delta_new - delta_mid;
      beta      = max(zero,delta/delta_old);

      search = -z + search*beta;

      xprev  = nlp->getXc();
      fprev  = fvalue;
      gprev  = grad;

      *optout 
	<< d(nlcg_iter,5) << " " << e(fvalue,12,4) << " " << e(gnorm,12,4) 
	<< e(step,12,4)   << " " << e(beta,12,4)   << " " << e(slope,12,4) 
	<< d(fcn_evals,4) << " " << d(grad_evals,4) << endl;
    }

    setMesg("Algorithm terminated - Number of iterations exceeds the specified limit");
    ret_code = 4;
    setReturnCode(ret_code);
  }
}
int ContractionHierarchiesClient::computeRoute( const IGPSLookup::Result& source, const IGPSLookup::Result& target, QVector< Node>* pathNodes, QVector< Edge >* pathEdges ) {
	EdgeIterator sourceEdge = m_graph.findEdge( source.source, source.target, source.edgeID );
	unsigned sourceWeight = sourceEdge.distance();
	EdgeIterator targetEdge = m_graph.findEdge( target.source, target.target, target.edgeID );
	unsigned targetWeight = targetEdge.distance();

	//insert source into heap
	m_heapForward->Insert( source.target, sourceWeight - sourceWeight * source.percentage, source.target );
	if ( sourceEdge.backward() && sourceEdge.forward() && source.target != source.source )
		m_heapForward->Insert( source.source, sourceWeight * source.percentage, source.source );

	//insert target into heap
	m_heapBackward->Insert( target.source, targetWeight * target.percentage, target.source );
	if ( targetEdge.backward() && targetEdge.forward() && target.target != target.source )
		m_heapBackward->Insert( target.target, targetWeight - targetWeight * target.percentage, target.target );

	int targetDistance = std::numeric_limits< int >::max();
	NodeIterator middle = ( NodeIterator ) 0;
	AllowForwardEdge forward;
	AllowBackwardEdge backward;

	while ( m_heapForward->Size() + m_heapBackward->Size() > 0 ) {

		if ( m_heapForward->Size() > 0 )
			computeStep( m_heapForward, m_heapBackward, forward, backward, &middle, &targetDistance );

		if ( m_heapBackward->Size() > 0 )
			computeStep( m_heapBackward, m_heapForward, backward, forward, &middle, &targetDistance );

	}

	if ( targetDistance == std::numeric_limits< int >::max() )
		return std::numeric_limits< int >::max();

	// abort early if the path description is not requested
	if ( pathNodes == NULL || pathEdges == NULL )
		return targetDistance;

	std::stack< NodeIterator > stack;
	NodeIterator pathNode = middle;
	while ( true ) {
		NodeIterator parent = m_heapForward->GetData( pathNode ).parent;
		stack.push( pathNode );
		if ( parent == pathNode )
			break;
		pathNode = parent;
	}

	pathNodes->push_back( source.nearestPoint );
	bool reverseSourceDescription = pathNode != source.target;
	if ( source.source == source.target && sourceEdge.backward() && sourceEdge.forward() && source.percentage < 0.5 )
		reverseSourceDescription = !reverseSourceDescription;
	if ( sourceEdge.unpacked() ) {
		bool unpackSourceForward = source.target != sourceEdge.target() ? reverseSourceDescription : !reverseSourceDescription;
		m_graph.path( sourceEdge, pathNodes, pathEdges, unpackSourceForward );
		if ( reverseSourceDescription ) {
			pathNodes->remove( 1, pathNodes->size() - 1 - source.previousWayCoordinates );
		} else {
			pathNodes->remove( 1, source.previousWayCoordinates - 1 );
		}
	} else {
		pathNodes->push_back( m_graph.node( pathNode ) );
		pathEdges->push_back( sourceEdge.description() );
	}
	pathEdges->front().length = pathNodes->size() - 1;
	pathEdges->front().seconds *= reverseSourceDescription ? source.percentage : 1 - source.percentage;

	while ( stack.size() > 1 ) {
		const NodeIterator node = stack.top();
		stack.pop();
		unpackEdge( node, stack.top(), true, pathNodes, pathEdges );
	}

	pathNode = middle;
	while ( true ) {
		NodeIterator parent = m_heapBackward->GetData( pathNode ).parent;
		if ( parent == pathNode )
			break;
		unpackEdge( parent, pathNode, false, pathNodes, pathEdges );
		pathNode = parent;
	}

	int begin = pathNodes->size();
	bool reverseTargetDescription = pathNode != target.source;
	if ( target.source == target.target && targetEdge.backward() && targetEdge.forward() && target.percentage > 0.5 )
		reverseTargetDescription = !reverseTargetDescription;
	if ( targetEdge.unpacked() ) {
		bool unpackTargetForward = target.target != targetEdge.target() ? reverseTargetDescription : !reverseTargetDescription;
		m_graph.path( targetEdge, pathNodes, pathEdges, unpackTargetForward );
		if ( reverseTargetDescription ) {
			pathNodes->resize( pathNodes->size() - target.previousWayCoordinates );
		} else {
			pathNodes->resize( begin + target.previousWayCoordinates - 1 );
		}
	} else {
		pathEdges->push_back( targetEdge.description() );
	}
	pathNodes->push_back( target.nearestPoint );
	pathEdges->back().length = pathNodes->size() - begin;
	pathEdges->back().seconds *= reverseTargetDescription ? 1 - target.percentage : target.percentage;

	return targetDistance;
}