Exemple #1
0
/*!
  Stack column vectors.

  \param A : Initial vector.
  \param B : Vector to stack at the end of A.
  \param C : Resulting stacked vector \f$C = [A B]^T\f$.

  \code
  vpColVector A(3);
  vpColVector B(5);
  vpColVector C;
  vpColVector::stack(A, B, C); // C = [A B]T
  // C is now an 8 dimension column vector
  \endcode

  \sa stack(const vpColVector &)
  \sa stack(const vpColVector &, const vpColVector &)
*/
void vpColVector::stack(const vpColVector &A, const vpColVector &B, vpColVector &C)
{
  unsigned int nrA = A.getRows();
  unsigned int nrB = B.getRows();

  if (nrA == 0 && nrB == 0) {
    C.resize(0);
    return;
  }

  if (nrB == 0) {
    C = A;
    return;
  }

  if (nrA == 0) {
    C = B;
    return;
  }

  // General case
  C.resize(nrA + nrB);

  for (unsigned int i=0; i<nrA; i++)
    C[i] = A[i];

  for (unsigned int i=0; i<nrB; i++)
    C[nrA+i] = B[i];
}
Exemple #2
0
void
vpMbKltTracker::computeVVSWeights(const unsigned int iter, const unsigned int nbInfos, const vpColVector &R,
    vpColVector &w_true, vpColVector &w, vpRobust &robust) {
  if(iter == 0){
    w_true.resize(2*nbInfos);
    w.resize(2*nbInfos);
    w = 1;
    w_true = 1;
  }
  robust.setIteration(iter);
  robust.setThreshold(2/cam.get_px());
  robust.MEstimator( vpRobust::TUKEY, R, w);
}
/*!

  Get the robot displacement since the last call of this method.

  \warning The first call of this method gives not a good value for the
  displacement.

  \param frame The frame in which the measured displacement is expressed.

  \param d The displacement:

  - In articular, the dimension of q is 2  (the number of axis of the robot)
  with respectively d[0] (pan displacement), d[1] (tilt displacement).

  - In camera frame, the dimension of d is 6 (tx, ty, ty, tux, tuy, tuz).
  Translations are expressed in meters, rotations in radians with the theta U
  representation.

  \exception vpRobotException::wrongStateError If a not supported frame type is
  given.

  \sa getArticularDisplacement(), getCameraDisplacement()

*/
void
vpRobotBiclops::getDisplacement(vpRobot::vpControlFrameType frame,
				vpColVector &d)
{
  vpColVector q_current; // current position

  getPosition(vpRobot::ARTICULAR_FRAME, q_current);

  switch(frame) {
  case vpRobot::ARTICULAR_FRAME:
    d.resize(vpBiclops::ndof);
    d = q_current - q_previous;
    break ;

  case vpRobot::CAMERA_FRAME: {
    d.resize(6);
    vpHomogeneousMatrix fMc_current;
    vpHomogeneousMatrix fMc_previous;
    fMc_current  = vpBiclops::get_fMc(q_current);
    fMc_previous = vpBiclops::get_fMc(q_previous);
    vpHomogeneousMatrix c_previousMc_current;
    // fMc_c = fMc_p * c_pMc_c
    // => c_pMc_c = (fMc_p)^-1 * fMc_c
    c_previousMc_current = fMc_previous.inverse() * fMc_current;

    // Compute the instantaneous velocity from this homogeneous matrix.
    d = vpExponentialMap::inverse( c_previousMc_current );
    break ;
  }

  case vpRobot::REFERENCE_FRAME:
      vpERROR_TRACE ("Cannot get a velocity in the reference frame: "
		   "functionality not implemented");
      throw vpRobotException (vpRobotException::wrongStateError,
			      "Cannot get a velocity in the reference frame:"
			      "functionality not implemented");
      break ;
  case vpRobot::MIXT_FRAME:
      vpERROR_TRACE ("Cannot get a velocity in the mixt frame: "
		   "functionality not implemented");
      throw vpRobotException (vpRobotException::wrongStateError,
			      "Cannot get a velocity in the mixt frame:"
			      "functionality not implemented");
      break ;
  }


  q_previous = q_current; // Update for next call of this method

}
Exemple #4
0
/*!

  Get the robot displacement since the last call of this method.

  \warning The first call of this method gives not a good value for the
  displacement.

  \param frame The frame in which the measured displacement is expressed.

  \param d The displacement:
  - In articular, the dimension of q is 2  (the number of axis of the robot)
  with respectively d[0] (pan displacement), d[1] (tilt displacement).
  - In camera frame, the dimension of d is 6 (tx, ty, ty, rx, ry,
  rz). Translations are expressed in meters, rotations in radians.

  \exception vpRobotException::wrongStateError If a not supported frame type is
  given.

  \sa getArticularDisplacement(), getCameraDisplacement()

*/
void
vpRobotPtu46::getDisplacement(vpRobot::vpControlFrameType frame,
			      vpColVector &d)
{
  double d_[6];

  switch (frame)
  {
  case vpRobot::CAMERA_FRAME:
    {
      d.resize (6);
      ptu.measureDpl(d_, PTU_CAMERA_FRAME);
      d[0]=d_[0];
      d[1]=d_[1];
      d[2]=d_[2];
      d[3]=d_[3];
      d[4]=d_[4];
      d[5]=d_[5];
      break ;
    }
  case vpRobot::ARTICULAR_FRAME:
    {
      ptu.measureDpl(d_, PTU_ARTICULAR_FRAME);
      d.resize (vpPtu46::ndof);
      d[0]=d_[0];  // pan
      d[1]=d_[1];  // tilt
      break ;
    }
  case vpRobot::REFERENCE_FRAME:
    {
      vpERROR_TRACE ("Cannot get a displacement in the reference frame: "
		   "functionality not implemented");
      throw vpRobotException (vpRobotException::wrongStateError,
			      "Cannot get a displacement in the reference frame:"
			      "functionality not implemented");
      break ;
    }
  case vpRobot::MIXT_FRAME:
    {
      vpERROR_TRACE ("Cannot get a displacement in the mixt frame: "
		   "functionality not implemented");
      throw vpRobotException (vpRobotException::wrongStateError,
			      "Cannot get a displacement in the reference frame:"
			      "functionality not implemented");

      break ;
    }
  }
}
// Fit model to this random selection of data points.
void
vpHomography::computeTransformation(vpColVector &x, unsigned int *ind, vpColVector &M)
{
  unsigned int i ;
  unsigned int n = x.getRows()/4 ;
  std::vector<double> xa(4), xb(4);
  std::vector<double> ya(4), yb(4);
  unsigned int n2 = n * 2;
  unsigned int ind2;
  for(i=0 ; i < 4 ; i++)
    {
      ind2 = 2 * ind[i];
      xb[i] = x[ind2] ;
      yb[i] = x[ind2+1] ;

      xa[i] = x[n2+ind2] ;
      ya[i] = x[n2+ind2+1] ;
    }

  vpHomography aHb ;
  try {
    vpHomography::HLM(xb, yb, xa, ya, true, aHb);
  }
  catch(...)
    {
      aHb.setIdentity();
    }

  M.resize(9);
  for (i=0 ; i <9 ; i++)
    {
      M[i] = aHb.data[i] ;
    }
  aHb /= aHb[2][2] ;
}
/*
  Get the current position of the robot.

  \param frame : Control frame type in which to get the position, either :
  - in the camera cartesien frame,
  - joint (articular) coordinates of each axes (not implemented)
  - in a reference or fixed cartesien frame attached to the robot base
  - in a mixt cartesien frame (translation in reference frame, and rotation in camera frame)

  \param position : Measured position of the robot:
  - in camera cartesien frame, a 6 dimension vector, set to 0.

  - in articular, this functionality is not implemented.

  - in reference frame, a 6 dimension vector, the first 3 values correspond to
  the translation tx, ty, tz in meters (like a vpTranslationVector), and the
  last 3 values to the rx, ry, rz rotation (like a vpRxyzVector).
*/
void vpSimulatorPioneer::getPosition(const vpRobot::vpControlFrameType frame, vpColVector &q)
{
  q.resize (6);

  switch (frame) {
  case vpRobot::CAMERA_FRAME :
    q = 0;
    break;

  case vpRobot::ARTICULAR_FRAME :
    std::cout << "ARTICULAR_FRAME is not implemented in vpSimulatorPioneer::getPosition()" << std::endl;
    break;
  case vpRobot::REFERENCE_FRAME : {
    // Convert wMc_ to a position
    // From fMc extract the pose
    vpRotationMatrix wRc;
    this->wMc_.extract(wRc);
    vpRxyzVector rxyz;
    rxyz.buildFrom(wRc);

    for (unsigned int i=0; i < 3; i++) {
      q[i] = this->wMc_[i][3]; // translation x,y,z
      q[i+3] = rxyz[i]; // Euler rotation x,y,z
    }

    break;
    }
  case vpRobot::MIXT_FRAME :
    std::cout << "MIXT_FRAME is not implemented in vpSimulatorCamera::getPosition()" << std::endl;
  }
}
Exemple #7
0
/*!
  Compute the error \f$ (I-I^*)\f$ between the current and the desired
 
  \param s_star : Desired visual feature.
  \param e : Error between the current and the desired features.

*/
void
vpFeatureLuminance::error(const vpBasicFeature &s_star,
			  vpColVector &e)
{
  e.resize(dim_s) ;

  for (int i =0 ; i < dim_s ; i++)
    {
      e[i] = s[i] - s_star[i] ;
    }
}
Exemple #8
0
/*!

  Get the articular velocity.

  \param frame : Control frame. This head can only be controlled in articular.

  \param q_dot : The measured articular velocity in rad/s.

  \exception vpRobotException::wrongStateError : If a not supported frame type
  is given.
*/
void
vpRobotPtu46::getVelocity (const vpRobot::vpControlFrameType frame,
			   vpColVector & q_dot)
{

  TPtuFrame ptuFrameInterface = PTU_ARTICULAR_FRAME;

  switch (frame)
  {
  case vpRobot::CAMERA_FRAME:
    {
      vpERROR_TRACE ("Cannot get a velocity in the camera frame: "
		   "functionality not implemented");
      throw vpRobotException (vpRobotException::wrongStateError,
			      "Cannot get a velocity in the camera frame:"
			      "functionality not implemented");
       break ;
    }
  case vpRobot::ARTICULAR_FRAME:
    {
      ptuFrameInterface = PTU_ARTICULAR_FRAME;
      break ;
    }
  case vpRobot::REFERENCE_FRAME:
    {
      vpERROR_TRACE ("Cannot get a velocity in the reference frame: "
		   "functionality not implemented");
      throw vpRobotException (vpRobotException::wrongStateError,
			      "Cannot get a velocity in the reference frame:"
			      "functionality not implemented");
      break ;
    }
  case vpRobot::MIXT_FRAME:
    {

      vpERROR_TRACE ("Cannot get a velocity in the mixt frame: "
		   "functionality not implemented");
      throw vpRobotException (vpRobotException::wrongStateError,
			      "Cannot get a velocity in the mixt frame:"
			      "functionality not implemented");
      break ;
    }
  }

  q_dot.resize(vpPtu46::ndof);
  double ptuSpeedInterface[2];

  ptu.getCurrentSpeed(ptuSpeedInterface, ptuFrameInterface);

  q_dot[0] = ptuSpeedInterface[0];
  q_dot[1] = ptuSpeedInterface[1];

}
// Evaluate distances between points and model.
double
vpHomography::computeResidual(vpColVector &x, vpColVector &M, vpColVector &d)
{
  unsigned int i ;
  unsigned int n = x.getRows()/4 ;
  unsigned int n2 = n  * 2;
  unsigned int i2;
  vpColVector *pa;
  vpColVector *pb;

  pa = new vpColVector [n];
  pb = new vpColVector [n];

  for( i=0 ; i < n ; i++)
    {
      i2 = 2 * i;
      pb[i].resize(3) ;
      pb[i][0] = x[i2] ;
      pb[i][1] = x[i2+1] ;
      pb[i][2] = 1;

      pa[i].resize(3) ;
      pa[i][0] = x[n2+i2] ;
      pa[i][1] = x[n2+i2+1] ;
      pa[i][2] = 1;
    }

  vpMatrix aHb(3,3) ;

  for (i=0 ; i <9 ; i++)
    {
      aHb.data[i] = M[i];
    }

  aHb /= aHb[2][2];

  d.resize(n);

  vpColVector Hpb  ;
  for (i=0 ; i <n ; i++)
    {
      Hpb = aHb*pb[i] ;
      Hpb /= Hpb[2] ;
      d[i] = sqrt((pa[i] - Hpb ).sumSquare()) ;
    }

  delete [] pa;
  delete [] pb;

  return 0 ;
}
/*
 Change the velocity frame

 \brief :
 The velocituy results from the visual servoing control
 law. It is basically expressed in the camera frame
 Using the current measurements, we can compute the
 transformation between the camera frame in the waist

 wMc and the correponding Twist wVc

 and use it to change the velocity frame.


 return : 0  if ok
          -1 if the input velocity is not of size 6


 warning : attention nothing garanties the size
           of   poseHeadInFoot and poseWaistInFoot

 */
int  HRP2ComputeControlLawProcess::changeVelocityFrame(const vpColVector& velCam,
						       vpColVector& velWaist,
						       const double *poseHeadInFoot,
						       const double *poseWaistInFoot,
						       vpHomogeneousMatrix &afMh )
{
  // test on the input velocity
  if(velCam.getRows()!=6)
    {
      return -1;
    }

  velWaist.resize(6);

  //Express rotation in theta U
  vpRxyzVector headInFootRxyz      (poseHeadInFoot[3],
				    poseHeadInFoot[4],
				    poseHeadInFoot[5]);
  vpRxyzVector waistInFootRxyz     (poseWaistInFoot[3],
				    poseWaistInFoot[4],
				    poseWaistInFoot[5]);
  vpThetaUVector headInFootThU     (headInFootRxyz);
  vpThetaUVector waistInFootThU    (waistInFootRxyz);

  //Create translation vectors
  vpTranslationVector  headInFootT (poseHeadInFoot[0],
				    poseHeadInFoot[1],
				    poseHeadInFoot[2]);
  vpTranslationVector  waistInFootT(poseWaistInFoot[0],
				    poseWaistInFoot[1],
				    poseWaistInFoot[2]);

  //Create the corresponding homogeneousMatrix
  vpHomogeneousMatrix  fMh (headInFootT ,headInFootThU);
  vpHomogeneousMatrix  fMw (waistInFootT, waistInFootThU);

  afMh=fMh;

  //Compute the position of the head in the waist
  vpHomogeneousMatrix wMh = fMw.inverse()*fMh;

  //Compute the associate twist matrix
  vpVelocityTwistMatrix wVh (wMh);

  //The position of the current camera in the head should
  // have been previously loaded
  //Change the velocity frame
  velWaist = wVh*m_hVc*velCam;

  return 0;
}
Exemple #11
0
/*!

Extract the rotation angle \f$ \theta \f$ and the unit vector
\f$\bf u \f$ from the \f$ \theta {\bf u} \f$ representation.

\param theta : Rotation angle \f$ \theta \f$.

\param u : Unit vector \f${\bf u} = (u_{x},u_{y},u_{z})^{\top} \f$
representing the rotation axis.

*/
void 
vpThetaUVector::extract(double &theta, vpColVector &u) const
{
  u.resize(3);

  theta = sqrt(data[0]*data[0] + data[1]*data[1] + data[2]*data[2]);
  //if (theta == 0) {
  if (std::fabs(theta) <= std::numeric_limits<double>::epsilon()) {
    u = 0;
    return;
  }
  for (unsigned int i=0 ; i < 3 ; i++) 
    u[i] = data[i] / theta ;
}
static void
getPlaneInfo(vpPlane &oN, vpHomogeneousMatrix &cMo, vpColVector &cN, double &cd)
{
  double A1 = cMo[0][0]*oN.getA()+ cMo[0][1]*oN.getB() + cMo[0][2]*oN.getC();
  double B1 = cMo[1][0]*oN.getA()+ cMo[1][1]*oN.getB() + cMo[1][2]*oN.getC();
  double C1 = cMo[2][0]*oN.getA()+ cMo[2][1]*oN.getB() + cMo[2][2]*oN.getC();
  double D1 = oN.getD()  - (cMo[0][3]*A1 + cMo[1][3]*B1  + cMo[2][3]*C1);

  cN.resize(3) ;
  cN[0] = A1 ;
  cN[1] = B1 ;
  cN[2] = C1 ;
  cd = -D1 ;
}
/*!

  Get an articular position from the position file.

  \param filename : Position file.

  \param q : The articular position read in the file.

  \code
  # Example of biclops position file
  # The axis positions must be preceed by R:
  # First value : pan  articular position in degrees
  # Second value: tilt articular position in degrees
  R: 15.0 5.0
  \endcode

  \return true if a position was found, false otherwise.

*/
bool
vpRobotBiclops::readPositionFile(const char *filename, vpColVector &q)
{
  FILE * pt_f ;
  pt_f = fopen(filename,"r") ;

  if (pt_f == NULL) {
    vpERROR_TRACE ("Can not open biclops position file %s", filename);
    return false;
  }

  char line[FILENAME_MAX];
  char head[] = "R:";
  bool end = false;

  do {
    // skip lines begining with # for comments
    if (fgets (line, 100, pt_f) != NULL) {
      if ( strncmp (line, "#", 1) != 0) {
	// this line is not a comment
	if ( fscanf (pt_f, "%s", line) != EOF)   {
	  if ( strcmp (line, head) == 0)
	    end = true; 	// robot position was found
	}
	else
	  return (false); // end of file without position
      }
    }
    else {
      return (false);// end of file
    }

  }
  while ( end != true );

  double q1,q2;
  // Read positions
  if (fscanf(pt_f, "%lf %lf", &q1, &q2) == EOF) {
    std::cout << "Cannot read joint positions." << std::endl;
    return false;
  }
  q.resize(vpBiclops::ndof) ;

  q[0] = vpMath::rad(q1) ; // Rot tourelle
  q[1] = vpMath::rad(q2) ;

  fclose(pt_f) ;
  return (true);
}
Exemple #14
0
/*!

  Return the position of each axis.

  \param frame : Control frame. This head can only be controlled in
  articular.

  \param q : The position of the axis.

  \exception vpRobotException::wrongStateError : If a not supported frame type
  is given.

*/
void
vpRobotPtu46::getPosition (const vpRobot::vpControlFrameType frame,
			   vpColVector & q)
{
  vpDEBUG_TRACE (9, "# Entree.");

  switch(frame)
  {
  case vpRobot::CAMERA_FRAME :
    vpERROR_TRACE ("Cannot get position in camera frame: not implemented");
    throw vpRobotException (vpRobotException::lowLevelError,
			    "Cannot get position in camera frame: "
			    "not implemented");
    break;
  case vpRobot::REFERENCE_FRAME:
    vpERROR_TRACE ("Cannot get position in reference frame: "
		 "not implemented");
    throw vpRobotException (vpRobotException::lowLevelError,
			    "Cannot get position in reference frame: "
			    "not implemented");
    break;
  case vpRobot::MIXT_FRAME:
    vpERROR_TRACE ("Cannot get position in mixt frame: "
		 "not implemented");
    throw vpRobotException (vpRobotException::lowLevelError,
			    "Cannot get position in mixt frame: "
			    "not implemented");
    break;
  case vpRobot::ARTICULAR_FRAME:
    break ;
  }

  double artpos[2];

  if (0 != ptu.getCurrentPosition( artpos ) )
  {
    vpERROR_TRACE ("Error when calling  recup_posit_Afma4.");
    throw vpRobotException (vpRobotException::lowLevelError,
			    "Error when calling  recup_posit_Afma4.");
  }

  q.resize (vpPtu46::ndof);

  q[0] = artpos[0];
  q[1] = artpos[1];
}
void
vpHomography::initRansac(unsigned int n,
			 double *xb, double *yb,
			 double *xa, double *ya,
			 vpColVector &x)
{
  x.resize(4*n) ;
  unsigned int n2 = n * 2;
  unsigned int i2;
  for (unsigned int i=0 ; i < n ; i++)
  {
    i2 = 2 * i;
    x[i2] = xb[i] ;
    x[i2+1] = yb[i] ;
    x[n2+i2] = xa[i] ;
    x[n2+i2+1] = ya[i] ;
  }
}
/*!
  Fit model to this random selection of data points.

  We chose the Dementhon algorithm to compute the pose
*/
void
vpPose::computeTransformation(vpColVector &x, unsigned int *ind, vpColVector &M)
{
  unsigned int i ;

  vpPoint p[4] ;

  vpPose pose ;
  pose.clearPoint() ;
  for(i=0 ; i < 4 ; i++)
  {

    unsigned int index = 5*ind[i] ;

    p[i].set_x(x[index]) ;
    p[i].set_y(x[index+1]) ;

    p[i].setWorldCoordinates(x[index+2],x[index+3], x[index+4]) ;
    pose.addPoint(p[i]) ;
  }


  //  pose.printPoint() ;
  vpHomogeneousMatrix cMo ;
  try {
    pose.computePose(vpPose::DEMENTHON, cMo) ;
    //    std::cout << cMo << std::endl ;
  }
  catch(...)
  {
    cMo.setIdentity() ;
  }

  M.resize(16) ;
  for (i=0 ; i <16 ; i++)
  {
    M[i] = cMo.data[i] ;
  }

}
Exemple #17
0
/*!
  Get the robot position (frame has to be specified).

  \param frame : Control frame. For the moment, only vpRobot::REFERENCE_FRAME is implemented.

  \param pose : A 6 dimension vector that corresponds to the position of the robot.

  \exception vpRobotException::wrongStateError : If the specified control frame is not supported.

  */
void vpROSRobot::getPosition(const vpRobot::vpControlFrameType frame, vpColVector &pose) {
  while(!odom_mutex);
  odom_mutex = false;
  if (frame == vpRobot::REFERENCE_FRAME)
  {
    pose.resize(6);
    pose[0] = p[0];
    pose[1] = p[1];
    pose[2] = p[2];
    vpRotationMatrix R(q);
    vpRxyzVector V(R);
    pose[3]=V[0];
    pose[4]=V[1];
    pose[5]=V[2];
  }
  else
  {
    throw vpRobotException (vpRobotException::wrongStateError,
                            "Cannot get the robot position in the specified control frame");
  }
  odom_mutex = true;
}
double
vpPose::computeResidual(vpColVector &x, vpColVector &M, vpColVector &d)
{

  unsigned int i ;
  unsigned int n = x.getRows()/5 ;

  vpPoint *p;
  p = new vpPoint [n] ;
  {
    //    firsttime=1 ;
    for( i=0 ; i < n ; i++)
    {
      p[i].setWorldCoordinates(x[5*i+2],x[5*i+3], x[5*i+4]) ;
    }
  }

  vpHomogeneousMatrix cMo ;
  for (i=0 ; i <16 ; i++)
  {
    cMo.data[i] = M[i];
  }


  d.resize(n) ;
  vpColVector cP, xy ;

  for( i=0 ; i < n ; i++)
  {
    p[i].changeFrame(cMo,cP) ;
    p[i].projection(cP,xy) ;
    d[i] = sqrt(vpMath::sqr(x[5*i]-xy[0])+vpMath::sqr(x[5*i+1]-xy[1])) ;
  }

  delete [] p;

  return 0 ;
}
void
vpPose::initRansac(const unsigned int n,
       const double *x, const double *y,
       const unsigned int m,
       const double *X, const double *Y, const double *Z,
       vpColVector &data)
{
  data.resize(5*n*m) ;
  unsigned int k =0 ;
  for (unsigned int i=0 ; i < n ; i++)
  {
    for (unsigned int j=0 ; j < m ; j++)
    {
      data[k] = x[i] ;
      data[k+1] = y[i] ;
      data[k+2] = X[j] ;
      data[k+3] = Y[j] ;
      data[k+4] = Z[j] ;

      k+=5 ;
    }
  }
}
Exemple #20
0
/*!
  Gets the current translational velocity of the robot.

  \param frame : Control frame. For the moment, only vpRobot::ARTICULAR_FRAME to get left
  and right wheel velocities and vpRobot::REFERENCE_FRAME to get translational and
  rotational velocities are implemented.

  \param velocity : A two dimension vector that corresponds to the current velocities applied to the robot.
  - If the frame is vpRobot::ARTICULAR_FRAME, first value is the velocity of the left wheel and
    second value is the velocity of the right wheel in m/s.
  - If the frame is vpRobot::REFERENCE_FRAME, first value is the translation velocity in m/s.
    Second value is the rotational velocity in rad/s.

  \exception vpRobotException::dimensionError : Velocity vector is not a 2 dimension vector.
  \exception vpRobotException::wrongStateError : If the specified control frame is not supported.

  \sa getVelocity(const vpRobot::vpControlFrameType)
  */
void vpRobotPioneer::getVelocity (const vpRobot::vpControlFrameType frame, vpColVector & velocity)
{
  init();
  velocity.resize(2);

  if (frame == vpRobot::ARTICULAR_FRAME)
  {
    this->lock();
    velocity[0] =	this->getLeftVel() / 1000.;
    velocity[1] =	this->getRightVel() / 1000;
    this->unlock();
  }
  else if (frame == vpRobot::REFERENCE_FRAME)
  {
    this->lock();
    velocity[0] = this->getVel() / 1000.;
    velocity[1] = vpMath::rad( this->getRotVel() );
    this->unlock();
  }
  else {
    throw vpRobotException (vpRobotException::wrongStateError,
                            "Cannot get the robot volocity in the specified control frame");
  }
}
// Fit model to this random selection of data points.
void
vpHomography::computeTransformation(vpColVector &x, unsigned int *ind, vpColVector &M)
{
  unsigned int i ;
  unsigned int n = x.getRows()/4 ;
  double xa[4], xb[4];
  double ya[4], yb[4];
  unsigned int n2 = n * 2;
  unsigned int ind2;
  for(i=0 ; i < 4 ; i++)
    {
      ind2 = 2 * ind[i];
      xb[i] = x[ind2] ;
      yb[i] = x[ind2+1] ;

      xa[i] = x[n2+ind2] ;
      ya[i] = x[n2+ind2+1] ;
    }

  vpHomography aHb ;
  try {
    vpHomography::HLM(4,xb, yb, xa, ya, true, aHb);
    //vpHomography::HLM(8, xb, yb, xa, ya, false, aHb); //modified 13/09
  }
  catch(...)
    {
      aHb.setIdentity();
    }

  M.resize(9);
  for (i=0 ; i <9 ; i++)
    {
      M[i] = aHb.data[i] ;
    }
  aHb /= aHb[2][2] ;
}
Exemple #22
0
/*!
  Compute the pose from a set of n 2D point (x,y) and m 3D points
  (X,Y,Z) using the Ransac algorithm. It is not assumed that
  the 2D and 3D points are registred (there is nm posibilities)

  at least numberOfInlierToReachAConsensus of true correspondance are required
  to validate the pose

  the inliers are given in xi, yi, Xi, Yi, Zi

  the pose is return in cMo
 */
void
vpPose::ransac(const int n,
	       const double *x, const double *y,
	       const int m,
	       const double *X, const double *Y, const double *Z,
	       const int  numberOfInlierToReachAConsensus,
	       const double threshold,
	       int &ninliers,
	       vpColVector &xi,  vpColVector &yi,
	       vpColVector &Xi,  vpColVector &Yi,  vpColVector &Zi,
	       vpHomogeneousMatrix &cMo)
{


  double tms = vpTime::measureTimeMs() ;
  vpColVector data ;
  int i;
  vpPose::initRansac(n,x,y,m,X,Y,Z, data) ;

  vpColVector M(16) ;
  vpColVector inliers(n*m) ;
  vpRansac<vpPose>::ransac(n*m,data,4,
			   threshold, M,inliers,
			   numberOfInlierToReachAConsensus) ;


  // we count the number of inliers
  ninliers = 0 ;
  for(i=0 ; i < n*m ; i++)
  {
    if (inliers[i]==1)
    {
      ninliers++ ;
    }
  }

  xi.resize(ninliers) ;
  yi.resize(ninliers) ;
  Xi.resize(ninliers) ;
  Yi.resize(ninliers) ;
  Zi.resize(ninliers) ;

  int k =0 ;
  for(i=0 ; i < n*m ; i++)
  {
    if (inliers[i]==1)
    {
      xi[k] = data[5*i] ;
      yi[k] = data[5*i+1] ;
      Xi[k] = data[5*i+2] ;
      Yi[k] = data[5*i+3] ;
      Zi[k] = data[5*i+4] ;
      k++ ;
    }
  }

  for (i=0 ; i <16 ; i++)
  {
      cMo.data[i] = M[i];
  }

  std::cout << vpTime::measureTimeMs() - tms << "ms" << std::endl ;

}
/*!

  Return the position of each axis.
  - In positionning control mode, call vpRobotBiclopsController::getPosition()
  - In speed control mode, call vpRobotBiclopsController::getActualPosition()

  \param frame : Control frame. This biclops head can only be controlled in
  articular.

  \param q : The position of the axis in radians.

  \exception vpRobotException::wrongStateError : If a not supported frame type
  is given.

*/
void
vpRobotBiclops::getPosition (const vpRobot::vpControlFrameType frame,
			     vpColVector & q)
{
  switch(frame)
  {
  case vpRobot::CAMERA_FRAME :
    vpERROR_TRACE ("Cannot get position in camera frame: not implemented");
    throw vpRobotException (vpRobotException::wrongStateError,
			    "Cannot get position in camera frame: "
			    "not implemented");
    break;
  case vpRobot::REFERENCE_FRAME:
    vpERROR_TRACE ("Cannot get position in reference frame: "
		 "not implemented");
    throw vpRobotException (vpRobotException::wrongStateError,
			    "Cannot get position in reference frame: "
			    "not implemented");
    break;
  case vpRobot::MIXT_FRAME:
    vpERROR_TRACE ("Cannot get position in mixt frame: "
		 "not implemented");
    throw vpRobotException (vpRobotException::wrongStateError,
			    "Cannot get position in mixt frame: "
			    "not implemented");
    break;
  case vpRobot::ARTICULAR_FRAME:
    break ;
  }

  vpRobot::vpRobotStateType state;
  state = vpRobot::getRobotState();

  switch (state) {
  case STATE_STOP:
  case STATE_POSITION_CONTROL:
    q = controller.getPosition();

    break;
  case STATE_VELOCITY_CONTROL:
  case STATE_ACCELERATION_CONTROL:
  default:
    q.resize(vpBiclops::ndof);

    vpDEBUG_TRACE (12, "Lock mutex vpMeasure_mutex");
    pthread_mutex_lock(&vpMeasure_mutex); // Wait until a position is available

    vpRobotBiclopsController::shmType shm;

    vpDEBUG_TRACE (12, "Lock mutex vpShm_mutex");
    pthread_mutex_lock(&vpShm_mutex);

    shm = controller.readShm();

    vpDEBUG_TRACE (12, "unlock mutex vpShm_mutex");
    pthread_mutex_unlock(&vpShm_mutex);

    for (unsigned int i=0; i < vpBiclops::ndof; i ++) {
      q[i] = shm.actual_q[i];
    }

    vpCDEBUG(11) << "++++++++ Measure actuals: " << q.t();

    vpDEBUG_TRACE (12, "unlock mutex vpMeasure_mutex");
    pthread_mutex_unlock(&vpMeasure_mutex); // A position is available

    break;

  }
}
/*!
  Compute the pose from a set of n 2D point (x,y) and m 3D points
  (X,Y,Z) using the Ransac algorithm. It is not assumed that
  the 2D and 3D points are registred (there is nm posibilities)

  At least numberOfInlierToReachAConsensus of true correspondance are required
  to validate the pose

  The inliers are given in xi, yi, Xi, Yi, Zi

  The pose is returned in cMo.

  \param n : Number of 2d points.
  \param x : Array (of size \e n) of the x coordinates of the 2d points.
  \param y : Array (of size \e n) of the y coordinates of the 2d points.
  \param m : Number of 3d points.
  \param X : Array (of size \e m) of the oX coordinates of the 3d points.
  \param Y : Array (of size \e m) of the oY coordinates of the 3d points.
  \param Z : Array (of size \e m) of the oZ coordinates of the 3d points.
  \param numberOfInlierToReachAConsensus : The minimum number of inlier to have
  to consider a trial as correct.
  \param threshold : The maximum error allowed between the 2d points and the
  reprojection of its associated 3d points by the current pose (in meter).
  \param ninliers : Number of inliers found for the best solution.
  \param xi : Array (of size \e ninliers) of the x coordinates of the inliers.
  \param yi : Array (of size \e ninliers) of the y coordinates of the inliers.
  \param Xi : Array (of size \e ninliers) of the oX coordinates of the inliers.
  \param Yi : Array (of size \e ninliers) of the oY coordinates of the inliers.
  \param Zi : Array (of size \e ninliers) of the oZ coordinates of the inliers.
  \param cMo : The computed pose (best solution).
  \param maxNbTrials : Maximum number of trials before considering a solution
  fitting the required \e numberOfInlierToReachAConsensus and \e threshold
  cannot be found.
 */
void
vpPose::ransac(const unsigned int n,
         const double *x, const double *y,
         const unsigned int m,
         const double *X, const double *Y, const double *Z,
         const int  numberOfInlierToReachAConsensus,
         const double threshold,
         unsigned int &ninliers,
         vpColVector &xi,  vpColVector &yi,
         vpColVector &Xi,  vpColVector &Yi,  vpColVector &Zi,
         vpHomogeneousMatrix &cMo,
         const int maxNbTrials)
{


  double tms = vpTime::measureTimeMs() ;
  vpColVector data ;
  unsigned int i;
  vpPose::initRansac(n,x,y,m,X,Y,Z, data) ;

  vpColVector M(16) ;
  vpColVector inliers(n*m) ;
  vpRansac<vpPose>::ransac(n*m,data,4,
         threshold, M,inliers,
         numberOfInlierToReachAConsensus, 0.0, maxNbTrials) ;


  // we count the number of inliers
  ninliers = 0 ;
  for(i=0 ; i < n*m ; i++)
  {
    //if (inliers[i]==1)
    if (std::fabs(inliers[i]-1) <= std::fabs(vpMath::maximum(inliers[i], 1.)) * std::numeric_limits<double>::epsilon())
    {
      ninliers++ ;
    }
  }

  xi.resize(ninliers) ;
  yi.resize(ninliers) ;
  Xi.resize(ninliers) ;
  Yi.resize(ninliers) ;
  Zi.resize(ninliers) ;

  unsigned int k =0 ;
  for(i=0 ; i < n*m ; i++)
  {
    //if (inliers[i]==1)
    if (std::fabs(inliers[i]-1) <= std::fabs(vpMath::maximum(inliers[i], 1.)) * std::numeric_limits<double>::epsilon())
    {
      xi[k] = data[5*i] ;
      yi[k] = data[5*i+1] ;
      Xi[k] = data[5*i+2] ;
      Yi[k] = data[5*i+3] ;
      Zi[k] = data[5*i+4] ;
      k++ ;
    }
  }

  for (i=0 ; i <16 ; i++)
  {
      cMo.data[i] = M[i];
  }

  std::cout << vpTime::measureTimeMs() - tms << "ms" << std::endl ;

}