Пример #1
0
/*!

  Method which enables to compute a NURBS curve approximating a set of
  data points.
  
  The data points are approximated thanks to a least square method.
  
  The result of the method is composed by a knot vector, a set of
  control points and a set of associated weights.
  
  \param l_crossingPoints : The list of data points which have to be
  interpolated.

  \param n : The desired number of control points. This parameter \e n
  must be under or equal to the number of data points.
*/
void vpNurbs::globalCurveApprox(vpList<vpMeSite> &l_crossingPoints, unsigned int n)
{
  std::vector<vpImagePoint> v_crossingPoints;
  l_crossingPoints.front();
  while(!l_crossingPoints.outside())
  {
    vpMeSite s = l_crossingPoints.value();
    vpImagePoint pt(s.ifloat,s.jfloat);
    v_crossingPoints.push_back(pt);
    l_crossingPoints.next();
  }
  globalCurveApprox(v_crossingPoints, p, n, knots, controlPoints, weights);
}
Пример #2
0
/*!
  Method which enables to compute a NURBS curve passing through a set of data points.
  
  The result of the method is composed by a knot vector, a set of control points and a set of associated weights.
  
  \param l_crossingPoints : The list of data points which have to be interpolated.
*/
void vpNurbs::globalCurveInterp(vpList<vpMeSite> &l_crossingPoints)
{
  std::vector<vpImagePoint> v_crossingPoints;
  l_crossingPoints.front();
  vpMeSite s = l_crossingPoints.value();
  vpImagePoint pt(s.ifloat,s.jfloat);
  vpImagePoint pt_1 = pt;
  v_crossingPoints.push_back(pt);
  l_crossingPoints.next();
  while(!l_crossingPoints.outside())
  {
    s = l_crossingPoints.value();
    pt.set_ij(s.ifloat,s.jfloat);
    if (vpImagePoint::distance(pt_1,pt) >= 10)
    {
      v_crossingPoints.push_back(pt);
      pt_1 = pt;
    }
    l_crossingPoints.next();
  }
  globalCurveInterp(v_crossingPoints, p, knots, controlPoints, weights);
}
Пример #3
0
/*!
  Get the view of the virtual camera. Be carefull, the image I is modified. The projected image is not added as an overlay!
  
  With this method, a list of image is projected into the image. Thus, you have to initialise a list of vpImageSimulator. Then you store them into a vpList. And finally with this method you project them into the image \f$ I \f$. The depth of the 3D scene is managed such as an image in foreground hides an image background.
  
  The following example shows how to use the method:
  
  \code
  #include <visp/vpImage.h>
  #include <visp/vpImageSimulator.h>
  
  int main()
  {
    vpImage<vpRGBa> Icamera(480,640,0);
    vpImage<vpRGBa> Iimage(60,60);
    
    // Initialise the image which will be projected into the image Icamera
    vpRGBa colorb(0,0,255);
    vpRGBa colorw(255,255,255);
    vpRGBa colorr(255,0,0);
    for(int i = 0; i < 60; i++)
    {
      for(int j = 0; j < 20; j++)
        Iimage[i][j] = colorb;
      for(int j = 20; j < 40; j++)
        Iimage[i][j] = colorw;
      for(int j = 40; j < 60; j++)
        Iimage[i][j] = colorr;
    }
    
    // Initialise the 3D coordinates of the Iimage corners
    vpColVector X[4];
    for (int i = 0; i < 4; i++) X[i].resize(3);
    // Top left corner
    X[0][0] = -1;
    X[0][1] = -1;
    X[0][2] = 1;
  
    // Top right corner
    X[1][0] = 1;
    X[1][1] = -1;
    X[1][2] = 1;
  
    // Bottom right corner
    X[2][0] = 1;
    X[2][1] = 1;
    X[2][2] = 1;
  
    //Bottom left corner
    X[3][0] = -1;
    X[3][1] = 1;
    X[3][2] = 1;
    
    vpImageSimulator sim;
    sim.init(Iimage, X);
    
    // Top left corner
    X[0][0] = -1;
    X[0][1] = -1;
    X[0][2] = 1;
  
    // Top right corner
    X[1][0] = 1;
    X[1][1] = -1;
    X[1][2] = 1;
  
    // Bottom right corner
    X[2][0] = 1;
    X[2][1] = 1;
    X[2][2] = 1;
  
    //Bottom left corner
    X[3][0] = -1;
    X[3][1] = 1;
    X[3][2] = 1;
    
    vpImageSimulator sim2;
    sim2.init(Iimage, X);
    
    sim.setCameraPosition(vpHomogeneousMatrix(0,0,5,vpMath::rad(0),vpMath::rad(30),0));
    sim2.setCameraPosition(vpHomogeneousMatrix(0,0,5,vpMath::rad(0),vpMath::rad(-30),0));
    
    vpList<vpImageSimulator> listSim;
    listSim.addRight(sim);
    listSim.addRight(sim2);
    
    sim.setCameraPosition(vpHomogeneousMatrix(0,0,5,vpMath::rad(60),vpMath::rad(0),0));
    
    vpCameraParameters cam(868.0, 869.0, 320, 240);
    
    vpImageSimulator::getImage(Icamera,listSim,cam);
    
    return 0;
  }
  \endcode
  
  \param I : The image used to store the result
  \param list : List of vpImageSimulator to project
  \param cam : The parameters of the virtual camera
*/
void
vpImageSimulator::getImage(vpImage<vpRGBa> &I, vpList<vpImageSimulator> &list, const vpCameraParameters cam)
{
  
  unsigned int width = I.getWidth();
  unsigned int height = I.getHeight();
  
  int nbsimList = list.nbElements();
  
  if (nbsimList < 1)
    return;
  
  vpImageSimulator** simList = new vpImageSimulator* [nbsimList];
  
  double topFinal = height+1;;
  double bottomFinal = -1;
  double leftFinal = width+1;
  double rightFinal = -1;
  
  list.front();
  
  int unvisible = 0;
  for (int i = 0; i < nbsimList; i++)
  {
    vpImageSimulator* sim = &(list.value());
    list.next();
    if (sim->visible)
      simList[i] = sim;
    else
      unvisible++;
  }
  nbsimList = nbsimList - unvisible;
  
   if (nbsimList < 1)
   {
     delete[] simList;
     return;
   }  
      
  for (int i = 0; i < nbsimList; i++)
  {
    
    simList[i]->getRoi(width,height,cam,simList[i]->pt,simList[i]->rect);
    
    if (topFinal > simList[i]->rect.getTop()) topFinal = simList[i]->rect.getTop();
    if (bottomFinal < simList[i]->rect.getBottom()) bottomFinal = simList[i]->rect.getBottom();
    if (leftFinal > simList[i]->rect.getLeft()) leftFinal = simList[i]->rect.getLeft();
    if (rightFinal < simList[i]->rect.getRight()) rightFinal = simList[i]->rect.getRight();  
  }
  
  double zmin = -1;
  int indice = -1;
  vpRGBa *bitmap = I.bitmap;
  vpImagePoint ip;
    
  for (int i = (int)topFinal; i < (int)bottomFinal; i++)
  {
    for (int j = (int)leftFinal; j < (int)rightFinal; j++)
    {
      zmin = -1;
      double x=0,y=0;
      ip.set_ij(i,j);
      vpPixelMeterConversion::convertPoint(cam,ip, x,y);
      ip.set_ij(y,x);
      for (int k = 0; k < nbsimList; k++)
      {
	double z = 0;
	if(simList[k]->getPixelDepth(ip,z))
	{
	  if (z < zmin || zmin < 0)
	  {
	    zmin = z;
	    indice = k;
	  }
	}
      }
      if (indice >= 0)
      {
        if (simList[indice]->colorI == GRAY_SCALED)
        {
	  unsigned char Ipixelplan = 255;
          simList[indice]->getPixel(ip,Ipixelplan);
	  vpRGBa pixelcolor;
	  pixelcolor.R = Ipixelplan;
	  pixelcolor.G = Ipixelplan;
	  pixelcolor.B = Ipixelplan;
	  *(bitmap+i*width+j) = pixelcolor;
        }
        else if (simList[indice]->colorI == COLORED)
        {
	  vpRGBa Ipixelplan(255,255,255);
	  simList[indice]->getPixel(ip,Ipixelplan);
	  //unsigned char pixelgrey = 0.2126 * Ipixelplan.R + 0.7152 * Ipixelplan.G + 0.0722 * Ipixelplan.B;
	  *(bitmap+i*width+j)=Ipixelplan;
        }
      }
    }
  }
  
  delete[] simList;
}
Пример #4
0
static void
computeInteractionMatrixFromList  (/*const*/ vpList<vpBasicFeature *> & featureList,
				   /*const*/ vpList<int> & featureSelectionList,
				   vpMatrix & L)
{
  if (featureList.empty())
    {
      vpERROR_TRACE("feature list empty, cannot compute Ls") ;
      throw(vpServoException(vpServoException::noFeatureError,
			     "feature list empty, cannot compute Ls")) ;
    }

  /* The matrix dimension is not known before the affectation loop.
   * It thus should be allocated on the flight, in the loop.
   * The first assumption is that the size has not changed. A double
   * reallocation (realloc(dim*2)) is done if necessary. In particulary,
   * [log_2(dim)+1] reallocations are done for the first matrix computation.
   * If the allocated size is too large, a correction is done after the loop.
   * The algotithmic cost is linear in affectation, logarthmic in allocation
   * numbers and linear in allocation size.
   */

  /* First assumption: matrix dimensions have not changed. If 0, they are
   * initialized to dim 1.*/
  int rowL = L .getRows();
  const int colL = 6;
  if (0 == rowL) { rowL = 1; L .resize(rowL, colL);}

  /* vectTmp is used to store the return values of functions get_s() and
   * error(). */
  vpMatrix matrixTmp;
  int rowMatrixTmp, colMatrixTmp;

  /* The cursor are the number of the next case of the vector array to
   * be affected. A memory reallocation should be done when cursor
   * is out of the vector-array range.*/
  int cursorL = 0;

  for (featureList.front() ,featureSelectionList.front() ;
       !featureList.outside();
       featureSelectionList.next(),featureList.next() )
    {
      vpBasicFeature * sPTR = featureList.value() ;
      const int select = featureSelectionList.value() ;

      /* Get s. */
      matrixTmp = sPTR->interaction(select);
      rowMatrixTmp = matrixTmp .getRows();
      colMatrixTmp = matrixTmp .getCols();

      /* Check the matrix L size, and realloc if needed. */
      while (rowMatrixTmp + cursorL > rowL)
	{ rowL *= 2; L .resize (rowL,colL,false); vpDEBUG_TRACE(15,"Realloc!"); }

      /* Copy the temporarily matrix into L. */
      for (int k = 0; k < rowMatrixTmp; ++k, ++cursorL)
	for (int j = 0; j <  colMatrixTmp; ++j)
	  { L[cursorL][j] = matrixTmp[k][j]; }

    }

  L.resize (cursorL,colL,false);

  return ;
}
Пример #5
0
/*!
  Compute the pose from a list lp of  2D point (x,y)  and  a list lP 3D points
  (X,Y,Z) in P using the Ransac algorithm. It is not assumed that
  the 2D and 3D points are registred

  at least numberOfInlierToReachAConsensus of true correspondance are required
  to validate the pose

  the inliers are given in a list of lPi vpPoint

  the pose is return in cMo
 */
void
vpPose::ransac(vpList<vpPoint> &lp,
	       vpList<vpPoint> &lP,
	       const int numberOfInlierToReachAConsensus,
	       const double threshold,
	       int &ninliers,
	       vpList<vpPoint> &lPi,
	       vpHomogeneousMatrix &cMo)
{
  int n = lp.nbElement() ;
  int m = lP.nbElement() ;

  double *x, *y;
  x = new double [n];
  y = new double [n];

  vpPoint pin ;

  lp.front() ;
  int i = 0 ;
  while(!lp.outside())
  {
    pin = lp.value() ; lp.next() ;
    x[i] = pin.get_x() ;
    y[i] = pin.get_y() ;
    i++ ;
  }

  double *X, *Y, *Z;
  X = new double [m];
  Y = new double [m];
  Z = new double [m];
  lP.front() ;
   i = 0 ;
  while(!lP.outside())
  {
    pin = lP.value() ; lP.next() ;
    X[i] = pin.get_oX() ;
    Y[i] = pin.get_oY() ;
    Z[i] = pin.get_oZ() ;
    i++ ;
  }

  vpColVector xi,yi,Xi,Yi,Zi ;

  ransac(n,x,y,
	 m,X,Y,Z, numberOfInlierToReachAConsensus,
	 threshold,
	 ninliers,
	 xi,yi,Xi,Yi,Zi,
	 cMo) ;


  for( i=0 ; i < ninliers ; i++)
  {
    vpPoint Pi ;
    Pi.setWorldCoordinates(Xi[i],Yi[i], Zi[i]) ;
    Pi.set_x(xi[i]) ;
    Pi.set_y(yi[i]) ;
    lPi += Pi ;
  }


  delete [] x;
  delete [] y;
  delete [] X;
  delete [] Y;
  delete [] Z;

}