Example #1
0
/*!
 * Manikandan. B
 * Photometric moments v2
 * Intended to be used by 'vpMomentObject's of type DENSE_FULL_OBJECT
 * @param image                   : Grayscale image
 * @param cam                     : Camera parameters (to change to )
 * @param bg_type                 : White/Black background surrounding the image
 * @param normalize_with_pix_size : This flag if SET, the moments, after calculation are normalized w.r.t  pixel size
 *                                  available from camera parameters
 */
void vpMomentObject::fromImage(const vpImage<unsigned char>& image, const vpCameraParameters& cam,
    vpCameraImgBckGrndType bg_type, bool normalize_with_pix_size)
{
  std::vector<double> cache(order*order,0.);
  values.assign(order*order,0);

  // (x,y) - Pixel co-ordinates in metres
  double x=0;
  double y=0;
  //for indexing into cache[] and values[]
  unsigned int idx = 0;
  unsigned int kidx = 0;

  double intensity = 0;

  //double Imax = static_cast<double>(image.getMaxValue());

  double iscale = 1.0;
  if (flg_normalize_intensity) {                                            // This makes the image a probability density function
    double Imax = 255.;                                                     // To check the effect of gray level change. ISR Coimbra
    iscale = 1.0/Imax;
  }

  if (bg_type == vpMomentObject::WHITE) {
      /////////// WHITE BACKGROUND ///////////
      for(unsigned int j=0;j<image.getRows();j++){
          for(unsigned int i=0;i<image.getCols();i++){
              x = 0;
              y = 0;
              intensity = (double)(image[j][i])*iscale;
              double intensity_white = 1. - intensity;

              vpPixelMeterConversion::convertPoint(cam,i,j,x,y);
              cacheValues(cache,x,y, intensity_white);			// Modify 'cache' which has x^p*y^q to x^p*y^q*(1 - I(x,y))

              // Copy to "values"
              for(unsigned int k=0;k<order;k++){
                  kidx = k*order;
                  for(unsigned int l=0;l<order-k;l++){
                      idx = kidx+l;
                      values[idx]+= cache[idx];
                  }
              }
          }
      }
  }
  else {
      /////////// BLACK BACKGROUND	///////////
      for(unsigned int j=0;j<image.getRows();j++){
          for(unsigned int i=0;i<image.getCols();i++){
              x = 0;
              y = 0;
              intensity = (double)(image[j][i])*iscale;
              vpPixelMeterConversion::convertPoint(cam,i,j,x,y);

              // Cache values for fast moment calculation
              cacheValues(cache,x,y, intensity);					// Modify 'cache' which has x^p*y^q to x^p*y^q*I(x,y)

              // Copy to moments array 'values'
              for(unsigned int k=0;k<order;k++){
                  kidx = k*order;
                  for(unsigned int l=0;l<order-k;l++){
                      idx = kidx+l;
                      values[idx]+= cache[idx];
                  }
              }

          }
      }
  }

  if (normalize_with_pix_size){
      // Normalisation equivalent to sampling interval/pixel size delX x delY
      double norm_factor = 1./(cam.get_px()*cam.get_py());
      for (std::vector<double>::iterator it = values.begin(); it!=values.end(); ++it) {
          *it = (*it) * norm_factor;
      }
  }
}
Example #2
0
void 
vpMbKltTracker::reinit(const vpImage<unsigned char>& I)
{
  c0Mo = cMo;
  ctTc0.eye();

  vpImageConvert::convert(I, cur);

  cam.computeFov(I.getWidth(), I.getHeight());

  if(useScanLine){
    faces.computeClippedPolygons(cMo,cam);
    faces.computeScanLineRender(cam, I.getWidth(), I.getHeight());
  }
  
  // mask
#if (VISP_HAVE_OPENCV_VERSION >= 0x020408)
  cv::Mat mask((int)I.getRows(), (int)I.getCols(), CV_8UC1, cv::Scalar(0));
#else
  IplImage* mask = cvCreateImage(cvSize((int)I.getWidth(), (int)I.getHeight()), IPL_DEPTH_8U, 1);
  cvZero(mask);
#endif

  vpMbtDistanceKltPoints *kltpoly;
  vpMbtDistanceKltCylinder *kltPolyCylinder;
  if(useScanLine){
    vpImageConvert::convert(faces.getMbScanLineRenderer().getMask(), mask);
  }
  else{
    unsigned char val = 255/* - i*15*/;
    for(std::list<vpMbtDistanceKltPoints*>::const_iterator it=kltPolygons.begin(); it!=kltPolygons.end(); ++it){
      kltpoly = *it;
      if(kltpoly->polygon->isVisible() && kltpoly->isTracked() && kltpoly->polygon->getNbPoint() > 2){
        //need to changeFrame when reinit() is called by postTracking
        //other solution is
        kltpoly->polygon->changeFrame(cMo);
        kltpoly->polygon->computePolygonClipped(cam); // Might not be necessary when scanline is activated
        kltpoly->updateMask(mask, val, maskBorder);
      }
    }

    for(std::list<vpMbtDistanceKltCylinder*>::const_iterator it=kltCylinders.begin(); it!=kltCylinders.end(); ++it){
      kltPolyCylinder = *it;

      if(kltPolyCylinder->isTracked())
      {
        for(unsigned int k = 0 ; k < kltPolyCylinder->listIndicesCylinderBBox.size() ; k++)
        {
          unsigned int indCylBBox = (unsigned int)kltPolyCylinder->listIndicesCylinderBBox[k];
          if(faces[indCylBBox]->isVisible() && faces[indCylBBox]->getNbPoint() > 2u){
            faces[indCylBBox]->computePolygonClipped(cam); // Might not be necessary when scanline is activated
          }
        }

        kltPolyCylinder->updateMask(mask, val, maskBorder);
      }
    }
  }
  
  tracker.initTracking(cur, mask);
//  tracker.track(cur); // AY: Not sure to be usefull but makes sure that the points are valid for tracking and avoid too fast reinitialisations.
//  vpCTRACE << "init klt. detected " << tracker.getNbFeatures() << " points" << std::endl;

  for(std::list<vpMbtDistanceKltPoints*>::const_iterator it=kltPolygons.begin(); it!=kltPolygons.end(); ++it){
    kltpoly = *it;
    if(kltpoly->polygon->isVisible() && kltpoly->isTracked() && kltpoly->polygon->getNbPoint() > 2){
      kltpoly->init(tracker);
    }
  }

  for(std::list<vpMbtDistanceKltCylinder*>::const_iterator it=kltCylinders.begin(); it!=kltCylinders.end(); ++it){
    kltPolyCylinder = *it;

    if(kltPolyCylinder->isTracked())
      kltPolyCylinder->init(tracker, cMo);
  }

#if (VISP_HAVE_OPENCV_VERSION < 0x020408)
  cvReleaseImage(&mask);
#endif
}
Example #3
0
void vpMomentObject::fromImage(const vpImage<unsigned char>& image, unsigned char threshold, const vpCameraParameters& cam){
#ifdef VISP_HAVE_OPENMP
  #pragma omp parallel shared(threshold)
  {
    std::vector<double> curvals(order*order);
    curvals.assign(order*order,0.);

    #pragma omp for nowait//automatically organize loop counter between threads
    for(int i=0;i<(int)image.getCols();i++){
      for(int j=0;j<(int)image.getRows();j++){
        unsigned int i_ = static_cast<unsigned int>(i);
        unsigned int j_ = static_cast<unsigned int>(j);
        if(image[j_][i_]>threshold){
          double x=0;
          double y=0;
          vpPixelMeterConversion::convertPoint(cam,i_,j_,x,y);

          double yval=1.;
          for(unsigned int k=0;k<order;k++){
            double xval=1.;
            for(unsigned int l=0;l<order-k;l++){
              curvals[(k*order+l)]+=(xval*yval);
              xval*=x;
            }
            yval*=y;
          }
        }
      }
    }

    #pragma omp master //only set this variable in master thread
    {
      values.assign(order*order, 0.);
    }

    #pragma omp barrier
    for(unsigned int k=0;k<order;k++){
      for(unsigned int l=0;l<order-k;l++){
        #pragma omp atomic
        values[k*order+l]+= curvals[k*order+l];
      }
    }

  }
#else
    std::vector<double> cache(order*order,0.);
    values.assign(order*order,0);
    for(unsigned int i=0;i<image.getCols();i++){
        for(unsigned int j=0;j<image.getRows();j++){
            if(image[j][i]>threshold){
                double x=0;
                double y=0;
                vpPixelMeterConversion::convertPoint(cam,i,j,x,y);
                cacheValues(cache,x,y);
                for(unsigned int k=0;k<order;k++){
                    for(unsigned int l=0;l<order-k;l++){
                        values[k*order+l]+=cache[k*order+l];
                    }
                }
            }
        }
    }
#endif

    //Normalisation equivalent to sampling interval/pixel size delX x delY
    double norm_factor = 1./(cam.get_px()*cam.get_py());
    for (std::vector<double>::iterator it = values.begin(); it!=values.end(); ++it) {
        *it = (*it) * norm_factor;
    }
}