/*! Basic destructor. */ vpMeEllipse::~vpMeEllipse() { vpCDEBUG(1) << "begin vpMeEllipse::~vpMeEllipse() " << std::endl ; list.clear(); angle.clear(); vpCDEBUG(1) << "end vpMeEllipse::~vpMeEllipse() " << std::endl ; }
/*! Perform the Hinkley test. A downward jump is detected if \f$ M_k - S_k > \alpha \f$. An upward jump is detected if \f$ T_k - N_k > \alpha \f$. \param signal : Observed signal \f$ s(t) \f$. \sa setDelta(), setAlpha(), testDownwardJump(), testUpwardJump() */ vpHinkley::vpHinkleyJumpType vpHinkley::testDownUpwardJump(double signal) { vpHinkleyJumpType jump = noJump; nsignal ++; // Signal length if (nsignal == 1) mean = signal; // Calcul des variables cumulees computeSk(signal); computeTk(signal); computeMk(); computeNk(); vpCDEBUG(2) << "alpha: " << alpha << " dmin2: " << dmin2 << " signal: " << signal << " Sk: " << Sk << " Mk: " << Mk << " Tk: " << Tk << " Nk: " << Nk << std::endl; // teste si les variables cumulees excedent le seuil if ((Mk - Sk) > alpha) jump = downwardJump; else if ((Tk - Nk) > alpha) jump = upwardJump; #ifdef VP_DEBUG if (VP_DEBUG_MODE >= 2) { switch(jump) { case noJump: std::cout << "noJump " << std::endl; break; case downwardJump: std::cout << "downWardJump " << std::endl; break; case upwardJump: std::cout << "upwardJump " << std::endl; break; } } #endif computeMean(signal); if ((jump == upwardJump) || (jump == downwardJump)) { vpCDEBUG(2) << "\n*** Reset the Hinkley test ***\n"; Sk = 0; Mk = 0; Tk = 0; Nk = 0; nsignal = 0; // Debut modif FS le 03/09/2003 mean = signal; // Fin modif FS le 03/09/2003 } return (jump); }
// =================================================================== vpColVector vpRobust::simultMEstimator(vpColVector &residues) { double med=0; // Median double normmedian=0; // Normalized Median double sigma=0; // Standard Deviation unsigned int n_data = residues.getRows(); vpColVector norm_res(n_data); // Normalized Residue vpColVector w(n_data); vpCDEBUG(2) << "vpRobust MEstimator reached. No. data = " << n_data << std::endl; // Calculate Median unsigned int ind_med = (unsigned int)(ceil(n_data/2.0))-1; med = select(residues, 0, (int)n_data-1, (int)ind_med/*(int)n_data/2*/); // Normalize residues for(unsigned int i=0; i<n_data; i++) norm_res[i] = (fabs(residues[i]- med)); // Check for various methods. // For Huber compute Simultaneous scale estimate // For Others use MAD calculated on first iteration if(it==0) { normmedian = select(norm_res, 0, (int)n_data-1, (int)ind_med/*(int)n_data/2*/); // 1.48 keeps scale estimate consistent for a normal probability dist. sigma = 1.4826*normmedian; // Median Absolute Deviation } else { // compute simultaneous scale estimate sigma = simultscale(residues); } // Set a minimum threshold for sigma // (when sigma reaches the level of noise in the image) if(sigma < NoiseThreshold) { sigma= NoiseThreshold; } vpCDEBUG(2) << "MAD and C computed" << std::endl; psiHuber(sigma, norm_res,w); sig_prev = sigma; return w; }
/*! Initialization of the tracking. The line is defined thanks to the coordinates of two points. \param I : Image in which the line appears. \param ip1 : Coordinates of the first point. \param ip2 : Coordinates of the second point. */ void vpMeLine::initTracking(const vpImage<unsigned char> &I, const vpImagePoint &ip1, const vpImagePoint &ip2) { vpCDEBUG(1) <<" begin vpMeLine::initTracking()"<<std::endl ; int i1s, j1s, i2s, j2s; i1s = vpMath::round( ip1.get_i() ); i2s = vpMath::round( ip2.get_i() ); j1s = vpMath::round( ip1.get_j() ); j2s = vpMath::round( ip2.get_j() ); try{ // 1. On fait ce qui concerne les droites (peut etre vide) { // Points extremites PExt[0].ifloat = (float)ip1.get_i() ; PExt[0].jfloat = (float)ip1.get_j() ; PExt[1].ifloat = (float)ip2.get_i() ; PExt[1].jfloat = (float)ip2.get_j() ; double angle_ = atan2((double)(i1s-i2s),(double)(j1s-j2s)) ; a = cos(angle_) ; b = sin(angle_) ; // Real values of a, b can have an other sign. So to get the good values // of a and b in order to initialise then c, we call track(I) just below computeDelta(delta,i1s,j1s,i2s,j2s) ; delta_1 = delta; // vpTRACE("a: %f b: %f c: %f -b/a: %f delta: %f", a, b, c, -(b/a), delta); sample(I) ; } // 2. On appelle ce qui n'est pas specifique { vpMeTracker::initTracking(I) ; } // Call track(I) to give the good sign to a and b and to initialise c which can be used for the display track(I); } catch(...) { vpERROR_TRACE("Error caught") ; throw ; } vpCDEBUG(1) <<" end vpMeLine::initTracking()"<<std::endl ; }
/*! Perform the Hinkley test. A downward jump is detected if \f$ M_k - S_k > \alpha \f$. \param signal : Observed signal \f$ s(t) \f$. \sa setDelta(), setAlpha(), testUpwardJump() */ vpHinkley::vpHinkleyJumpType vpHinkley::testDownwardJump(double signal) { vpHinkleyJumpType jump = noJump; nsignal ++; // Signal lenght if (nsignal == 1) mean = signal; // Calcul des variables cumulées computeSk(signal); computeMk(); vpCDEBUG(2) << "alpha: " << alpha << " dmin2: " << dmin2 << " signal: " << signal << " Sk: " << Sk << " Mk: " << Mk; // teste si les variables cumulées excèdent le seuil if ((Mk - Sk) > alpha) jump = downwardJump; #ifdef VP_DEBUG if (VP_DEBUG_MODE >=2) { switch(jump) { case noJump: std::cout << "noJump " << std::endl; break; case downwardJump: std::cout << "downWardJump " << std::endl; break; case upwardJump: std::cout << "upwardJump " << std::endl; break; } } #endif computeMean(signal); if (jump == downwardJump) { vpCDEBUG(2) << "\n*** Reset the Hinkley test ***\n"; Sk = 0; Mk = 0; nsignal = 0; } return (jump); }
/*! Perform the Hinkley test. An upward jump is detected if \f$ T_k - N_k > \alpha \f$. \param signal : Observed signal \f$ s(t) \f$. \sa setDelta(), setAlpha(), testDownwardJump() */ vpHinkley::vpHinkleyJumpType vpHinkley::testUpwardJump(double signal) { vpHinkleyJumpType jump = noJump; nsignal ++; // Signal length if (nsignal == 1) mean = signal; // Calcul des variables cumulees computeTk(signal); computeNk(); vpCDEBUG(2) << "alpha: " << alpha << " dmin2: " << dmin2 << " signal: " << signal << " Tk: " << Tk << " Nk: " << Nk; // teste si les variables cumulees excedent le seuil if ((Tk - Nk) > alpha) jump = upwardJump; #ifdef VP_DEBUG if (VP_DEBUG_MODE >= 2) { switch(jump) { case noJump: std::cout << "noJump " << std::endl; break; case downwardJump: std::cout << "downWardJump " << std::endl; break; case upwardJump: std::cout << "upWardJump " << std::endl; break; } } #endif computeMean(signal); if (jump == upwardJump) { vpCDEBUG(2) << "\n*** Reset the Hinkley test ***\n"; Tk = 0; Nk = 0; nsignal = 0; } return (jump); }
void vpRobust::MEstimator(const vpRobustEstimatorType method, const vpColVector &residues, const vpColVector& all_residues, vpColVector &weights) { double normmedian=0; // Normalized median double sigma=0;// Standard Deviation unsigned int n_all_data = all_residues.getRows(); vpColVector all_normres(n_all_data); // compute median with the residues vector, return all_normres which are the normalized all_residues vector. normmedian = computeNormalizedMedian(all_normres,residues,all_residues,weights); // 1.48 keeps scale estimate consistent for a normal probability dist. sigma = 1.4826*normmedian; // Median Absolute Deviation // Set a minimum threshold for sigma // (when sigma reaches the level of noise in the image) if(sigma < NoiseThreshold) { sigma= NoiseThreshold; } switch (method) { case TUKEY : { psiTukey(sigma, all_normres,weights); vpCDEBUG(2) << "Tukey's function computed" << std::endl; break ; } case CAUCHY : { psiCauchy(sigma, all_normres,weights); break ; } /* case MCLURE : { psiMcLure(sigma, all_normres); break ; }*/ case HUBER : { psiHuber(sigma, all_normres,weights); break ; } }; }
/*! \brief Constructor. \param n_data : Size of the data vector. */ vpRobust::vpRobust(unsigned int n_data) : normres(), sorted_normres(), sorted_residues(), NoiseThreshold(0.0017), sig_prev(0), it(0), swap(0), size(n_data) { vpCDEBUG(2) << "vpRobust constructor reached" << std::endl; normres.resize(n_data); sorted_normres.resize(n_data); sorted_residues.resize(n_data); // NoiseThreshold=0.0017; //Can not be more accurate than 1 pixel }
/*! Compute the direct geometric model of the camera: fMc \param q : Articular position for pan and tilt axis. \param fMc : Homogeneous matrix corresponding to the direct geometric model of the camera. Describes the transformation between the robot reference frame (called fixed) and the camera frame. */ void vpBiclops::get_fMc (const vpColVector &q, vpHomogeneousMatrix &fMc) { vpHomogeneousMatrix fMe = get_fMe(q); fMc = fMe * cMe_.inverse(); vpCDEBUG (6) << "camera position: " << std::endl << fMc; return ; }
/*! Basic constructor that calls the constructor of the class vpMeTracker. */ vpMeEllipse::vpMeEllipse() : K(), iPc(), a(0.), b(0.), e(0.), iP1(), iP2(), alpha1(0), alpha2(2*M_PI), ce(0.), se(0.), angle(), m00(0.), mu11(0.), mu20(0.), mu02(0.), m10(0.), m01(0.), m11(0.), m02(0.), m20(0.), thresholdWeight(0.2), expecteddensity(0.), circle(false) { vpCDEBUG(1) << "begin vpMeEllipse::vpMeEllipse() " << std::endl ; // redimensionnement du vecteur de parametre // i^2 + K0 j^2 + 2 K1 i j + 2 K2 i + 2 K3 j + K4 K.resize(5) ; //j1 = j2 = i1 = i2 = 0 ; iP1.set_i(0); iP1.set_j(0); iP2.set_i(0); iP2.set_j(0); vpCDEBUG(1) << "end vpMeEllipse::vpMeEllipse() " << std::endl ; }
/*! * \brief computeAngle */ void vpMeEllipse::computeAngle(int ip1, int jp1, double &_alpha1, int ip2, int jp2, double &_alpha2) { getParameters() ; double j1, i1, j11, i11; j1 = i1 = 0.0 ; int number_of_points = 2000 ; double incr = 2 * M_PI / number_of_points ; // angle increment double dmin1 = 1e6 ; double dmin2 = 1e6 ; double k = -M_PI ; while(k < M_PI) { j1 = a *cos(k) ; // equation of an ellipse i1 = b *sin(k) ; // equation of an ellipse // (i1,j1) are the coordinates on the origin centered ellipse ; // a rotation by "e" and a translation by (xci,jc) are done // to get the coordinates of the point on the shifted ellipse j11 = iPc.get_j() + ce *j1 - se *i1 ; i11 = iPc.get_i() -( se *j1 + ce *i1) ; double d = vpMath::sqr(ip1-i11) + vpMath::sqr(jp1-j11) ; if (d < dmin1) { dmin1 = d ; alpha1 = k ; _alpha1 = k ; } d = vpMath::sqr(ip2-i11) + vpMath::sqr(jp2-j11) ; if (d < dmin2) { dmin2 = d ; alpha2 = k ; _alpha2 = k ; } k += incr ; } if (alpha2 <alpha1) alpha2 += 2*M_PI ; vpCDEBUG(1) << "end vpMeEllipse::computeAngle(..)" << alpha1 << " " << alpha2 << std::endl ; }
/*! Compute the direct geometric model of the camera: fMc \param q : Articular position for pan and tilt axis. \param fMc : Homogeneous matrix corresponding to the direct geometric model of the camera. Describes the transformation between the robot reference frame (called fixed) and the camera frame. */ void vpPtu46::computeMGD (const vpColVector & q, vpHomogeneousMatrix & fMc) { if (q.getRows() != 2) { vpERROR_TRACE("Bad dimension for ptu-46 articular vector"); throw(vpException(vpException::dimensionError, "Bad dimension for ptu-46 articular vector")); } double q1 = q[0]; // pan double q2 = q[1]; // tilt double c1 = cos(q1); double s1 = sin(q1); double c2 = cos(q2); double s2 = sin(q2); fMc[0][0] = s1; fMc[0][1] = c1*s2; fMc[0][2] = c1*c2; fMc[0][3] = -h*c1*s2 - L*s1; fMc[1][0] = -c1; fMc[1][1] = s1*s2; fMc[1][2] = s1*c2; fMc[1][3] = -h*s1*s2 + L*c1; fMc[2][0] = 0; fMc[2][1] = -c2; fMc[2][2] = s2; fMc[2][3] = h*c2; fMc[3][0] = 0; fMc[3][1] = 0; fMc[3][2] = 0; fMc[3][3] = 1; vpCDEBUG (6) << "Position de la camera: " << std::endl << fMc; return ; }
/*! Initilization of the tracking. Ask the user to click on five points from the ellipse edge to track. \param I : Image in which the ellipse appears. */ void vpMeEllipse::initTracking(const vpImage<unsigned char> &I) { vpCDEBUG(1) <<" begin vpMeEllipse::initTracking()"<<std::endl ; const unsigned int n=5 ; vpImagePoint iP[n]; for (unsigned int k =0 ; k < n ; k++) { std::cout << "Click points "<< k+1 <<"/" << n ; std::cout << " on the ellipse in the trigonometric order" <<std::endl ; vpDisplay::getClick(I, iP[k], true); vpDisplay::displayCross(I, iP[k], 7, vpColor::red); vpDisplay::flush(I); std::cout << iP[k] << std::endl; } iP1 = iP[0]; iP2 = iP[n-1]; initTracking(I, n, iP) ; }
void vpMeEllipse::initTracking(const vpImage<unsigned char> &I, const unsigned int n, unsigned *i, unsigned *j) { vpCDEBUG(1) <<" begin vpMeEllipse::initTracking()"<<std::endl ; if (circle==false) { vpMatrix A(n,5) ; vpColVector b_(n) ; vpColVector x(5) ; // Construction du systeme Ax=b //! i^2 + K0 j^2 + 2 K1 i j + 2 K2 i + 2 K3 j + K4 // A = (j^2 2ij 2i 2j 1) x = (K0 K1 K2 K3 K4)^T b = (-i^2 ) for (unsigned int k =0 ; k < n ; k++) { A[k][0] = vpMath::sqr(j[k]) ; A[k][1] = 2* i[k] * j[k] ; A[k][2] = 2* i[k] ; A[k][3] = 2* j[k] ; A[k][4] = 1 ; b_[k] = - vpMath::sqr(i[k]) ; } K = A.pseudoInverse(1e-26)*b_ ; std::cout << K << std::endl; } else { vpMatrix A(n,3) ; vpColVector b_(n) ; vpColVector x(3) ; vpColVector Kc(3) ; for (unsigned int k =0 ; k < n ; k++) { A[k][0] = 2* i[k] ; A[k][1] = 2* j[k] ; A[k][2] = 1 ; b_[k] = - vpMath::sqr(i[k]) - vpMath::sqr(j[k]) ; } Kc = A.pseudoInverse(1e-26)*b_ ; K[0] = 1 ; K[1] = 0 ; K[2] = Kc[0] ; K[3] = Kc[1] ; K[4] = Kc[2] ; std::cout << K << std::endl; } iP1.set_i( i[0] ); iP1.set_j( j[0] ); iP2.set_i( i[n-1] ); iP2.set_j( j[n-1] ); getParameters() ; computeAngle(iP1, iP2) ; display(I, vpColor::green) ; sample(I) ; // 2. On appelle ce qui n'est pas specifique { vpMeTracker::initTracking(I) ; } try{ track(I) ; } catch(...) { vpERROR_TRACE("Error caught") ; throw ; } vpMeTracker::display(I) ; vpDisplay::flush(I) ; }
/*! Least squares method used to make the tracking more robust. It ensures that the points taken into account to compute the right equation belong to the line. */ void vpMeLine::leastSquare() { vpMatrix A(numberOfSignal(),2) ; vpColVector x(2), x_1(2) ; x_1 = 0; unsigned int i ; vpRobust r(numberOfSignal()) ; r.setThreshold(2); r.setIteration(0) ; vpMatrix D(numberOfSignal(),numberOfSignal()) ; D.eye() ; vpMatrix DA, DAmemory ; vpColVector DAx ; vpColVector w(numberOfSignal()) ; vpColVector B(numberOfSignal()) ; w =1 ; vpMeSite p_me ; unsigned int iter =0 ; unsigned int nos_1 = 0 ; double distance = 100; if (list.size() <= 2 || numberOfSignal() <= 2) { //vpERROR_TRACE("Not enough point") ; vpCDEBUG(1) << "Not enough point"; throw(vpTrackingException(vpTrackingException::notEnoughPointError, "not enough point")) ; } if ((fabs(b) >=0.9)) // Construction du systeme Ax=B // a i + j + c = 0 // A = (i 1) B = (-j) { nos_1 = numberOfSignal() ; unsigned int k =0 ; for(std::list<vpMeSite>::const_iterator it=list.begin(); it!=list.end(); ++it){ p_me = *it; if (p_me.getState() == vpMeSite::NO_SUPPRESSION) { A[k][0] = p_me.ifloat ; A[k][1] = 1 ; B[k] = -p_me.jfloat ; k++ ; } } while (iter < 4 && distance > 0.05) { DA = D*A ; x = DA.pseudoInverse(1e-26) *D*B ; vpColVector residu(nos_1); residu = B - A*x; r.setIteration(iter) ; r.MEstimator(vpRobust::TUKEY,residu,w) ; k = 0; for (i=0 ; i < nos_1 ; i++) { D[k][k] =w[k] ; k++; } iter++ ; distance = fabs(x[0]-x_1[0])+fabs(x[1]-x_1[1]); x_1 = x; } k =0 ; for(std::list<vpMeSite>::iterator it=list.begin(); it!=list.end(); ++it){ p_me = *it; if (p_me.getState() == vpMeSite::NO_SUPPRESSION) { if (w[k] < 0.2) { p_me.setState(vpMeSite::M_ESTIMATOR); *it = p_me; } k++ ; } } // mise a jour de l'equation de la droite a = x[0] ; b = 1 ; c = x[1] ; double s =sqrt( vpMath::sqr(a)+vpMath::sqr(b)) ; a /= s ; b /= s ; c /= s ; } else // Construction du systeme Ax=B // i + bj + c = 0 // A = (j 1) B = (-i) { nos_1 = numberOfSignal() ; unsigned int k =0 ; for(std::list<vpMeSite>::const_iterator it=list.begin(); it!=list.end(); ++it){ p_me = *it; if (p_me.getState() == vpMeSite::NO_SUPPRESSION) { A[k][0] = p_me.jfloat ; A[k][1] = 1 ; B[k] = -p_me.ifloat ; k++ ; } } while (iter < 4 && distance > 0.05) { DA = D*A ; x = DA.pseudoInverse(1e-26) *D*B ; vpColVector residu(nos_1); residu = B - A*x; r.setIteration(iter) ; r.MEstimator(vpRobust::TUKEY,residu,w) ; k = 0; for (i=0 ; i < nos_1 ; i++) { D[k][k] =w[k] ; k++; } iter++ ; distance = fabs(x[0]-x_1[0])+fabs(x[1]-x_1[1]); x_1 = x; } k =0 ; for(std::list<vpMeSite>::iterator it=list.begin(); it!=list.end(); ++it){ p_me = *it; if (p_me.getState() == vpMeSite::NO_SUPPRESSION) { if (w[k] < 0.2) { p_me.setState(vpMeSite::M_ESTIMATOR); *it = p_me; } k++ ; } } a = 1 ; b = x[0] ; c = x[1] ; double s = sqrt(vpMath::sqr(a)+vpMath::sqr(b)) ; a /= s ; b /= s ; c /= s ; } // mise a jour du delta delta = atan2(a,b) ; normalizeAngle(delta) ; }
/*! Construct a list of vpMeSite moving edges at a particular sampling step between the two extremities of the line. \param I : Image in which the line appears. \exception vpTrackingException::initializationError : Moving edges not initialized. */ void vpMeLine::sample(const vpImage<unsigned char>& I) { if (!me) { vpDERROR_TRACE(2, "Tracking error: Moving edges not initialized"); throw(vpTrackingException(vpTrackingException::initializationError, "Moving edges not initialized")) ; } int rows = (int)I.getHeight() ; int cols = (int)I.getWidth() ; double n_sample; if (std::fabs(me->getSampleStep()) <= std::numeric_limits<double>::epsilon()) { vpERROR_TRACE("function called with sample step = 0") ; throw(vpTrackingException(vpTrackingException::fatalError, "sample step = 0")) ; } // i, j portions of the line_p double diffsi = PExt[0].ifloat-PExt[1].ifloat; double diffsj = PExt[0].jfloat-PExt[1].jfloat; double length_p = sqrt((vpMath::sqr(diffsi)+vpMath::sqr(diffsj))); if(std::fabs(length_p)<=std::numeric_limits<double>::epsilon()) throw(vpTrackingException(vpTrackingException::fatalError,"points too close of each other to define a line")) ; // number of samples along line_p n_sample = length_p/(double)me->getSampleStep(); double stepi = diffsi/(double)n_sample; double stepj = diffsj/(double)n_sample; // Choose starting point double is = PExt[1].ifloat; double js = PExt[1].jfloat; // Delete old list list.clear(); // sample positions at i*me->getSampleStep() interval along the // line_p, starting at PSiteExt[0] vpImagePoint ip; for(int i=0; i<=vpMath::round(n_sample); i++) { // If point is in the image, add to the sample list if(!outOfImage(vpMath::round(is), vpMath::round(js), 0, rows, cols)) { vpMeSite pix ; //= list.value(); pix.init((int)is, (int)js, delta, 0, sign) ; pix.setDisplay(selectDisplay) ; if(vpDEBUG_ENABLE(3)) { ip.set_i( is ); ip.set_j( js ); vpDisplay::displayCross(I, ip, 2, vpColor::blue); } list.push_back(pix); } is += stepi; js += stepj; } vpCDEBUG(1) << "end vpMeLine::sample() : "; vpCDEBUG(1) << n_sample << " point inserted in the list " << std::endl ; }
/*! Track the ellipse in the image I. \param I : Image in which the ellipse appears. */ void vpMeEllipse::track(const vpImage<unsigned char> &I) { vpCDEBUG(1) <<"begin vpMeEllipse::track()"<<std::endl ; static int iter =0 ; // 1. On fait ce qui concerne les ellipse (peut etre vide) { } //vpDisplay::display(I) ; // 2. On appelle ce qui n'est pas specifique { try{ vpMeTracker::track(I) ; } catch(...) { vpERROR_TRACE("Error caught") ; throw ; } // std::cout << "number of signals " << numberOfSignal() << std::endl ; } // 3. On revient aux ellipses { // Estimation des parametres de la droite aux moindres carre suppressPoints() ; setExtremities() ; try{ leastSquare() ; } catch(...) { vpERROR_TRACE("Error caught") ; throw ; } seekExtremities(I) ; setExtremities() ; try { leastSquare() ; } catch(...) { vpERROR_TRACE("Error caught") ; throw ; } // suppression des points rejetes par la regression robuste suppressPoints() ; setExtremities() ; //reechantillonage si necessaire reSample(I) ; // remet a jour l'angle delta pour chaque point de la liste updateTheta() ; computeMoments(); // Remise a jour de delta dans la liste de site me if (vpDEBUG_ENABLE(2)) { display(I,vpColor::red) ; vpMeTracker::display(I) ; vpDisplay::flush(I) ; } // computeAngle(iP1, iP2) ; // // if (iter%5==0) // { // sample(I) ; // try{ // leastSquare() ; } // catch(...) // { // vpERROR_TRACE("Error caught") ; // throw ; // } // computeAngle(iP1, iP2) ; // } // seekExtremities(I) ; // // vpMeTracker::display(I) ; // // vpDisplay::flush(I) ; // // // remet a jour l'angle theta pour chaque point de la liste // updateTheta() ; } iter++ ; vpCDEBUG(1) << "end vpMeEllipse::track()"<<std::endl ; }
/*! Initialization of the tracking. The ellipse is defined thanks to the coordinates of n points. \warning It is better to use at least five points to well estimate the K parameters. \param I : Image in which the ellipse appears. \param n : The number of points in the list. \param iP : A pointer to a list of pointsbelonging to the ellipse edge. */ void vpMeEllipse::initTracking(const vpImage<unsigned char> &I, const unsigned int n, vpImagePoint *iP) { vpCDEBUG(1) <<" begin vpMeEllipse::initTracking()"<<std::endl ; if (circle==false) { vpMatrix A(n,5) ; vpColVector b_(n) ; vpColVector x(5) ; // Construction du systeme Ax=b //! i^2 + K0 j^2 + 2 K1 i j + 2 K2 i + 2 K3 j + K4 // A = (j^2 2ij 2i 2j 1) x = (K0 K1 K2 K3 K4)^T b = (-i^2 ) for (unsigned int k =0 ; k < n ; k++) { A[k][0] = vpMath::sqr(iP[k].get_j()) ; A[k][1] = 2* iP[k].get_i() * iP[k].get_j() ; A[k][2] = 2* iP[k].get_i() ; A[k][3] = 2* iP[k].get_j() ; A[k][4] = 1 ; b_[k] = - vpMath::sqr(iP[k].get_i()) ; } K = A.pseudoInverse(1e-26)*b_ ; std::cout << K << std::endl; } else { vpMatrix A(n,3) ; vpColVector b_(n) ; vpColVector x(3) ; vpColVector Kc(3) ; for (unsigned int k =0 ; k < n ; k++) { A[k][0] = 2* iP[k].get_i() ; A[k][1] = 2* iP[k].get_j() ; A[k][2] = 1 ; b_[k] = - vpMath::sqr(iP[k].get_i()) - vpMath::sqr(iP[k].get_j()) ; } Kc = A.pseudoInverse(1e-26)*b_ ; K[0] = 1 ; K[1] = 0 ; K[2] = Kc[0] ; K[3] = Kc[1] ; K[4] = Kc[2] ; std::cout << K << std::endl; } iP1 = iP[0]; iP2 = iP[n-1]; getParameters() ; std::cout << "vpMeEllipse::initTracking() ellipse avant: " << iPc << " " << a << " " << b << " " << vpMath::deg(e) << " alpha: " << alpha1 << " " << alpha2 << std::endl; computeAngle(iP1, iP2) ; std::cout << "vpMeEllipse::initTracking() ellipse apres: " << iPc << " " << a << " " << b << " " << vpMath::deg(e) << " alpha: " << alpha1 << " " << alpha2 << std::endl; expecteddensity = (alpha2-alpha1) / vpMath::rad((double)me->getSampleStep()); display(I, vpColor::green) ; sample(I) ; // 2. On appelle ce qui n'est pas specifique { vpMeTracker::initTracking(I) ; } try{ track(I) ; } catch(...) { vpERROR_TRACE("Error caught") ; throw ; } vpMeTracker::display(I) ; vpDisplay::flush(I) ; }
// =================================================================== void vpRobust::MEstimator(const vpRobustEstimatorType method, const vpColVector &residues, vpColVector &weights) { double med=0; // median double normmedian=0; // Normalized median double sigma=0;// Standard Deviation // resize vector only if the size of residue vector has changed unsigned int n_data = residues.getRows(); resize(n_data); sorted_residues = residues; unsigned int ind_med = (unsigned int)(ceil(n_data/2.0))-1; // Calculate median med = select(sorted_residues, 0, (int)n_data-1, (int)ind_med/*(int)n_data/2*/); //residualMedian = med ; // Normalize residues for(unsigned int i=0; i<n_data; i++) { normres[i] = (fabs(residues[i]- med)); sorted_normres[i] = (fabs(sorted_residues[i]- med)); } // Calculate MAD normmedian = select(sorted_normres, 0, (int)n_data-1, (int)ind_med/*(int)n_data/2*/); //normalizedResidualMedian = normmedian ; // 1.48 keeps scale estimate consistent for a normal probability dist. sigma = 1.4826*normmedian; // median Absolute Deviation // Set a minimum threshold for sigma // (when sigma reaches the level of noise in the image) if(sigma < NoiseThreshold) { sigma= NoiseThreshold; } switch (method) { case TUKEY : { psiTukey(sigma, normres,weights); vpCDEBUG(2) << "Tukey's function computed" << std::endl; break ; } case CAUCHY : { psiCauchy(sigma, normres,weights); break ; } /* case MCLURE : { psiMcLure(sigma, normres); break ; }*/ case HUBER : { psiHuber(sigma, normres,weights); break ; } } }
double vpRobust::computeNormalizedMedian(vpColVector &all_normres, const vpColVector &residues, const vpColVector &all_residues, const vpColVector & weights ) { double med=0; double normmedian=0; unsigned int n_all_data = all_residues.getRows(); unsigned int n_data = residues.getRows(); // resize vector only if the size of residue vector has changed resize(n_data); sorted_residues = residues; vpColVector no_null_weight_residues; no_null_weight_residues.resize(n_data); //all_normres.resize(n_all_data); // Normalized Residue //vpColVector sorted_normres(n_data); // Normalized Residue //vpColVector sorted_residues = residues; //vpColVector sorted_residues; unsigned int index =0; for(unsigned int j=0;j<n_data;j++) { //if(weights[j]!=0) if(std::fabs(weights[j]) > std::numeric_limits<double>::epsilon()) { no_null_weight_residues[index]=residues[j]; index++; } } sorted_residues.resize(index); memcpy(sorted_residues.data,no_null_weight_residues.data,index*sizeof(double)); n_data=index; vpCDEBUG(2) << "vpRobust MEstimator reached. No. data = " << n_data << std::endl; // Calculate Median // Be careful to not use the rejected residues for the // calculation. unsigned int ind_med = (unsigned int)(ceil(n_data/2.0))-1; med = select(sorted_residues, 0, (int)n_data-1, (int)ind_med/*(int)n_data/2*/); unsigned int i; // Normalize residues for(i=0; i<n_all_data; i++) { all_normres[i] = (fabs(all_residues[i]- med)); } for(i=0; i<n_data; i++) { sorted_normres[i] = (fabs(sorted_residues[i]- med)); } // MAD calculated only on first iteration //normmedian = Median(normres, weights); //normmedian = Median(normres); normmedian = select(sorted_normres, 0, (int)n_data-1, (int)ind_med/*(int)n_data/2*/); return normmedian; }
/*! Seek along the line defined by its equation, the two extremities of the line. This function is useful in case of translation of the line. \param I : Image in which the line appears. \exception vpTrackingException::initializationError : Moving edges not initialized. */ void vpMeLine::seekExtremities(const vpImage<unsigned char> &I) { vpCDEBUG(1) <<"begin vpMeLine::sample() : "<<std::endl ; if (!me) { vpDERROR_TRACE(2, "Tracking error: Moving edges not initialized"); throw(vpTrackingException(vpTrackingException::initializationError, "Moving edges not initialized")) ; } int rows = (int)I.getHeight() ; int cols = (int)I.getWidth() ; double n_sample; //if (me->getSampleStep()==0) if (std::fabs(me->getSampleStep()) <= std::numeric_limits<double>::epsilon()) { vpERROR_TRACE("function called with sample step = 0") ; throw(vpTrackingException(vpTrackingException::fatalError, "sample step = 0")) ; } // i, j portions of the line_p double diffsi = PExt[0].ifloat-PExt[1].ifloat; double diffsj = PExt[0].jfloat-PExt[1].jfloat; double s = vpMath::sqr(diffsi)+vpMath::sqr(diffsj) ; double di = diffsi/sqrt(s) ; // pas de risque de /0 car d(P1,P2) >0 double dj = diffsj/sqrt(s) ; double length_p = sqrt((vpMath::sqr(diffsi)+vpMath::sqr(diffsj))); // number of samples along line_p n_sample = length_p/(double)me->getSampleStep(); double sample_step = (double)me->getSampleStep(); vpMeSite P ; P.init((int) PExt[0].ifloat, (int)PExt[0].jfloat, delta_1, 0, sign) ; P.setDisplay(selectDisplay) ; unsigned int memory_range = me->getRange() ; me->setRange(1); vpImagePoint ip; for (int i=0 ; i < 3 ; i++) { P.ifloat = P.ifloat + di*sample_step ; P.i = (int)P.ifloat ; P.jfloat = P.jfloat + dj*sample_step ; P.j = (int)P.jfloat ; if(!outOfImage(P.i, P.j, 5, rows, cols)) { P.track(I,me,false) ; if (P.getState() == vpMeSite::NO_SUPPRESSION) { list.push_back(P); if (vpDEBUG_ENABLE(3)) { ip.set_i( P.i ); ip.set_j( P.j ); vpDisplay::displayCross(I, ip, 5, vpColor::green) ; } } else { if (vpDEBUG_ENABLE(3)) { ip.set_i( P.i ); ip.set_j( P.j ); vpDisplay::displayCross(I, ip, 10, vpColor::blue) ; } } } } P.init((int) PExt[1].ifloat, (int)PExt[1].jfloat, delta_1, 0, sign) ; P.setDisplay(selectDisplay) ; for (int i=0 ; i < 3 ; i++) { P.ifloat = P.ifloat - di*sample_step ; P.i = (int)P.ifloat ; P.jfloat = P.jfloat - dj*sample_step ; P.j = (int)P.jfloat ; if(!outOfImage(P.i, P.j, 5, rows, cols)) { P.track(I,me,false) ; if (P.getState() == vpMeSite::NO_SUPPRESSION) { list.push_back(P); if (vpDEBUG_ENABLE(3)) { ip.set_i( P.i ); ip.set_j( P.j ); vpDisplay::displayCross(I, ip, 5, vpColor::green) ; } } else { if (vpDEBUG_ENABLE(3)) { ip.set_i( P.i ); ip.set_j( P.j ); vpDisplay::displayCross(I, ip, 10, vpColor::blue) ; } } } } me->setRange(memory_range); vpCDEBUG(1) <<"end vpMeLine::sample() : " ; vpCDEBUG(1) << n_sample << " point inserted in the list " << std::endl ; }
void vpRobotPtu46::setVelocity (const vpRobot::vpControlFrameType frame, const vpColVector & v) { TPtuFrame ptuFrameInterface; if (vpRobot::STATE_VELOCITY_CONTROL != getRobotState ()) { vpERROR_TRACE ("Cannot send a velocity to the robot " "use setRobotState(vpRobot::STATE_VELOCITY_CONTROL) first) "); throw vpRobotException (vpRobotException::wrongStateError, "Cannot send a velocity to the robot " "use setRobotState(vpRobot::STATE_VELOCITY_CONTROL) first) "); } switch(frame) { case vpRobot::CAMERA_FRAME : { ptuFrameInterface = PTU_CAMERA_FRAME; if ( v.getRows() != 2) { vpERROR_TRACE ("Bad dimension fo speed vector in camera frame"); throw vpRobotException (vpRobotException::wrongStateError, "Bad dimension for speed vector " "in camera frame"); } break ; } case vpRobot::ARTICULAR_FRAME : { ptuFrameInterface = PTU_ARTICULAR_FRAME; if ( v.getRows() != 2) { vpERROR_TRACE ("Bad dimension fo speed vector in articular frame"); throw vpRobotException (vpRobotException::wrongStateError, "Bad dimension for speed vector " "in articular frame"); } break ; } case vpRobot::REFERENCE_FRAME : { vpERROR_TRACE ("Cannot send a velocity to the robot " "in the reference frame: " "functionality not implemented"); throw vpRobotException (vpRobotException::wrongStateError, "Cannot send a velocity to the robot " "in the reference frame:" "functionality not implemented"); break ; } case vpRobot::MIXT_FRAME : { vpERROR_TRACE ("Cannot send a velocity to the robot " "in the mixt frame: " "functionality not implemented"); throw vpRobotException (vpRobotException::wrongStateError, "Cannot send a velocity to the robot " "in the mixt frame:" "functionality not implemented"); break ; } default: { vpERROR_TRACE ("Error in spec of vpRobot. " "Case not taken in account."); throw vpRobotException (vpRobotException::wrongStateError, "Cannot send a velocity to the robot "); } } vpDEBUG_TRACE (12, "Velocity limitation."); bool norm = false; // Flag to indicate when velocities need to be nomalized double ptuSpeedInterface[2]; switch(frame) { case vpRobot::ARTICULAR_FRAME : case vpRobot::CAMERA_FRAME : { double max = this ->maxRotationVelocity; for (unsigned int i = 0 ; i < 2; ++ i) // rx and ry of the camera { if (fabs (v[i]) > max) { norm = true; max = fabs (v[i]); vpERROR_TRACE ("Excess velocity: ROTATION " "(axe nr.%d).", i); } } // Rotations velocities normalisation if (norm == true) { max = this ->maxRotationVelocity / max; for (unsigned int i = 0 ; i < 2; ++ i) ptuSpeedInterface [i] = v[i]*max; } break; } default: // Should never occur break; } vpCDEBUG(12) << "v: " << ptuSpeedInterface[0] << " " << ptuSpeedInterface[1] << std::endl; ptu.move(ptuSpeedInterface, ptuFrameInterface); return; }
/*! Track the line in the image I. \param I : Image in which the line appears. */ void vpMeLine::track(const vpImage<unsigned char> &I) { vpCDEBUG(1) <<"begin vpMeLine::track()"<<std::endl ; // 1. On fait ce qui concerne les droites (peut etre vide) { } // 2. On appelle ce qui n'est pas specifique { vpMeTracker::track(I) ; } // 3. On revient aux droites { // supression des points rejetes par les ME suppressPoints() ; setExtremities() ; // Estimation des parametres de la droite aux moindres carre try { leastSquare() ; } catch(...) { vpERROR_TRACE("Error caught") ; throw ; } // recherche de point aux extremite de la droites // dans le cas d'un glissement seekExtremities(I) ; setExtremities() ; try { leastSquare() ; } catch(...) { vpERROR_TRACE("Error caught") ; throw ; } // suppression des points rejetes par la regression robuste suppressPoints() ; setExtremities() ; //reechantillonage si necessaire reSample(I) ; // remet a jour l'angle delta pour chaque point de la liste updateDelta() ; // Remise a jour de delta dans la liste de site me if (vpDEBUG_ENABLE(2)) { display(I,vpColor::red) ; vpMeTracker::display(I) ; vpDisplay::flush(I) ; } } computeRhoTheta(I) ; vpCDEBUG(1) <<"end vpMeLine::track()"<<std::endl ; }
/*! Return the position of each axis. - In positionning control mode, call vpRobotBiclopsController::getPosition() - In speed control mode, call vpRobotBiclopsController::getActualPosition() \param frame : Control frame. This biclops head can only be controlled in articular. \param q : The position of the axis in radians. \exception vpRobotException::wrongStateError : If a not supported frame type is given. */ void vpRobotBiclops::getPosition (const vpRobot::vpControlFrameType frame, vpColVector & q) { switch(frame) { case vpRobot::CAMERA_FRAME : vpERROR_TRACE ("Cannot get position in camera frame: not implemented"); throw vpRobotException (vpRobotException::wrongStateError, "Cannot get position in camera frame: " "not implemented"); break; case vpRobot::REFERENCE_FRAME: vpERROR_TRACE ("Cannot get position in reference frame: " "not implemented"); throw vpRobotException (vpRobotException::wrongStateError, "Cannot get position in reference frame: " "not implemented"); break; case vpRobot::MIXT_FRAME: vpERROR_TRACE ("Cannot get position in mixt frame: " "not implemented"); throw vpRobotException (vpRobotException::wrongStateError, "Cannot get position in mixt frame: " "not implemented"); break; case vpRobot::ARTICULAR_FRAME: break ; } vpRobot::vpRobotStateType state; state = vpRobot::getRobotState(); switch (state) { case STATE_STOP: case STATE_POSITION_CONTROL: q = controller.getPosition(); break; case STATE_VELOCITY_CONTROL: case STATE_ACCELERATION_CONTROL: default: q.resize(vpBiclops::ndof); vpDEBUG_TRACE (12, "Lock mutex vpMeasure_mutex"); pthread_mutex_lock(&vpMeasure_mutex); // Wait until a position is available vpRobotBiclopsController::shmType shm; vpDEBUG_TRACE (12, "Lock mutex vpShm_mutex"); pthread_mutex_lock(&vpShm_mutex); shm = controller.readShm(); vpDEBUG_TRACE (12, "unlock mutex vpShm_mutex"); pthread_mutex_unlock(&vpShm_mutex); for (unsigned int i=0; i < vpBiclops::ndof; i ++) { q[i] = shm.actual_q[i]; } vpCDEBUG(11) << "++++++++ Measure actuals: " << q.t(); vpDEBUG_TRACE (12, "unlock mutex vpMeasure_mutex"); pthread_mutex_unlock(&vpMeasure_mutex); // A position is available break; } }
/*! Send a velocity on each axis. \param frame : Control frame. This biclops head can only be controlled in articular. Be aware, the camera frame (vpRobot::CAMERA_FRAME), the reference frame (vpRobot::REFERENCE_FRAME) and the mixt frame (vpRobot::MIXT_FRAME) are not implemented. \param q_dot : The desired articular velocity of the axis in rad/s. \f$ \dot {r} = [\dot{q}_1, \dot{q}_2]^t \f$ with \f$ \dot{q}_1 \f$ the pan of the camera and \f$ \dot{q}_2\f$ the tilt of the camera. \exception vpRobotException::wrongStateError : If a the robot is not configured to handle a velocity. The robot can handle a velocity only if the velocity control mode is set. For that, call setRobotState( vpRobot::STATE_VELOCITY_CONTROL) before setVelocity(). \exception vpRobotException::wrongStateError : If a not supported frame type (vpRobot::CAMERA_FRAME, vpRobot::REFERENCE_FRAME or vpRobot::MIXT_FRAME) is given. \warning Velocities could be saturated if one of them exceed the maximal autorized speed (see vpRobot::maxRotationVelocity). */ void vpRobotBiclops::setVelocity (const vpRobot::vpControlFrameType frame, const vpColVector & q_dot) { if (vpRobot::STATE_VELOCITY_CONTROL != getRobotState ()) { vpERROR_TRACE ("Cannot send a velocity to the robot " "use setRobotState(vpRobot::STATE_VELOCITY_CONTROL) first) "); throw vpRobotException (vpRobotException::wrongStateError, "Cannot send a velocity to the robot " "use setRobotState(vpRobot::STATE_VELOCITY_CONTROL) first) "); } switch(frame) { case vpRobot::CAMERA_FRAME : { vpERROR_TRACE ("Cannot send a velocity to the robot " "in the camera frame: " "functionality not implemented"); throw vpRobotException (vpRobotException::wrongStateError, "Cannot send a velocity to the robot " "in the camera frame:" "functionality not implemented"); break ; } case vpRobot::ARTICULAR_FRAME : { if ( q_dot.getRows() != 2) { vpERROR_TRACE ("Bad dimension fo speed vector in articular frame"); throw vpRobotException (vpRobotException::wrongStateError, "Bad dimension for speed vector " "in articular frame"); } break ; } case vpRobot::REFERENCE_FRAME : { vpERROR_TRACE ("Cannot send a velocity to the robot " "in the reference frame: " "functionality not implemented"); throw vpRobotException (vpRobotException::wrongStateError, "Cannot send a velocity to the robot " "in the reference frame:" "functionality not implemented"); break ; } case vpRobot::MIXT_FRAME : { vpERROR_TRACE ("Cannot send a velocity to the robot " "in the mixt frame: " "functionality not implemented"); throw vpRobotException (vpRobotException::wrongStateError, "Cannot send a velocity to the robot " "in the mixt frame:" "functionality not implemented"); break ; } default: { vpERROR_TRACE ("Error in spec of vpRobot. " "Case not taken in account."); throw vpRobotException (vpRobotException::wrongStateError, "Cannot send a velocity to the robot "); } } vpDEBUG_TRACE (12, "Velocity limitation."); bool norm = false; // Flag to indicate when velocities need to be nomalized // Saturate articular speed double max = vpBiclops::speedLimit; vpColVector q_dot_sat(vpBiclops::ndof); // init q_dot_saturated q_dot_sat = q_dot; for (unsigned int i = 0 ; i < vpBiclops::ndof; ++ i) // q1 and q2 { if (fabs (q_dot[i]) > max) { norm = true; max = fabs (q_dot[i]); vpERROR_TRACE ("Excess velocity: ROTATION " "(axe nr.%d).", i); } } // Rotations velocities normalisation if (norm == true) { max = vpBiclops::speedLimit / max; q_dot_sat = q_dot * max; } vpCDEBUG(12) << "send velocity: " << q_dot_sat.t() << std::endl; vpRobotBiclopsController::shmType shm; vpDEBUG_TRACE (12, "Lock mutex vpShm_mutex"); pthread_mutex_lock(&vpShm_mutex); shm = controller.readShm(); for (unsigned int i=0; i < vpBiclops::ndof; i ++) shm.q_dot[i] = q_dot[i]; controller.writeShm(shm); vpDEBUG_TRACE (12, "unlock mutex vpShm_mutex"); pthread_mutex_unlock(&vpShm_mutex); return; }
/*! Construct a list of vpMeSite moving edges at a particular sampling step between the two extremities. The two extremities are defined by the points with the smallest and the biggest \f$ alpha \f$ angle. \param I : Image in which the ellipse appears. \exception vpTrackingException::initializationError : Moving edges not initialized. */ void vpMeEllipse::sample(const vpImage<unsigned char> & I) { vpCDEBUG(1) <<"begin vpMeEllipse::sample() : "<<std::endl ; if (!me) { vpDERROR_TRACE(2, "Tracking error: Moving edges not initialized"); throw(vpTrackingException(vpTrackingException::initializationError, "Moving edges not initialized")) ; } int height = (int)I.getHeight() ; int width = (int)I.getWidth() ; double n_sample; //if (me->getSampleStep()==0) if (std::fabs(me->getSampleStep()) <= std::numeric_limits<double>::epsilon()) { std::cout << "In vpMeEllipse::sample: " ; std::cout << "function called with sample step = 0" ; //return fatalError ; } double j, i;//, j11, i11; vpImagePoint iP11; j = i = 0.0 ; double incr = vpMath::rad(me->getSampleStep()) ; // angle increment en degree vpColor col = vpColor::red ; getParameters() ; // Delete old list list.clear(); angle.clear(); // sample positions double k = alpha1 ; while (k<alpha2) { // j = a *cos(k) ; // equation of an ellipse // i = b *sin(k) ; // equation of an ellipse j = a *sin(k) ; // equation of an ellipse i = b *cos(k) ; // equation of an ellipse // (i,j) are the coordinates on the origin centered ellipse ; // a rotation by "e" and a translation by (xci,jc) are done // to get the coordinates of the point on the shifted ellipse // iP11.set_j( iPc.get_j() + ce *j - se *i ); // iP11.set_i( iPc.get_i() -( se *j + ce *i) ); iP11.set_j( iPc.get_j() + ce *j + se *i ); iP11.set_i( iPc.get_i() - se *j + ce *i ); vpDisplay::displayCross(I, iP11, 5, col) ; double theta ; computeTheta(theta, K, iP11) ; // If point is in the image, add to the sample list if(!outOfImage(vpMath::round(iP11.get_i()), vpMath::round(iP11.get_j()), 0, height, width)) { vpMeSite pix ; pix.init((int)iP11.get_i(), (int)iP11.get_j(), theta) ; pix.setDisplay(selectDisplay) ; pix.setState(vpMeSite::NO_SUPPRESSION); if(vpDEBUG_ENABLE(3)) { vpDisplay::displayCross(I,iP11, 5, vpColor::blue); } list.push_back(pix); angle.push_back(k); } k += incr ; } vpMeTracker::initTracking(I) ; n_sample = (unsigned int)list.size() ; vpCDEBUG(1) << "end vpMeEllipse::sample() : " ; vpCDEBUG(1) << n_sample << " point inserted in the list " << std::endl ; }