/*! Create a vpMbScanLineEdge from two points while ordering them. \param a : First point of the line. \param b : Second point of the line. \return Resulting vpMbScanLineEdge. */ vpMbScanLine::vpMbScanLineEdge vpMbScanLine::makeMbScanLineEdge(const vpPoint &a, const vpPoint &b) { vpColVector _a(3); vpColVector _b(3); _a[0] = std::ceil((a.get_X() * 1e8) * 1e-6); _a[1] = std::ceil((a.get_Y() * 1e8) * 1e-6); _a[2] = std::ceil((a.get_Z() * 1e8) * 1e-6); _b[0] = std::ceil((b.get_X() * 1e8) * 1e-6); _b[1] = std::ceil((b.get_Y() * 1e8) * 1e-6); _b[2] = std::ceil((b.get_Z() * 1e8) * 1e-6); bool b_comp = false; for(unsigned int i = 0 ; i < 3 ; ++i) if (_a[i] < _b[i]) { b_comp = true; break; } else if(_a[i] > _b[i]) break; if (b_comp) return std::make_pair(_a, _b); return std::make_pair(_b, _a); }
/*! Create a vpColVector of a projected point. \param p : Point to project. \param v : Resulting vector. \param K : Camera parameters. */ void vpMbScanLine::createVectorFromPoint(const vpPoint &p, vpColVector &v, const vpCameraParameters &K) { v = vpColVector(3); v[0] = p.get_X() * K.get_px() + K.get_u0() * p.get_Z(); v[1] = p.get_Y() * K.get_py() + K.get_v0() * p.get_Z(); v[2] = p.get_Z(); }
/*! Interpolate two vpPoints. \param a : first point. \param b : second point. \param alpha : interpolation factor. \return Interpolated vpPoint. */ vpPoint vpMbScanLine::mix(const vpPoint &a, const vpPoint &b, double alpha) { vpPoint res; res.set_X(a.get_X() + ( b.get_X() - a.get_X() ) * alpha); res.set_Y(a.get_Y() + ( b.get_Y() - a.get_Y() ) * alpha); res.set_Z(a.get_Z() + ( b.get_Z() - a.get_Z() ) * alpha); return res; }
/*! Initialize a point feature with polar coordinates \f$(\rho,\theta)\f$ using the coordinates of the point \f$(x,y,Z)\f$, where \f$(x,y)\f$ correspond to the perspective projection of the point in the image plane and \f$Z\f$ the 3D depth of the point in the camera frame. The values of \f$(x,y,Z)\f$ are expressed in meters. This function intends to introduce noise in the conversion from cartesian to polar coordinates. Cartesian \f$(x,y)\f$ coordinates are first converted in pixel coordinates in the image using \e goodCam camera parameters. Then, the pixels coordinates of the point are converted back to cartesian coordinates \f$(x^{'},y^{'})\f$ using the noisy camera parameters \e wrongCam. From these new coordinates in the image plane, the polar coordinates are computed by: \f[\rho = \sqrt{x^2+y^2} \hbox{,}\; \; \theta = \arctan \frac{y}{x}\f] \param s : Visual feature \f$(\rho,\theta)\f$ and \f$Z\f$ to initialize. \param goodCam : Camera parameters used to introduce noise. These parameters are used to convert cartesian coordinates of the point \e p in the image plane in pixel coordinates. \param wrongCam : Camera parameters used to introduce noise. These parameters are used to convert pixel coordinates of the point in cartesian coordinates of the point in the image plane. \param p : A point with \f$(x,y)\f$ cartesian coordinates in the image plane corresponding to the camera perspective projection, and with 3D depth \f$Z\f$. */ void vpFeatureBuilder::create(vpFeaturePointPolar &s, const vpCameraParameters &goodCam, const vpCameraParameters &wrongCam, const vpPoint &p) { try { double x = p.get_x(); double y = p.get_y(); s.set_Z( p.get_Z() ); double u=0, v=0; vpMeterPixelConversion::convertPoint(goodCam, x, y, u, v); vpPixelMeterConversion::convertPoint(wrongCam, u, v, x, y); double rho = sqrt(x*x + y*y); double theta = atan2(y, x); s.set_rho(rho) ; s.set_theta(theta) ; } catch(...) { vpERROR_TRACE("Error caught") ; throw ; } }
/*! Initialize a point feature with polar coordinates \f$(\rho,\theta)\f$ using the coordinates of the point \f$(x,y,Z)\f$, where \f$(x,y)\f$ correspond to the perspective projection of the point in the image plane and \f$Z\f$ the 3D depth of the point in the camera frame. The values of \f$(x,y,Z)\f$ are expressed in meters. From the coordinates in the image plane, the polar coordinates are computed by: \f[\rho = \sqrt{x^2+y^2} \hbox{,}\; \; \theta = \arctan \frac{y}{x}\f] \param s : Visual feature \f$(\rho,\theta)\f$ and \f$Z\f$ to initialize. \param p : A point with \f$(x,y)\f$ cartesian coordinates in the image plane corresponding to the camera perspective projection, and with 3D depth \f$Z\f$. */ void vpFeatureBuilder::create(vpFeaturePointPolar &s, const vpPoint &p) { try { double x = p.get_x(); double y = p.get_y(); double rho = sqrt(x*x + y*y); double theta = atan2(y, x); s.set_rho(rho) ; s.set_theta(theta) ; s.set_Z( p.get_Z() ) ; if (s.get_Z() < 0) { vpERROR_TRACE("Point is behind the camera ") ; std::cout <<"Z = " << s.get_Z() << std::endl ; throw(vpFeatureException(vpFeatureException::badInitializationError, "Point is behind the camera ")) ; } if (fabs(s.get_Z()) < 1e-6) { vpERROR_TRACE("Point Z coordinates is null ") ; std::cout <<"Z = " << s.get_Z() << std::endl ; throw(vpFeatureException(vpFeatureException::badInitializationError, "Point Z coordinates is null")) ; } } catch(...) { vpERROR_TRACE("Error caught") ; throw ; } }
/*! Get the clipped points according to a plane equation. \param cam : camera parameters \param p1 : First extremity of the line. \param p2 : Second extremity of the line. \param p1Clipped : Resulting p1. \param p2Clipped : Resulting p2. \param p1ClippedInfo : Resulting clipping flag for p1. \param p2ClippedInfo : Resulting clipping flag for p2. \param A : Param A from plane equation. \param B : Param B from plane equation. \param C : Param C from plane equation. \param D : Param D from plane equation. \param flag : flag specifying the clipping used when calling this function. \return True if the points have been clipped, False otherwise */ bool vpPolygon3D::getClippedPointsFovGeneric(const vpPoint &p1, const vpPoint &p2, vpPoint &p1Clipped, vpPoint &p2Clipped, unsigned int &p1ClippedInfo, unsigned int &p2ClippedInfo, const vpColVector &normal, const unsigned int &flag) { vpRowVector p1Vec(3); p1Vec[0] = p1.get_X(); p1Vec[1] = p1.get_Y(); p1Vec[2] = p1.get_Z(); p1Vec = p1Vec.normalize(); vpRowVector p2Vec(3); p2Vec[0] = p2.get_X(); p2Vec[1] = p2.get_Y(); p2Vec[2] = p2.get_Z(); p2Vec = p2Vec.normalize(); if((clippingFlag & flag) == flag){ double beta1 = acos( p1Vec * normal ); double beta2 = acos( p2Vec * normal ); // std::cout << beta1 << " && " << beta2 << std::endl; // if(!(beta1 < M_PI / 2.0 && beta2 < M_PI / 2.0)) if(beta1 < M_PI / 2.0 && beta2 < M_PI / 2.0) return false; else if (beta1 < M_PI / 2.0 || beta2 < M_PI / 2.0){ vpPoint pClipped; double t = -(normal[0] * p1.get_X() + normal[1] * p1.get_Y() + normal[2] * p1.get_Z()); t = t / ( normal[0] * (p2.get_X() - p1.get_X()) + normal[1] * (p2.get_Y() - p1.get_Y()) + normal[2] * (p2.get_Z() - p1.get_Z()) ); pClipped.set_X((p2.get_X() - p1.get_X())*t + p1.get_X()); pClipped.set_Y((p2.get_Y() - p1.get_Y())*t + p1.get_Y()); pClipped.set_Z((p2.get_Z() - p1.get_Z())*t + p1.get_Z()); if(beta1 < M_PI / 2.0){ p1ClippedInfo = p1ClippedInfo | flag; p1Clipped = pClipped; } else{ p2ClippedInfo = p2ClippedInfo | flag; p2Clipped = pClipped; } } } return true; }
/*! Unary function to convert the 3D coordinates in the camera frame to a cv::Point3d. \param point : Point to convert. \return A cv::Point3d with the 3D coordinates stored in vpPoint in the camera frame. */ cv::Point3d vpConvert::vpCamPointToPoint3d(const vpPoint &point) { return cv::Point3d(point.get_X(), point.get_Y(), point.get_Z()); }
/*! Unary function to convert the 3D coordinates in the camera frame to a cv::Point3f. \param point : Point to convert. \return A cv::Point3f with the 3D coordinates stored in vpPoint in the camera frame. */ cv::Point3f vpConvert::vpCamPointToPoint3f(const vpPoint &point) { return cv::Point3f((float) point.get_X(), (float) point.get_Y(), (float) point.get_Z()); }
bool vpPolygon3D::getClippedPointsDistance(const vpPoint &p1, const vpPoint &p2, vpPoint &p1Clipped, vpPoint &p2Clipped, unsigned int &p1ClippedInfo, unsigned int &p2ClippedInfo, const unsigned int &flag, const double &distance) { // Since p1 and p1Clipped can be the same object as well as p2 and p2Clipped // to avoid a valgrind "Source and destination overlap in memcpy" error, // we introduce a two temporary points. vpPoint p1Clipped_, p2Clipped_; p1Clipped_ = p1; p2Clipped_ = p2; p1Clipped = p1Clipped_; p2Clipped = p2Clipped_; bool test1 = (p1Clipped.get_Z() < distance && p2Clipped.get_Z() < distance); if(flag == vpPolygon3D::FAR_CLIPPING) test1 = (p1Clipped.get_Z() > distance && p2Clipped.get_Z() > distance); bool test2 = (p1Clipped.get_Z() < distance || p2Clipped.get_Z() < distance); if(flag == vpPolygon3D::FAR_CLIPPING) test2 = (p1Clipped.get_Z() > distance || p2Clipped.get_Z() > distance); bool test3 = (p1Clipped.get_Z() < distance); if(flag == vpPolygon3D::FAR_CLIPPING) test3 = (p1Clipped.get_Z() > distance); if(test1) return false; else if(test2){ vpPoint pClippedNear; double t; t = (p2Clipped.get_Z() - p1Clipped.get_Z()); t = (distance - p1Clipped.get_Z()) / t; pClippedNear.set_X((p2Clipped.get_X() - p1Clipped.get_X())*t + p1Clipped.get_X()); pClippedNear.set_Y((p2Clipped.get_Y() - p1Clipped.get_Y())*t + p1Clipped.get_Y()); pClippedNear.set_Z(distance); if(test3){ p1Clipped = pClippedNear; if(flag == vpPolygon3D::FAR_CLIPPING) p1ClippedInfo = p1ClippedInfo | vpPolygon3D::FAR_CLIPPING; else p1ClippedInfo = p1ClippedInfo | vpPolygon3D::NEAR_CLIPPING; } else{ p2Clipped = pClippedNear; if(flag == vpPolygon3D::FAR_CLIPPING) p2ClippedInfo = p2ClippedInfo | vpPolygon3D::FAR_CLIPPING; else p2ClippedInfo = p2ClippedInfo | vpPolygon3D::NEAR_CLIPPING; } } return true; }
/*! Compute the norm of two vpPoints. \param a : first point. \param b : second point. \return Resulting norm. */ double vpMbScanLine::norm(const vpPoint &a, const vpPoint &b) { return sqrt(vpMath::sqr(a.get_X()-b.get_X()) + vpMath::sqr(a.get_Y() - b.get_Y()) + vpMath::sqr(a.get_Z() - b.get_Z())); }