/*! Get the region of interest clipped in the image and the information to know if it's a clipped point. \param cam : camera parameters. \param roi : image point corresponding to the region of interest with clipping information. \param cMo : pose. */ void vpPolygon3D::getRoiClipped(const vpCameraParameters &cam, std::vector<std::pair<vpImagePoint,unsigned int> > &roi, const vpHomogeneousMatrix &cMo) { changeFrame(cMo); computePolygonClipped(cam); getRoiClipped(cam, roi); }
/*! Modification of all the pixels that are in the roi to the value of _nb ( default is 255). \param _mask : the mask to update (0, not in the object, _nb otherwise). \param _nb : Optionnal value to set to the pixels included in the face. \param _shiftBorder : Optionnal shift for the border in pixel (sort of built-in erosion) to avoid to consider pixels near the limits of the face. */ void vpMbtKltPolygon::updateMask(IplImage* _mask, unsigned int _nb, unsigned int _shiftBorder) { int width = _mask->width; int i_min, i_max, j_min, j_max; cam.computeFov((unsigned)_mask->width, (unsigned)_mask->height); computeRoiClipped(cam); std::vector<vpImagePoint> roi; getRoiClipped(cam, roi); vpMbtPolygon::getMinMaxRoi(roi, i_min, i_max, j_min,j_max); /* check image boundaries */ if(i_min > _mask->height){ //underflow i_min = 0; } if(i_max > _mask->height){ i_max = _mask->height; } if(j_min > _mask->width){ //underflow j_min = 0; } if(j_max > _mask->width){ j_max = _mask->width; } double shiftBorder_d = (double) _shiftBorder; char* ptrData = _mask->imageData + i_min*width+j_min; for(int i=i_min; i< i_max; i++){ double i_d = (double) i; for(int j=j_min; j< j_max; j++){ double j_d = (double) j; if(_shiftBorder != 0){ if( vpMbtKltPolygon::isInside(roi, i_d, j_d) && vpMbtKltPolygon::isInside(roi, i_d+shiftBorder_d, j_d+shiftBorder_d) && vpMbtKltPolygon::isInside(roi, i_d-shiftBorder_d, j_d+shiftBorder_d) && vpMbtKltPolygon::isInside(roi, i_d+shiftBorder_d, j_d-shiftBorder_d) && vpMbtKltPolygon::isInside(roi, i_d-shiftBorder_d, j_d-shiftBorder_d) ){ *(ptrData++) = _nb; } else{ ptrData++; } } else{ if(vpMbtKltPolygon::isInside(roi, i, j)){ *(ptrData++) = _nb; } else{ ptrData++; } } } ptrData += width - j_max + j_min; } }
/*! Initialise the face to track. All the points in the map, representing all the map detected in the image, are parsed in order to extract the id of the points that are indeed in the face. \param _tracker : ViSP OpenCV KLT Tracker. */ void vpMbtKltPolygon::init(const vpKltOpencv& _tracker) { // extract ids of the points in the face nbPointsInit = 0; nbPointsCur = 0; initPoints = std::map<int, vpImagePoint>(); curPoints = std::map<int, vpImagePoint>(); curPointsInd = std::map<int, int>(); std::vector<vpImagePoint> roi; getRoiClipped(cam, roi); for (unsigned int i = 0; i < static_cast<unsigned int>(_tracker.getNbFeatures()); i ++){ int id; float x_tmp, y_tmp; _tracker.getFeature((int)i, id, x_tmp, y_tmp); if(isInside(roi, y_tmp, x_tmp)){ initPoints[id] = vpImagePoint(y_tmp, x_tmp); curPoints[id] = vpImagePoint(y_tmp, x_tmp); curPointsInd[id] = (int)i; nbPointsInit++; nbPointsCur++; } } // initialisation of the value for the computation in SE3 vpPlane plan(p[0], p[1], p[2]); d0 = plan.getD(); N = plan.getNormal(); N.normalize(); N_cur = N; invd0 = 1.0 / d0; }
/*! Check if the polygon is visible in the image and if the angle between the normal to the face and the line vector going from the optical center to the cog of the face is below the given threshold. To do that, the polygon is projected into the image thanks to the camera pose. \param cMo : The pose of the camera. \param alpha : Maximum angle to detect if the face is visible (in rad). \param modulo : Indicates if the test should also consider faces that are not oriented counter clockwise. If true, the orientation of the face is without importance. \param cam : Camera parameters (intrinsics parameters) \param I : Image used to consider level of detail. \return Return true if the polygon is visible. */ bool vpMbtPolygon::isVisible(const vpHomogeneousMatrix &cMo, const double alpha, const bool &modulo, const vpCameraParameters &cam, const vpImage<unsigned char> &I) { // std::cout << "Computing angle from MBT Face (cMo, alpha)" << std::endl; changeFrame(cMo); if(nbpt <= 2) { //Level of detail (LOD) if(useLod) { vpCameraParameters c = cam; if(clippingFlag > 3) { // Contains at least one FOV constraint c.computeFov(I.getWidth(), I.getHeight()); } computePolygonClipped(c); std::vector<vpImagePoint> roiImagePoints; getRoiClipped(c, roiImagePoints); if (roiImagePoints.size() == 2) { double x1 = roiImagePoints[0].get_u(); double y1 = roiImagePoints[0].get_v(); double x2 = roiImagePoints[1].get_u(); double y2 = roiImagePoints[1].get_v(); double length = std::sqrt((x1 - x2) * (x1 - x2) + (y1 - y2) * (y1 - y2)); // std::cout << "Index=" << index << " ; Line length=" << length << " ; clippingFlag=" << clippingFlag << std::endl; // vpTRACE("index=%d lenght=%f minLineLengthThresh=%f", index, length, minLineLengthThresh); if (length < minLineLengthThresh) { isvisible = false; isappearing = false; return false; } } } /* a line is always visible when LOD is not used */ isvisible = true; isappearing = false; return true ; } // If the polygon has no orientation, the angle check visibility is always valid. // Feature mainly used for cylinders. if(!hasOrientation) { isvisible = true; isappearing = false; return true; } //Check visibility from normal //Newell's Method for calculating the normal of an arbitrary 3D polygon //https://www.opengl.org/wiki/Calculating_a_Surface_Normal vpColVector faceNormal(3); vpColVector currentVertex, nextVertex; for(unsigned int i = 0; i<nbpt; i++) { currentVertex = p[i].cP; nextVertex = p[(i+1) % nbpt].cP; faceNormal[0] += (currentVertex[1] - nextVertex[1]) * (currentVertex[2] + nextVertex[2]); faceNormal[1] += (currentVertex[2] - nextVertex[2]) * (currentVertex[0] + nextVertex[0]); faceNormal[2] += (currentVertex[0] - nextVertex[0]) * (currentVertex[1] + nextVertex[1]); } faceNormal.normalize(); vpColVector e4(3) ; vpPoint pt; for (unsigned int i = 0; i < nbpt; i += 1){ pt.set_X(pt.get_X() + p[i].get_X()); pt.set_Y(pt.get_Y() + p[i].get_Y()); pt.set_Z(pt.get_Z() + p[i].get_Z()); } e4[0] = -pt.get_X() / (double)nbpt; e4[1] = -pt.get_Y() / (double)nbpt; e4[2] = -pt.get_Z() / (double)nbpt; e4.normalize(); double angle = acos(vpColVector::dotProd(e4, faceNormal)); // vpCTRACE << angle << "/" << vpMath::deg(angle) << "/" << vpMath::deg(alpha) << std::endl; if( angle < alpha || (modulo && (M_PI - angle) < alpha)) { isvisible = true; isappearing = false; if (useLod) { vpCameraParameters c = cam; if(clippingFlag > 3) { // Contains at least one FOV constraint c.computeFov(I.getWidth(), I.getHeight()); } computePolygonClipped(c); std::vector<vpImagePoint> roiImagePoints; getRoiClipped(c, roiImagePoints); vpPolygon roiPolygon(roiImagePoints); double area = roiPolygon.getArea(); // std::cout << "After normal test ; Index=" << index << " ; area=" << area << " ; clippingFlag=" // << clippingFlag << std::endl; if (area < minPolygonAreaThresh) { isappearing = false; isvisible = false; return false; } } return true; } if (angle < alpha+vpMath::rad(1) ){ isappearing = true; } else if (modulo && (M_PI - angle) < alpha+vpMath::rad(1) ){ isappearing = true; } else { isappearing = false; } isvisible = false ; return false ; }