static double toLine (double x, double y, const double intX [], const double intY [], int i) { int nearby; if (i == 6) { double a7 = atan2 (intY [7] - bodyY, intX [7] - bodyX); double a6 = atan2 (intY [6] - bodyY, intX [6] - bodyX); double a = atan2 (y - bodyY, x - bodyX); double da6 = arcLength (a7, a6); double da = arcLength (a7, a); if (da <= da6) return fabs (sqrt ((bodyX - x) * (bodyX - x) + (bodyY - y) * (bodyY - y)) - bodyRadius); else nearby = arcLength (a7 + 0.5 * da6, a) < NUMpi ? 6 : 7; } else if ((x - intX [i]) * (intX [i + 1] - intX [i]) + (y - intY [i]) * (intY [i + 1] - intY [i]) < 0) { nearby = i; } else if ((x - intX [i + 1]) * (intX [i] - intX [i + 1]) + (y - intY [i + 1]) * (intY [i] - intY [i + 1]) < 0) { nearby = i + 1; } else { double boundaryDistance = sqrt ((intX [i + 1] - intX [i]) * (intX [i + 1] - intX [i]) + (intY [i + 1] - intY [i]) * (intY [i + 1] - intY [i])); double outerProduct = (intX [i] - x) * (intY [i + 1] - intY [i]) - (intY [i] - y) * (intX [i + 1] - intX [i]); return fabs (outerProduct) / boundaryDistance; } return sqrt ((intX [nearby] - x) * (intX [nearby] - x) + (intY [nearby] - y) * (intY [nearby] - y)); }
/* Based on code from : Copyright (c) 2003-2006 Gino van den Bergen / Erwin Coumans http://continuousphysics.com/Bullet/ */ void ompl::base::SO3StateSpace::interpolate(const State *from, const State *to, const double t, State *state) const { assert(fabs(norm(static_cast<const StateType*>(from)) - 1.0) < MAX_QUATERNION_NORM_ERROR); assert(fabs(norm(static_cast<const StateType*>(to)) - 1.0) < MAX_QUATERNION_NORM_ERROR); double theta = arcLength(from, to); if (theta > std::numeric_limits<double>::epsilon()) { double d = 1.0 / sin(theta); double s0 = sin((1.0 - t) * theta); double s1 = sin(t * theta); const StateType *qs1 = static_cast<const StateType*>(from); const StateType *qs2 = static_cast<const StateType*>(to); StateType *qr = static_cast<StateType*>(state); double dq = qs1->x * qs2->x + qs1->y * qs2->y + qs1->z * qs2->z + qs1->w * qs2->w; if (dq < 0) // Take care of long angle case see http://en.wikipedia.org/wiki/Slerp s1 = -s1; qr->x = (qs1->x * s0 + qs2->x * s1) * d; qr->y = (qs1->y * s0 + qs2->y * s1) * d; qr->z = (qs1->z * s0 + qs2->z * s1) * d; qr->w = (qs1->w * s0 + qs2->w * s1) * d; } else { if (state != from) copyState(state, from); } }
/* Filter out ill-formed or small cells */ void filterCells( cv::Mat channel, std::vector<std::vector<cv::Point>> contours, std::vector<HierarchyType> contour_mask, std::vector<std::vector<cv::Point>> *filtered_contours ) { for (size_t i = 0; i < contours.size(); i++) { if (contour_mask[i] != HierarchyType::PARENT_CNTR) continue; // Eliminate extremely small contours auto arc_length = arcLength(contours[i], true); if ((contours[i].size() >= 5) && (arc_length <= MAX_ARC_LENGTH_FILTER)) { // Calculate center of the nucleus cv::Moments mu = moments(contours[i], true); cv::Point2f mc = cv::Point2f( static_cast<float>(mu.m10/mu.m00), static_cast<float>(mu.m01/mu.m00) ); cv::Mat circle_mask = cv::Mat::zeros(channel.size(), CV_8UC1); cv::circle(circle_mask, mc, DAPI_MASK_RADIUS, 255, -1, 8); int circle_score = countNonZero(circle_mask); cv::Mat intersection; bitwise_or(circle_mask, channel, intersection); int intersection_score = countNonZero(intersection); // Add to the filter dapi list if coverage area exceeds a certain threshold float ratio = ((float) intersection_score) / circle_score; if (ratio >= DAPI_COVERAGE_RATIO) filtered_contours->push_back(contours[i]); } } }
/* Filter out ill-formed or small cells */ void filterCells( ChannelType channel_type, cv::Mat channel, std::vector<std::vector<cv::Point>> contours, std::vector<HierarchyType> contour_mask, std::vector<std::vector<cv::Point>> *filtered_contours ) { for (size_t i = 0; i < contours.size(); i++) { if (contour_mask[i] != HierarchyType::PARENT_CNTR) continue; // Eliminate extremely small contours auto arc_length = arcLength(contours[i], true); if ((contours[i].size() < 5) || (arc_length < MIN_ARC_LENGTH)) continue; switch(channel_type) { case ChannelType::BLUE: { // Calculate center of the nucleus cv::RotatedRect min_area_rect = minAreaRect(cv::Mat(contours[i])); float aspect_ratio = float(min_area_rect.size.width)/min_area_rect.size.height; if (aspect_ratio > 1.0) { aspect_ratio = 1.0/aspect_ratio; } if (aspect_ratio >= CELL_ASPECT_RATIO) { filtered_contours->push_back(contours[i]); } } break; case ChannelType::PURPLE: break; case ChannelType::RED: { filtered_contours->push_back(contours[i]); } break; } } }
double ompl::base::SO3StateSpace::distance(const State *state1, const State *state2) const { BOOST_ASSERT_MSG(satisfiesBounds(state1) && satisfiesBounds(state2), "The states passed to SO3StateSpace::distance are not within bounds. Call " "SO3StateSpace::enforceBounds() in, e.g., ompl::control::ODESolver::PostPropagationEvent, " "ompl::control::StatePropagator, or ompl::base::StateValidityChecker"); return arcLength(state1, state2); }
void Feature::calcGeometryFeature(const Region& region) { circularity = 0; squareness = 0; aspectRatio = 0; roughness = 0; double mysq = 0; double len = 0; const vector<ContourInfo*>& contours = region.contours; for (vector<ContourInfo*>::const_iterator it = contours.begin(); it != contours.end(); it++) { const vector<Point>& contour = (*it)->contour; double area = (*it)->area; double perimeter = arcLength(contour, true); RotatedRect minRect = minAreaRect(Mat(contour)); Rect myrect = boundingRect(Mat(contour)); vector<Point> hull; convexHull(contour, hull); double perimeterHull = arcLength(hull, true); double width = minRect.size.width, height = minRect.size.height; circularity += area * (4 * M_PI * area / (perimeter * perimeter)); squareness += area * (area / (width * height)); mysq += area * (area / (myrect.width * myrect.height)); len = perimeter / (width * height); aspectRatio += area * (1.0 * min(width, height) / max(width, height)); roughness += area * (perimeterHull / perimeter); } circularity /= mArea; squareness /= mArea; mysq /= mArea; aspectRatio /= mArea; roughness /= mArea; #ifdef DEBUG_OUTPUT cout << "circularity ==== " << circularity << endl; cout << "squareness ==== " << squareness << endl; cout << "mysq ==== " << mysq << endl; cout << "len === " << len << endl; cout << "aspectRatio ==== " << aspectRatio << endl; cout << "roughness ==== " << roughness << endl; #endif }
bool CColor::existColor(Mat imgCanny) { findContours(imgCanny, contours, hierarchy, CV_RETR_TREE, CV_CHAIN_APPROX_SIMPLE, Point(0, 0)); for (size_t i = 0; i< contours.size(); i++) { approxPolyDP(Mat(contours[i]), approx, arcLength(Mat(contours[i]), true)*0.02, true);// 5, true); // if(fabs(contourArea(approx)) > 1000 && approx.size() == 4 ){ if(! fig.isSquare( approx )) return true; } } return false; }
void ResampleCurve(const vector<double>& curvex, const vector<double>& curvey, vector<double>& resampleX, vector<double>& resampleY, int N, bool isOpen ) { assert(curvex.size()>0 && curvey.size()>0 && curvex.size() == curvey.size()); vector<Point2d> resamplepl(N); resamplepl[0].x = curvex[0]; resamplepl[0].y = curvey[0]; vector<Point2i> pl; PolyLineMerge(pl, curvex, curvey); double pl_length = arcLength(pl, true); double resample_size = pl_length / (double)N; int curr = 0; double dist = 0.0; for (int i = 1; i<N;) { assert(curr <(int)pl.size() - 1); double last_dist = norm(pl[curr] - pl[curr + 1]); dist += last_dist; // cout << curr << " and " << curr+1 << "\t\t" << last_dist << " ("<<dist<<")"<<endl; if (dist >= resample_size) { //put a point on line double _d = last_dist - (dist - resample_size); Point2d cp(pl[curr].x, pl[curr].y), cp1(pl[curr + 1].x, pl[curr + 1].y); Point2d dirv = cp1 - cp; dirv = dirv * (1.0 / norm(dirv)); // cout << "point " << i << " between " << curr << " and " << curr+1 << " remaining " << dist << endl; assert(i <(int)resamplepl.size()); resamplepl[i] = cp + dirv * _d; i++; dist = last_dist - _d; //remaining dist //if remaining dist to next point needs more sampling... (within some epsilon) while (dist - resample_size > 1e-3) { // cout << "point " << i << " between " << curr << " and " << curr+1 << " remaining " << dist << endl; assert(i <(int)resamplepl.size()); resamplepl[i] = resamplepl[i - 1] + dirv * resample_size; dist -= resample_size; i++; } } curr++; } PolyLineSplit(resamplepl, resampleX, resampleY); }
/*! SLCurveBezier::findParamByDist gets parameter s distance in arc length from Q(t1). Returns max SLfloat if can't find it. */ SLfloat SLCurveBezier::findParamByDist(SLfloat t1, SLfloat s) { // ensure that we remain within valid parameter space if (s > arcLength(t1, _points[_points.size()-1].w)) return _points[_points.size()-1].w; // make first guess SLfloat p = t1 + s*(_points[_points.size()-1].w-_points[0].w)/_totalLength; for (SLuint i = 0; i < 32; ++i) { // compute function value and test against zero SLfloat func = arcLength(t1, p) - s; if (SL_abs(func) < 1.0e-03f) return p; // perform Newton-Raphson iteration step SLfloat speed = velocity(p).length(); assert(SL_abs(speed) > FLT_EPSILON); p -= func/speed; } // done iterating, return failure case return FLT_MAX; }
bool SegmentProcessor::ExtractBasicSegmentFeatures(SuperPixel& sp, const Mat& cimg, const Mat& dmap) { if(countNonZero(sp.mask) < 200) return false; // edge detection Mat edgemap; cv::Canny(sp.mask*150, edgemap, 100, 200); //imshow("edge", edgemap); //waitKey(0); Contours curves; std::vector<cv::Vec4i> hierarchy; findContours( edgemap, curves, hierarchy, CV_RETR_LIST, CV_CHAIN_APPROX_NONE ); sp.original_contour = curves[0]; approxPolyDP(sp.original_contour, sp.approx_contour, cv::arcLength(cv::Mat(sp.original_contour), true)*0.02, true); sp.area = countNonZero(sp.mask); sp.box = boundingRect(sp.approx_contour); sp.perimeter = arcLength(curves[0], true); sp.isConvex = isContourConvex(sp.approx_contour); sp.centroid.x = 0; sp.centroid.y = 0; for (int r=sp.box.y; r<sp.box.br().y; r++) { for(int c=sp.box.x; c<sp.box.br().x; c++) { sp.centroid.x += c*sp.mask.at<uchar>(r,c); sp.centroid.y += r*sp.mask.at<uchar>(r,c); } } sp.centroid.x /= sp.area; sp.centroid.y /= sp.area; sp.meanDepth = mean(dmap, sp.mask).val[0]; return true; }
void apritags(const Mat src, const int current_direction, double &direction, bool &state, Mat &result) { Histogram hc; MatND colorhist; Mat thresholded; Mat imageBinary; src.copyTo(result); Mat imageFilter; for (int i=1; i<9; i=i+2) GaussianBlur(src, imageFilter, Size(i, i), 0, 0); //imshow("img", src); //imshow("img_Gaussian", imageFilter); //createTrackbar("alpha", "camera", &alpha, 3, on_track); //on_track(alpha, 0); Mat imageL = imageFilter - Scalar(20,20,20); hc.getHueHistogram(imageL); equalizeHist(hc.v[2], hc.v[2]); //imshow("v[2]",hc.v[2]); threshold(hc.v[2], imageBinary, COLOR_BLACK_TH, 255, 1); //imshow("img_binary", imageBinary); Mat imageClosed; Mat element = getStructuringElement(MORPH_CROSS, Size(7,7), Point(0,0)); morphologyEx(imageBinary, imageClosed, MORPH_CLOSE, element); dilate(imageClosed, imageClosed, element); //imshow("img_open", imageClosed); vector<vector<Point> > contours; findContours(imageClosed, contours, CV_RETR_EXTERNAL, CV_CHAIN_APPROX_NONE, Point(0, 0)); Mat imageContours(imageClosed.size(), CV_8U, Scalar(255)); drawContours(imageContours, contours, -1, Scalar(0), 2); /* calculate the ju*/ vector<Moments> mu(contours.size()); for (int i=0; i<contours.size(); i++) mu[i] = moments(contours[i], false); //imshow("contours", imageContours); //imageContours.copyTo(result); vector<vector<Point> > apcontours; //AprilTags' contours vector<RotatedRect> rotatedRects; /* number of apriltags */ int countAp = 0; float angle[20]; //the rotating angle of each Apriltage double area, length, p; double d = src.cols * src.rows; Point center(src.cols/2, src.rows/2); state = false; double maxp = PROPERTY; cout << "X: " << center.x << " Y: " << center.y << endl; for ( int i=0; i<contours.size(); i++) { area = abs(contourArea( contours[i] )); length = abs(arcLength( contours[i], true )); p = 1.0*area/length; if (p>PROPERTY) { cout << "Area: " << area << " Length: " << length << " Property: " << int(p) << endl; countAp++; apcontours.push_back(contours.at(i)); vector<Point> p = contours.at(i); rotatedRects.push_back(minAreaRect(Mat(p))); angle[countAp-1] = rotatedRects[countAp-1].angle; Point2f mc; mc = Point2f(mu[i].m10/mu[i].m00, mu[i].m01/mu[i].m00); circle(result, mc, 2, Scalar::all(255)); cout << "X: " << mc.x << " Y: " << mc.y << endl; if ((((current_direction == UP) && (mc.y<=center.y)) || ((current_direction == DOWN) && (mc.y>center.y)) || ((current_direction == LEFT) && (mc.x<=center.x)) || ((current_direction == RIGHT) && (mc.x>center.x)))) { if (sqrt(pow(mc.x-center.x, 2.0) + pow(mc.y-center.y, 2.0)) < d) { d = sqrt(pow(mc.x-center.x, 2.0) + pow(mc.y-center.y, 2.0)); //direction = atan2(mc.x-center.x, mc.y-center.y); //direction = direction * 180 / PI; //maxp = p; if (center.y > mc.y) { if ( mc.x>center.x ) //第一象限 { direction=180*atan(float ((mc.x-center.x)/(center.y-mc.y))) / PI; } else //第四象限 { direction=360 + 180*atan(float ((mc.x-center.x)/(center.y-mc.y))) / PI; } } else if (center.y < mc.y) { if (mc.x>center.x ) //第er象限 { direction=180 - 180*atan(float ((mc.x-center.x)/(mc.y-center.y))) / PI; } else //第san象限 { direction=180 + 180*atan(float ((center.x-mc.x)/(mc.y-center.y))) / PI; } } state = true; } } // apriltag is near if ( d<20 ) { state = false; return; } } //cout << "A:" << area << " L:" << length << " P:" << p << endl; } drawContours(result, apcontours, -1, Scalar(255), 5); //waitKey(0); /* vector<Mat> imageAp; Rect rects; for (int i=0; i<countAp; i++) { Point2f rect_points[4]; rotatedRects[i].points(rect_points); cout << rect_points[0].x << " " << rect_points[0].y << " " << rect_points[1].x << " " << rect_points[1].y << " " << rect_points[2].x << " " << rect_points[2].y << " " << rect_points[3].x << " " << rect_points[3].y << endl; int x = min(min(min(rect_points[0].x, rect_points[1].x), rect_points[2].x), rect_points[3].x); int y = min(min(min(rect_points[0].y, rect_points[1].y), rect_points[2].y), rect_points[3].y); int rows = max(max(max(rect_points[0].y, rect_points[1].y), rect_points[2].y), rect_points[3].y)-y; int cols = max(max(max(rect_points[0].x, rect_points[1].x), rect_points[2].x), rect_points[3].x)-x; if (x + cols>image.cols) cols = image.cols-x; if (y + rows>image.rows) rows = image.rows-y; rects = Rect(max(x, 0), max(y, 0), cols, rows); setROI(image, imageAp, rects); } for (int i=0; i<imageAp.size(); i++) { Mat imageGray; cvtColor(imageAp[i], imageGray, CV_RGB2GRAY); Mat imageAdaptive; adaptiveThreshold(imageGray, imageAdaptive, 255, ADAPTIVE_THRESH_MEAN_C, THRESH_BINARY , 3, 5); //imshow("6", imageAdaptive); waitKey(0); }*/ }
void DkPageSegmentation::findRectangles(const cv::Mat& img, std::vector<DkPolyRect>& rects, int channel, int threshold) const { cv::Mat imgL; cv::normalize(img, imgL, 255, 0, cv::NORM_MINMAX); // downscale if (scale != 1.0f) cv::resize(imgL, imgL, cv::Size(), scale, scale, CV_INTER_LINEAR); std::vector<std::vector<cv::Point> > contours; int threshStep = dsc::round(255.0 / numThresh); //std::cout << "thresh step: " << threshStep << std::endl; cv::Mat gray; std::vector<DkPolyRect> rectsL; std::vector<int> indexes; if (threshold == -1) { // use less thresholds for a/b channels if (channel > 0) threshStep *= 2; for (int idx = 0; idx < 255; idx += threshStep) indexes.push_back(idx); } else indexes.push_back(threshold); // try several threshold levels for (int thr : indexes) { if (thr == 0) { int thresh = 80; Canny(imgL, gray, thresh, thresh*3, 5); // dilate canny output to remove potential // holes between edge segments //dilate(gray, gray, cv::Mat(), cv::Point(-1,-1)); //cv::imwrite("C:/VSProjects/DocScan/img/tests/edge.png", gray); } else gray = imgL >= thr; cv::erode(gray, gray, cv::Mat(), cv::Point(-1,-1)); // find contours and store them all as a list findContours(gray, contours, CV_RETR_LIST, CV_CHAIN_APPROX_SIMPLE); if (looseDetection) { std::vector<std::vector<cv::Point> > hull; for (int i = 0; i < (int)contours.size(); i++) { double cArea = contourArea(cv::Mat(contours[i])); if (fabs(cArea) > mMinArea*scale*scale && (!mMaxArea || fabs(cArea) < mMaxArea*(scale*scale))) { std::vector<cv::Point> cHull; cv::convexHull(cv::Mat(contours[i]), cHull, false); hull.push_back(cHull); } } contours = hull; } std::vector<cv::Point> approx; // DEBUG ------------------------ //cv::Mat pImg = imgL.clone(); //cv::cvtColor(pImg, pImg, CV_GRAY2BGR); // DEBUG ------------------------ // test each contour for (size_t i = 0; i < contours.size(); i++) { // approxicv::Mate contour with accuracy proportional // to the contour perimeter approxPolyDP(cv::Mat(contours[i]), approx, arcLength(cv::Mat(contours[i]), true)*0.02, true); double cArea = contourArea(cv::Mat(approx)); // DEBUG ------------------------ //if (fabs(cArea) < mMaxArea) //fillConvexPoly(pImg, &approx[0], (int)approx.size(), cv::Scalar(255,0,0)); // DEBUG ------------------------ // square contours should have 4 vertices after approxicv::Mation // relatively large area (to filter out noisy contours) // and be convex. // Note: absolute value of an area is used because // area may be positive or negative - in accordance with the // contour orientation if (approx.size() == 4 && fabs(cArea) > mMinArea*scale*scale && (!mMaxArea || fabs(cArea) < mMaxArea*scale*scale) && isContourConvex(cv::Mat(approx))) { DkPolyRect cr(approx); //std::cout << mMinArea*scale*scale << " < " << fabs(cArea) << " < " << mMaxArea*scale*scale << std::endl; // if cosines of all angles are small // (all angles are ~90 degree) if(/*cr.maxSide() < std::max(tImg.rows, tImg.cols)*maxSideFactor && */ (!maxSide || cr.maxSide() < maxSide*scale) && cr.getMaxCosine() < 0.3 ) { cr.setChannel(channel); cr.setThreshold(thr); rectsL.push_back(cr); } } } // DEBUG ------------------------ //cv::cvtColor(pImg, pImg, CV_RGB2BGR); //cv::imwrite("C:/VSProjects/DocScan/img/tests/poly" + Utils::num2str(thr) + ".png", pImg); // DEBUG ------------------------ } for (size_t idx = 0; idx < rectsL.size(); idx++) rectsL[idx].scale(1.0f/scale); // filter rectangles which are found because of the image border for (const DkPolyRect& p : rectsL) { DkBox b = p.getBBox(); if (b.size().height < img.rows*maxSideFactor && b.size().width < img.cols*maxSideFactor) { rects.push_back(p); } } //cv::normalize(dbgImg, dbgImg, 255, 0, cv::NORM_MINMAX); }
/** @function thresh_callback */ void thresh_callback(Mat src_gray, Mat& drawing, double scale, Size& ssize) { RNG rng(12345); int thresh = 100; ofstream fout("larget_contour_area.txt"); Mat canny_output; vector<vector<Point> > contours; vector<Vec4i> hierarchy; double max_contour_area(0.0); int largest_contour_index(0); /// Detect edges using canny Canny(src_gray, canny_output, thresh, thresh * 2, 3); /// Find contours findContours(canny_output, contours, hierarchy, CV_RETR_TREE, CV_CHAIN_APPROX_SIMPLE, Point(0, 0)); /// Get the moments vector<Moments> mu(contours.size()); for (int i = 0; i < contours.size(); i++) { mu[i] = moments(contours[i], false); //cout << "# of contour points: " << contours[i].size() << endl; for (int j = 0; j < contours[i].size(); j++) { //cout << "Point(x,y)=" <<i<<" j "<<j<<" "<< contours[i][j] << endl; } } /// Get the mass centers: vector<Point2f> mc(contours.size()); for (int i = 0; i < contours.size(); i++) { mc[i] = Point2f(mu[i].m10 / mu[i].m00, mu[i].m01 / mu[i].m00); } //----------- Find the convex hull object for each contour vector<vector<Point>>hull(contours.size()); for (int i = 0; i < contours.size(); i++){ convexHull(Mat(contours[i]), hull[i], false); } //------------ Draw contours drawing = Mat::zeros(canny_output.size(), CV_8UC3); //Size ssize; ssize = Size((int)(drawing.size().width * scale), (int)(drawing.size().height*scale)); //the dst image size,e.g.100x100 // Calculate the area with the moments 00 and compare with the result of the OpenCV function //printf("\t Info: Area and Contour Length \n"); //cout << "contours.size() " << contours.size() << endl; double countour_Area(0.0), arc_Length(0.0); for (int i = 0; i < contours.size(); i++) { countour_Area = (double)contourArea(contours[i]); arc_Length = (double)arcLength(contours[i], true); //cout << "contourArea [" << i << "] " << ": Moment " << mu[i].m00 // << " OpenCV " << countour_Area << " arcLength " << arc_Length << endl; //cout << "countour_Area "<< countour_Area << " " << endl; if (countour_Area > max_contour_area){ max_contour_area = countour_Area; largest_contour_index = i; } //------- draw all contour --------------- //Scalar color = Scalar(rng.uniform(0, 255), rng.uniform(0, 255), rng.uniform(0, 255)); //drawContours(drawing, contours, i, color, 2, 8, hierarchy, 0, Point()); //circle(drawing, mc[i], 4, color, -1, 8, 0); } //------- draw largest contour --------------- if (contours.size() > 0){ Scalar color = Scalar(rng.uniform(0, 255), rng.uniform(0, 255), rng.uniform(0, 255)); drawContours(drawing, contours, largest_contour_index, color, 1, 8, hierarchy, 0, Point()); circle(drawing, mc[largest_contour_index], 4, color, -1, 8, 0); drawContours(drawing, hull, largest_contour_index, color, 1, 8, vector<Vec4i>(), 0, Point()); } fout << max_contour_area << endl; cout << "max_contour_area " << max_contour_area << endl; }
/* * Detects a octagon (stop sign) * - 8 vertices * - angles are ~135 degrees -> cos(135)~-.70 * param frame: the image frame to process * param dist: pointer to the distance to stop sign * return 0: success * return 1: error */ Mat shapeDetect(Mat frame, float *dist) { if (frame.empty()) { cout<<"bad frame \n"; return Mat(); } // filter image GaussianBlur(frame, frame, Size(7,7), 1.5, 1.5); // Convert to binary image using Canny Mat bw; Canny(frame, bw, 50, 200, 5); // increase detected pixels dilate(bw, bw, Mat(), Point(-1,-1)); // Find contours vector<vector<Point> > contours; findContours(bw.clone(), contours, CV_RETR_EXTERNAL, CV_CHAIN_APPROX_SIMPLE); // vector approx will contain the vertices of the polygonal approximation for the contour vector<Point> approx; // Loop through all the contours and get the approximate polygonal curves for each contour for (unsigned int i = 0; i < contours.size(); i++) { // Approximate contour with accuracy proportional to the contour perimeter approxPolyDP(Mat(contours[i]), approx, arcLength(Mat(contours[i]), true)*0.02, true); // Skip small or non-convex objects if (fabs(contourArea(contours[i])) < 200 || !isContourConvex(approx)) continue; // possible octagon if (approx.size() == 8) { // Number of vertices of polygonal curve int vtc = approx.size(); // Get the cosines of all corners vector<double> cos; for (int j = 2; j < vtc+1; j++) cos.push_back(angle(approx[j%vtc], approx[j-2], approx[j-1])); // Sort ascending the cosine values sort(cos.begin(), cos.end()); // Get the lowest and the highest cosine double mincos = cos.front(); double maxcos = cos.back(); // Use the degrees obtained above and the number of vertices to determine the shape of the contour // angle are pretty relaxed in case camera isn't straight on if (vtc == 8 && mincos >= -0.85 && maxcos <= -0.55) { // found a octagon (stop sign) -> caclulate distance *dist = dist2obj(contours[i]); setLabel(bw, "stopsign", contours[i]); } } } return bw; }
static bool checkExtinctionLight(const cv::Mat& src_img, const cv::Point top_left, const cv::Point bot_right, const cv::Point bright_center) { /* check whether new roi is included by source image */ cv::Point roi_top_left; roi_top_left.x = (top_left.x < 0) ? 0 : (src_img.cols < top_left.x) ? src_img.cols : top_left.x; roi_top_left.y = (top_left.y < 0) ? 0 : (src_img.rows < top_left.y) ? src_img.rows : top_left.y; cv::Point roi_bot_right; roi_bot_right.x = (bot_right.x < 0) ? 0 : (src_img.cols < bot_right.x) ? src_img.cols : bot_right.x; roi_bot_right.y = (bot_right.y < 0) ? 0 : (src_img.rows < bot_right.y) ? src_img.rows : bot_right.y; cv::Mat roi = src_img(cv::Rect(roi_top_left, roi_bot_right)); cv::Mat roi_HSV; cvtColor(roi, roi_HSV, CV_BGR2HSV); cv::Mat hsv_channel[3]; split(roi_HSV, hsv_channel); int anchor = 3; cv::Mat kernel = getStructuringElement(cv::MORPH_ELLIPSE, cv::Size(2*anchor + 1, 2*anchor + 1), cv::Point(anchor, anchor)); cv::Mat topHat_dark; morphologyEx(hsv_channel[2], topHat_dark, cv::MORPH_TOPHAT, kernel, cv::Point(anchor, anchor), 5); /* sharpening */ cv::Mat tmp; threshold(topHat_dark, tmp, 0.1*255, 255, cv::THRESH_BINARY_INV); tmp.copyTo(topHat_dark); /* filter by its shape and search dark region */ std::vector< std::vector<cv::Point> > dark_contours; std::vector<cv::Vec4i> dark_hierarchy; findContours(topHat_dark, dark_contours, dark_hierarchy, CV_RETR_CCOMP, CV_CHAIN_APPROX_NONE); int contours_idx = 0; bool isThere_dark = false; /* check whether "turned off light" like region are in this roi */ for (unsigned int i=0; i<dark_contours.size(); i++) { cv::Rect bound = boundingRect(dark_contours.at(contours_idx)); double area = contourArea(dark_contours.at(contours_idx)); double perimeter = arcLength(dark_contours.at(contours_idx), true); double circleLevel = (IsNearlyZero(perimeter)) ? 0.0f : (4.0f * CV_PI * area / pow(perimeter, 2)); if (std::max(bound.width, bound.height) < 2*std::min(bound.width, bound.height) && // dimension ratio CIRCLE_LEVEL_THRESHOLD <= circleLevel) // round enough { isThere_dark = true; // std::cerr << "there is dark region" << std::endl; } contours_idx = dark_hierarchy[contours_idx][0]; if (contours_idx < 0) break; } return isThere_dark; } /* static bool checkExtinctionLight() */
int shapeDetection(Mat src, int size) { // Do convex hull refinement vector<Point> hull = convexHullExtraction(src); ///* std::vector<Point> approx; Mat dst = src.clone(); int shape = -1; // Approximate contour with accuracy proportional to the contour perimeter approxPolyDP(Mat(hull), approx, arcLength(Mat(hull), true)*0.3, true); //cout << approx.size() << endl; if (approx.size() == 4) { // Number of vertices of polygonal curve int vtc = approx.size(); // Get the cosines of all corners std::vector<double> cos; for (int j = 2; j < vtc + 1; j++) cos.push_back(angle(approx[j%vtc], approx[j - 2], approx[j - 1])); // Sort ascending the cosine values std::sort(cos.begin(), cos.end()); // Get the lowest and the highest cosine double mincos = cos.front(); double maxcos = cos.back(); // Use the degrees obtained above and the number of vertices // to determine the shape of the contour if (vtc == 4 && mincos >= -0.1 && maxcos <= 0.3) { setLabel(dst, "RECT", hull); shape = 4; } else if (vtc == 5 && mincos >= -0.34 && maxcos <= -0.27) { setLabel(dst, "PENTA", hull); shape = 5; } else if (vtc == 6 && mincos >= -0.55 && maxcos <= -0.45) { setLabel(dst, "HEXA", hull); shape = 6; } } else { // Detect and label circles double fillrate = FillingRate(src,size);// , hull); if (fillrate > 0.89) { setLabel(dst, "CIR", hull); shape = 0; } else if (fillrate < 0.78&&fillrate>0.7) { setLabel(dst, "HEART", hull); shape = 2; } else if (fillrate>0.78&&fillrate<0.89) { setLabel(dst, "FLOWER", hull); shape = 5; } else { setLabel(dst, "Rect", hull); shape = 4; } } if (recogintionShow) { //imshow("src", src); imshow("dst", dst); if (save) imwrite("Result.jpg", dst); waitKey(0); } return shape; //*/ }
///Level 1 void CColor::setALLBoardColorPosition() { Scalar colorContours; imgFHSVBoard = Frame2HSV(imgSharp, CColorsType::NOTWHITE); ///setColorPosition2Board(CColorsType::RED); imgFHSV = Frame2HSV(imgSharp, CColorsType::RED); bitwise_and(imgFHSVBoard, imgFHSV, imgFComper); //sumar a figura imgFHSVBoard com imgFHSV para obter só a cor Canny(imgFComper, imgCanny, 180, 120); findContours(imgCanny, contours, hierarchy, CV_RETR_TREE, CV_CHAIN_APPROX_SIMPLE, Point(0, 0)); drawingContours = Mat::zeros(imgCanny.size(), CV_8UC3); for (size_t i = 0; i< contours.size(); i++) { approxPolyDP(Mat(contours[i]), approx, arcLength(Mat(contours[i]), true)*0.02, true);// 5, true); // if(fabs(contourArea(approx)) > 1000 && approx.size() == 4) { if(fig.isSquare(approx)) { poscenter.ChooseSaveBoardColorCenter( CColorsType::RED, contours[i]); colorContours = CV_RGB(255, 0, 0); drawContours(drawingContours, contours, (int)i, colorContours, 2, /*CV_AA*/8, hierarchy, 0, Point()); } } } ///setColorPosition2Board(CColorsType::GREEN); imgFHSV = Frame2HSV(imgSharp, CColorsType::GREEN); bitwise_and(imgFHSVBoard, imgFHSV, imgFComper); //sumar a figura imgFHSVBoard com imgFHSV para obter só a cor Canny(imgFComper, imgCanny, 180, 120); findContours(imgCanny, contours, hierarchy, CV_RETR_TREE, CV_CHAIN_APPROX_SIMPLE, Point(0, 0)); for (size_t i = 0; i< contours.size(); i++) { approxPolyDP(Mat(contours[i]), approx, arcLength(Mat(contours[i]), true)*0.02, true);// 5, true); // if(fabs(contourArea(approx)) > 1000 && approx.size() == 4) { if(fig.isSquare(approx)) { poscenter.ChooseSaveBoardColorCenter( CColorsType::GREEN, contours[i]); colorContours = CV_RGB(0, 255, 0); drawContours(drawingContours, contours, (int)i, colorContours, 2, /*CV_AA*/8, hierarchy, 0, Point()); } } } ///setColorPosition2Board(CColorsType::BLUE); imgFHSV = Frame2HSV(imgSharp, CColorsType::BLUE); bitwise_and(imgFHSVBoard, imgFHSV, imgFComper); //sumar a figura imgFHSVBoard com imgFHSV para obter só a cor Canny(imgFComper, imgCanny, 180, 120); findContours(imgCanny, contours, hierarchy, CV_RETR_TREE, CV_CHAIN_APPROX_SIMPLE, Point(0, 0)); for (size_t i = 0; i< contours.size(); i++) { approxPolyDP(Mat(contours[i]), approx, arcLength(Mat(contours[i]), true)*0.02, true);// 5, true); // if(fabs(contourArea(approx)) > 1000 && approx.size() == 4) { if(fig.isSquare(approx)) { poscenter.ChooseSaveBoardColorCenter( CColorsType::BLUE, contours[i]); colorContours = CV_RGB(0, 0, 255); drawContours(drawingContours, contours, (int)i, colorContours, 2, /*CV_AA*/8, hierarchy, 0, Point()); } } } ////setColorPosition2Board(CColorsType::YELLOW); imgFHSV = Frame2HSV(imgSharp, CColorsType::YELLOW); bitwise_and(imgFHSVBoard, imgFHSV, imgFComper); //sumar a figura imgFHSVBoard com imgFHSV para obter só a cor Canny(imgFComper, imgCanny, 180, 120); findContours(imgCanny, contours, hierarchy, CV_RETR_TREE, CV_CHAIN_APPROX_SIMPLE, Point(0, 0)); for (size_t i = 0; i< contours.size(); i++) { approxPolyDP(Mat(contours[i]), approx, arcLength(Mat(contours[i]), true)*0.02, true);// 5, true); // if(fabs(contourArea(approx)) > 1000 && approx.size() == 4) { if(fig.isSquare(approx)) { poscenter.ChooseSaveBoardColorCenter( CColorsType::YELLOW, contours[i]); colorContours = CV_RGB(255, 255, 0); drawContours(drawingContours, contours, (int)i, colorContours, 2, /*CV_AA*/8, hierarchy, 0, Point()); } } } ///setColorPosition2Board(CColorsType::BLACK); imgFHSV = Frame2HSV(imgSharp, CColorsType::BLACK ); bitwise_and(imgFHSVBoard, imgFHSV, imgFComper); //sumar a figura imgFHSVBoard com imgFHSV para obter só a cor Canny(imgFComper, imgCanny, 180, 120); findContours(imgCanny, contours, hierarchy, CV_RETR_TREE, CV_CHAIN_APPROX_SIMPLE, Point(0, 0)); for (size_t i = 0; i< contours.size(); i++) { approxPolyDP(Mat(contours[i]), approx, arcLength(Mat(contours[i]), true)*0.02, true);// 5, true); // if(fabs(contourArea(approx)) > 1000 && approx.size() == 4) { if(fig.isSquare(approx)) { poscenter.ChooseSaveBoardColorCenter( CColorsType::BLACK, contours[i]); colorContours = CV_RGB(255, 255, 0); drawContours(drawingContours, contours, (int)i, colorContours, 2, /*CV_AA*/8, hierarchy, 0, Point()); } } } //imshow("drawingContours", drawingContours); }
QVector<CColorsType> CColor::getCorAsked_CorPlaced(CColorsType colorAsked) { Scalar colorContours; imgFHSV = Frame2HSV(imgSharp, CColorsType::YELLOW); Canny(imgFHSV, imgCanny, 180, 120); findContours(imgCanny, contours, hierarchy, CV_RETR_TREE, CV_CHAIN_APPROX_SIMPLE, Point(0, 0)); for (size_t i = 0; i< contours.size(); i++) { approxPolyDP(Mat(contours[i]), approx, arcLength(Mat(contours[i]), true)*0.02, true); if(fabs(contourArea(approx)) > 1000 && approx.size() == 4) { if( ! fig.isSquare(approx)) {/// YELLOW ///As peças jogadas são retangulos if(colorAsked == CColorsType::YELLOW) ///Right Answer { fig.setContourRectangle(imgSharp, contours[i], CV_RGB(0, 255, 0)); //feedback on screen colorContours = CV_RGB(255, 255, 0); drawContours(imgSharp, contours, (int)i, colorContours, 2, /*CV_AA*/8, hierarchy, 0, Point()); setMessage2Robot(true, colorAsked, CColorsType::YELLOW); return QVector<CColorsType>() << colorAsked << CColorsType::YELLOW; } else { fig.setContourRectangle(imgSharp, contours[i], Scalar(0, 0, 255)); //feedback on screen setMessage2Robot(false, colorAsked, CColorsType::YELLOW); return QVector<CColorsType>() << colorAsked << CColorsType::YELLOW; } } } } imgFHSV = Frame2HSV(imgSharp, CColorsType::RED); Canny(imgFHSV, imgCanny, 180, 120); findContours(imgCanny, contours, hierarchy, CV_RETR_TREE, CV_CHAIN_APPROX_SIMPLE, Point(0, 0)); for (size_t i = 0; i< contours.size(); i++) { approxPolyDP(Mat(contours[i]), approx, arcLength(Mat(contours[i]), true)*0.02, true);// 5, true); // if(fabs(contourArea(approx)) > 1000 && approx.size() == 4) //este if não deve ser necessario { if( ! fig.isSquare(approx)){/// RED ///As peças jogadas são retangulos if(colorAsked == CColorsType::RED) { fig.setContourRectangle(imgSharp, contours[i], CV_RGB(0, 255, 0)); //feedback on screen colorContours = CV_RGB(255, 0, 0); drawContours(imgSharp, contours, (int)i, colorContours, 2, /*CV_AA*/8, hierarchy, 0, Point()); setMessage2Robot(true, colorAsked, CColorsType::RED); return QVector<CColorsType>() << colorAsked << CColorsType::RED; } else { fig.setContourRectangle(imgSharp, contours[i], Scalar(0, 0, 255)); //feedback on screen setMessage2Robot(false, colorAsked, CColorsType::RED); return QVector<CColorsType>() << colorAsked << CColorsType::RED; } } } } imgFHSV = Frame2HSV(imgSharp, CColorsType::GREEN); Canny(imgFHSV, imgCanny, 180, 120); findContours(imgCanny, contours, hierarchy, CV_RETR_TREE, CV_CHAIN_APPROX_SIMPLE, Point(0, 0)); for (size_t i = 0; i< contours.size(); i++) { approxPolyDP(Mat(contours[i]), approx, arcLength(Mat(contours[i]), true)*0.02, true);// 5, true); // if(fabs(contourArea(approx)) > 1000 && approx.size() == 4) //este if não deve ser necessario { if( ! fig.isSquare(approx)){/// GREEN ///As peças jogadas são retangulos if(colorAsked == CColorsType::GREEN) { fig.setContourRectangle(imgSharp, contours[i], CV_RGB(0, 255, 0)); //feedback on screen colorContours = CV_RGB(0, 255, 0); drawContours(imgSharp, contours, (int)i, colorContours, 2, /*CV_AA*/8, hierarchy, 0, Point()); setMessage2Robot(true, colorAsked, CColorsType::GREEN); return QVector<CColorsType>() << colorAsked << CColorsType::GREEN; } else { fig.setContourRectangle(imgSharp, contours[i], Scalar(0, 0, 255)); //feedback on screen setMessage2Robot(false, colorAsked, CColorsType::GREEN); return QVector<CColorsType>() << colorAsked << CColorsType::GREEN; } } } } imgFHSV = Frame2HSV(imgSharp, CColorsType::BLUE); Canny(imgFHSV, imgCanny, 180, 120); findContours(imgCanny, contours, hierarchy, CV_RETR_TREE, CV_CHAIN_APPROX_SIMPLE, Point(0, 0)); for (size_t i = 0; i< contours.size(); i++) { approxPolyDP(Mat(contours[i]), approx, arcLength(Mat(contours[i]), true)*0.02, true);// 5, true); // if(fabs(contourArea(approx)) > 1000 && approx.size() == 4) //este if não deve ser necessario { if( ! fig.isSquare(approx)){/// BLUE ///As peças jogadas são retangulos if(colorAsked == CColorsType::BLUE) { fig.setContourRectangle(imgSharp, contours[i], CV_RGB(0, 255, 0)); //feedback on screen colorContours = CV_RGB(0, 0, 255); drawContours(imgSharp, contours, (int)i, colorContours, 2, /*CV_AA*/8, hierarchy, 0, Point()); setMessage2Robot(true, colorAsked, CColorsType::BLUE); return QVector<CColorsType>() << colorAsked << CColorsType::BLUE; } else { fig.setContourRectangle(imgSharp, contours[i], Scalar(0, 0, 255)); //feedback on screen setMessage2Robot(false, colorAsked, CColorsType::BLUE); return QVector<CColorsType>() << colorAsked << CColorsType::BLUE; } } } } imgFHSV = Frame2HSV(imgSharp, CColorsType::BLACK); Canny(imgFHSV, imgCanny, 180, 120); findContours(imgCanny, contours, hierarchy, CV_RETR_TREE, CV_CHAIN_APPROX_SIMPLE, Point(0, 0)); for (size_t i = 0; i< contours.size(); i++) { approxPolyDP(Mat(contours[i]), approx, arcLength(Mat(contours[i]), true)*0.02, true);// 5, true); // if(fabs(contourArea(approx)) > 1000 && approx.size() == 4) //este if não deve ser necessario { if( ! fig.isSquare(approx)){/// BLACK ///As peças jogadas são retangulos if(colorAsked == CColorsType::BLACK) { fig.setContourRectangle(imgSharp, contours[i], CV_RGB(0, 255, 0)); //feedback on screen colorContours = CV_RGB(0, 0, 0); drawContours(imgSharp, contours, (int)i, colorContours, 2, /*CV_AA*/8, hierarchy, 0, Point()); setMessage2Robot(true, colorAsked, CColorsType::BLACK); return QVector<CColorsType>() << colorAsked << CColorsType::BLACK; } else { fig.setContourRectangle(imgSharp, contours[i], Scalar(0, 0, 255)); //feedback on screen setMessage2Robot(false, colorAsked, CColorsType::BLACK); return QVector<CColorsType>() << colorAsked << CColorsType::BLACK; } } } } return QVector<CColorsType>() << CColorsType::NONE << CColorsType::NONE; ///REVER }
QVector<CColorsType> CColor::getCorBoard_CorPlaced() { imgFHSV = Frame2HSV(imgSharp, CColorsType::YELLOW); Canny(imgFHSV, imgCanny, 180, 120); findContours(imgCanny, contours, hierarchy, CV_RETR_TREE, CV_CHAIN_APPROX_SIMPLE, Point(0, 0)); for (size_t i = 0; i< contours.size(); i++) { approxPolyDP(Mat(contours[i]), approx, arcLength(Mat(contours[i]), true)*0.02, true); if(fabs(contourArea(approx)) > 1000 && approx.size() == 4) { if( ! fig.isSquare(approx)) {/// YELLOW ///As peças jogadas são retangulos CColorsType colorBoard = poscenter.getBoardColor(CColorsType::YELLOW); if(colorBoard == CColorsType::YELLOW) ///Right Answer { setMessage2Robot(true, colorBoard, CColorsType::YELLOW); return QVector<CColorsType>() << colorBoard << CColorsType::YELLOW; } else { setMessage2Robot(false, colorBoard, CColorsType::YELLOW); return QVector<CColorsType>() << colorBoard << CColorsType::YELLOW; } } } } imgFHSV = Frame2HSV(imgSharp, CColorsType::RED); Canny(imgFHSV, imgCanny, 180, 120); findContours(imgCanny, contours, hierarchy, CV_RETR_TREE, CV_CHAIN_APPROX_SIMPLE, Point(0, 0)); for (size_t i = 0; i< contours.size(); i++) { approxPolyDP(Mat(contours[i]), approx, arcLength(Mat(contours[i]), true)*0.02, true);// 5, true); // if(fabs(contourArea(approx)) > 1000 && approx.size() == 4) //este if não deve ser necessario { if( ! fig.isSquare(approx)){/// RED ///As peças jogadas são retangulos CColorsType colorBoard = poscenter.getBoardColor(CColorsType::RED); if(colorBoard == CColorsType::RED) { setMessage2Robot(true, colorBoard, CColorsType::RED); return QVector<CColorsType>() << colorBoard << CColorsType::RED; } else { setMessage2Robot(false, colorBoard, CColorsType::RED); return QVector<CColorsType>() << colorBoard << CColorsType::RED; } } } } imgFHSV = Frame2HSV(imgSharp, CColorsType::GREEN); Canny(imgFHSV, imgCanny, 180, 120); findContours(imgCanny, contours, hierarchy, CV_RETR_TREE, CV_CHAIN_APPROX_SIMPLE, Point(0, 0)); for (size_t i = 0; i< contours.size(); i++) { approxPolyDP(Mat(contours[i]), approx, arcLength(Mat(contours[i]), true)*0.02, true);// 5, true); // if(fabs(contourArea(approx)) > 1000 && approx.size() == 4) //este if não deve ser necessario { if( ! fig.isSquare(approx)){/// GREEN ///As peças jogadas são retangulos CColorsType colorBoard = poscenter.getBoardColor(CColorsType::GREEN); if(colorBoard == CColorsType::GREEN) { setMessage2Robot(true, colorBoard, CColorsType::GREEN); return QVector<CColorsType>() << colorBoard << CColorsType::GREEN; } else { setMessage2Robot(false, colorBoard, CColorsType::GREEN); return QVector<CColorsType>() << colorBoard << CColorsType::GREEN; } } } } imgFHSV = Frame2HSV(imgSharp, CColorsType::BLUE); Canny(imgFHSV, imgCanny, 180, 120); findContours(imgCanny, contours, hierarchy, CV_RETR_TREE, CV_CHAIN_APPROX_SIMPLE, Point(0, 0)); for (size_t i = 0; i< contours.size(); i++) { approxPolyDP(Mat(contours[i]), approx, arcLength(Mat(contours[i]), true)*0.02, true);// 5, true); // if(fabs(contourArea(approx)) > 1000 && approx.size() == 4) //este if não deve ser necessario { if( ! fig.isSquare(approx)){/// BLUE ///As peças jogadas são retangulos CColorsType colorBoard = poscenter.getBoardColor(CColorsType::BLUE); if(colorBoard == CColorsType::BLUE) { setMessage2Robot(true, colorBoard, CColorsType::BLUE); return QVector<CColorsType>() << colorBoard << CColorsType::BLUE; } else { setMessage2Robot(false, colorBoard, CColorsType::BLUE); return QVector<CColorsType>() << colorBoard << CColorsType::BLUE; } } } } imgFHSV = Frame2HSV(imgSharp, CColorsType::BLACK); Canny(imgFHSV, imgCanny, 180, 120); findContours(imgCanny, contours, hierarchy, CV_RETR_TREE, CV_CHAIN_APPROX_SIMPLE, Point(0, 0)); for (size_t i = 0; i< contours.size(); i++) { approxPolyDP(Mat(contours[i]), approx, arcLength(Mat(contours[i]), true)*0.02, true);// 5, true); // if(fabs(contourArea(approx)) > 1000 && approx.size() == 4) //este if não deve ser necessario { if( ! fig.isSquare(approx)){/// BLACK ///As peças jogadas são retangulos CColorsType colorBoard = poscenter.getBoardColor(CColorsType::BLACK); if(colorBoard == CColorsType::BLACK) { setMessage2Robot(true, colorBoard, CColorsType::BLACK); return QVector<CColorsType>() << colorBoard << CColorsType::BLACK; } else { setMessage2Robot(false, colorBoard, CColorsType::BLACK); return QVector<CColorsType>() << colorBoard << CColorsType::BLACK; } } } } return QVector<CColorsType>() << CColorsType::NONE << CColorsType::NONE; ///REVER }
Mat CColor::setColorPosition() { Scalar colorContours; drawingContours = Mat::zeros(imgCanny.size(), CV_8UC3); ///setColorPosition2Piece(CColorsType::RED); imgFHSV = Frame2HSV(imgSharp, CColorsType::RED); Canny(imgFHSV, imgCanny, 180, 120); findContours(imgCanny, contours, hierarchy, CV_RETR_TREE, CV_CHAIN_APPROX_SIMPLE, Point(0, 0)); for (size_t i = 0; i< contours.size(); i++) { approxPolyDP(Mat(contours[i]), approx, arcLength(Mat(contours[i]), true)*0.02, true);// 5, true); // if(fabs(contourArea(approx)) > 1000 && approx.size() == 4) { if(!fig.isSquare(approx)) { poscenter.ChooseSaveColorCenter( CColorsType::RED, contours[i]); fig.setContourRectangle(imgSharp, contours[i]); colorContours = CV_RGB(255, 0, 0); drawContours(drawingContours, contours, (int)i, colorContours, 2, /*CV_AA*/8, hierarchy, 0, Point()); drawContours(imgSharp, contours, (int)i, colorContours, 2, /*CV_AA*/8, hierarchy, 0, Point()); } } } ///setColorPosition2Piece(CColorsType::GREEN); imgFHSV = Frame2HSV(imgSharp, CColorsType::GREEN); Canny(imgFHSV, imgCanny, 180, 120); findContours(imgCanny, contours, hierarchy, CV_RETR_TREE, CV_CHAIN_APPROX_SIMPLE, Point(0, 0)); for (size_t i = 0; i< contours.size(); i++) { approxPolyDP(Mat(contours[i]), approx, arcLength(Mat(contours[i]), true)*0.02, true);// 5, true); // if(fabs(contourArea(approx)) > 1000 && approx.size() == 4) { if(!fig.isSquare(approx)) { poscenter.ChooseSaveColorCenter( CColorsType::GREEN, contours[i]); fig.setContourRectangle(imgSharp, contours[i]); colorContours = CV_RGB(0, 255, 0); drawContours(drawingContours, contours, (int)i, colorContours, 2, /*CV_AA*/8, hierarchy, 0, Point()); drawContours(imgSharp, contours, (int)i, colorContours, 2, /*CV_AA*/8, hierarchy, 0, Point()); } } } ///setColorPosition2Piece(CColorsType::BLUE); imgFHSV = Frame2HSV(imgSharp, CColorsType::BLUE); Canny(imgFHSV, imgCanny, 180, 120); findContours(imgCanny, contours, hierarchy, CV_RETR_TREE, CV_CHAIN_APPROX_SIMPLE, Point(0, 0)); for (size_t i = 0; i< contours.size(); i++) { approxPolyDP(Mat(contours[i]), approx, arcLength(Mat(contours[i]), true)*0.02, true);// 5, true); // if(fabs(contourArea(approx)) > 1000 && approx.size() == 4) { if(!fig.isSquare(approx)) { poscenter.ChooseSaveColorCenter( CColorsType::BLUE, contours[i]); fig.setContourRectangle(imgSharp, contours[i]); colorContours = CV_RGB(0, 0, 255); drawContours(drawingContours, contours, (int)i, colorContours, 2, /*CV_AA*/8, hierarchy, 0, Point()); drawContours(imgSharp, contours, (int)i, colorContours, 2, /*CV_AA*/8, hierarchy, 0, Point()); } } } ///setColorPosition2Piece(CColorsType::YELLOW); imgFHSV = Frame2HSV(imgSharp, CColorsType::YELLOW); Canny(imgFHSV, imgCanny, 180, 120); findContours(imgCanny, contours, hierarchy, CV_RETR_TREE, CV_CHAIN_APPROX_SIMPLE, Point(0, 0)); for (size_t i = 0; i< contours.size(); i++) { approxPolyDP(Mat(contours[i]), approx, arcLength(Mat(contours[i]), true)*0.02, true);// 5, true); // if(fabs(contourArea(approx)) > 1000 && approx.size() == 4) { if(!fig.isSquare(approx)) { poscenter.ChooseSaveColorCenter( CColorsType::YELLOW, contours[i]); fig.setContourRectangle(imgSharp, contours[i]); colorContours = CV_RGB(255, 255, 0); drawContours(drawingContours, contours, (int)i, colorContours, 2, /*CV_AA*/8, hierarchy, 0, Point()); drawContours(imgSharp, contours, (int)i, colorContours, 2, /*CV_AA*/8, hierarchy, 0, Point()); } } } ///setColorPosition2Piece(CColorsType::BLACK); imgFHSV = Frame2HSV(imgSharp, CColorsType::BLACK); Canny(imgFHSV, imgCanny, 180, 120); findContours(imgCanny, contours, hierarchy, CV_RETR_TREE, CV_CHAIN_APPROX_SIMPLE, Point(0, 0)); for (size_t i = 0; i< contours.size(); i++) { approxPolyDP(Mat(contours[i]), approx, arcLength(Mat(contours[i]), true)*0.02, true);// 5, true); // if(fabs(contourArea(approx)) > 1000 && approx.size() == 4) { if(!fig.isSquare(approx)) { poscenter.ChooseSaveColorCenter( CColorsType::BLACK, contours[i]); fig.setContourRectangle(imgSharp, contours[i]); colorContours = CV_RGB(100, 100, 100); drawContours(drawingContours, contours, (int)i, colorContours, 2, /*CV_AA*/8, hierarchy, 0, Point()); drawContours(imgSharp, contours, (int)i, colorContours, 2, /*CV_AA*/8, hierarchy, 0, Point()); } } } // imshow("drawingContours", drawingContours); return drawingContours; }
bool ShapesDetector::scan_colour(Mat HSV_img,HSV_Param Filt_Type, int thres) { cv::Mat threshold; int xPos,yPos; int x_min_box = thres; int x_max_box = FRAME_WIDTH-thres; int y_min_box = thres; int y_max_box = FRAME_HEIGHT-thres; Shape tempShape; vector<Shape> tempShapes; //initiallize temp shape with color tempShape.setColour(Filt_Type.getColour()); //initiallize box constraints if(thres < 0) { x_min_box = CENTER_BOX_X_MIN; x_max_box = CENTER_BOX_X_MAX; y_min_box = CENTER_BOX_Y_MIN; y_max_box = CENTER_BOX_Y_MAX; } cv::inRange(HSV_img,Filt_Type.getHSVmin(),Filt_Type.getHSVmax(),threshold); if(Filt_Type.getColour() == red) { cv::Mat temp; cv::Scalar HSV_min = Filt_Type.getHSVmin(); cv::Scalar HSV_max = Filt_Type.getHSVmax(); HSV_min.val[0] = RED_H_MIN_2; HSV_max.val[0] = RED_H_MAX_2; cv::inRange(HSV_img,HSV_min,HSV_max,temp); bitwise_or(threshold, temp, threshold); //cv::imshow("red threshold",threshold); //waitKey(50); } morphOps(threshold); //these two vectors needed for output of findContours vector< vector<Point> > contours; cv::Mat approx; cv::Mat edges_map; vector<Vec4i> hierarchy; vector<Vec3f> circles; //find contours of filtered image using openCV findContours function cv::findContours(threshold,contours,hierarchy,CV_RETR_CCOMP,CV_CHAIN_APPROX_SIMPLE ); //cv::Canny(threshold, edges_map, 5, 250, 7, true); //cv::HoughCircles(edges_map, circles, CV_HOUGH_GRADIENT, 1, 100, 180, 30 ,20, 210); //use moments method to find the position double refArea = 0; bool objectFound = false; if (hierarchy.size() > 0) { int numObjects = hierarchy.size(); //if number of objects greater than MAX_NUM_OBJECTS we have a noisy filter if(numObjects<MAX_NUM_OBJECTS) { for (int index = 0; index >= 0; index = hierarchy[index][0]) { Moments moment = moments((cv::Mat)contours[index]); double area = moment.m00; //if the area is less than 20 px by 20px then it is probably just noise //if the area is the same as the 3/2 of the image size, probably just a bad filter //we only want the object with the largest area so we safe a reference area each //iteration and compare it to the area in the next iteration. if(area>MIN_OBJECT_AREA) { approxPolyDP(Mat(contours[index]), approx, arcLength(Mat(contours[index]), true)*0.04, true); drawContours(img_info, approx, -1, Scalar(0,255,0), 5, 4); xPos = moment.m10/area; yPos = moment.m01/area; tempShape.setXPos(xPos); tempShape.setYPos(yPos); int corners = approx.size().height; if(corners == 3) { tempShape.setShape(triangle_sh); } else if(corners == 4) { tempShape.setShape(square_sh); } else if(corners > 5) { tempShape.setShape(circle_sh); } if(corners >= 3) { tempShapes.push_back(tempShape); } } } } } /*//ROS_INFO("Circles Detected: %d",circles.size()); for( size_t i = 0; i < circles.size(); i++ ) { tempShape.setXPos(cvRound(circles[i][0])); tempShape.setYPos(cvRound(circles[i][1])); tempShape.setRadius(cvRound(circles[i][2])); tempShape.setShape(circle_sh); tempShapes.push_back(tempShape); }*/ for( size_t i = 0; i < tempShapes.size(); i++ ) { int xPos = tempShapes[i].getXPos(); int yPos = tempShapes[i].getYPos(); if((xPos < x_max_box && xPos > x_min_box) && (yPos < y_max_box && yPos > y_min_box)) { Shapes.push_back(tempShapes[i]); } } return true; }
// Process an image containing a line and return the angle with respect to NAO. double NaoVision::calculateAngleToBlackLine() { // Convert image to gray and blur it. cvtColor(src, src_gray, CV_BGR2GRAY); blur(src_gray, src_gray, Size(3,3)); if(!local) imshow("src", src); Mat canny_output; vector<vector<Point> > contours; vector<Vec4i> hierarchy; // Detect edges using canny. Canny(src_gray, canny_output, thresh, thresh * 2, 3); // Find contours. findContours(canny_output, contours, hierarchy, CV_RETR_TREE, CV_CHAIN_APPROX_SIMPLE, Point(0, 0)); // Get the moments. vector<Moments> mu(contours.size()); for(int i = 0; i < contours.size(); i++) mu[i] = moments(contours[i], false); // Get the mass centers. vector<Point2f> mc( contours.size()); for(int i = 0; i < contours.size(); i++) mc[i] = Point2f( mu[i].m10/mu[i].m00 , mu[i].m01/mu[i].m00); // Eliminate contours without area. contoursClean.clear(); int indMax = 0; int lengthMax = 0; for(int i = 0; i < contours.size(); i++) { area = mu[i].m00; length = arcLength(contours[i], true); punto = mc[i]; if(area != 0 && length > 200 && punto.x > 0 && punto.y > 0) contoursClean.push_back(contours.at(i)); } if(contoursClean.size() != 0) { // Get moments and mass for new vector. vector<Moments> muClean(contoursClean.size()); for(int i = 0; i < contoursClean.size(); i++) muClean[i] = moments(contoursClean[i], false); // Get the mass centers. vector<Point2f> mcClean( contoursClean.size()); for(int i = 0; i < contoursClean.size(); i++) mcClean[i] = Point2f(muClean[i].m10/muClean[i].m00, muClean[i].m01/muClean[i].m00); for(int i = 0; i < contoursClean.size(); i++) { punto = mcClean[i]; length = arcLength(contoursClean[i], true); } // Find the longest. for(int i = 0; i < contoursClean.size(); i++) { length = arcLength(contoursClean[i], true); lengthMax = arcLength(contoursClean[indMax], true); if(i > 0) { if(length > lengthMax) indMax = i; } else indMax = 0; } // Draw contours. Mat drawing = Mat::zeros(canny_output.size(), CV_8UC3); Scalar color = Scalar( rng.uniform(0, 255), rng.uniform(0,255), rng.uniform(0,255)); drawContours( drawing, contoursClean, indMax, color, 2, 8, hierarchy, 0, Point()); circle(drawing, mcClean[indMax], 4, color, 5, 8, 0 ); // Calculate the angle of the line. angleToALine = getAngleDegrees(contoursClean[indMax], drawing); puntoMax = mcClean[indMax]; lengthMax = arcLength(contoursClean[indMax], true); // Show in a window. if(!local) { namedWindow("Contours", CV_WINDOW_AUTOSIZE); imshow("Contours", drawing); // Draw grid. line(drawing, Point(260,0), Point(260, drawing.rows), Scalar(255,255,255)); line(drawing, Point(umbral,0), Point(umbral, drawing.rows), Scalar(255,255,255)); line(drawing, Point((drawing.cols/2),0), Point((drawing.cols/2), drawing.rows), Scalar(255,255,255)); line(drawing, Point(0,120), Point(320,120), Scalar(255,255,255)); imshow("Contours", drawing); } } else { // Go straight. angleToALine = 90.0; } return angleToALine; }
//find rectangles on image std::vector<Rect> findRect( const cv::Mat& image ) { std::vector<Rect> rect; rect.clear(); cv::Mat gray = image.clone(); std::vector<std::vector<cv::Point> > contours; //SLOWER //erote the image to fill holes /*int erosion_size = 1; cv::Mat element = getStructuringElement( cv::MORPH_RECT, cv::Size( 2*erosion_size + 1, 2*erosion_size+1 ), cv::Point( -1, -1 ) ); cv::erode(gray, gray, element); */ /* cv::erode(gray, gray, cv::Mat(), cv::Point(-1,-1),1); //standard call //cv::dilate(gray, gray, cv::Mat(), cv::Point(-1,-1),2); //standard call std::string file = "/mnt/sdcard/Pictures/MyCameraApp/red_trans.jpeg"; cv::imwrite(file,gray); */ // find contours and store them all as a list cv::findContours(gray, contours, CV_RETR_LIST, CV_CHAIN_APPROX_SIMPLE); std::vector<cv::Point> approx; // test each contour for( size_t i = 0; i < contours.size(); i++ ) { // approximate contour with accuracy proportional // to the contour perimeter cv::approxPolyDP(cv::Mat(contours[i]), approx, arcLength(cv::Mat(contours[i]), true)*0.02, true); // square contours should have 4 vertices after approximation // relatively large area (to filter out noisy contours) // and be convex. // Note: absolute value of an area is used because // area may be positive or negative - in accordance with the // contour orientation if( approx.size() == 4 && fabs(cv::contourArea(cv::Mat(approx))) > MIN_RECT_SIZE && cv::isContourConvex(cv::Mat(approx)) ) { double maxCosine = 0; for( int j = 2; j < 5; j++ ) { // find the maximum cosine of the angle between joint edges double cosine = fabs(angle(approx[j%4], approx[j-2], approx[j-1])); maxCosine = MAX(maxCosine, cosine); } // if cosines of all angles are small // (all angles are ~90 degree) then store quandrange // vertices to resultant sequence if( maxCosine < 0.3 ) { rect.push_back(extractRectData(approx)); } } } return rect; }
static cv::Mat signalDetect_inROI(const cv::Mat& roi, const cv::Mat& src_img, const double estimatedRadius, const cv::Point roi_topLeft ) { /* reduce noise */ cv::Mat noiseReduced(roi.rows, roi.cols, CV_8UC3); GaussianBlur(roi, noiseReduced, cv::Size(3, 3), 0, 0); /* extract color information */ cv::Mat red_mask(roi.rows, roi.cols, CV_8UC1); colorExtraction(noiseReduced , &red_mask , thSet.Red.Hue.lower, thSet.Red.Hue.upper, thSet.Red.Sat.lower, thSet.Red.Sat.upper, thSet.Red.Val.lower, thSet.Red.Val.upper); cv::Mat yellow_mask(roi.rows, roi.cols, CV_8UC1); colorExtraction(noiseReduced , &yellow_mask , thSet.Yellow.Hue.lower, thSet.Yellow.Hue.upper, thSet.Yellow.Sat.lower, thSet.Yellow.Sat.upper, thSet.Yellow.Val.lower, thSet.Yellow.Val.upper); cv::Mat green_mask(roi.rows, roi.cols, CV_8UC1); colorExtraction(noiseReduced , &green_mask , thSet.Green.Hue.lower, thSet.Green.Hue.upper, thSet.Green.Sat.lower, thSet.Green.Sat.upper, thSet.Green.Val.lower, thSet.Green.Val.upper); /* combine all color mask and create binarized image */ cv::Mat binarized = cv::Mat::zeros(roi.rows, roi.cols, CV_8UC1); bitwise_or(red_mask, yellow_mask, binarized); bitwise_or(binarized, green_mask, binarized); threshold(binarized, binarized, 0, 255, CV_THRESH_BINARY | CV_THRESH_OTSU); /* filter by its shape and index each bright region */ std::vector< std::vector<cv::Point> > bright_contours; std::vector<cv::Vec4i> bright_hierarchy; findContours(binarized, bright_contours, bright_hierarchy, CV_RETR_CCOMP, CV_CHAIN_APPROX_NONE); cv::Mat bright_mask = cv::Mat::zeros(roi.rows, roi.cols, CV_8UC1); int contours_idx = 0; std::vector<regionCandidate> candidates; for (unsigned int i=0; i<bright_contours.size(); i++) { cv::Rect bound = boundingRect(bright_contours.at(contours_idx)); cv::Scalar rangeColor = BLACK; struct regionCandidate cnd; double area = contourArea(bright_contours.at(contours_idx)); double perimeter = arcLength(bright_contours.at(contours_idx), true); double circleLevel = (IsNearlyZero(perimeter)) ? 0.0f : (4.0f * CV_PI * area / pow(perimeter, 2)); if (std::max(bound.width, bound.height) < 2*std::min(bound.width, bound.height) && /* dimension ratio */ CIRCLE_LEVEL_THRESHOLD <= circleLevel && CIRCLE_AREA_THRESHOLD <= area) { // std::cerr << "circleLevel: " << circleLevel << std::endl; rangeColor = WHITE; cnd.center.x = bound.x + bound.width/2; cnd.center.y = bound.y + bound.height/2; cnd.idx = contours_idx; cnd.circleLevel = (IsNearlyZero(perimeter)) ? 0.0f : (4.0 * CV_PI * area / pow(perimeter, 2)); cnd.isBlacked = false; candidates.push_back(cnd); } drawContours(bright_mask, bright_contours, contours_idx, rangeColor, CV_FILLED, 8, bright_hierarchy, 0); /* only contours on toplevel are considered */ contours_idx = bright_hierarchy[contours_idx][0]; if (contours_idx < 0) break; } // imshow("bright_mask", bright_mask); // waitKey(10); unsigned int candidates_num = candidates.size(); // std::cerr << "before checkExtrinctionLight. candidates: " << candidates_num << std::endl; /* decrease candidates by checking existence of turned off light in their neighborhood */ if (candidates_num > 1) /* if there are multipule candidates */ { for (unsigned int i=0; i<candidates.size(); i++) { /* check wheter this candidate seems to be green lamp */ cv::Point check_roi_topLeft = cv::Point(candidates.at(i).center.x - 2*estimatedRadius + roi_topLeft.x, candidates.at(i).center.y - 2*estimatedRadius + roi_topLeft.y); cv::Point check_roi_botRight = cv::Point(candidates.at(i).center.x + 6*estimatedRadius + roi_topLeft.x, candidates.at(i).center.y + 2*estimatedRadius + roi_topLeft.y); bool likeGreen = checkExtinctionLight(src_img, check_roi_topLeft, check_roi_botRight, candidates.at(i).center); /* check wheter this candidate seems to be yellow lamp */ check_roi_topLeft = cv::Point(candidates.at(i).center.x - 4*estimatedRadius + roi_topLeft.x, candidates.at(i).center.y - 2*estimatedRadius + roi_topLeft.y); check_roi_botRight = cv::Point(candidates.at(i).center.x + 4*estimatedRadius + roi_topLeft.x, candidates.at(i).center.y + 2*estimatedRadius + roi_topLeft.y); bool likeYellow = checkExtinctionLight(src_img, check_roi_topLeft, check_roi_botRight, candidates.at(i).center); /* check wheter this candidate seems to be red lamp */ check_roi_topLeft = cv::Point(candidates.at(i).center.x - 6*estimatedRadius + roi_topLeft.x, candidates.at(i).center.y - 2*estimatedRadius + roi_topLeft.y); check_roi_botRight = cv::Point(candidates.at(i).center.x + 2*estimatedRadius + roi_topLeft.x, candidates.at(i).center.y + 2*estimatedRadius + roi_topLeft.y); bool likeRed = checkExtinctionLight(src_img, check_roi_topLeft, check_roi_botRight, candidates.at(i).center); if (!likeGreen && !likeYellow && !likeRed) /* this region may not be traffic light */ { candidates_num--; drawContours(bright_mask, bright_contours, candidates.at(i).idx, BLACK, CV_FILLED, 8, bright_hierarchy, 0); candidates.at(i).isBlacked = true; } } } // std::cerr << "after checkExtrinctionLight. candidates: " << candidates_num << std::endl; /* choose one candidate by comparing degree of circularity */ if (candidates_num > 1) /* if there are still multiple candidates */ { double min_diff = DBL_MAX; unsigned int min_idx = 0; /* search the region that has nearest degree of circularity to 1 */ for (unsigned int i=0; i<candidates.size(); i++) { if(candidates.at(i).isBlacked) continue; double diff = fabs(1 - candidates.at(i).circleLevel); if (min_diff > diff) { min_diff = diff; min_idx = i; } } /* fill region of non-candidate */ for (unsigned int i=0; i<candidates.size(); i++) { if(candidates.at(i).isBlacked) continue; cv::Scalar regionColor = BLACK; candidates.at(i).isBlacked = true; if (i == min_idx) { regionColor = WHITE; candidates.at(i).isBlacked = false; } drawContours(bright_mask, bright_contours, candidates.at(i).idx, regionColor, CV_FILLED, 8, bright_hierarchy, 0); } } return bright_mask; } /* static void signalDetect_inROI() */
void SquareOcl::find_squares_gpu( const Mat& image, vector<vector<Point> >& squares ) { squares.clear(); Mat gray; cv::ocl::oclMat pyr_ocl, timg_ocl, gray0_ocl, gray_ocl; // down-scale and upscale the image to filter out the noise ocl::pyrDown(ocl::oclMat(image), pyr_ocl); ocl::pyrUp(pyr_ocl, timg_ocl); vector<vector<Point> > contours; vector<cv::ocl::oclMat> gray0s; ocl::split(timg_ocl, gray0s); // split 3 channels into a vector of oclMat // find squares in every color plane of the image for( int c = 0; c < 3; c++ ) { gray0_ocl = gray0s[c]; // try several threshold levels for( int l = 0; l < SQUARE_OCL_THRESH_LEVEL_H; l++ ) { // hack: use Canny instead of zero threshold level. // Canny helps to catch squares with gradient shading if( l == 0 ) { // do canny on OpenCL device // apply Canny. Take the upper threshold from slider // and set the lower to 0 (which forces edges merging) cv::ocl::Canny(gray0_ocl, gray_ocl, 0, SQUARE_OCL_EDGE_THRESH_H, 5); // dilate canny output to remove potential // holes between edge segments ocl::dilate(gray_ocl, gray_ocl, Mat(), Point(-1,-1)); gray = Mat(gray_ocl); } else { // apply threshold if l!=0: // tgray(x,y) = gray(x,y) < (l+1)*255/N ? 255 : 0 cv::ocl::threshold(gray0_ocl, gray_ocl, (l+1)*255/SQUARE_OCL_THRESH_LEVEL_H, 255, THRESH_BINARY); gray = gray_ocl; } // find contours and store them all as a list findContours(gray, contours, CV_RETR_LIST, CV_CHAIN_APPROX_SIMPLE); vector<Point> approx; // test each contour for( size_t i = 0; i < contours.size(); i++ ) { // approximate contour with accuracy proportional // to the contour perimeter approxPolyDP(Mat(contours[i]), approx, arcLength(Mat(contours[i]), true)*0.02, true); // square contours should have 4 vertices after approximation // relatively large area (to filter out noisy contours) // and be convex. // Note: absolute value of an area is used because // area may be positive or negative - in accordance with the // contour orientation if( approx.size() == 4 && fabs(contourArea(Mat(approx))) > 1000 && isContourConvex(Mat(approx)) ) { double maxCosine = 0; for( int j = 2; j < 5; j++ ) { // find the maximum cosine of the angle between joint edges double cosine = fabs(angle(approx[j%4], approx[j-2], approx[j-1])); maxCosine = MAX(maxCosine, cosine); } // if cosines of all angles are small // (all angles are ~90 degree) then write quandrange // vertices to resultant sequence if( maxCosine < 0.3 ) squares.push_back(approx); } } } } }
double ompl::base::SO3StateSpace::distance(const State *state1, const State *state2) const { return arcLength(state1, state2); }
bool ompl::base::SO3StateSpace::equalStates(const State *state1, const State *state2) const { return arcLength(state1, state2) < std::numeric_limits<double>::epsilon(); }
void paperRegistration::detectFigures(cv::vector<cv::vector<cv::Point>>& squares, cv::vector<cv::vector<cv::Point>>& triangles, float minLength, float maxLength, int tresh_binary) { if (currentDeviceImg.empty()) return; //cv::Mat image = currentDeviceImg; //cv::Mat image = cv::imread("C:/Users/sophie/Desktop/meinz.png", CV_LOAD_IMAGE_GRAYSCALE);// cv::imread(path, CV_LOAD_IMAGE_GRAYSCALE); //resize(image, image, cv::Size(500,700)); squares.clear(); triangles.clear(); cv::Mat gray; cv::Mat element = getStructuringElement(cv::MORPH_RECT, cv::Size(7,7)); cv::vector<cv::vector<cv::Point> > contours; //compute binary image //use dilatation and erosion to improve edges threshold(currentDeviceImg, gray, tresh_binary, 255, cv::THRESH_BINARY_INV); dilate(gray, gray, element, cv::Point(-1,-1)); erode(gray, gray, element, cv::Point(-1,-1)); // find contours and store them all as a list cv::findContours(gray, contours, CV_RETR_LIST, CV_CHAIN_APPROX_SIMPLE); //test each contour cv::vector<cv::Point> approx; cv::vector<cv::vector<cv::Point> >::iterator iterEnd = contours.end(); for(cv::vector<cv::vector<cv::Point> >::iterator iter = contours.begin(); iter != iterEnd; ++iter) { // approximate contour with accuracy proportional // to the contour perimeter cv::approxPolyDP(*iter, approx, arcLength(*iter, true)*0.03, true); //contours should be convex if (isContourConvex(approx)) { // square contours should have 4 vertices after approximation and // relatively large length (to filter out noisy contours) if( approx.size() == 4) { bool rectangular = true; for( int j = 3; j < 6; j++ ) { // if cosines of all angles are small // (all angles are ~90 degree) then write // vertices to result if (fabs(90 - fabs(computeAngle(approx[j%4], approx[j-3], approx[j-2]))) > 7) { rectangular = false; break; } } if (!rectangular) continue; float side1 = computeLength(approx[0], approx[1]); float side2 = computeLength(approx[1], approx[2]); if (side1 > minLength && side1 < maxLength && side2 > minLength && side2 < maxLength) squares.push_back(approx); } // triangle contours should have 3 vertices after approximation and // relatively large length (to filter out noisy contours) else if ( approx.size() == 3) { float side1 = computeLength(approx[0], approx[1]); float side2 = computeLength(approx[1], approx[2]); float side3 = computeLength(approx[2], approx[0]); if (side1 > minLength && side1 < maxLength && side2 > minLength && side2 < maxLength && side3 > minLength && side3 < maxLength) triangles.push_back(approx); } } } }
char detectBlueBlock(Mat image) { int T=15; //面积与边长之比的阈值 ColorHistogram hc; MatND colorhist = hc.getHueHistogram(image); //遍历直方图数据 //hc.getHistogramStat(colorhist); /* Mat histImg = hc.getHistogramImage(colorhist); namedWindow("BlueBlockHistogram"); imshow("BlueBlockHistogram", histImg);*/ Mat thresholded, thresholded1, thresholded2, thresholded3; threshold(hc.v[0], thresholded1, 100, 255, 1); threshold(hc.v[0], thresholded2, 124, 255, 0); threshold(hc.v[1], thresholded3, 125, 255, 1); //变成黑色 thresholded = thresholded1+thresholded2+thresholded3; //imshow("1", thresholded1); //imshow("2", thresholded2); //imshow("3", thresholded3); //namedWindow("BlueBlockBinary"); //imshow("BlueBlockBinary", thresholded); int top = (int) (0.05*thresholded.rows); int bottom = (int) (0.05*thresholded.rows); int left = (int) (0.05*thresholded.cols); int right = (int) (0.05*thresholded.cols); Scalar value = Scalar( 255 ); copyMakeBorder( thresholded, thresholded, top, bottom, left, right, 0, value ); /* Mat eroded; erode(thresholded, eroded, Mat()); namedWindow("ErodedImage"); imshow("ErodedImage", eroded); Mat dilated; erode(thresholded, dilated, Mat()); namedWindow("DilatedImage"); imshow("DilatedImage", dilated);*/ //闭运算 Mat closed; morphologyEx(thresholded, closed, MORPH_CLOSE, Mat()); //namedWindow("ClosedImage"); //imshow("ClosedImage", closed); vector<vector<Point>>contours; findContours(closed, contours, CV_RETR_LIST, CV_CHAIN_APPROX_NONE); //筛选不合格轮廓 int cmin = 100; //最小轮廓长度 vector<vector<Point>>::const_iterator itc = contours.begin(); while (itc != contours.end()) { if (itc->size()<cmin) itc = contours.erase(itc); else itc++; } Mat result(closed.size(), CV_8U, Scalar(255)); double area, length, p; double a[2] = {0,0}; cout << "Size=" << contours.size() << endl; for ( int i=0; i<contours.size(); i++) { area = abs(contourArea( contours[i] )); length = abs(arcLength( contours[i], true )); p = area/length; if (p > a[0]) { a[1] = a[0]; a[0] = p; } else if (p > a[1]) a[1] = p; cout << "Area=" << area << " " << "Length=" << length << " " << "Property=" << p << endl; } drawContours(result, contours, -1, Scalar(0), 1); //namedWindow("DrawContours"); //imshow("DrawContours", result); cout << "Property=" << a[1] << endl; //waitKey(); if (a[1] > T) return BLUEBLOCK; else return NOTHING; }
void FindLargest_ProjectionVoxel(int ImageNum, vector<OctVoxel>& Octree, vector<cv::Mat>& Silhouette, Cpixel*** vertexII, CMatrix* ART){ int thresh = 70; int max_thresh = 210; RNG rng(12345); Mat src_gray; Mat drawing; double scale(0.7); Size ssize; CVector M(4); //Homogeneous coordinates of the vertices(x,y,z,1) world coordinate CVector m(4); //That the image coordinates (normalized) expressed in homogeneous coordinates M[3] = 1.0; //8 vertices world coordinates of the voxel (x,y,z) CVector3d vertexW[8]; ofstream fout("larget_boundingbox_contour.txt"); int Boundingbox_line[12][2] = { { 0, 1 }, { 1, 2 }, { 2, 3 }, { 3, 0 }, { 0, 4 }, { 1, 5 }, { 2, 6 }, { 3, 7 }, { 4, 5 }, { 5, 6 }, { 6, 7 }, { 7, 4 } }; //--------------------------------------------------------------- for (auto h(0); h < ImageNum; h++){ //src_gray = Silhouette[h]; Silhouette[h].copyTo(src_gray); cout << "Silhouette_" << h << endl; for (auto j(0); j < Octree.size(); j++){ Octree[j].SetVertexWorld_Rotated(vertexW); for (int k = 0; k < 8; ++k){ //8 vertices of the voxel M[0] = vertexW[k].x; M[1] = vertexW[k].y; M[2] = vertexW[k].z; m = ART[h] * M; vertexII[h][j][k].setPixel_u_v((int)(m[0] / m[2]), (int)(m[1] / m[2])); // normalize } //-------------------------------------- bounding box ------------------------ for (auto k(0); k < 12; k++){ //Draw 12 lines of the voxel in img. Start_point.x = vertexII[h][j][Boundingbox_line[k][0]].getPixel_u(); Start_point.y = vertexII[h][j][Boundingbox_line[k][0]].getPixel_v(); PointStart.push_back(Start_point); End_point.x = vertexII[h][j][Boundingbox_line[k][1]].getPixel_u(); End_point.y = vertexII[h][j][Boundingbox_line[k][1]].getPixel_v(); PointEnd.push_back(End_point); //line(src_gray, Start_point, End_point, Scalar(225, 225,255), 2.0, CV_AA); } } Mat canny_output; vector<vector<Point> > contours; vector<Vec4i> hierarchy; double max_contour_area(0.0); int largest_contour_index(0); /// Detect edges using canny Canny(src_gray, canny_output, thresh, max_thresh, 3); /// Find contours //findContours(canny_output, contours, hierarchy, CV_RETR_TREE, CV_CHAIN_APPROX_SIMPLE, Point(0, 0)); findContours(canny_output, contours, hierarchy, CV_RETR_CCOMP, CV_CHAIN_APPROX_NONE, Point(0, 0)); /// Draw contours drawing = Mat::zeros(canny_output.size(), CV_8UC3); for (auto n(0); n < PointEnd.size(); n++){ line(drawing, PointStart[n], PointEnd[n], Scalar(225, 225, 225), 1.0, 1, 0); } /// Get the moments vector<Moments> mu(contours.size()); for (int i = 0; i < contours.size(); i++) { mu[i] = moments(contours[i], false); //cout << "# of contour points: " << contours[i].size() << endl; for (int j = 0; j < contours[i].size(); j++) { //cout << "Point(x,y)=" <<i<<" j "<<j<<" "<< contours[i][j] << endl; } } //// Get the mass centers: vector<Point2f> mc(contours.size()); for (int i = 0; i < contours.size(); i++) { mc[i] = Point2f(mu[i].m10 / mu[i].m00, mu[i].m01 / mu[i].m00); } //// ---------- - Find the convex hull object for each contour vector<vector<Point>>hull(contours.size()); for (int i = 0; i < contours.size(); i++){ convexHull(Mat(contours[i]), hull[i], false); } // Calculate the area with the moments 00 and compare with the result of the OpenCV function //printf("\t Info: Area and Contour Length \n"); //cout << "contours.size() " << contours.size() << endl; double countour_Area(0.0); double arc_Length(0.0); for (int i = 0; i < contours.size(); i++) { countour_Area = (double)contourArea(contours[i]); arc_Length = (double)arcLength(contours[i], true); //cout << "contourArea [" << i << "] " << ": Moment " << mu[i].m00 // << " OpenCV " << countour_Area << " arcLength " << arc_Length << endl; //cout << "countour_Area "<< countour_Area << " " << endl; if (countour_Area > max_contour_area){ max_contour_area = countour_Area; largest_contour_index = i; } //------- draw all contour --------------- //Scalar color = Scalar(rng.uniform(0, 255), rng.uniform(0, 255), rng.uniform(0, 255)); //drawContours(drawing, contours, i, color, 2, 8, hierarchy, 0, Point()); //circle(drawing, mc[i], 4, color, -1, 8, 0); //drawContours(drawing, hull, i, color, 1, 8, vector<Vec4i>(), 0, Point()); //drawContours(drawing, contours, i, Scalar(255, 255, 255), 0.10, 8, hierarchy, 0, Point()); } //------- draw largest contour --------------- Scalar color = Scalar(rng.uniform(0, 255), rng.uniform(0, 255), rng.uniform(0, 255)); drawContours(drawing, contours, largest_contour_index, color, 2, 8, hierarchy, 0, Point()); //circle(drawing, mc[largest_contour_index], 4, color, -1, 8, 0); //drawContours(drawing, contours, largest_contour_index, Scalar(0, 255, 255), 2, 8, hierarchy, 0, Point()); //drawContours(drawing, hull, largest_contour_index, color, 2, 8, vector<Vec4i>(), 0, Point()); //drawContours(drawing, contours, largest_contour_index, Scalar(255, 255, 255), 1, 8, hierarchy, 0, Point()); fout << max_contour_area << endl; cout << "max_contour_area " << max_contour_area << endl; //----------------------- Show in a window -------------------------------------- //resize(drawing, drawing, ssize, INTER_NEAREST); namedWindow("Contours", CV_WINDOW_AUTOSIZE); imshow("Contours", drawing); //output white boundary imwrite("../../data2016/input/newzebra/contour_voxel/contour_voxel" + to_string(h) + ".bmp", drawing); waitKey(0); destroyWindow("silhouette"); PointStart.clear(); PointStart.shrink_to_fit(); PointEnd.clear(); PointEnd.shrink_to_fit(); } //getchar(); }