Exemplo n.º 1
0
int Touch::countArea()
{
    Mat m(contourPoints);
    return contourArea(m);
}
Exemplo n.º 2
0
bool ScreenDetector::contour_compare_area(const std::vector<cv::Point> c1, const std::vector<cv::Point> c2)
{
    return contourArea(c1) > contourArea(c2);
}
Exemplo n.º 3
0
bool ProcessingThread::CrossDetect(Mat gray, vector<Point2f> &cross)
{
	double tresholdmin = 0.6;
	int tresholdmin_int = 6;
	int tresholdmax_int = 6;
	int tresholdCannyMin = 1400;
	int tresholdCannyMax = 1500;

	bool found = true;
	vector<Mat> contours;
	vector<Point> approx;

	//Mat gray;
	//cvtColor(img, gray, CV_BGR2GRAY);

	Mat bw;
	Canny(gray, bw, tresholdCannyMin, tresholdCannyMax, 5);

	findContours(bw.clone(), contours, CV_RETR_EXTERNAL, CV_CHAIN_APPROX_SIMPLE);

	for (int i = 0; i < contours.size(); i++)
	{
		approxPolyDP(Mat(contours[i]), approx, arcLength(Mat(contours[i]), true)*0.02, true);

		if (fabs(contourArea(contours[i])) < 100 || isContourConvex(approx) || (approx.size() != 8))
			continue;

		double x0 = approx[0].x;
		double x1 = approx[1].x;
		double x2 = approx[2].x;
		double x3 = approx[3].x;
		double x4 = approx[4].x;
		double x5 = approx[5].x;
		double x6 = approx[6].x;
		double x7 = approx[7].x;

		double y0 = approx[0].y;
		double y1 = approx[1].y;
		double y2 = approx[2].y;
		double y3 = approx[3].y;
		double y4 = approx[4].y;
		double y5 = approx[5].y;
		double y6 = approx[6].y;
		double y7 = approx[7].y;

		double length_top = (((abs(x0 - x1) + abs(x0 - x7)) / 2) + ((abs(y0 - y1) + abs(y0 - y7)) / 2)) / 2;
		double length_bot = (((abs(x3 - x4) + abs(x4 - x5)) / 2) + ((abs(y3 - y4) + abs(y4 - y5)) / 2)) / 2;
		double ratio1 = ((((length_top + length_bot) / length_top - 0.5) + ((length_top + length_bot) / length_bot - 0.5))) / 2 - 0.5;

		double length_left = (((abs(x2 - x1) + abs(x2 - x3)) / 2) + ((abs(y2 - y1) + abs(y2 - y3)) / 2)) / 2;
		double length_right = (((abs(x6 - x7) + abs(x6 - x5)) / 2) + ((abs(y6 - y7) + abs(y6 - y5)) / 2)) / 2;
		double ratio2 = ((((length_left + length_right) / length_left - 0.5) + ((length_left + length_right) / length_right - 0.5))) / 2 - 0.5;

		if (abs((ratio1 + ratio2) / 2 - 1) > 0.2)
		{
			found = false;
			continue;
		}

		for (int j = 0; j < approx.size() - 3; j++){
			double ang1 = angle(approx[j], approx[j + 1], approx[j + 2]);
			double ang2 = angle(approx[j + 1], approx[j + 2], approx[j + 3]);
			//printf("ang1: %f\t, ang2: %f \n", ang1, ang2);
			if (ang1 > 0.7){
				if (!(ang1 > 0.7 && ang2 < 0.3))
				{
					found = false;
					continue;
				}
			}
		}

		if (found)
		{
			for each(Point pt in approx)
				cross.push_back((Point2f)pt);
			return true;
		}

	}
	return found;
}
Exemplo n.º 4
0
void VisionNode::CameraCallback(CCamera *cam, const void *buffer, int buffer_length) {
    cv::Mat myuv(HEIGHT + HEIGHT / 2, WIDTH, CV_8UC1, (unsigned char *) buffer);
    cv::cvtColor(myuv, img, CV_YUV2RGBA_NV21);
    cv::cvtColor(img, img_gray, CV_RGBA2GRAY);

    communication::MarkerPosition markerPosition;
    markerPosition.header.stamp = ros::Time::now();
    static uint next_id = 0;
    markerPosition.header.seq = next_id++;
    markerPosition.cameraID = ID;

    static uint counter = 0;
    t2 = std::chrono::high_resolution_clock::now();
    time_span = std::chrono::duration_cast<std::chrono::milliseconds>(t2 - t1);
    markerPosition.fps = (double)counter/time_span.count();
    counter++;

    if(time_span.count()>30){ // reset every 30 seconds
        counter = 0;
        t1 = std::chrono::high_resolution_clock::now();
        std_msgs::Int32 msg;
        msg.data = ID;
        cameraID_pub->publish(msg);
    }

    cv::Mat filtered_img;
    cv::threshold(img_gray, filtered_img, threshold_value, 255, 3);

    // find contours in result, which hopefully correspond to a found object
    vector <vector<cv::Point>> contours;
    vector <cv::Vec4i> hierarchy;
    findContours(filtered_img, contours, hierarchy, CV_RETR_EXTERNAL, CV_CHAIN_APPROX_SIMPLE,
                 cv::Point(0, 0));

    // filter out tiny useless contours
    double min_contour_area = 10;
    for (auto it = contours.begin(); it != contours.end();) {
        if (contourArea(*it) < min_contour_area) {
            it = contours.erase(it);
        }
        else {
            ++it;
        }
    }

    // publish the markerPositions
    vector<cv::Point2f> centers(contours.size());
    vector<float> radius(contours.size());
    for (int idx = 0; idx < contours.size(); idx++) {
        minEnclosingCircle(contours[idx], centers[idx], radius[idx]);
        communication::Vector2 pos;
        pos.x = WIDTH - centers[idx].x;
        pos.y = centers[idx].y;
        markerPosition.marker_position.push_back(pos);
    }
    //imshow("camera", img);
    //waitKey(1);
    markerPosition.markerVisible=contours.size();
    marker_position_pub->publish(markerPosition);

    if(publish_video_flag && counter%3==0){
        // get centers and publish
        for (int idx = 0; idx < contours.size(); idx++) {
            drawContours(img_gray, contours, idx, cv::Scalar(0, 0, 0), 4, 8, hierarchy, 0,
                         cv::Point());
        }
        cv_bridge::CvImage cvImage;
        img_gray.copyTo(cvImage.image);
        sensor_msgs::Image msg;
        cvImage.toImageMsg(msg);
        msg.encoding = "mono8";
       	msg.header = markerPosition.header;
        video_pub->publish(msg);
   }
}
Exemplo n.º 5
0
double Circle::findRadiusFromContours(vector<vector<Point> > contours) {
	double area = contourArea(contours.at(0));
	double radius = sqrt(area/M_PI);
	return radius;
}
Exemplo n.º 6
0
double Circle::getContourArea(vector<vector<Point> > contours) {
	double area = contourArea(contours.at(0));
	return area;
}
Exemplo n.º 7
0
void imgproc(const uint8_t *image, int width, int height)
{
  cv::Mat img(height, width, CV_8UC1, const_cast<uint8_t*>(image), width);
  imshow("Original", img);
  cv::waitKey(1);
  return;

  cv::Mat src = img.clone();
  cv::Mat color_src(height, width, CV_8UC3);
  cvtColor(src, color_src, CV_GRAY2RGB);

  // Image processing starts here
  GaussianBlur(src, src, cv::Size(3,3), 0);
  adaptiveThreshold(src, src, 255, cv::ADAPTIVE_THRESH_GAUSSIAN_C, cv::THRESH_BINARY_INV, 5, 3);
  //equalizeHist(src, src);
  // TODO: Can think about using multiple thresholds and choosing one where
  // we can detect a pattern
  //threshold(src, src, 100, 255, cv::THRESH_BINARY_INV);

  imshow("Thresholded", src);

  std::vector<std::vector<cv::Point> > contours;

  findContours(src, contours, CV_RETR_CCOMP, CV_CHAIN_APPROX_NONE);
  //printf("Num contours: %lu\n", contours.size());

  std::vector<double> contour_area, contour_arclength;
  contour_area.resize(contours.size());
  contour_arclength.resize(contours.size());
  std::vector<unsigned int> circle_index;
  for(unsigned int idx = 0; idx < contours.size(); idx++)
  {
    if(contours[idx].size() > 25)
    {
      cv::Mat contour(contours[idx]);
      contour_area[idx] = contourArea(contour);
      if(contour_area[idx] > 50)
      {
        contour_arclength[idx] = arcLength(contour,true);
        float q = 4*M_PI*contour_area[idx] /
            (contour_arclength[idx]*contour_arclength[idx]);
        if(q > 0.8f)
        {
          circle_index.push_back(idx);
          //printf("isoperimetric quotient: %f\n", q);
          //Scalar color( rand()&255, rand()&255, rand()&255 );
          //drawContours(contours_dark, contours, idx, color, 1, 8);
        }
      }
    }
  }
  std::list<Circle> circles;
  for(unsigned int i = 0; i < circle_index.size(); i++)
  {
    Circle c;
    cv::Moments moment = moments(contours[circle_index[i]]);
    float inv_m00 = 1./moment.m00;
    c.center = cv::Point2f(moment.m10*inv_m00, moment.m01*inv_m00);
    c.radius = (sqrtf(contour_area[circle_index[i]]/M_PI) + contour_arclength[circle_index[i]]/(2*M_PI))/2.0f;
    circles.push_back(c);
  }

  // Get the circles with centers close to each other
  std::vector<std::list<Circle> > filtered_circles;
  std::list<Circle>::iterator it = circles.begin();
  unsigned int max_length = 0;
  while(it != circles.end())
  {
    std::list<Circle> c;
    c.push_back(*it);

    cv::Point c1 = it->center;

    std::list<Circle>::iterator it2 = it;
    it2++;
    while(it2 != circles.end())
    {
      cv::Point c2 = it2->center;
      std::list<Circle>::iterator it3 = it2;
      it2++;
      if(hypotf(c2.x - c1.x, c2.y - c1.y) < 10)
      {
        c.push_back(*it3);
        circles.erase(it3);
      }
    }
    unsigned int length_c = c.size();
    if(length_c > 1 && length_c > max_length)
    {
      max_length = length_c;
      filtered_circles.push_back(c);
    }

    it2 = it;
    it++;
    circles.erase(it2);
  }

  if(filtered_circles.size() > 0)
  {
    Circle target_circle;
    target_circle.radius = std::numeric_limits<float>::max();

    for(it = filtered_circles.back().begin(); it != filtered_circles.back().end(); it++)
    {
      //printf("circle: c: %f, %f, r: %f\n", it->center.x, it->center.y, it->radius);
      if(it->radius < target_circle.radius)
      {
        target_circle.radius = it->radius;
        target_circle.center = it->center;
      }
    }
    circle(color_src, cv::Point(target_circle.center.x, target_circle.center.y), target_circle.radius, cv::Scalar(0,0,255), 2);
    printf("target: c: %f, %f, r: %f\n", target_circle.center.x, target_circle.center.y, target_circle.radius);

  }
#if defined(CAPTURE_VIDEO)
  static cv::VideoWriter video_writer("output.mp4", CV_FOURCC('M','J','P','G'), 20, cv::Size(width, height));
  video_writer.write(color_src);
#endif
  imshow("Target", color_src);
  cv::waitKey(1);
}
Exemplo n.º 8
0
void OpenniFilter::cloud_cb_ (const pcl::PointCloud<pcl::PointXYZRGBA>::ConstPtr &cloud)
{
    if (!viewer.wasStopped())
    {
        if (cloud->isOrganized())
        {
            // initialize all the Mats to store intermediate steps
            int cloudHeight = cloud->height;
            int cloudWidth = cloud->width;
            rgbFrame = Mat(cloudHeight, cloudWidth, CV_8UC3);
            drawing = Mat(cloudHeight, cloudWidth, CV_8UC3, NULL);
            grayFrame = Mat(cloudHeight, cloudWidth, CV_8UC1, NULL);
            hsvFrame = Mat(cloudHeight, cloudWidth, CV_8UC3, NULL);
            contourMask = Mat(cloudHeight, cloudWidth, CV_8UC1, NULL);

            if (!cloud->empty())
            {
                for (int h = 0; h < rgbFrame.rows; h ++)
                {
                    for (int w = 0; w < rgbFrame.cols; w++)
                    {
                        pcl::PointXYZRGBA point = cloud->at(w, cloudHeight-h-1);
                        Eigen::Vector3i rgb = point.getRGBVector3i();
                        rgbFrame.at<Vec3b>(h,w)[0] = rgb[2];
                        rgbFrame.at<Vec3b>(h,w)[1] = rgb[1];
                        rgbFrame.at<Vec3b>(h,w)[2] = rgb[0];
                    }
                }

                // do the filtering 
                int xPos = 0;
                int yPos = 0;
                mtx.lock();
                xPos = mouse_x;
                yPos = mouse_y;
                mtx.unlock();

                // color filtering based on what is chosen by users
                cvtColor(rgbFrame, hsvFrame, CV_RGB2HSV);
                Vec3b pixel = hsvFrame.at<Vec3b>(xPos,yPos);

                int hueLow = pixel[0] < iHueDev ? pixel[0] : pixel[0] - iHueDev;
                int hueHigh = pixel[0] > 255 - iHueDev ? pixel[0] : pixel[0] + iHueDev;
                // inRange(hsvFrame, Scalar(hueLow, pixel[1]-20, pixel[2]-20), Scalar(hueHigh, pixel[1]+20, pixel[2]+20), grayFrame);
                inRange(hsvFrame, Scalar(hueLow, iLowS, iLowV), Scalar(hueHigh, iHighS, iHighV), grayFrame);

                // removes small objects from the foreground by morphological opening
                erode(grayFrame, grayFrame, getStructuringElement(MORPH_ELLIPSE, Size(5,5)));
                dilate(grayFrame, grayFrame, getStructuringElement(MORPH_ELLIPSE, Size(5,5)));

                // morphological closing (removes small holes from the foreground)
                dilate(grayFrame, grayFrame, getStructuringElement(MORPH_ELLIPSE, Size(5,5)));
                erode(grayFrame, grayFrame, getStructuringElement(MORPH_ELLIPSE, Size(5,5)));

                // gets contour from the grayFrame and keeps the largest contour
                Mat cannyOutput;
                vector<vector<Point> > contours;
                vector<Vec4i> hierarchy;
                int thresh = 100;
                Canny(grayFrame, cannyOutput, thresh, thresh * 2, 3);
                findContours(cannyOutput, contours, hierarchy, CV_RETR_TREE, CV_CHAIN_APPROX_SIMPLE, Point(0, 0));
                int largestContourArea, largestContourIndex = 0;
                int defaultContourArea = 1000; // 1000 seems to work find in most cases... cannot prove this
                vector<vector<Point> > newContours;
                for (int i = 0; i < contours.size(); i++)
                {
                    double area = contourArea(contours[i], false);
                    if (area > defaultContourArea)
                        newContours.push_back(contours[i]);
                }

                // draws the largest contour: 
                drawing = Mat::zeros(cannyOutput.size(), CV_8UC3);
                for (int i = 0; i < newContours.size(); i++)
                    drawContours(drawing, newContours, i, Scalar(255, 255, 255), CV_FILLED, 8, hierarchy, 0, Point());

                // gets the filter by setting everything within the contour to be 1. 
                inRange(drawing, Scalar(1, 1, 1), Scalar(255, 255, 255), contourMask);

                // filters the point cloud based on contourMask
                // again go through the point cloud and filter out unnecessary points
                pcl::PointCloud<pcl::PointXYZRGBA>::Ptr resultCloud (new pcl::PointCloud<pcl::PointXYZRGBA>);
                pcl::PointXYZRGBA newPoint;
                for (int h = 0; h < contourMask.rows; h ++)
                {
                    for (int w = 0; w < contourMask.cols; w++)
                    {
                        if (contourMask.at<uchar>(h,w) > 0)
                        {
                            newPoint = cloud->at(w,h);
                            resultCloud->push_back(newPoint);
                        }
                    }
                }

                if (xPos == 0 && yPos == 0)
                    viewer.showCloud(cloud);
                else
                    viewer.showCloud(resultCloud);
                
                imshow("tracker", rgbFrame);
                imshow("filtered result", contourMask);
                char key = waitKey(1);
                if (key == 27) 
                {
                    interface->stop();
                    return;
                }
            }
            else
                cout << "Warning: Point Cloud is empty" << endl;
        }
        else
            cout << "Warning: Point Cloud is not organized" << endl;
    }
}
Exemplo n.º 9
0
int CHuman::HumanDetectRun(Mat &displayframe)
{
	//int time_use=0;
	//struct timeval start;
 //struct timeval end;

  //gettimeofday(&start,NULL);

	Mat tmpframe;
	//Mat blobdealFrame;
	vector< vector<Point> >  contours;
	Rect contoursRect;

	alarm =0;

	displayframe.copyTo(tmpframe);
	//displayframe.copyTo(blobdealFrame);

	vector<blobnode>().swap(humanlistpro);

	 m_zoomRows  =   tmpframe.rows  /m_rowsZoomRate;
	 m_zoomCols  =   tmpframe.cols   /m_colsZoomRate;

	 w_Rate = (float)tmpframe.cols / m_zoomCols;
	 h_Rate = (float)tmpframe.rows / m_zoomRows;


	Mat morph = Mat(tmpframe.rows ,tmpframe.cols,CV_8UC1);

	mog(tmpframe,foregrondframe,0.001);   // 0.001

	frameindex++;
	if(frameindex<250) return 2;
	if(frameindex >= 250) frameindex =250;
	foregrondframe.copyTo(mask);
	threshold(mask, mask, 200, 255, THRESH_BINARY);

	cv::erode(mask, mask, cv::Mat());

	cv::dilate(mask, mask, cv::Mat());

	algorithmMorphology_Operations(mask, mask);

	findContours(mask, contours, CV_RETR_LIST, CV_CHAIN_APPROX_SIMPLE);

	mask.release(); //nikola
	m_BlobRects.clear();
	for(int i=0;i<contours.size();i++)
	{
		contoursRect = boundingRect(contours[i]);
		if(fabs(contourArea(contours[i])) > 600.0)
		{
			//rectangle(displayframe, contoursRect,color_rect, 2, 8, 0);
			m_BlobRects.push_back(contoursRect);
		}
	}

	if((m_Flag & 0x02)  == 0x02){
		for(int i=0;i<DirectionLines.size();i++)
		{
			line(displayframe,DirectionLines[i].Start,DirectionLines[i].End,Scalar(255));
		}

	}


	if((m_Flag & 0x01)  == 1){

		for(int ii=0;ii<MonitorZoneRects.size();ii++)
		{
			rectangle(displayframe, MonitorZoneRects[ii], Scalar( 255, 0, 0 ), 2, 8, 0);//��
		}
	}

	human_detect(morph,displayframe);

	if((m_Flag & 0x01)  == 1){
		census(displayframe);// for human statistics
	}

	if((m_Flag & 0x02)  == 0x02){
		blobdeal(displayframe);
	}

        if(humanstatis.numAll<(humanstatis.doorin[0]+humanstatis.doorin[1]-humanstatis.doorout[0]-humanstatis.doorout[1]))
        		humanstatis.numAll = humanstatis.doorin[0]+humanstatis.doorin[1]-humanstatis.doorout[0]-humanstatis.doorout[1];

	 //dbgprint("door1:in=%d,out=%d  door2:in=%d,out=%d\n",humanstatis.doorin[0],humanstatis.doorout[0],humanstatis.doorin[1],humanstatis.doorout[1]);

	if(humanstatis.numAll >= MaxNum){
				//printf("humanstatis.numAll is %d\n",humanstatis.numAll);
				alarm =1;
	}

	char dstr[100];
	sprintf(dstr, "door1:in=%d,out=%d  door2:in=%d,out=%d",humanstatis.doorin[0],humanstatis.doorout[0],humanstatis.doorin[1],humanstatis.doorout[1]);
	putText(displayframe,dstr,cvPoint(200,25),CV_FONT_HERSHEY_COMPLEX, 0.5, cvScalar(0,0,255));


	//printf("humanstatis Num:%d, humanstatisIn:%d,humanstatisOut:%d\n",humanstatis.numAll,humanstatis.inAll,humanstatis.outAll);


	//char dstr[100];
  //sprintf(dstr,  "in=%d,out=%d",humanstatis.doorin,humanstatis.doorout);
  //putText(displayframe,dstr,cvPoint(25,25),CV_FONT_HERSHEY_COMPLEX, 1, cvScalar(0,0,255));
	//printf("doorin=%d,doorout=%d\n",humanstatis.doorin,humanstatis.doorout);

	tmpframe.release(); //nikola

	morph.release();
	vector<Point>().swap(object); //vector<Point>
	vector<blobnode>().swap(humanlist);

	//gettimeofday(&end,NULL);
	//time_use=(end.tv_sec-start.tv_sec)*1000+(end.tv_usec-start.tv_usec)/1000;//΢��
	//printf("time_use is %d\n",time_use);
	foregrondframe.release();

	return 0;
}
 float TransformationBuilder::getScale(const vector<Point2f>& objectCornersTransformed, const vector<Point2f>& objectCorners)
 {
     return contourArea(objectCornersTransformed) / contourArea(objectCorners);
 }
Exemplo n.º 11
0
void camera_contours_display(int num, Straightener & straight) {
	int c;
		IplImage* color_img;
		CvCapture* cv_cap = cvCaptureFromCAM(num);
		cvNamedWindow("Video", 0); // create window
		resizeWindow("Video", 700,700);
		for(;;) {
			color_img = cvQueryFrame(cv_cap); // get frame
			if(color_img != 0) {
				Mat cam_mat(color_img);
				Mat result;
				cam_mat.copyTo(result);

				if(straight.doAll(cam_mat, result)) {
					///Apply blur
					blur(result, result, Size(3,3));
					///Apply Canny to destination Matrix
					Canny(result, result, 50, 50, 3);
					/// Vectors for storing contours
					vector<vector<Point> > contours; //contours of the paper sheet
					vector<vector<Point> > approx_contours; //approx contours of the paper sheet
					vector<Vec4i> hierarchy;
					int erosion_type = 2;
					int erosion_size = 3;
					Mat element = getStructuringElement(erosion_type,
														Size( 2*erosion_size + 1, 2*erosion_size+1),
														Point( erosion_size, erosion_size));
					dilate(result, result, element);
					/// Cut 20 px from each side to avoid paper borders detection
					result = result(Rect(10, 10, result.cols-20, result.rows-20));
					findContours(result, contours, hierarchy, CV_RETR_CCOMP, CV_CHAIN_APPROX_NONE, Point(0, 0));
					/// Draw contours
					Mat drawing = Mat::zeros( result.size(), CV_8UC3 );
					/// https://github.com/Itseez/opencv/blob/master/samples/cpp/contours2.cpp
//					approx_contours.resize(contours.size());
					for(unsigned int i = 0; i < contours.size(); i++) {
						/// Area of more than 20 and no parent
						if(contourArea(contours[i]) > 20 && hierarchy[i][3] == -1) {
							vector<Point> tmp_contour;
							approxPolyDP(Mat(contours[i]), tmp_contour, 3, true);
							approx_contours.push_back(tmp_contour);
						}
					}
					for(unsigned int i=0; i < approx_contours.size(); i++) {
						Scalar color;
						if(approx_contours[i].size() == 4) {
							color = Scalar( 255, 255, 255);
							drawContours( drawing, approx_contours, i, color, 1, 8, NULL, 0, Point() );
						}
						else {
							color = Scalar( 0, 255, 0);
							drawContours( drawing, approx_contours, i, color, 1, 8, NULL, 0, Point() );
						}
					}
					imshow("Video", drawing);
				}
			}
			c = cvWaitKey(10); // wait 10 ms or for key stroke
			if(c == 27)

				break; // if ESC, break and quit
		}
		/* clean up */
		cvReleaseCapture( &cv_cap );
		cvDestroyWindow("Video");
}
Exemplo n.º 12
0
vector<bool> CharacterAnalysis::filterBetweenLines(Mat img, vector<vector<Point> > contours, vector<Vec4i> hierarchy, vector<Point> outerPolygon, vector<bool> goodIndices)
{
    static float MIN_AREA_PERCENT_WITHIN_LINES = 0.88;

    vector<bool> includedIndices(contours.size());
    for (int j = 0; j < contours.size(); j++)
        includedIndices[j] = false;


    if (outerPolygon.size() == 0)
        return includedIndices;

    vector<Point> validPoints;

    // Figure out the line height
    LineSegment topLine(outerPolygon[0].x, outerPolygon[0].y, outerPolygon[1].x, outerPolygon[1].y);
    LineSegment bottomLine(outerPolygon[3].x, outerPolygon[3].y, outerPolygon[2].x, outerPolygon[2].y);

    float x = ((float) img.cols) / 2;
    Point midpoint = Point(x, bottomLine.getPointAt(x));
    Point acrossFromMidpoint = topLine.closestPointOnSegmentTo(midpoint);
    float lineHeight = distanceBetweenPoints(midpoint, acrossFromMidpoint);

    // Create a white mask for the area inside the polygon
    Mat outerMask = Mat::zeros(img.size(), CV_8U);
    Mat innerArea = Mat::zeros(img.size(), CV_8U);
    fillConvexPoly(outerMask, outerPolygon.data(), outerPolygon.size(), Scalar(255,255,255));


    for (int i = 0; i < contours.size(); i++)
    {
        if (goodIndices[i] == false)
            continue;

        // get rid of the outline by drawing a 1 pixel width black line
        drawContours(innerArea, contours,
                     i, // draw this contour
                     cv::Scalar(255,255,255), // in
                     CV_FILLED,
                     8,
                     hierarchy,
                     0
                    );


        bitwise_and(innerArea, outerMask, innerArea);


        vector<vector<Point> > tempContours;
        findContours(innerArea, tempContours,
                     CV_RETR_EXTERNAL, // retrieve the external contours
                     CV_CHAIN_APPROX_SIMPLE ); // all pixels of each contours );

        double totalArea = contourArea(contours[i]);
        double areaBetweenLines = 0;

        for (int tempContourIdx = 0; tempContourIdx < tempContours.size(); tempContourIdx++)
        {
            areaBetweenLines += contourArea(tempContours[tempContourIdx]);

        }


        if (areaBetweenLines / totalArea >= MIN_AREA_PERCENT_WITHIN_LINES)
        {
            includedIndices[i] = true;
        }

        innerArea.setTo(Scalar(0,0,0));
    }

    return includedIndices;
}
Exemplo n.º 13
0
Mat CharacterAnalysis::findOuterBoxMask()
{
    double min_parent_area = config->templateHeightPx * config->templateWidthPx * 0.10;	// Needs to be at least 10% of the plate area to be considered.

    int winningIndex = -1;
    int winningParentId = -1;
    int bestCharCount = 0;
    double lowestArea = 99999999999999;


    if (this->config->debugCharAnalysis)
        cout << "CharacterAnalysis::findOuterBoxMask" << endl;

    for (int imgIndex = 0; imgIndex < allContours.size(); imgIndex++)
    {
        //vector<bool> charContours = filter(thresholds[imgIndex], allContours[imgIndex], allHierarchy[imgIndex]);

        int charsRecognized = 0;
        int parentId = -1;
        bool hasParent = false;
        for (int i = 0; i < charSegments[imgIndex].size(); i++)
        {
            if (charSegments[imgIndex][i]) charsRecognized++;
            if (charSegments[imgIndex][i] && allHierarchy[imgIndex][i][3] != -1)
            {
                parentId = allHierarchy[imgIndex][i][3];
                hasParent = true;
            }
        }

        if (charsRecognized == 0)
            continue;

        if (hasParent)
        {
            double boxArea = contourArea(allContours[imgIndex][parentId]);
            if (boxArea < min_parent_area)
                continue;

            if ((charsRecognized > bestCharCount) ||
                    (charsRecognized == bestCharCount && boxArea < lowestArea))
                //(boxArea < lowestArea)
            {
                bestCharCount = charsRecognized;
                winningIndex = imgIndex;
                winningParentId = parentId;
                lowestArea = boxArea;
            }
        }


    }

    if (this->config->debugCharAnalysis)
        cout << "Winning image index is: " << winningIndex << endl;




    if (winningIndex != -1 && bestCharCount >= 3)
    {
        int longestChildIndex = -1;
        double longestChildLength = 0;
        // Find the child with the longest permiter/arc length ( just for kicks)
        for (int i = 0; i < allContours[winningIndex].size(); i++)
        {
            for (int j = 0; j < allContours[winningIndex].size(); j++)
            {
                if (allHierarchy[winningIndex][j][3] == winningParentId)
                {
                    double arclength = arcLength(allContours[winningIndex][j], false);
                    if (arclength > longestChildLength)
                    {
                        longestChildIndex = j;
                        longestChildLength = arclength;
                    }
                }
            }
        }





        Mat mask = Mat::zeros(thresholds[winningIndex].size(), CV_8U);

        // get rid of the outline by drawing a 1 pixel width black line
        drawContours(mask, allContours[winningIndex],
                     winningParentId, // draw this contour
                     cv::Scalar(255,255,255), // in
                     CV_FILLED,
                     8,
                     allHierarchy[winningIndex],
                     0
                    );


        // Morph Open the mask to get rid of any little connectors to non-plate portions
        int morph_elem  = 2;
        int morph_size = 3;
        Mat element = getStructuringElement( morph_elem, Size( 2*morph_size + 1, 2*morph_size+1 ), Point( morph_size, morph_size ) );

        //morphologyEx( mask, mask, MORPH_CLOSE, element );
        morphologyEx( mask, mask, MORPH_OPEN, element );

        //morph_size = 1;
        //element = getStructuringElement( morph_elem, Size( 2*morph_size + 1, 2*morph_size+1 ), Point( morph_size, morph_size ) );
        //dilate(mask, mask, element);


        // Drawing the edge black effectively erodes the image.  This may clip off some extra junk from the edges.
        // We'll want to do the contour again and find the larges one so that we remove the clipped portion.

        vector<vector<Point> > contoursSecondRound;

        findContours(mask, contoursSecondRound, CV_RETR_EXTERNAL, CV_CHAIN_APPROX_SIMPLE);
        int biggestContourIndex = -1;
        double largestArea = 0;
        for (int c = 0; c < contoursSecondRound.size(); c++)
        {
            double area = contourArea(contoursSecondRound[c]);
            if (area > largestArea)
            {
                biggestContourIndex = c;
                largestArea = area;
            }
        }

        if (biggestContourIndex != -1)
        {
            mask = Mat::zeros(thresholds[winningIndex].size(), CV_8U);

            vector<Point> smoothedMaskPoints;
            approxPolyDP(contoursSecondRound[biggestContourIndex], smoothedMaskPoints, 2, true);

            vector<vector<Point> > tempvec;
            tempvec.push_back(smoothedMaskPoints);
            //fillPoly(mask, smoothedMaskPoints.data(), smoothedMaskPoints, Scalar(255,255,255));
            drawContours(mask, tempvec,
                         0, // draw this contour
                         cv::Scalar(255,255,255), // in
                         CV_FILLED,
                         8,
                         allHierarchy[winningIndex],
                         0
                        );



        }

        if (this->config->debugCharAnalysis)
        {
            vector<Mat> debugImgs;
            Mat debugImgMasked = Mat::zeros(thresholds[winningIndex].size(), CV_8U);

            thresholds[winningIndex].copyTo(debugImgMasked, mask);

            debugImgs.push_back(mask);
            debugImgs.push_back(thresholds[winningIndex]);
            debugImgs.push_back(debugImgMasked);

            Mat dashboard = drawImageDashboard(debugImgs, CV_8U, 1);
            displayImage(config, "Winning outer box", dashboard);
        }

        hasPlateMask = true;
        return mask;
    }

    hasPlateMask = false;
    Mat fullMask = Mat::zeros(thresholds[0].size(), CV_8U);
    bitwise_not(fullMask, fullMask);
    return fullMask;



}