示例#1
0
static char triangleInRectangleTest(CvSeq* c, struct Triangle* t) {
    if( cvPointPolygonTest( c, cvPointTo32f( t->pt[0]), 0) > 0 ) {
        if ( cvPointPolygonTest( c, cvPointTo32f( t->pt[1]), 0 ) > 0 ){
            if ( cvPointPolygonTest( c, cvPointTo32f( t->pt[2] ), 0 ) > 0 ){
                return 1;
            }
        }
    } else
        return 0;
}
示例#2
0
/*
 * Determines whether the point is inside a contour, outside, or lies on an edge (or coinsides with a vertex).
 * @overload point_polygon_test(point, measure_dist)
 *   @param point [CvPoint2D32f] Point tested against the contour
 *   @param measure_dist [Boolean] If true, the method estimates the signed distance from the point to
 *      the nearest contour edge. Otherwise, the function only checks if the point is inside a contour or not.
 * @return [Number] When measure_dist = false, the return value is +1, -1 and 0, respectively.
 *   When measure_dist = true, it is a signed distance between the point and the nearest contour edge.
 * @opencv_func cvPointPolygonTest
 */
VALUE
rb_point_polygon_test(VALUE self, VALUE point, VALUE measure_dist)
{
  int measure_dist_flag;

  if (measure_dist == Qtrue)
    measure_dist_flag = 1;
  else if (measure_dist == Qfalse)
    measure_dist_flag = 0;
  else
    measure_dist_flag = NUM2INT(measure_dist);

  double dist = Qnil;
  try {
    dist = cvPointPolygonTest(CVARR(self), VALUE_TO_CVPOINT2D32F(point), measure_dist_flag);
  }
  catch (cv::Exception& e) {
    raise_cverror(e);
  }
  
  /* cvPointPolygonTest returns 100, -100 or 0 when measure_dist = 0 */
  if ((!measure_dist_flag) && ((int)dist) != 0)
    dist = (dist > 0) ? 1 : -1;

  return rb_float_new(dist);
}
示例#3
0
CvScalar contour_average(CvContour* contour, IplImage* image)
{
    CvRect rect = ((CvContour*)contour)->rect;
    
    CvScalar average = cvScalarAll(0);
    int count = 0;
    for(int x = rect.x; x < (rect.x+rect.width); x++) {
        for(int y = rect.y; y < (rect.y+rect.height); y++) {
            if(cvPointPolygonTest(contour, cvPointTo32f(cvPoint(x,y)),0) == 100) {
                CvScalar s = cvGet2D(image,y,x);
                average.val[0] += s.val[0];
                average.val[1] += s.val[1];
                average.val[2] += s.val[2];
                
                count++;
            }
        }
    }
    
    for(int i = 0; i < 3; i++){
        average.val[i] /= count;
    }
    
    return average;
}
示例#4
0
double BlobGetXYInside::operator()(Blob &blob)
{
	if( blob.GetExternalContour()->GetContourPoints() )
	{
		return cvPointPolygonTest( blob.GetExternalContour()->GetContourPoints(), m_p,0) >= 0;
	}

	return 0;
}
示例#5
0
/*
 * Calculates distance between a point and the nearest contour edgex
 * @overload measure_distance(point)
 *   @param point [CvPoint2D32f] Point tested against the contour
 * @return Signed distance between the point and the nearest contour edge
 * @opencv_func cvPointPolygonTest
 */
VALUE
rb_measure_distance(VALUE self, VALUE point)
{
  double distance = 0;
  try {
    distance = cvPointPolygonTest(CVARR(self), VALUE_TO_CVPOINT2D32F(point), 1);
  }
  catch (cv::Exception& e) {
    raise_cverror(e);
  }
  return rb_float_new(distance);
}
示例#6
0
/*
 * Performs a point-in-contour test.
 * The method determines whether the point is inside a contour, outside,
 * or lies on an edge (or coincides with a vertex).
 * @overload in?(point)
 *   @param point [CvPoint2D32f] Point tested against the contour
 * @return [Boolean] If the point is inside, returns true. If outside, returns false.
 *   If lies on an edge, returns nil.
 * @opencv_func cvPointPolygonTest
 */
VALUE
rb_in_q(VALUE self, VALUE point)
{
  double n = 0;
  try {
    n = cvPointPolygonTest(CVARR(self), VALUE_TO_CVPOINT2D32F(point), 0);
  }
  catch (cv::Exception& e) {
    raise_cverror(e);
  }
  return n == 0 ? Qnil : n > 0 ? Qtrue : Qfalse;
}
示例#7
0
void lmprojinv(double *p, double *x, int m, int n, void *data)
{
	double totdist = 0;
	int iter = seq2->total<LMRAWCONTOURN?1:(seq2->total/LMRAWCONTOURN); 
	for(int i = 0;i < n;i++){
		CvPoint*pt = (CvPoint*)cvGetSeqElem(seq2,i*iter);
		double d = p[6]*pt->x + p[7]*pt->y + p[8];
		double px = (p[0]*pt->x + p[1]*pt->y + p[2])/d;
		double py = (p[3]*pt->x + p[4]*pt->y + p[5])/d;
		double dist = abs(cvPointPolygonTest(seq1, cvPoint2D32f(px,py), 1));
		x[i] = dist;
		totdist+= dist;
	}
}
示例#8
0
void lmproj(double *p, double *x, int m, int n, void *data)
{
	for(int i = 0;i < seq1->total;i++){
		CvPoint*pt = (CvPoint*)cvGetSeqElem(seq1,i);
		double d = p[6]*pt->x + p[7]*pt->y + 1.0;
		double px = (p[0]*pt->x + p[1]*pt->y + p[2])/d;
		double py = (p[3]*pt->x + p[4]*pt->y + p[5])/d;
		double dist=0.0;
		if (px>=0&&px<IMG_WIDTH&&py>=0&&py<IMG_HEIGHT)
		{				
			dist = abs(cvPointPolygonTest(seq2, cvPoint2D32f(px,py), 1));
		} 
		x[i] = dist;
	
	}

}
示例#9
0
文件: PAW.cpp 项目: 09Hero/code
/*
    This method will calculate the convex hull of source landmarks
    and populate the pointsInsideHull vector with these points coordinates
*/
void PAW::populatePointsInsideHull(){
    //calc scrLandmarks convex hull
    CvPoint* pointsHull = (CvPoint*)malloc( nLandmarks * sizeof(pointsHull[0]));
    int* hull = (int*)malloc( nLandmarks * sizeof(hull[0]));
    CvMat pointMat = cvMat( 1, nLandmarks, CV_32SC2, pointsHull );
    CvMat hullMat = cvMat( 1, nLandmarks, CV_32SC1, hull );
        

    for(int i = 0; i < nLandmarks; i++ )
    {           
        pointsHull[i] = cvPoint(srcLandmarks.at<int>(i,0),srcLandmarks.at<int>(i,1));
    }
    cvConvexHull2( &pointMat, &hullMat, CV_CLOCKWISE, 0 );
    int hullcount = hullMat.cols;
        
    CvPoint* pointsHullFinal = (CvPoint*)malloc( hullcount * sizeof(pointsHullFinal[0]));        
        
    
    for(int i = 0; i < hullcount; i++ ){
        int ptIndex = hull[i];
        CvPoint pt = cvPoint(    srcLandmarks.at<int>(ptIndex,0),
                                srcLandmarks.at<int>(ptIndex,1));

        pointsHullFinal[i] = pt;
    }

    CvMat hullMatPoints = cvMat( 1, hullcount, CV_32SC2, pointsHullFinal);

    //check if point belongs
    for (int j=0;j<baseImageHeight;j++){            
        for(int i=0;i< baseImageWidth;i++){
            
            double distance = cvPointPolygonTest(&hullMatPoints,cvPoint2D32f(i,j),1);
            if(distance >=0){                    
                pointsInsideHull.push_back(cvPoint(i,j));        
            }
        }
    }
}
示例#10
0
bool CPolygonalRegion::IsPointInner(double x, double y) const {

  if (GetSize() <= 2) return false;
  else return (cvPointPolygonTest(_pPointsOuter, cvPoint2D32f(x,y), 0) >= 0);
}
示例#11
0
文件: widget.cpp 项目: xian0gang/test
Widget::Widget(QWidget *parent) :
    QWidget(parent),
    ui(new Ui::Widget)
{
    ui->setupUi(this);

    IplImage *image,*hsv,*hue,*saturation,*value;//图像空间

//    cv::Mat image = cv::imread("/home/xiangang/qt/qt_example/yuzhi/pic.bmp" ,CV_LOAD_IMAGE_GRAYSCALE);
image = cvLoadImage("picture.bmp");
//    image = cvLoadImage("2.jpg");
cvShowImage("image",image);
    hsv = cvCreateImage(cvGetSize(image),8,3);//给hsv色系的图像申请空间
    hue = cvCreateImage(cvGetSize(image),8,1);  //色调
    saturation = cvCreateImage(cvGetSize(image),8,1);//饱和度
    value = cvCreateImage(cvGetSize(image),8,1);//亮度

    cvNamedWindow("image",CV_WINDOW_AUTOSIZE);//用于显示图像的窗口
//    cvNamedWindow("hsv",CV_WINDOW_AUTOSIZE);
//    cvNamedWindow("hue",CV_WINDOW_AUTOSIZE);
//    cvNamedWindow("saturation",CV_WINDOW_AUTOSIZE);
//    cvNamedWindow("value",CV_WINDOW_AUTOSIZE);

    cvCvtColor(image,hsv,CV_BGR2HSV);//将RGB色系转为HSV色系
qDebug("rgb to hsv");

    cvSplit(hsv, hue, 0, 0, 0 );//分离三个通道
    cvSplit(hsv, 0, saturation, 0, 0 );
    cvSplit(hsv, 0, 0, value, 0 );

    cvShowImage("hue",hue);
    cvShowImage("saturation",saturation);
    cvShowImage("value",value);
qDebug("show pic!");
    int th1 = 140;
    cv::Mat global1;
    cv::threshold(cv::Mat(hue,false),global1,th1,360,CV_THRESH_BINARY_INV);

    int th3 = 100;
    cv::Mat global3;
    cv::threshold(cv::Mat(saturation,false),global3,th3,255,CV_THRESH_BINARY_INV);

    int th2 = 80;
    cv::Mat global2;
    cv::threshold(cv::Mat(value,false),global2,th2,255,CV_THRESH_BINARY_INV);

//    int blockSize = 2;
//        int constValue = 10;
//        cv::Mat local;
//        cv::adaptiveThreshold(cv::Mat( hue,false), local, 255, CV_ADAPTIVE_THRESH_MEAN_C, CV_THRESH_BINARY_INV, blockSize, constValue);

        cv::imwrite("hue.jpg",cv::Mat(hue,false));
        cv::imwrite("global1.jpg", global1);
        cv::imwrite("global2.jpg", global2);
//        cv::imwrite("local.jpg", local);

////cv::imshow("h分量otsu全局阈值", hue);
////cvShowImage("h分量otsu全局阈值", hue);
        cv::imshow("h分量全局阈值", global1);
        cv::imshow("v分量全局阈值", global2);
        cv::imshow("s分量全局阈值", global3);
//       cv::imshow("h分量局部阈值", local);

        //otsu
ImageBinarization(hue);
        IplImage *img = cvLoadImage("hue.jpg");    //加载图片
        IplImage *imgBuf = cvCreateImage(cvGetSize(img), IPL_DEPTH_8U, 3);   //通道为3
        cvConvert(img, imgBuf);   //将源图片转化为位深度为8,通道为3的图片
        for (int i = 0; i < img->height; i++)
            {
                for(int j = 0; j < img->width; j++)
                {
                    if((((uchar *)(img->imageData +  i * img->widthStep ))[j * img->nChannels + 0] > 200)/*&&
                            (((uchar *)(img->imageData +  i * img->widthStep ))[j * img->nChannels + 0] < 100)*/)
                    {
                        ((uchar *)(img->imageData +  i * img->widthStep ))[j * img->nChannels + 0] = 234;   //改变该像素B的颜色分量
                        ((uchar *)(img->imageData +  i * img->widthStep ))[j * img->nChannels + 1] = 200;   //改变该像素G的颜色分量
                        ((uchar *)(img->imageData +  i * img->widthStep ))[j * img->nChannels + 2] = 150;   //改变该像素R的颜色分量
                    }
                }
            }
        cv::imshow("h分量测试",cv::Mat(img,false));

        for (int i = 0; i <(global2.rows)/2; i++)
            {
                for(int j = 0; j < global2.cols; j++)
                {
                    for(int n = 0; n < global2.channels(); n++ )
                    {
                        if(global2.at<uchar>(i,j*global2.channels()+n) > 125)
                        {
                            global1.at<uchar>(i,j*global1.channels()+n) = 255;
//                            local.at<uchar>(i,j*local.channels()+n) = 255;
                        }
                    }
                }
            }
        cv::imshow("合并h分量和v分量",global1);
        cv::imwrite("success.jpg",global1);



        IplImage *img3 = cvLoadImage("success.jpg",0);
            if(img3 == NULL)
            {
                qDebug("img load failed!\n");
//                return 0;
            }
            IplImage *img_erode = cvCreateImage(cvGetSize(img), 8, 1);
            IplImage *img_dilate = cvCreateImage(cvGetSize(img), 8, 1);

            cvErode( img3,img_erode, NULL,1); //腐蚀
            cvDilate( img_erode,img_dilate, NULL,3); //膨胀
////            cvErode( img_dilate,img_erode, NULL,1); //腐蚀
        cvShowImage("腐蚀",img_erode);
        cvShowImage("膨胀",img_dilate);
        cv::imwrite("dilate_success.jpg",cv::Mat(img_dilate,false));
/*****************提取轮廓*****************/
 //       IplImage* src = NULL;
 //           IplImage* img = NULL;
            IplImage* dst = NULL;
        CvMemStorage* storage = cvCreateMemStorage (0);;
            CvSeq* contour = 0;
            double contour_area_temp = 0, contour_area_max = 0, contour_area_2 = 0;
            CvSeq* area_max_contour = 0;
            CvSeq* area_2_contour = 0;
            int contours = 0;
            CvScalar external_color;
            CvScalar hole_color;
            //寻找轮廓   CV_RETR_EXTERNAL     CV_RETR_LIST
        contours = cvFindContours (img_dilate, storage, &contour, sizeof(CvContour), CV_RETR_LIST, CV_CHAIN_APPROX_NONE);
qDebug("contours:%d",contours);

        //创建新的图像显示轮廓
        dst = cvCreateImage (cvGetSize(image), image->depth, image->nChannels);
        cvCopy(image,dst);

        for (;contour != 0; contour = contour->h_next)
       {
            contour_area_temp = fabs(cvContourArea(contour,CV_WHOLE_SEQ));
            if(contour_area_temp > contour_area_max)
            {
                contour_area_2 = contour_area_max;
                contour_area_max = contour_area_temp;
                area_2_contour = area_max_contour;
                area_max_contour = contour;
            }
        }
            external_color = CV_RGB(rand()&255, rand()&255, rand()&255);
           hole_color = CV_RGB(rand()&255, rand()&255, rand()&255);
           //画轮廓
            cvDrawContours (dst, area_2_contour, external_color, hole_color, 0, 2, 8);



           cvNamedWindow ("Contour", 1);
           cvShowImage ("Contour", dst);

           for (int i = 0; i < image->height; i++)
               {
                   for(int j = 0; j < image->width; j++)
                   {
                       //-1 0 1分别代表轮廓外,轮廓上,轮廓里面
                       if(-1 == (cvPointPolygonTest(area_2_contour,cv::Point2f(j,i),0)))
                       {
                           ((uchar *)(image->imageData +  i * image->widthStep ))[j * image->nChannels + 0] = 234;   //改变该像素B的颜色分量
                           ((uchar *)(image->imageData +  i * image->widthStep ))[j * image->nChannels + 1] = 200;   //改变该像素G的颜色分量
                           ((uchar *)(image->imageData +  i * image->widthStep ))[j * image->nChannels + 2] = 150;   //改变该像素R的颜色分量
                       }
                   }
               }
           cv::imshow("final",cv::Mat(image,false));


///*****************************************/
////        cvWrite("dilate_success.jpg",img_dilate);

////        cv::waitKey(0); /// 等待用户按键。如果是ESC健则退出等待过程。
////        while(true)
////        {
////          int c;
////          c = cv::waitKey( 20 );
////          if( (char)c == 27 )
////            { break; }
////         }


zhifangtu(value);

        cvReleaseMemStorage (&storage);
 //      cvReleaseImage (&src);
          cvReleaseImage (&img);
            cvReleaseImage (&dst);
}
示例#12
0
void FrameProcessor::DetectAndDrawQuads(IplImage* img, vector <Square>& cubes, CvCapture* capture)
{
	float angle = 0.0f;
	CvSeq* contours;
	CvSeq* result;
	CvMemStorage *storage = cvCreateMemStorage(0);

	//IplImage* ret = cvCreateImage(cvGetSize(img), 8, 3);
	IplImage* temp = cvCreateImage(cvGetSize(img), 8, 1);

	cvCvtColor(img, temp, CV_BGR2GRAY);

	IplImage* Img = cvCreateImage (cvGetSize (img), 8, 1);
	cvThreshold(temp, Img, 0, 255, CV_THRESH_BINARY_INV | CV_THRESH_OTSU);

	cvFindContours(Img, storage, &contours, sizeof(CvContour), CV_RETR_LIST, CV_CHAIN_APPROX_SIMPLE, cvPoint(0,0));
	CvSeq* contours1 = contours;
	while(contours)
    {
		result = cvApproxPoly (contours, sizeof (CvContour), storage, CV_POLY_APPROX_DP, cvContourPerimeter (contours) * 0.02, 0);

		if((result->total==4)  && (fabs(cvContourArea(result, CV_WHOLE_SEQ)) > 30) && cvCheckContourConvexity (result))
        {
	  int countTriang = 0;   
	  CvSeq* contours2 = contours1;
	  
	  while(contours2){
	    
	    CvSeq* result2 = cvApproxPoly(contours2, sizeof(CvContour), storage, CV_POLY_APPROX_DP, cvContourPerimeter(contours2)*0.02, 0);
	    if((result2->total==3) && (fabs(cvContourArea(result2, CV_WHOLE_SEQ)) > 50) && cvCheckContourConvexity(result2)) 
	      {
		CvPoint *pt[3];
		for(int i=0;i<3;i++)
		  pt[i] = (CvPoint*)cvGetSeqElem(result2, i);
		
		CvPoint ptCent[1];
		ptCent[0].x = (pt[0]->x + pt[1]->x + pt[2]->x)/3;
		ptCent[0].y = (pt[0]->y + pt[1]->y + pt[2]->y)/3;
		//printf("(%d, %d) (%d,%d) (%d,%d)\n", pt[0]->x, pt[0]->y, pt[1]->x, pt[1]->y, pt[2]->x, pt[2]->y); 
		//cvCircle(ret, ptCent[0], 5, cvScalar(255));
		
		CvPoint2D32f triang;
		triang.x = ptCent[0].x;
		triang.y = ptCent[0].y;
		
		if(cvPointPolygonTest(result, triang, 0)  > 0){
		  countTriang++;
		  //cvLine(ret, *pt[0], *pt[1], cvScalar(30, 50, 50));
		  //cvLine(ret, *pt[1], *pt[2], cvScalar(30, 50, 50));
		  //cvLine(ret, *pt[2], *pt[0], cvScalar(30, 50, 50));
		  angle = GetAngle(pt);
		}
	      }
	    
	    contours2 = contours2->h_next;
	  }
	  
	  if(countTriang == 1){
	    CvPoint *pt[4];
	    for(int i=0;i<4;i++)
	      pt[i] = (CvPoint*)cvGetSeqElem(result, i);
	    
	    //cvLine(ret, *pt[0], *pt[1], cvScalar(255));
	    //cvLine(ret, *pt[1], *pt[2], cvScalar(255));
	    //cvLine(ret, *pt[2], *pt[3], cvScalar(255));
	    //cvLine(ret, *pt[3], *pt[0], cvScalar(255));
	    
	    CvSeq* contours3 = contours1;
	    int countSquare = 0;
	    while(contours3){
	      CvSeq* result3 = cvApproxPoly(contours3, sizeof(CvContour), storage, CV_POLY_APPROX_DP, cvContourPerimeter(contours3)*0.02, 0);
	      if((result3->total==4) && (fabs(cvContourArea(result3, CV_WHOLE_SEQ)) > 50) && cvCheckContourConvexity(result3)) 
		{
		  CvPoint *pt[4];
		  for(int i=0;i<4;i++)
		    pt[i] = (CvPoint*)cvGetSeqElem(result3, i);
		  
		  CvPoint ptCent[1];
		  ptCent[0].x = (pt[0]->x + pt[1]->x + pt[2]->x + pt[3]->x)/4;
		  ptCent[0].y = (pt[0]->y + pt[1]->y + pt[2]->y + pt[3]->y)/4;
		  
		  //cvCircle(ret, ptCent[0], 5, cvScalar(255));
		  
		  CvPoint2D32f square;
		  square.x = ptCent[0].x;
		  square.y = ptCent[0].y;
		  
		  //cout << cvPointPolygonTest(result, triang, 0);
		  if(cvPointPolygonTest(result, square, 0)  > 0){
		    countSquare++;
		    //cvLine(ret, *pt[0], *pt[1], cvScalar(90, 50, 50));
		    //cvLine(ret, *pt[1], *pt[2], cvScalar(90, 50, 50));
		    //cvLine(ret, *pt[2], *pt[3], cvScalar(90, 50, 50));
		    //cvLine(ret, *pt[3], *pt[0], cvScalar(90, 50, 50));
		    
		  }
		}
	      contours3 = contours3->h_next;
	    }
	    countSquare--;
	    
	    CvPoint *pt1[4];
	    for(int i=0;i<4;i++)
	      pt1[i] = (CvPoint*)cvGetSeqElem(result, i);
	    
	    CvPoint ptCent[1];
	    ptCent[0].x = (pt1[0]->x + pt1[1]->x + pt1[2]->x + pt1[3]->x)/4;
	    ptCent[0].y = (pt1[0]->y + pt1[1]->y + pt1[2]->y + pt1[3]->y)/4;
	    //printf("%d %d\n", ptCent[0].x, ptCent[0].y);
	    //cvCircle(ret, ptCent[0], 5, cvScalar(255));
	    
	    CvPoint pC[1];
	    pC[0].x = ptCent[0].x;
	    pC[0].y = ptCent[0].y;
	    
	    int width = (int) cvGetCaptureProperty (capture, CV_CAP_PROP_FRAME_WIDTH);
	    int height = (int) cvGetCaptureProperty (capture, CV_CAP_PROP_FRAME_HEIGHT);

	    pC[0].x = width - ptCent[0].x;

	    pC[0].x = (pC[0].x * 100) / width;
	    pC[0].y = (ptCent[0].y * 100) / height;

	    Square test3 (countSquare, pC [0], abs(pt1[0]->x - pC[0].x) * 1.5, abs(pt1[0]->x - pC[0].x) * 1.5, angle);
	    cubes.push_back(test3);
	  }
        }
      
      contours = contours->h_next;
    }

	cvReleaseImage(&temp);
	//cvReleaseImage(&ret);
	cvReleaseImage(&Img);
	cvReleaseMemStorage(&storage);
}