int main(int argc, char* argv[])
{
	IplImage *m_pPreImage = NULL;
	IplImage *m_pGrayImage = NULL;
	IplImage *m_pSmoothImage = NULL;
	IplImage *pPrev = NULL;
	IplImage *pCurr = NULL;
	IplImage *pDest = NULL;
	IplImage *pMask = NULL;
	IplImage *pMaskDest = NULL;
	IplImage *dst = NULL;
	CvMat *pPrevF = NULL;
	CvMat *pCurrF = NULL;
	CvSize imgSize;

    CvCapture *m_pCapture = NULL;
	CvVideoWriter *writer = 0;
	IplConvKernel* element;
	CvSeq* contour = 0;
	CvMemStorage* storage = cvCreateMemStorage(0);
	CvRect r;

	// IplConvKernel* element;

    cvNamedWindow( "VideoDisplay1", 1 );
	cvNamedWindow( "VideoDisplay2", 1 );
	cvNamedWindow( "VideoDisplay3", 1 );
	cvNamedWindow( "VideoDisplay4", 1 );
	
// Capture
	m_pCapture = cvCreateFileCapture("MVI_8833.AVI");
	contour = cvCreateSeq(CV_SEQ_ELTYPE_POINT,sizeof(CvSeq),sizeof(CvPoint),storage);
	

    if( !m_pCapture )
    {
        fprintf(stderr,"Could not initialize capturing! \n");
        return -1;
    }
// Display
    while ( (m_pPreImage = cvQueryFrame(m_pCapture)))
    {	
		imgSize = cvSize(m_pPreImage->width, m_pPreImage->height);
		if(!m_pGrayImage)
			m_pGrayImage = cvCreateImage(imgSize, IPL_DEPTH_8U, 1);
		if(!pCurr)
			pCurr = cvCreateImage(imgSize, IPL_DEPTH_8U, 1);	
		if(!m_pSmoothImage)
			m_pSmoothImage = cvCreateImage(imgSize, IPL_DEPTH_8U, 1);

		//图像预处理
		cvCvtColor(m_pPreImage, m_pGrayImage, CV_BGR2GRAY);//转化为灰度图像
		cvSmooth(m_pGrayImage,m_pSmoothImage,CV_GAUSSIAN,3,0,0,0 );//GAUSSIAN平滑去噪声
		cvEqualizeHist(m_pSmoothImage,pCurr );//直方图均衡


		 if(!pPrevF)
			pPrevF = cvCreateMat(m_pGrayImage->width,m_pPreImage->height, CV_32FC1);
		 if(!pCurrF)
			pCurrF = cvCreateMat(m_pGrayImage->width,m_pPreImage->height, CV_32FC1);
		 if(!pPrev)
			pPrev = cvCreateImage(imgSize, IPL_DEPTH_8U, 1);
		 if(!pMask)
			pMask = cvCreateImage(imgSize, IPL_DEPTH_8U, 1);
		 if(!pMaskDest)
			pMaskDest = cvCreateImage(imgSize, IPL_DEPTH_8U, 1);
		 if(!dst)
			dst = cvCreateImage(imgSize, IPL_DEPTH_8U, 1);
		 if(!pDest)
			{
				pDest = cvCreateImage(imgSize, IPL_DEPTH_8U, 1);
				
			}
	
		cvAbsDiff(pPrev, pCurr, pDest);   //帧差
		cvCopy(pCurr, pPrev, NULL);  // 当前帧存入前一帧

		
		cvThreshold(pDest, pMask, 80, 255, CV_THRESH_BINARY);     // 二值化
		element = cvCreateStructuringElementEx( 9, 9, 3, 3, CV_SHAPE_RECT, NULL);
		cvMorphologyEx( pMask, pMaskDest, NULL, element, CV_MOP_CLOSE, 1);//形态学处理
		
		//查找并且画出团块轮廓
		cvFindContours( pMaskDest, storage, &contour, sizeof(CvContour), CV_RETR_CCOMP, CV_CHAIN_APPROX_SIMPLE );

		//画出包含目标的最小矩形
		for(;contour;contour=contour->h_next)
		{
			r=((CvContour*)contour)->rect;
			if(r.height*r.width>100)
			{
				cvRectangle(m_pPreImage,cvPoint(r.x,r.y),cvPoint(r.x+r.width,r.y+r.height),CV_RGB(255,0,0),1,CV_AA,0);
				
			}
		}


		cvShowImage( "VideoDisplay1", m_pPreImage );
		cvShowImage( "VideoDisplay2", pMask);
		cvShowImage( "VideoDisplay3", pMaskDest );
		cvShowImage( "VideoDisplay4", pPrev );

		if(cvWaitKey(50)>0)
			return 0;
	}

	// Realease
    cvReleaseImage( &m_pPreImage );
	cvReleaseImage( &m_pGrayImage );
	cvReleaseImage( &m_pSmoothImage );
	cvReleaseImage( &pCurr );
	cvReleaseImage( &pDest );
	cvReleaseImage( &pMask );
	cvReleaseImage( &pMaskDest );
	cvReleaseImage( &dst );
	cvReleaseMemStorage( &storage );
    cvDestroyWindow("VideoDisplay1");
	cvDestroyWindow("VideoDisplay2");
	cvDestroyWindow("VideoDisplay3");
	cvDestroyWindow("VideoDisplay4");
	cvReleaseStructuringElement( &element ); 

	return 0;
}
// returns sequence of squares detected on the image.
// the sequence is stored in the specified memory storage 
CvSeq* findSquares4( IplImage* img, CvMemStorage* storage ){  
	CvSeq* contours;  
	int i, c, l, N = 11;  
	CvSize sz = cvSize( img->width & -2, img->height & -2 );
	IplImage* timg = cvCloneImage( img );
	// make a copy of input image 
	IplImage* gray = cvCreateImage( sz, 8, 1 );  
	IplImage* pyr = cvCreateImage( cvSize(sz.width/2, sz.height/2), 8, 3 ); 
	IplImage* tgray;  CvSeq* result;  double s, t; 
	// create empty sequence that will contain points -  
	// 4 points per square (the square's vertices) 
	CvSeq* squares = cvCreateSeq( 0, sizeof(CvSeq), sizeof(CvPoint), storage ); 
	// select the maximum ROI in the image 
	// with the width and height divisible by 2 
	cvSetImageROI( timg, cvRect( 0, 0, sz.width, sz.height )); 
	// down-scale and upscale the image to filter out the noise  
	cvPyrDown( timg, pyr, 7 ); 
	cvPyrUp( pyr, timg, 7 );  tgray = cvCreateImage( sz, 8, 1 );
	// find squares in every color plane of the image  
	for( c = 0; c < 3; c++ )  {  
		// extract the c-th color plane  
		cvSetImageCOI( timg, c+1 ); 
		cvCopy( timg, tgray, 0 ); 
		// try several threshold levels 
		for( l = 0; l < N; l++ )  { 
			// hack: use Canny instead of zero threshold level. 
			// Canny helps to catch squares with gradient shading
			if( l == 0 )  {  
				// apply Canny. Take the upper threshold from slider 
				// and set the lower to 0 (which forces edges merging) 
				cvCanny( tgray, gray, 0, thresh, 5 );
				// dilate canny output to remove potential 
				// holes between edge segments  
				cvDilate( gray, gray, 0, 1 );
			} 
			else  
			{ 
				// apply threshold if l!=0:  
				// tgray(x,y) = gray(x,y) < (l+1)*255/N ? 255 : 0  
				cvThreshold( tgray, gray, (l+1)*255/N, 255, CV_THRESH_BINARY ); 
			}  // find contours and store them all as a list 
			cvFindContours( gray, storage, &contours, sizeof(CvContour),  CV_RETR_LIST, CV_CHAIN_APPROX_SIMPLE, cvPoint(0,0) ); 
			// test each contour 
			while( contours )  { 
				// approximate contour with accuracy proportional 
				// to the contour perimeter
				result = cvApproxPoly( contours, sizeof(CvContour), storage,  CV_POLY_APPROX_DP, cvContourPerimeter(contours)*0.02, 0 );
				// square contours should have 4 vertices after approximation  
				// relatively large area (to filter out noisy contours)
				// and be convex.  
				// Note: absolute value of an area is used because 
				// area may be positive or negative - in accordance with the 
				// contour orientation
				if( result->total == 4 &&  fabs(cvContourArea(result,CV_WHOLE_SEQ)) > 1000 &&  cvCheckContourConvexity(result) )  {
					s = 0; 
					for( i = 0; i < 5; i++ )  
					{  
						// find minimum angle between joint 
						// edges (maximum of cosine)  
						if( i >= 2 )  {  
							t = fabs(angle(  (CvPoint*)cvGetSeqElem( result, i ),  (CvPoint*)cvGetSeqElem( result, i-2 ),  (CvPoint*)cvGetSeqElem( result, i-1 )));  s = s > t ? s : t;
						}  
					}  
					// if cosines of all angles are small  
					// (all angles are ~90 degree) then write quandrange  
					// vertices to resultant sequence 
					if( s < 0.3 )  
						for( i = 0; i < 4; i++ )  
							cvSeqPush( squares,  (CvPoint*)cvGetSeqElem( result, i ));
				}  
				// take the next contour  
				contours = contours->h_next; 
			}  
		}  
	} 
	// release all the temporary images 
	cvReleaseImage( &gray );
	cvReleaseImage( &pyr ); 
	cvReleaseImage( &tgray ); 
	cvReleaseImage( &timg );
	return squares;
} 
示例#3
0
static int mushroom_read_database( const char* filename, CvMat** data, CvMat** missing, CvMat** responses )
{
    const int M = 1024;
    FILE* f = fopen( filename, "rt" );
    CvMemStorage* storage;
    CvSeq* seq;
    char buf[M+2], *ptr;
    float* el_ptr;
    CvSeqReader reader;
    int i, j, var_count = 0;

    if( !f )
        return 0;

    // read the first line and determine the number of variables
    if( !fgets( buf, M, f ))
    {
        fclose(f);
        return 0;
    }

    for( ptr = buf; *ptr != '\0'; ptr++ )
        var_count += *ptr == ',';
    assert( ptr - buf == (var_count+1)*2 );

    // create temporary memory storage to store the whole database
    el_ptr = new float[var_count+1];
    storage = cvCreateMemStorage();
    seq = cvCreateSeq( 0, sizeof(*seq), (var_count+1)*sizeof(float), storage );

    for(;;)
    {
        for( i = 0; i <= var_count; i++ )
        {
            int c = buf[i*2];
            el_ptr[i] = c == '?' ? -1.f : (float)c;
        }
        if( i != var_count+1 )
            break;
        cvSeqPush( seq, el_ptr );
        if( !fgets( buf, M, f ) || !strchr( buf, ',' ) )
            break;
    }
    fclose(f);

    // allocate the output matrices and copy the base there
    *data = cvCreateMat( seq->total, var_count, CV_32F );
    *missing = cvCreateMat( seq->total, var_count, CV_8U );
    *responses = cvCreateMat( seq->total, 1, CV_32F );

    cvStartReadSeq( seq, &reader );

    for( i = 0; i < seq->total; i++ )
    {
        const float* sdata = (float*)reader.ptr + 1;
        float* ddata = data[0]->data.fl + var_count*i;
        float* dr = responses[0]->data.fl + i;
        uchar* dm = missing[0]->data.ptr + var_count*i;

        for( j = 0; j < var_count; j++ )
        {
            ddata[j] = sdata[j];
            dm[j] = sdata[j] < 0;
        }
        *dr = sdata[-1];
        CV_NEXT_SEQ_ELEM( seq->elem_size, reader );
    }

    cvReleaseMemStorage( &storage );
    delete [] el_ptr;
    return 1;
}
static int aGestureRecognition(void)
{       
    IplImage *image, *imagew, *image_rez, *mask_rez, *image_hsv, *img_p[2],*img_v,
             *init_mask_ver = 0, *final_mask_ver = 0;
    CvPoint3D32f *pp, p;

    CvPoint pt;
    CvSize2D32f fsize;
    CvPoint3D32f center, cf;
    IplImage *image_mask, *image_maskw;
    
    CvSize size;
    CvHistogram *hist, *hist_mask;

    int width, height;
    int k_points, k_indexs;
    int warpFlag, interpolate;

    int hdim[2] = {20, 20};
    
    double coeffs[3][3], rect[2][2], rez = 0, eps_rez = 2.5, rez_h;
    float *thresh[2];
    float hv[3];
    
    float reps, aeps, ww;
    float line[6], in[3][3], h[3][3];
    float cx, cy, fx, fy;

    static char num[4]; 
    
    char *name_image;  
    char *name_range_image;
    char *name_verify_data;
    char *name_init_mask_very;
    char *name_final_mask_very;

    CvSeq *numbers;
    CvSeq *points;
    CvSeq *indexs;
        
    CvMemStorage *storage;
    CvRect hand_roi, hand_roi_trans;
    
    int i,j, lsize, block_size = 1000, flag;
    int code;

    FILE *filin, *fil_ver;

/* read tests params */

    code = TRS_OK;

/*  define input information    */
    strcpy (num, "001"); 

    lsize = strlen(data_path)+12;
    name_verify_data = (char*)trsmAlloc(lsize);
    name_range_image = (char*)trsmAlloc(lsize);
    name_image = (char*)trsmAlloc(lsize);

    name_init_mask_very = (char*)trsmAlloc(lsize);
    name_final_mask_very = (char*)trsmAlloc(lsize);

/*  define input range_image file path        */
    strcpy(name_range_image, data_path);
    strcat(name_range_image, "rpts");
    strcat(name_range_image, num);
    strcat(name_range_image, ".txt");

/*  define input image file path        */
    strcpy(name_image, data_path);
    strcat(name_image, "real");
    strcat(name_image, num);
    strcat(name_image, ".bmp");

/*  define verify data file path        */
    strcpy(name_verify_data, data_path);
    strcat(name_verify_data, "very");
    strcat(name_verify_data, num);
    strcat(name_verify_data, ".txt");

/*  define verify init mask file path    */
    strcpy(name_init_mask_very, data_path);
    strcat(name_init_mask_very, "imas");
    strcat(name_init_mask_very, num);
    strcat(name_init_mask_very, ".bmp");

/*  define verify final mask file path    */
    strcpy(name_final_mask_very, data_path);
    strcat(name_final_mask_very, "fmas");
    strcat(name_final_mask_very, num);
    strcat(name_final_mask_very, ".bmp");

    filin = fopen(name_range_image,"r");
    fil_ver = fopen(name_verify_data,"r");

    fscanf( filin, "\n%d %d\n", &width, &height);
    printf("width=%d height=%d  reading testing data...", width,height);

    OPENCV_CALL( storage = cvCreateMemStorage ( block_size ) );
    OPENCV_CALL( points = cvCreateSeq( CV_SEQ_POINT3D_SET, sizeof(CvSeq),
                            sizeof(CvPoint3D32f), storage ) );
    OPENCV_CALL (indexs = cvCreateSeq( CV_SEQ_POINT_SET, sizeof(CvSeq),
                            sizeof(CvPoint), storage ) );

    pp = 0;
    
/*  read input image from file   */   
    image = atsCreateImageFromFile( name_image );
    if(image == NULL)   {code = TRS_FAIL; goto m_exit;}

/*  read input 3D points from input file        */
    for (i = 0; i < height; i++)
    {
        for (j = 0; j < width; j++)    
        {
            fscanf( filin, "%f %f %f\n", &p.x, &p.y, &p.z);
            if(/*p.x != 0 || p.y != 0 ||*/ p.z != 0)
            {
                OPENCV_CALL(cvSeqPush(points, &p));
                pt.x = j; pt.y = i;
                OPENCV_CALL(cvSeqPush(indexs, &pt));
                               
            }
        }
    }

    k_points = points->total;
    k_indexs = indexs->total;

/*   convert sequence to array          */
    pp = (CvPoint3D32f*)trsmAlloc(k_points * sizeof(CvPoint3D32f));
    OPENCV_CALL(cvCvtSeqToArray(points, pp ));

/*  find 3D-line      */

    reps = (float)0.1;
    aeps = (float)0.1;
    ww = (float)0.08;

    OPENCV_CALL( cvFitLine3D(pp, k_points, CV_DIST_WELSCH, &ww, reps, aeps, line ));

/*  find hand location      */
    flag = -1;
    fsize.width = fsize.height = (float)0.22;  //   (hand size in m)

    numbers = NULL;
    OPENCV_CALL( cvFindHandRegion (pp, k_points, indexs,line, fsize,
                      flag,&center,storage, &numbers));

/*   read verify data    */
    fscanf( fil_ver, "%f %f %f\n", &cf.x, &cf.y, &cf.z);
    rez+= cvSqrt((center.x - cf.x)*(center.x - cf.x)+(center.y - cf.y)*(center.y - cf.y)+
         (center.z - cf.z)*(center.z - cf.z))/3.;
    
/*    create hand mask            */
    size.height = height;
    size.width = width;
    OPENCV_CALL( image_mask = cvCreateImage(size, IPL_DEPTH_8U, 1) ); 

    OPENCV_CALL( cvCreateHandMask(numbers, image_mask, &hand_roi) );

/*  read verify initial image mask                  */
    init_mask_ver = atsCreateImageFromFile( name_init_mask_very );
    if(init_mask_ver == NULL)   {code = TRS_FAIL; goto m_exit;}
    
    rez+= iplNorm(init_mask_ver, image_mask, IPL_L2) / (width*height+0.);

/*  calculate homographic transformation matrix            */
    cx = (float)(width / 2.);
    cy = (float)(height / 2.);
    fx = fy = (float)571.2048;

/* define intrinsic camera parameters                      */
    in[0][1] = in[1][0] = in[2][0] = in[2][1] = 0;
    in[0][0] = fx; in[0][2] = cx;
    in[1][1] = fy; in[1][2] = cy;
    in[2][2] = 1;

    OPENCV_CALL( cvCalcImageHomography(line, &center, in, h) );
    
    rez_h = 0;
    for(i=0;i<3;i++)
    {
        fscanf( fil_ver, "%f %f %f\n", &hv[0], &hv[1], &hv[2]);
        for(j=0;j<3;j++)
        {
            rez_h+=(hv[j] - h[i][j])*(hv[j] - h[i][j]);
        }
    }
    rez+=sqrt(rez_h)/9.;

/*   image unwarping         */
    size.width = image->width; 
    size.height = image->height; 
    OPENCV_CALL( imagew = cvCreateImage(size, IPL_DEPTH_8U,3) );
    OPENCV_CALL( image_maskw = cvCreateImage(size, IPL_DEPTH_8U,1) );

    iplSet(image_maskw, 0);

    cvSetImageROI(image, hand_roi);
    cvSetImageROI(image_mask, hand_roi);

/* convert homographic transformation matrix from float to double      */
    for(i=0;i<3;i++)
        for(j=0;j<3;j++)
            coeffs[i][j] = (double)h[i][j];

/*  get bounding rectangle for image ROI         */
    iplGetPerspectiveBound(image, coeffs, rect);

    width = (int)(rect[1][0] - rect[0][0]);
    height = (int)(rect[1][1] - rect[0][1]);
    hand_roi_trans.x = (int)rect[0][0];hand_roi_trans.y = (int)rect[0][1];
    hand_roi_trans.width = width; hand_roi_trans.height = height;

    cvMaxRect(&hand_roi, &hand_roi_trans, &hand_roi);
    iplSetROI((IplROI*)image->roi, 0, hand_roi.x, hand_roi.y,
               hand_roi.width,hand_roi.height);
    iplSetROI((IplROI*)image_mask->roi, 0, hand_roi.x, hand_roi.y,
                hand_roi.width,hand_roi.height);

    warpFlag = IPL_WARP_R_TO_Q;
/*    interpolate = IPL_INTER_CUBIC;   */
/*    interpolate = IPL_INTER_NN;      */
    interpolate = IPL_INTER_LINEAR;
    iplWarpPerspective(image, imagew, coeffs, warpFlag, interpolate);
    iplWarpPerspective(image_mask, image_maskw, coeffs, warpFlag, IPL_INTER_NN);  
    
/*  set new image and mask ROI after transformation        */
    iplSetROI((IplROI*)imagew->roi,0, (int)rect[0][0], (int)rect[0][1],(int)width,(int)height);
    iplSetROI((IplROI*)image_maskw->roi,0, (int)rect[0][0], (int)rect[0][1],(int)width,(int)height);

/*  copy image ROI to new image and resize        */
    size.width = width; size.height = height;
    image_rez = cvCreateImage(size, IPL_DEPTH_8U,3);
    mask_rez = cvCreateImage(size, IPL_DEPTH_8U,1);
 
    iplCopy(imagew,image_rez);
    iplCopy(image_maskw,mask_rez);
    
/* convert rezult image from RGB to HSV               */
    image_hsv = iplCreateImageHeader(3, 0, IPL_DEPTH_8U, "HSV", "HSV",
                                   IPL_DATA_ORDER_PIXEL, IPL_ORIGIN_TL,IPL_ALIGN_DWORD,
                                   image_rez->width, image_rez->height, NULL, NULL, NULL, NULL);
    iplAllocateImage(image_hsv, 0, 0 ); 
    strcpy(image_rez->colorModel, "RGB");
    strcpy(image_rez->channelSeq, "RGB");
    image_rez->roi = NULL;

    iplRGB2HSV(image_rez, image_hsv);

/* convert to three images planes                      */
    img_p[0] = cvCreateImage(size, IPL_DEPTH_8U,1);
    img_p[1] = cvCreateImage(size, IPL_DEPTH_8U,1);
    img_v = cvCreateImage(size, IPL_DEPTH_8U,1);

    cvCvtPixToPlane(image_hsv, img_p[0], img_p[1], img_v, NULL);
   
/*  calculate histograms                */
    hist = cvCreateHist ( 2, hdim, CV_HIST_ARRAY);
    hist_mask = cvCreateHist ( 2, hdim, CV_HIST_ARRAY);

/*  install histogram threshold         */
    thresh[0] = (float*) trsmAlloc(2*sizeof(float));
    thresh[1] = (float*) trsmAlloc(2*sizeof(float));

    thresh[0][0] = thresh[1][0] = -0.5;
    thresh[0][1] = thresh[1][1] = 255.5;
    cvSetHistThresh( hist, thresh, 1);
    cvSetHistThresh( hist_mask, thresh, 1);

    cvCalcHist(img_p, hist, 0);
        
    cvCalcHistMask(img_p, mask_rez, hist_mask, 0);
            
    cvCalcProbDensity(hist, hist_mask, hist_mask);

    cvCalcBackProject( img_p, mask_rez, hist_mask ); 

/*  read verify final image mask                  */
    final_mask_ver = atsCreateImageFromFile( name_final_mask_very );
    if(final_mask_ver == NULL)   {code = TRS_FAIL; goto m_exit;}

    rez+= iplNorm(final_mask_ver, mask_rez, IPL_L2) / (width*height+0.);

    trsWrite( ATS_CON | ATS_SUM, "\n gesture recognition \n");
    trsWrite( ATS_CON | ATS_SUM, "result testing error = %f \n",rez);

    if(rez > eps_rez) code = TRS_FAIL;
    else code = TRS_OK;
    
m_exit:    

    cvReleaseImage(&image_mask);
    cvReleaseImage(&mask_rez);
    cvReleaseImage(&image_rez);
    atsReleaseImage(final_mask_ver);
    atsReleaseImage(init_mask_ver);

    cvReleaseImage(&imagew);
    cvReleaseImage(&image_maskw); 

    cvReleaseImage(&img_p[0]);
    cvReleaseImage(&img_p[1]);
    cvReleaseImage(&img_v);
 
    cvReleaseHist( &hist);
    cvReleaseHist( &hist_mask);
    
    cvReleaseMemStorage ( &storage );

    trsFree(pp);
    trsFree(name_final_mask_very);
    trsFree(name_init_mask_very);
    trsFree(name_image);
    trsFree(name_range_image);
    trsFree(name_verify_data);

    fclose(filin);
    fclose(fil_ver);

    
/*    _getch();       */
    return code;
}
示例#5
0
/***************************************************************************************\
*
*   This function compute intermediate polygon between contour1 and contour2
*
*   Correspondence between points of contours specify by corr
*
*   param = [0,1];  0 correspondence to contour1, 1 - contour2
*
\***************************************************************************************/
CvSeq* icvBlendContours(CvSeq* contour1,
                        CvSeq* contour2,
                        CvSeq* corr,
                        double param,
                        CvMemStorage* storage)
{
    int j;

    CvSeqWriter writer01;
    CvSeqReader reader01;

    int Ni,Nj;              // size of contours
    int i;                  // counter

    CvPoint* point1;        // array of first contour point
    CvPoint* point2;        // array of second contour point

    CvPoint point_output;   // intermediate storage of ouput point

    int corr_point;

    // Create output sequence.
    CvSeq* output = cvCreateSeq(0,
                                sizeof(CvSeq),
                                sizeof(CvPoint),
                                storage );

    // Find size of contours.
    Ni = contour1->total + 1;
    Nj = contour2->total + 1;

    point1 = (CvPoint* )malloc( Ni*sizeof(CvPoint) );
    point2 = (CvPoint* )malloc( Nj*sizeof(CvPoint) );

    // Initialize arrays of point
    cvCvtSeqToArray( contour1, point1, CV_WHOLE_SEQ );
    cvCvtSeqToArray( contour2, point2, CV_WHOLE_SEQ );

    // First and last point mast be equal.
    point1[Ni-1] = point1[0];
    point2[Nj-1] = point2[0];

    // Initializes process of writing to sequence.
    cvStartAppendToSeq( output, &writer01);

    i = Ni-1; //correspondence to points of contour1
    for( ; corr; corr = corr->h_next )
    {
        //Initializes process of sequential reading from sequence
        cvStartReadSeq( corr, &reader01, 0 );

        for(j=0; j < corr->total; j++)
        {
            // Read element from sequence.
            CV_READ_SEQ_ELEM( corr_point, reader01 );

            // Compute point of intermediate polygon.
            point_output.x = cvRound(point1[i].x + param*( point2[corr_point].x - point1[i].x ));
            point_output.y = cvRound(point1[i].y + param*( point2[corr_point].y - point1[i].y ));

            // Write element to sequence.
            CV_WRITE_SEQ_ELEM( point_output, writer01 );
        }
        i--;
    }
    // Updates sequence header.
    cvFlushSeqWriter( &writer01 );

    return output;
}
示例#6
0
CV_IMPL CvSeq*
cvConvexHull2( const CvArr* array, void* hull_storage,
               int orientation, int return_points )
{
    CvMat* mat = 0;
    CvContour contour_header;
    CvSeq hull_header;
    CvSeqBlock block, hullblock;
    CvSeq* ptseq = 0;
    CvSeq* hullseq = 0;

    if( CV_IS_SEQ( array ))
    {
        ptseq = (CvSeq*)array;
        if( !CV_IS_SEQ_POINT_SET( ptseq ))
            CV_Error( CV_StsBadArg, "Unsupported sequence type" );
        if( hull_storage == 0 )
            hull_storage = ptseq->storage;
    }
    else
    {
        ptseq = cvPointSeqFromMat( CV_SEQ_KIND_GENERIC, array, &contour_header, &block );
    }

    bool isStorage = isStorageOrMat(hull_storage);

    if(isStorage)
    {
        if( return_points )
        {
            hullseq = cvCreateSeq(CV_SEQ_KIND_CURVE|CV_SEQ_ELTYPE(ptseq)|
                                  CV_SEQ_FLAG_CLOSED|CV_SEQ_FLAG_CONVEX,
                                  sizeof(CvContour), sizeof(CvPoint),(CvMemStorage*)hull_storage );
        }
        else
        {
            hullseq = cvCreateSeq(
                                  CV_SEQ_KIND_CURVE|CV_SEQ_ELTYPE_PPOINT|
                                  CV_SEQ_FLAG_CLOSED|CV_SEQ_FLAG_CONVEX,
                                  sizeof(CvContour), sizeof(CvPoint*), (CvMemStorage*)hull_storage );
        }
    }
    else
    {
        mat = (CvMat*)hull_storage;

        if( (mat->cols != 1 && mat->rows != 1) || !CV_IS_MAT_CONT(mat->type))
            CV_Error( CV_StsBadArg,
                     "The hull matrix should be continuous and have a single row or a single column" );

        if( mat->cols + mat->rows - 1 < ptseq->total )
            CV_Error( CV_StsBadSize, "The hull matrix size might be not enough to fit the hull" );

        if( CV_MAT_TYPE(mat->type) != CV_SEQ_ELTYPE(ptseq) &&
           CV_MAT_TYPE(mat->type) != CV_32SC1 )
            CV_Error( CV_StsUnsupportedFormat,
                     "The hull matrix must have the same type as input or 32sC1 (integers)" );

        hullseq = cvMakeSeqHeaderForArray(
                                          CV_SEQ_KIND_CURVE|CV_MAT_TYPE(mat->type)|CV_SEQ_FLAG_CLOSED,
                                          sizeof(hull_header), CV_ELEM_SIZE(mat->type), mat->data.ptr,
                                          mat->cols + mat->rows - 1, &hull_header, &hullblock );
        cvClearSeq( hullseq );
    }

    int hulltype = CV_SEQ_ELTYPE(hullseq);
    int total = ptseq->total;
    if( total == 0 )
    {
        if( !isStorage )
            CV_Error( CV_StsBadSize,
                     "Point sequence can not be empty if the output is matrix" );
        return 0;
    }

    cv::AutoBuffer<double> _ptbuf;
    cv::Mat h0;
    cv::convexHull(cv::cvarrToMat(ptseq, false, false, 0, &_ptbuf), h0,
                   orientation == CV_CLOCKWISE, CV_MAT_CN(hulltype) == 2);


    if( hulltype == CV_SEQ_ELTYPE_PPOINT )
    {
        const int* idx = h0.ptr<int>();
        int ctotal = (int)h0.total();
        for( int i = 0; i < ctotal; i++ )
        {
            void* ptr = cvGetSeqElem(ptseq, idx[i]);
            cvSeqPush( hullseq, &ptr );
        }
    }
    else
        cvSeqPushMulti(hullseq, h0.ptr(), (int)h0.total());

    if (isStorage)
    {
        return hullseq;
    }
    else
    {
        if( mat->rows > mat->cols )
            mat->rows = hullseq->total;
        else
            mat->cols = hullseq->total;
        return 0;
    }
}
示例#7
0
/* it must have more than 3 points  */
CV_IMPL CvSeq*
cvConvexityDefects( const CvArr* array,
                    const CvArr* hullarray,
                    CvMemStorage* storage )
{
    CvSeq* defects = 0;

    CV_FUNCNAME( "cvConvexityDefects" );

    __BEGIN__;

    int i, index;
    CvPoint* hull_cur;

    /* is orientation of hull different from contour one */
    int rev_orientation;

    CvContour contour_header;
    union { CvContour c; CvSeq s; } hull_header;
    CvSeqBlock block, hullblock;
    CvSeq *ptseq = (CvSeq*)array, *hull = (CvSeq*)hullarray;

    CvSeqReader hull_reader;
    CvSeqReader ptseq_reader;
    CvSeqWriter writer;
    int is_index;

    if( CV_IS_SEQ( ptseq ))
    {
        if( !CV_IS_SEQ_POINT_SET( ptseq ))
            CV_ERROR( CV_StsUnsupportedFormat,
                "Input sequence is not a sequence of points" );
        if( !storage )
            storage = ptseq->storage;
    }
    else
    {
        CV_CALL( ptseq = cvPointSeqFromMat(
            CV_SEQ_KIND_GENERIC, array, &contour_header, &block ));
    }

    if( CV_SEQ_ELTYPE( ptseq ) != CV_32SC2 )
        CV_ERROR( CV_StsUnsupportedFormat,
            "Floating-point coordinates are not supported here" );

    if( CV_IS_SEQ( hull ))
    {
        int hulltype = CV_SEQ_ELTYPE( hull );
        if( hulltype != CV_SEQ_ELTYPE_PPOINT && hulltype != CV_SEQ_ELTYPE_INDEX )
            CV_ERROR( CV_StsUnsupportedFormat,
                "Convex hull must represented as a sequence "
                "of indices or sequence of pointers" );
        if( !storage )
            storage = hull->storage;
    }
    else
    {
        CvMat* mat = (CvMat*)hull;

        if( !CV_IS_MAT( hull ))
            CV_ERROR(CV_StsBadArg, "Convex hull is neither sequence nor matrix");

        if( mat->cols != 1 && mat->rows != 1 ||
            !CV_IS_MAT_CONT(mat->type) || CV_MAT_TYPE(mat->type) != CV_32SC1 )
            CV_ERROR( CV_StsBadArg,
            "The matrix should be 1-dimensional and continuous array of int's" );

        if( mat->cols + mat->rows - 1 > ptseq->total )
            CV_ERROR( CV_StsBadSize, "Convex hull is larger than the point sequence" );

        CV_CALL( hull = cvMakeSeqHeaderForArray(
            CV_SEQ_KIND_CURVE|CV_MAT_TYPE(mat->type)|CV_SEQ_FLAG_CLOSED,
            sizeof(CvContour), CV_ELEM_SIZE(mat->type), mat->data.ptr,
            mat->cols + mat->rows - 1, &hull_header.s, &hullblock ));
    }

    is_index = CV_SEQ_ELTYPE(hull) == CV_SEQ_ELTYPE_INDEX;

    if( !storage )
        CV_ERROR( CV_StsNullPtr, "NULL storage pointer" );

    CV_CALL( defects = cvCreateSeq( CV_SEQ_KIND_GENERIC, sizeof(CvSeq),
                                    sizeof(CvConvexityDefect), storage ));

    if( ptseq->total < 4 || hull->total < 3)
    {
        //CV_ERROR( CV_StsBadSize,
        //    "point seq size must be >= 4, convex hull size must be >= 3" );
        EXIT;
    }

    /* recognize co-orientation of ptseq and its hull */
    {
        int sign = 0;
        int index1, index2, index3;

        if( !is_index )
        {
            CvPoint* pos = *CV_SEQ_ELEM( hull, CvPoint*, 0 );
            CV_CALL( index1 = cvSeqElemIdx( ptseq, pos ));

            pos = *CV_SEQ_ELEM( hull, CvPoint*, 1 );
            CV_CALL( index2 = cvSeqElemIdx( ptseq, pos ));

            pos = *CV_SEQ_ELEM( hull, CvPoint*, 2 );
            CV_CALL( index3 = cvSeqElemIdx( ptseq, pos ));
        }
        else
        {
示例#8
0
CvSeq * MBLBPDetectMultiScale( const IplImage* img,
                               MBLBPCascade * pCascade,
                               CvMemStorage* storage, 
                               int scale_factor1024x,
                               int min_neighbors, 
                               int min_size,
							   int max_size)
{
    IplImage stub;
    CvMat mat, *pmat;
    CvSeq* seq = 0;
    CvSeq* seq2 = 0;
    CvSeq* idx_seq = 0;
    CvSeq* result_seq = 0;
    CvSeq* positions = 0;
    CvMemStorage* temp_storage = 0;
    CvAvgComp* comps = 0;
    
    CV_FUNCNAME( "MBLBPDetectMultiScale" );

    __BEGIN__;

    int factor1024x;
    int factor1024x_max;
    int coi;

    if( ! pCascade) 
        CV_ERROR( CV_StsNullPtr, "Invalid classifier cascade" );

    if( !storage )
        CV_ERROR( CV_StsNullPtr, "Null storage pointer" );

    CV_CALL( img = cvGetImage( img, &stub));
    CV_CALL( pmat = cvGetMat( img, &mat, &coi));

    if( coi )
        CV_ERROR( CV_BadCOI, "COI is not supported" );

    if( CV_MAT_DEPTH(pmat->type) != CV_8U )
        CV_ERROR( CV_StsUnsupportedFormat, "Only 8-bit images are supported" );

    if( CV_MAT_CN(pmat->type) > 1 )
    	CV_ERROR( CV_StsUnsupportedFormat, "Only single-channel images are supported" );

    min_size  = MAX(pCascade->win_width,  min_size);
	if(max_size <=0 )
		max_size = MIN(img->width, img->height);
	if(max_size < min_size)
		return NULL;

	CV_CALL( temp_storage = cvCreateChildMemStorage( storage ));
    seq = cvCreateSeq( 0, sizeof(CvSeq), sizeof(CvRect), temp_storage );
    seq2 = cvCreateSeq( 0, sizeof(CvSeq), sizeof(CvAvgComp), temp_storage );
    result_seq = cvCreateSeq( 0, sizeof(CvSeq), sizeof(CvAvgComp), storage );
    positions = cvCreateSeq( 0, sizeof(CvSeq), sizeof(CvPoint), temp_storage );

    if( min_neighbors == 0 )
        seq = result_seq;

    factor1024x = ((min_size<<10)+(pCascade->win_width/2)) / pCascade->win_width;
	factor1024x_max = (max_size<<10) / pCascade->win_width; //do not round it, to avoid the scan window be out of range

#ifdef _OPENMP
	omp_init_lock(&lock); 
#endif
    for( ; factor1024x <= factor1024x_max;
         factor1024x = ((factor1024x*scale_factor1024x+512)>>10) )
    {
        IplImage * pSmallImage = cvCreateImage( cvSize( ((img->width<<10)+factor1024x/2)/factor1024x, ((img->height<<10)+factor1024x/2)/factor1024x),
                                                IPL_DEPTH_8U, 1);
        try{
			cvResize(img, pSmallImage);
		}
		catch(...)
		{
			cvReleaseImage(&pSmallImage);
			return NULL;
		}
		
		
        CvSize winStride = cvSize( (factor1024x<=2048)+1,  (factor1024x<=2048)+1 );

		cvClearSeq(positions);

        MBLBPDetectSingleScale( pSmallImage, pCascade, positions, winStride);

        for(int i=0; i < (positions ? positions->total : 0); i++)
        {
            CvPoint pt = *(CvPoint*)cvGetSeqElem( positions, i );
            CvRect r = cvRect( (pt.x * factor1024x + 512)>>10,
                               (pt.y * factor1024x + 512)>>10,
                               (pCascade->win_width * factor1024x + 512)>>10,
                               (pCascade->win_height * factor1024x + 512)>>10);

            cvSeqPush(seq, &r);
        }

        cvReleaseImage(&pSmallImage);
    }
#ifdef _OPENMP
	omp_destroy_lock(&lock); 
#endif
  
    if( min_neighbors != 0 )
    {
        // group retrieved rectangles in order to filter out noise 
        int ncomp = cvSeqPartition( seq, 0, &idx_seq, (CvCmpFunc)is_equal, 0 );
        CV_CALL( comps = (CvAvgComp*)cvAlloc( (ncomp+1)*sizeof(comps[0])));
        memset( comps, 0, (ncomp+1)*sizeof(comps[0]));

        // count number of neighbors
        for(int i = 0; i < seq->total; i++ )
        {
            CvRect r1 = *(CvRect*)cvGetSeqElem( seq, i );
            int idx = *(int*)cvGetSeqElem( idx_seq, i );
            assert( (unsigned)idx < (unsigned)ncomp );

            comps[idx].neighbors++;
             
            comps[idx].rect.x += r1.x;
            comps[idx].rect.y += r1.y;
            comps[idx].rect.width += r1.width;
            comps[idx].rect.height += r1.height;
        }

        // calculate average bounding box
        for(int i = 0; i < ncomp; i++ )
        {
            int n = comps[i].neighbors;
            if( n >= min_neighbors )
            {
                CvAvgComp comp;
                comp.rect.x = (comps[i].rect.x*2 + n)/(2*n);
                comp.rect.y = (comps[i].rect.y*2 + n)/(2*n);
                comp.rect.width = (comps[i].rect.width*2 + n)/(2*n);
                comp.rect.height = (comps[i].rect.height*2 + n)/(2*n);
                comp.neighbors = comps[i].neighbors;

                cvSeqPush( seq2, &comp );
            }
        }

        // filter out small face rectangles inside large face rectangles
        for(int i = 0; i < seq2->total; i++ )
        {
            CvAvgComp r1 = *(CvAvgComp*)cvGetSeqElem( seq2, i );
            int j, flag = 1;

            for( j = 0; j < seq2->total; j++ )
            {
                CvAvgComp r2 = *(CvAvgComp*)cvGetSeqElem( seq2, j );
                int distance = (r2.rect.width *2+5)/10;//cvRound( r2.rect.width * 0.2 );
            
                if( i != j &&
                    r1.rect.x >= r2.rect.x - distance &&
                    r1.rect.y >= r2.rect.y - distance &&
                    r1.rect.x + r1.rect.width <= r2.rect.x + r2.rect.width + distance &&
                    r1.rect.y + r1.rect.height <= r2.rect.y + r2.rect.height + distance &&
                    (r2.neighbors > MAX( 3, r1.neighbors ) || r1.neighbors < 3) )
                {
                    flag = 0;
                    break;
                }
            }

            if( flag )
            {
                cvSeqPush( result_seq, &r1 );
                /* cvSeqPush( result_seq, &r1.rect ); */
            }
        }
    }   


    __END__;

    cvReleaseMemStorage( &temp_storage );
    cvFree( &comps );

    return result_seq;
}
示例#9
0
static int
icvFindContoursInInterval( const CvArr* src,
                           /*int minValue, int maxValue,*/
                           CvMemStorage* storage,
                           CvSeq** result,
                           int contourHeaderSize )
{
    int count = 0;
    CvMemStorage* storage00 = 0;
    CvMemStorage* storage01 = 0;
    CvSeq* first = 0;

    CV_FUNCNAME( "icvFindContoursInInterval" );

    __BEGIN__;

    int i, j, k, n;

    uchar*  src_data = 0;
    int  img_step = 0;
    CvSize  img_size;

    int  connect_flag;
    int  lower_total;
    int  upper_total;
    int  all_total;

    CvSeq*  runs;
    CvLinkedRunPoint  tmp;
    CvLinkedRunPoint*  tmp_prev;
    CvLinkedRunPoint*  upper_line = 0;
    CvLinkedRunPoint*  lower_line = 0;
    CvLinkedRunPoint*  last_elem;

    CvLinkedRunPoint*  upper_run = 0;
    CvLinkedRunPoint*  lower_run = 0;
    CvLinkedRunPoint*  prev_point = 0;

    CvSeqWriter  writer_ext;
    CvSeqWriter  writer_int;
    CvSeqWriter  writer;
    CvSeqReader  reader;

    CvSeq* external_contours;
    CvSeq* internal_contours;
    CvSeq* prev = 0;

    if( !storage )
        CV_ERROR( CV_StsNullPtr, "NULL storage pointer" );

    if( !result )
        CV_ERROR( CV_StsNullPtr, "NULL double CvSeq pointer" );

    if( contourHeaderSize < (int)sizeof(CvContour))
        CV_ERROR( CV_StsBadSize, "Contour header size must be >= sizeof(CvContour)" );

    CV_CALL( storage00 = cvCreateChildMemStorage(storage));
    CV_CALL( storage01 = cvCreateChildMemStorage(storage));

    {
        CvMat stub, *mat;

        CV_CALL( mat = cvGetMat( src, &stub ));
        if( !CV_IS_MASK_ARR(mat))
            CV_ERROR( CV_StsBadArg, "Input array must be 8uC1 or 8sC1" );
        src_data = mat->data.ptr;
        img_step = mat->step;
        img_size = cvGetMatSize( mat );
    }

    // Create temporary sequences
    runs = cvCreateSeq(0, sizeof(CvSeq), sizeof(CvLinkedRunPoint), storage00 );
    cvStartAppendToSeq( runs, &writer );

    cvStartWriteSeq( 0, sizeof(CvSeq), sizeof(CvLinkedRunPoint*), storage01, &writer_ext );
    cvStartWriteSeq( 0, sizeof(CvSeq), sizeof(CvLinkedRunPoint*), storage01, &writer_int );

    tmp_prev = &(tmp);
    tmp_prev->next = 0;
    tmp_prev->link = 0;
    
    // First line. None of runs is binded
    tmp.pt.y = 0;
    i = 0;
    CV_WRITE_SEQ_ELEM( tmp, writer );
    upper_line = (CvLinkedRunPoint*)CV_GET_WRITTEN_ELEM( writer );
    
    tmp_prev = upper_line;
    for( j = 0; j < img_size.width; )
    {
        for( ; j < img_size.width && !ICV_IS_COMPONENT_POINT(src_data[j]); j++ )
            ;
        if( j == img_size.width )
            break;

        tmp.pt.x = j;
        CV_WRITE_SEQ_ELEM( tmp, writer );
        tmp_prev->next = (CvLinkedRunPoint*)CV_GET_WRITTEN_ELEM( writer );
        tmp_prev = tmp_prev->next;

        for( ; j < img_size.width && ICV_IS_COMPONENT_POINT(src_data[j]); j++ )
            ;

        tmp.pt.x = j-1;
        CV_WRITE_SEQ_ELEM( tmp, writer );
        tmp_prev->next = (CvLinkedRunPoint*)CV_GET_WRITTEN_ELEM( writer );
        tmp_prev->link = tmp_prev->next;
        // First point of contour
        CV_WRITE_SEQ_ELEM( tmp_prev, writer_ext );
        tmp_prev = tmp_prev->next;
    }
    cvFlushSeqWriter( &writer );
    upper_line = upper_line->next;
    upper_total = runs->total - 1;
    last_elem = tmp_prev;
    tmp_prev->next = 0;
    
    for( i = 1; i < img_size.height; i++ )
    {
//------// Find runs in next line
        src_data += img_step;
        tmp.pt.y = i;
        all_total = runs->total;
        for( j = 0; j < img_size.width; )
        {
            for( ; j < img_size.width && !ICV_IS_COMPONENT_POINT(src_data[j]); j++ )
                ;
            if( j == img_size.width ) break;

            tmp.pt.x = j;
            CV_WRITE_SEQ_ELEM( tmp, writer );
            tmp_prev->next = (CvLinkedRunPoint*)CV_GET_WRITTEN_ELEM( writer );
            tmp_prev = tmp_prev->next;

            for( ; j < img_size.width && ICV_IS_COMPONENT_POINT(src_data[j]); j++ )
                ;

            tmp.pt.x = j-1;
            CV_WRITE_SEQ_ELEM( tmp, writer );
            tmp_prev = tmp_prev->next = (CvLinkedRunPoint*)CV_GET_WRITTEN_ELEM( writer );
        }//j
        cvFlushSeqWriter( &writer );
        lower_line = last_elem->next;
        lower_total = runs->total - all_total;
        last_elem = tmp_prev;
        tmp_prev->next = 0;
//------//
//------// Find links between runs of lower_line and upper_line
        upper_run = upper_line;
        lower_run = lower_line;
        connect_flag = ICV_SINGLE;

        for( k = 0, n = 0; k < upper_total/2 && n < lower_total/2; )
        {
            switch( connect_flag )
            {
            case ICV_SINGLE:
                if( upper_run->next->pt.x < lower_run->next->pt.x )
                {
                    if( upper_run->next->pt.x >= lower_run->pt.x  -1 )
                    {
                        lower_run->link = upper_run;
                        connect_flag = ICV_CONNECTING_ABOVE;
                        prev_point = upper_run->next;
                    }
                    else
                        upper_run->next->link = upper_run;
                    k++;
                    upper_run = upper_run->next->next;
                }
                else
                {
                    if( upper_run->pt.x <= lower_run->next->pt.x  +1 )
                    {
                        lower_run->link = upper_run;
                        connect_flag = ICV_CONNECTING_BELOW;
                        prev_point = lower_run->next;
                    }
                    else
                    {
                        lower_run->link = lower_run->next;
                        // First point of contour
                        CV_WRITE_SEQ_ELEM( lower_run, writer_ext );
                    }
                    n++;
                    lower_run = lower_run->next->next;
                }
                break;
            case ICV_CONNECTING_ABOVE:
                if( upper_run->pt.x > lower_run->next->pt.x +1 )
                {
                    prev_point->link = lower_run->next;
                    connect_flag = ICV_SINGLE;
                    n++;
                    lower_run = lower_run->next->next;
                }
                else
                {
                    prev_point->link = upper_run;
                    if( upper_run->next->pt.x < lower_run->next->pt.x )
                    {
                        k++;
                        prev_point = upper_run->next;
                        upper_run = upper_run->next->next;
                    }
                    else
                    {
                        connect_flag = ICV_CONNECTING_BELOW;
                        prev_point = lower_run->next;
                        n++;
                        lower_run = lower_run->next->next;
                    }
                }
                break;
            case ICV_CONNECTING_BELOW:
                if( lower_run->pt.x > upper_run->next->pt.x +1 )
                {
                    upper_run->next->link = prev_point;
                    connect_flag = ICV_SINGLE;
                    k++;
                    upper_run = upper_run->next->next;
                }
                else
                {
                    // First point of contour
                    CV_WRITE_SEQ_ELEM( lower_run, writer_int );

                    lower_run->link = prev_point;
                    if( lower_run->next->pt.x < upper_run->next->pt.x )
                    {
                        n++;
                        prev_point = lower_run->next;
                        lower_run = lower_run->next->next;
                    }
                    else
                    {
                        connect_flag = ICV_CONNECTING_ABOVE;
                        k++;
                        prev_point = upper_run->next;
                        upper_run = upper_run->next->next;
                    }
                }
                break;          
            }
        }// k, n

        for( ; n < lower_total/2; n++ )
        {
            if( connect_flag != ICV_SINGLE )
            {
                prev_point->link = lower_run->next;
                connect_flag = ICV_SINGLE;
                lower_run = lower_run->next->next;
                continue;
            }
            lower_run->link = lower_run->next;

            //First point of contour
            CV_WRITE_SEQ_ELEM( lower_run, writer_ext );

            lower_run = lower_run->next->next;
        }

        for( ; k < upper_total/2; k++ )
        {
            if( connect_flag != ICV_SINGLE )
            {
                upper_run->next->link = prev_point;
                connect_flag = ICV_SINGLE;
                upper_run = upper_run->next->next;
                continue;
            }
            upper_run->next->link = upper_run;
            upper_run = upper_run->next->next;
        }
        upper_line = lower_line;
        upper_total = lower_total;
    }//i

    upper_run = upper_line;

    //the last line of image
    for( k = 0; k < upper_total/2; k++ )
    {
        upper_run->next->link = upper_run;
        upper_run = upper_run->next->next;
    }

//------//
//------//Find end read contours
    external_contours = cvEndWriteSeq( &writer_ext );
    internal_contours = cvEndWriteSeq( &writer_int );

    for( k = 0; k < 2; k++ )
    {
        CvSeq* contours = k == 0 ? external_contours : internal_contours;

        cvStartReadSeq( contours, &reader );

        for( j = 0; j < contours->total; j++, count++ )
        {
            CvLinkedRunPoint* p_temp;
            CvLinkedRunPoint* p00;
            CvLinkedRunPoint* p01;
            CvSeq* contour;

            CV_READ_SEQ_ELEM( p00, reader );
            p01 = p00;

            if( !p00->link )
                continue;

            cvStartWriteSeq( CV_SEQ_ELTYPE_POINT | CV_SEQ_POLYLINE | CV_SEQ_FLAG_CLOSED,
                             contourHeaderSize, sizeof(CvPoint), storage, &writer );
            do
            {
                CV_WRITE_SEQ_ELEM( p00->pt, writer );
                p_temp = p00;
                p00 = p00->link;
                p_temp->link = 0;
            }
            while( p00 != p01 );

            contour = cvEndWriteSeq( &writer );
            cvBoundingRect( contour, 1 );

            if( k != 0 )
                contour->flags |= CV_SEQ_FLAG_HOLE;

            if( !first )
                prev = first = contour;
            else
            {
                contour->h_prev = prev;
                prev = prev->h_next = contour;
            }
        }
    }

    __END__;

    if( !first )
        count = -1;

    if( result )
        *result = first;

    cvReleaseMemStorage(&storage00);
    cvReleaseMemStorage(&storage01);

    return count;
}
示例#10
0
int main(int argc, char* argv[])
{
	IplImage* color = cvLoadImage("E:\\pic_skindetect\\clothtest\\2.jpg", 1);
	IplImage* gray = cvCreateImage(cvGetSize(color), 8, 1);
	IplImage* show = cvCreateImage(cvGetSize(color), 8, 1);
	cvZero(show);
	int i = 0;

	cvCvtColor(color, gray, CV_RGB2GRAY);
	//cvThreshold(gray, gray, 100, 255, CV_THRESH_BINARY_INV);
	cvCanny(gray, gray, 50, 150, 3); 
	CvMemStorage * storage = cvCreateMemStorage(0);
	CvSeq* contours;
	CvSeq* seq_fourier = cvCreateSeq(CV_SEQ_KIND_GENERIC|CV_32SC2, sizeof(CvContour),sizeof(CvPoint2D32f), storage);
	cvFindContours(gray, storage, &contours, sizeof(CvContour), CV_RETR_TREE);

	CvSeq* mostContours = contours;
	/*for(; contours; contours = contours->h_next)
	{
		if (mostContours->total < contours->total)
		{
			mostContours = contours;
		}
	}*/

	int t = 0;
	for(; contours; contours = contours->h_next)
	{
	//contours = mostContours;
		++t;
		printf("%d\n", contours->total);
		cvDrawContours(color, contours, CV_RGB(255,0,0), CV_RGB(255,0,0), 1, 3);
		CalcFourierDescriptorCoeff(contours, 2000, seq_fourier);
		CalcBoundary(seq_fourier, contours->total, contours);

		for(int i = 0; i < contours->total; i++)
		{
			CvPoint* pt=(CvPoint*)cvGetSeqElem(contours, i);
			if(pt->x >= 0 && pt->x < show->width && pt->y >= 0 && pt->y < show->height)
			{
				((uchar*)(show->imageData+pt->y*show->widthStep))[pt->x] = 255;
			}
		}

		/*for(i = 0; i < contours->total; i++)
		{
			CvPoint* pt=(CvPoint*)cvGetSeqElem(contours, i);
			printf("%d, %d, %d\n", pt->x, pt->y, i);
		}*/
/*
		for(i = 0; i < seq_fourier->total; i++)
		{
			CvPoint2D32f* pt=(CvPoint2D32f*)cvGetSeqElem(seq_fourier, i);
			printf("%f, %f, %d\n", pt->x, pt->y, i);
		}*/
	}
	printf("t=%d\n", t);

	cvNamedWindow("color", 0);
	cvShowImage("color",color);
	//cvWaitKey(0);

	cvNamedWindow("gray", 0);
	cvShowImage("gray", gray);
	//cvWaitKey(0);

	cvNamedWindow("reconstructed", 0);
	cvShowImage("reconstructed", show);
	cvWaitKey(0);
	cvReleaseMemStorage(&storage);
	cvReleaseImage(&color);
	cvReleaseImage(&gray);
	cvReleaseImage(&show);
	cvDestroyAllWindows();
	return 0;
}
示例#11
0
int main( int argc, char** argv )
{
    forceUSLocaleToKeepOurSanity();

    CvSize board_size = {0,0};
    float square_size = 1.f, aspect_ratio = 1.f;
    const char* out_filename = "out_camera_data.yml";
    const char* input_filename = 0;
    int i, image_count = 10;
    int write_extrinsics = 0, write_points = 0;
    int flags = 0;
    CvCapture* capture = 0;
    FILE* f = 0;
    char imagename[1024];
    CvMemStorage* storage;
    CvSeq* image_points_seq = 0;
    int elem_size, flip_vertical = 0;
    int delay = 1000;
    clock_t prev_timestamp = 0;
    CvPoint2D32f* image_points_buf = 0;
    CvFont font = cvFont( 1, 1 );
    double _camera[9], _dist_coeffs[4];
    CvMat camera = cvMat( 3, 3, CV_64F, _camera );
    CvMat dist_coeffs = cvMat( 1, 4, CV_64F, _dist_coeffs );
    CvMat *extr_params = 0, *reproj_errs = 0;
    double avg_reproj_err = 0;
    int mode = DETECTION;
    int undistort_image = 0;
    CvSize img_size = {0,0};
    const char* live_capture_help =
        "When the live video from camera is used as input, the following hot-keys may be used:\n"
            "  <ESC>, 'q' - quit the program\n"
            "  'g' - start capturing images\n"
            "  'u' - switch undistortion on/off\n";

    if( argc < 2 )
    {
        printf( "This is a camera calibration sample.\n"
            "Usage: calibration\n"
            "     -w <board_width>         # the number of inner corners per one of board dimension\n"
            "     -h <board_height>        # the number of inner corners per another board dimension\n"
            "     [-n <number_of_frames>]  # the number of frames to use for calibration\n"
            "                              # (if not specified, it will be set to the number\n"
            "                              #  of board views actually available)\n"
            "     [-d <delay>]             # a minimum delay in ms between subsequent attempts to capture a next view\n"
            "                              # (used only for video capturing)\n"
            "     [-s <square_size>]       # square size in some user-defined units (1 by default)\n"
            "     [-o <out_camera_params>] # the output filename for intrinsic [and extrinsic] parameters\n"
            "     [-op]                    # write detected feature points\n"
            "     [-oe]                    # write extrinsic parameters\n"
            "     [-zt]                    # assume zero tangential distortion\n"
            "     [-a <aspect_ratio>]      # fix aspect ratio (fx/fy)\n"
            "     [-p]                     # fix the principal point at the center\n"
            "     [-v]                     # flip the captured images around the horizontal axis\n"
            "     [input_data]             # input data, one of the following:\n"
            "                              #  - text file with a list of the images of the board\n"
            "                              #  - name of video file with a video of the board\n"
            "                              # if input_data not specified, a live view from the camera is used\n"
            "\n" );
        printf( "%s", live_capture_help );
        return 0;
    }

    for( i = 1; i < argc; i++ )
    {
        const char* s = argv[i];
        if( strcmp( s, "-w" ) == 0 )
        {
            if( sscanf( argv[++i], "%u", &board_size.width ) != 1 || board_size.width <= 0 )
                return fprintf( stderr, "Invalid board width\n" ), -1;
        }
        else if( strcmp( s, "-h" ) == 0 )
        {
            if( sscanf( argv[++i], "%u", &board_size.height ) != 1 || board_size.height <= 0 )
                return fprintf( stderr, "Invalid board height\n" ), -1;
        }
        else if( strcmp( s, "-s" ) == 0 )
        {
            if( sscanf( argv[++i], "%f", &square_size ) != 1 || square_size <= 0 )
                return fprintf( stderr, "Invalid board square width\n" ), -1;
        }
        else if( strcmp( s, "-n" ) == 0 )
        {
            if( sscanf( argv[++i], "%u", &image_count ) != 1 || image_count <= 3 )
                return printf("Invalid number of images\n" ), -1;
        }
        else if( strcmp( s, "-a" ) == 0 )
        {
            if( sscanf( argv[++i], "%f", &aspect_ratio ) != 1 || aspect_ratio <= 0 )
                return printf("Invalid aspect ratio\n" ), -1;
        }
        else if( strcmp( s, "-d" ) == 0 )
        {
            if( sscanf( argv[++i], "%u", &delay ) != 1 || delay <= 0 )
                return printf("Invalid delay\n" ), -1;
        }
        else if( strcmp( s, "-op" ) == 0 )
        {
            write_points = 1;
        }
        else if( strcmp( s, "-oe" ) == 0 )
        {
            write_extrinsics = 1;
        }
        else if( strcmp( s, "-zt" ) == 0 )
        {
            flags |= CV_CALIB_ZERO_TANGENT_DIST;
        }
        else if( strcmp( s, "-p" ) == 0 )
        {
            flags |= CV_CALIB_FIX_PRINCIPAL_POINT;
        }
        else if( strcmp( s, "-v" ) == 0 )
        {
            flip_vertical = 1;
        }
        else if( strcmp( s, "-o" ) == 0 )
        {
            out_filename = argv[++i];
        }
        else if( s[0] != '-' )
            input_filename = s;
        else
            return fprintf( stderr, "Unknown option %s", s ), -1;
    }

    if( input_filename )
    {
        fprintf( stderr, "Trying to open %s \n" , input_filename );

        capture = cvCreateFileCapture( input_filename );
        if( !capture )
        {
            fprintf(stderr,"Warning , cvCreateFileCapture failed to open %s \n",input_filename);
            f = fopen( input_filename, "rt" );
            if( !f )
                return fprintf( stderr, "The input file could not be opened\n" ), -1;
            image_count = -1;
        }
        mode = CAPTURING;
    }
    else
        capture = cvCreateCameraCapture(0);

    if( !capture && !f )
        return fprintf( stderr, "Could not initialize video capture\n" ), -2;

    if( capture )
        printf( "%s", live_capture_help );

    elem_size = board_size.width*board_size.height*sizeof(image_points_buf[0]);
    storage = cvCreateMemStorage( MAX( elem_size*4, 1 << 16 ));
    image_points_buf = (CvPoint2D32f*)cvAlloc( elem_size );
    image_points_seq = cvCreateSeq( 0, sizeof(CvSeq), elem_size, storage );

    cvNamedWindow( "Image View", 1 );

    for(;;)
    {
        IplImage *view = 0, *view_gray = 0;
        int count = 0, found, blink = 0;
        CvPoint text_origin;
        CvSize text_size = {0,0};
        int base_line = 0;
        char s[100];
        int key;

        if( f && fgets( imagename, sizeof(imagename)-2, f ))
        {
            int l = strlen(imagename);
            if( l > 0 && imagename[l-1] == '\n' )
                imagename[--l] = '\0';
            if( l > 0 )
            {
                if( imagename[0] == '#' )
                    continue;
                view = cvLoadImage( imagename, 1 );
            }
        }
        else if( capture )
        {
            IplImage* view0 = cvQueryFrame( capture );
            if( view0 )
            {
                view = cvCreateImage( cvGetSize(view0), IPL_DEPTH_8U, view0->nChannels );
                if( view0->origin == IPL_ORIGIN_BL )
                    cvFlip( view0, view, 0 );
                else
                    cvCopy( view0, view );
            }
        }

        if( !view )
        {
            if( image_points_seq->total > 0 )
            {
                image_count = image_points_seq->total;
                goto calibrate;
            }
            break;
        }

        if( flip_vertical )
            cvFlip( view, view, 0 );

        img_size = cvGetSize(view);
        found = cvFindChessboardCorners( view, board_size,
            image_points_buf, &count, CV_CALIB_CB_ADAPTIVE_THRESH );

#if 1
        // improve the found corners' coordinate accuracy
        view_gray = cvCreateImage( cvGetSize(view), 8, 1 );
        cvCvtColor( view, view_gray, CV_BGR2GRAY );
        cvFindCornerSubPix( view_gray, image_points_buf, count, cvSize(11,11),
            cvSize(-1,-1), cvTermCriteria( CV_TERMCRIT_EPS+CV_TERMCRIT_ITER, 30, 0.1 ));
        cvReleaseImage( &view_gray );
#endif

        if( mode == CAPTURING && found && (f || clock() - prev_timestamp > delay*1e-3*CLOCKS_PER_SEC) )
        {
            cvSeqPush( image_points_seq, image_points_buf );
            prev_timestamp = clock();
            blink = !f;
#if 1
            if( capture )
            {
                sprintf( imagename, "view%05d.png", image_points_seq->total - 1 );
                cvSaveImage( imagename, view );
            }
#endif
        }

        cvDrawChessboardCorners( view, board_size, image_points_buf, count, found );

        cvGetTextSize( "100/100", &font, &text_size, &base_line );
        text_origin.x = view->width - text_size.width - 10;
        text_origin.y = view->height - base_line - 10;

        if( mode == CAPTURING )
        {
            if( image_count > 0 )
                sprintf( s, "%d/%d", image_points_seq ? image_points_seq->total : 0, image_count );
            else
                sprintf( s, "%d/?", image_points_seq ? image_points_seq->total : 0 );
        }
        else if( mode == CALIBRATED )
            sprintf( s, "Calibrated" );
        else
            sprintf( s, "Press 'g' to start" );

        cvPutText( view, s, text_origin, &font, mode != CALIBRATED ?
                                   CV_RGB(255,0,0) : CV_RGB(0,255,0));

        if( blink )
            cvNot( view, view );

        if( mode == CALIBRATED && undistort_image )
        {
            IplImage* t = cvCloneImage( view );
            cvUndistort2( t, view, &camera, &dist_coeffs );
            cvReleaseImage( &t );
        }

        cvShowImage( "Image View", view );
        key = cvWaitKey(capture ? 50 : 500);

        if( key == 27 )
            break;

        if( key == 'u' && mode == CALIBRATED )
            undistort_image = !undistort_image;

        if( capture && key == 'g' )
        {
            mode = CAPTURING;
            cvClearMemStorage( storage );
            image_points_seq = cvCreateSeq( 0, sizeof(CvSeq), elem_size, storage );
        }

        if( mode == CAPTURING && (unsigned)image_points_seq->total >= (unsigned)image_count )
        {
calibrate:
            cvReleaseMat( &extr_params );
            cvReleaseMat( &reproj_errs );
            int code = run_calibration( image_points_seq, img_size, board_size,
                square_size, aspect_ratio, flags, &camera, &dist_coeffs, &extr_params,
                &reproj_errs, &avg_reproj_err );
            // save camera parameters in any case, to catch Inf's/NaN's
            save_camera_params( out_filename, image_count, img_size,
                board_size, square_size, aspect_ratio, flags,
                &camera, &dist_coeffs, write_extrinsics ? extr_params : 0,
                write_points ? image_points_seq : 0, reproj_errs, avg_reproj_err );
            if( code )
                mode = CALIBRATED;
            else
                mode = DETECTION;
        }

        if( !view )
            break;
        cvReleaseImage( &view );
    }

    if( capture )
        cvReleaseCapture( &capture );
    if( storage )
        cvReleaseMemStorage( &storage );
    return 0;
}
示例#12
0
// 参数: 
// img - 输入视频帧 // dst - 检测结果 
void Invade::update_mhi(IplImage* img, IplImage* dst, int diff_threshold)
{
	double timestamp = clock() / 100.; // get current time in seconds 时间戳 
	CvSize size = cvSize(img->width, img->height); // get current frame size,得到当前帧的尺寸 
	int i, idx1, idx2;
	IplImage* silh;
	IplImage* pyr = cvCreateImage(cvSize((size.width & -2) / 2, (size.height & -2) / 2), 8, 1);
	CvMemStorage *stor;
	CvSeq *cont;

	/*先进行数据的初始化*/
	if (!mhi || mhi->width != size.width || mhi->height != size.height)
	{
		if (buf == 0) //若尚没有初始化则分配内存给他 
		{
			buf = (IplImage**)malloc(N*sizeof(buf[0]));
			memset(buf, 0, N*sizeof(buf[0]));
		}

		for (i = 0; i < N; i++)
		{
			cvReleaseImage(&buf[i]);
			buf[i] = cvCreateImage(size, IPL_DEPTH_8U, 1);
			cvZero(buf[i]);// clear Buffer Frame at the beginning 
		}
		cvReleaseImage(&mhi);
		mhi = cvCreateImage(size, IPL_DEPTH_32F, 1);
		cvZero(mhi); // clear MHI at the beginning 
	} // end of if(mhi) 

	/*将当前要处理的帧转化为灰度放到buffer的最后一帧中*/
	cvCvtColor(img, buf[last], CV_BGR2GRAY); // convert frame to grayscale 

	/*设定帧的序号*/
	idx1 = last;
	idx2 = (last + 1) % N; // index of (last - (N-1))th frame 
	last = idx2;

	// 做帧差 
	silh = buf[idx2];//差值的指向idx2 
	cvAbsDiff(buf[idx1], buf[idx2], silh); // get difference between frames 

	// 对差图像做二值化 
	cvThreshold(silh, silh, 50, 255, CV_THRESH_BINARY); //threshold it,二值化 

	//去掉超时的影像以更新运动历史图像
	cvUpdateMotionHistory(silh, mhi, timestamp, MHI_DURATION); // update MHI 

	cvConvert(mhi, dst);//将mhi转化为dst,dst=mhi 

	// 中值滤波,消除小的噪声 
	cvSmooth(dst, dst, CV_MEDIAN, 3, 0, 0, 0);

	cvPyrDown(dst, pyr, CV_GAUSSIAN_5x5);// 向下采样,去掉噪声,图像是原图像的四分之一 
	cvDilate(pyr, pyr, 0, 1); // 做膨胀操作,消除目标的不连续空洞 
	cvPyrUp(pyr, dst, CV_GAUSSIAN_5x5);// 向上采样,恢复图像,图像是原图像的四倍 

	// 下面的程序段用来找到轮廓 
	// Create dynamic structure and sequence. 
	stor = cvCreateMemStorage(0);
	cont = cvCreateSeq(CV_SEQ_ELTYPE_POINT, sizeof(CvSeq), sizeof(CvPoint), stor);

	// 找到所有轮廓 
	cvFindContours(dst, stor, &cont, sizeof(CvContour),
		CV_RETR_LIST, CV_CHAIN_APPROX_SIMPLE, cvPoint(0, 0));

	// 直接使用CONTOUR中的矩形来画轮廓 
	for (; cont; cont = cont->h_next)
	{
		CvRect r = ((CvContour*)cont)->rect;
		if (r.height * r.width > CONTOUR_MAX_AERA) // 面积小的方形抛弃掉 
		{
			cvRectangle(img, cvPoint(r.x, r.y),
				cvPoint(r.x + r.width, r.y + r.height),
				CV_RGB(255, 0, 0), 1, CV_AA, 0);
		}
	}
	// free memory 
	cvReleaseMemStorage(&stor);
	cvReleaseImage(&pyr);
}
示例#13
0
/*
 * This Function segments a worm.
 * It requires that certain information be present in the WormAnalysisData struct Worm
 * It requires Worm->Boundary be full
 * It requires that Params->NumSegments be greater than zero
 *
 */
int SegmentWorm(WormAnalysisData* Worm, WormAnalysisParam* Params){
	if (cvSeqExists(Worm->Boundary) == 0){
		printf("Error! No boundary found in SegmentWorm()\n");
		return -1;
	}




	Worm->Segmented->NumSegments=Params->NumSegments;

	/***Clear Out any stale Segmented Information Already in the Worm Structure***/
	ClearSegmentedInfo(Worm->Segmented);

	Worm->Segmented->Head=Worm->Head;
	Worm->Segmented->Tail=Worm->Tail;

	/*** It would be nice to check that Worm->Boundary exists ***/

	/*** Clear Out Scratch Storage ***/
	cvClearMemStorage(Worm->MemScratchStorage);


	/*** Slice the boundary into left and right components ***/
	if (Worm->HeadIndex==Worm->TailIndex) printf("Error! Worm->HeadIndex==Worm->TailIndex in SegmentWorm()!\n");
	CvSeq* OrigBoundA=cvSeqSlice(Worm->Boundary,cvSlice(Worm->HeadIndex,Worm->TailIndex),Worm->MemScratchStorage,1);
	CvSeq* OrigBoundB=cvSeqSlice(Worm->Boundary,cvSlice(Worm->TailIndex,Worm->HeadIndex),Worm->MemScratchStorage,1);

	if (OrigBoundA->total < Params->NumSegments || OrigBoundB->total < Params->NumSegments ){
		printf("Error in SegmentWorm():\n\tWhen splitting  the original boundary into two, one or the other has less than the number of desired segments!\n");
		printf("OrigBoundA->total=%d\nOrigBoundB->total=%d\nParams->NumSegments=%d\n",OrigBoundA->total,OrigBoundB->total,Params->NumSegments);
		printf("Worm->HeadIndex=%d\nWorm->TailIndex=%d\n",Worm->HeadIndex,Worm->TailIndex);
		printf("It could be that your worm is just too small\n");
		return -1; /** Andy make this return -1 **/

	}

	cvSeqInvert(OrigBoundB);


	/*** Resample One of the Two Boundaries so that both are the same length ***/

	//Create sequences to store the Normalized Boundaries
	CvSeq* NBoundA=	cvCreateSeq(CV_SEQ_ELTYPE_POINT,sizeof(CvSeq),sizeof(CvPoint),Worm->MemScratchStorage);
	CvSeq* NBoundB=cvCreateSeq(CV_SEQ_ELTYPE_POINT,sizeof(CvSeq),sizeof(CvPoint),Worm->MemScratchStorage);

	//Resample L&R boundary to have the same number of points as min(L,R)
	if (OrigBoundA->total > OrigBoundB->total){
		resampleSeq(OrigBoundA,NBoundA,OrigBoundB->total );
		NBoundB=OrigBoundB;
	}else{
		resampleSeq(OrigBoundB,NBoundB,OrigBoundA->total );
		NBoundA=OrigBoundA;
	}
	//Now both NBoundA and NBoundB are the same length.



	/*
	 * Now Find the Centerline
	 *
	 */

	/*** Clear out Stale Centerline Information ***/
	cvClearSeq(Worm->Centerline);

	/*** Compute Centerline, from Head To Tail ***/
	FindCenterline(NBoundA,NBoundB,Worm->Centerline);



	/*** Smooth the Centerline***/
	CvSeq* SmoothUnresampledCenterline = smoothPtSequence (Worm->Centerline, 0.5*Worm->Centerline->total/Params->NumSegments, Worm->MemScratchStorage);

	/*** Note: If you wanted to you could smooth the centerline a second time here. ***/


	/*** Resample the Centerline So it has the specified Number of Points ***/
	//resampleSeq(SmoothUnresampledCenterline,Worm->Segmented->Centerline,Params->NumSegments);

	resampleSeqConstPtsPerArcLength(SmoothUnresampledCenterline,Worm->Segmented->Centerline,Params->NumSegments);

	/** Save the location of the centerOfWorm as the point halfway down the segmented centerline **/
	Worm->Segmented->centerOfWorm= CV_GET_SEQ_ELEM( CvPoint , Worm->Segmented->Centerline, Worm->Segmented->NumSegments / 2 );

	/*** Remove Repeat Points***/
	//RemoveSequentialDuplicatePoints (Worm->Segmented->Centerline);

	/*** Use Marc's Perpendicular Segmentation Algorithm
	 *   To Segment the Left and Right Boundaries and store them
	 */
	SegmentSides(OrigBoundA,OrigBoundB,Worm->Segmented->Centerline,Worm->Segmented->LeftBound,Worm->Segmented->RightBound);
	return 0;

}
示例#14
0
/*
 * Finds the Worm's Head and Tail.
 * Requires Worm->Boundary
 *
 */
int GivenBoundaryFindWormHeadTail(WormAnalysisData* Worm, WormAnalysisParam* Params) {
	if (Worm->Boundary->total < 2*Params->NumSegments) {
		printf("Error in GivenBoundaryFindWormHeadTail(). The Boundary has too few points.");
		return -1;
	}

	/*** Clear Out Scratch Storage ***/
	cvClearMemStorage(Worm->MemScratchStorage);

	/* **********************************************************************/
	/*  Express the Boundary in the form of a series of vectors connecting 	*/
	/*  two pixels a Delta pixels apart.									*/
	/* **********************************************************************/

	/* Create A Matrix to store all of the dot products along the boundary.
	 */
	CvSeq* DotProds= cvCreateSeq(CV_32SC1,sizeof(CvSeq),sizeof(int),Worm->MemScratchStorage);

	/* Create A Matrix to store all of the cross products along the boundary.
	 */
	CvSeq* CrossProds= cvCreateSeq(CV_32SC1,sizeof(CvSeq),sizeof(int),Worm->MemScratchStorage);
	
	//We walk around the boundary using the high-speed reader and writer objects.
	CvSeqReader ForeReader; //ForeReader reads delta pixels ahead
	CvSeqReader Reader; 	//Reader reads delta pixels behind
	CvSeqReader BackReader; //BackReader reads delta pixels behind


	/**** Local Variables ***/
	int i;
	CvPoint* Pt;
	CvPoint* AheadPt;
	CvPoint* BehindPt;
	CvPoint AheadVec;
	CvPoint BehindVec;
	int TotalBPts = Worm->Boundary->total;

	/*** Initializing Read & Write Apparatus ***/
	int AheadPtr=0;
	int BehindPtr=0;
	int Ptr=0;
	int* DotProdPtr;
	int* CrossProdPtr;
	int DotProdVal;
	int CrossProdVal;


	/*
	 * Loop through all the boundary and compute the dot products between the ForeVec and BackVec.
	 *
	 * Note: ForeVec and BackVec have the same "handedness" along the boundary.
	 */
	//printf ("total boundary elements = %d\n", TotalBPts); //debug MHG 10/19/09
	for (i = 0; i < TotalBPts; i++) {
		AheadPtr = (i+Params->LengthScale)%TotalBPts;
		BehindPtr = (i+TotalBPts-Params->LengthScale)%TotalBPts;
		Ptr = (i)%TotalBPts;

		//printf("AheadPtr=%d, BehindPtr=%d,Ptr=%d\n", AheadPtr,BehindPtr,Ptr);


		AheadPt = (CvPoint*) cvGetSeqElem(Worm->Boundary,AheadPtr);
		Pt = (CvPoint*) cvGetSeqElem(Worm->Boundary,Ptr);
		BehindPt=(CvPoint*) cvGetSeqElem(Worm->Boundary,BehindPtr);


		/** Compute the Forward Vector **/
		AheadVec = cvPoint((AheadPt->x) - (Pt->x), (AheadPt->y)
				- (Pt->y));

		/** Compute the Rear Vector **/
		BehindVec= cvPoint((Pt->x) - (BehindPt->x), (Pt->y)
				- (BehindPt->y));

		/** Store the Dot Product in our Mat **/
		DotProdVal=PointDot(&AheadVec,&BehindVec);
		cvSeqPush(DotProds,&DotProdVal); //<--- ANDY CONTINUE HERE!
		
		/** Store the Cross Product in our Mat **/
		CrossProdVal=PointCross(&AheadVec,&BehindVec);
		cvSeqPush(CrossProds,&CrossProdVal);

	//	printf("i= %d, DotProdVal=%d\n", i, DotProdVal);
	//	cvWaitKey(0);

	}


	/* **********************************************************************/
	/*  Find the Tail 													 	*/
	/*  Take dot product of neighboring vectors. Tail is location of		*/
	/*	 smallest dot product												*/
	/* **********************************************************************/


	/*
	 * Now Let's loop through the entire boundary to find the tail, which will be the curviest point.
	 */
	float MostCurvy = 1000; //Smallest value.
	float CurrentCurviness; //Metric of CurrentCurviness. In this case the dot product.
	int MostCurvyIndex = 0;
	int TailIndex;

	for (i = 0; i < TotalBPts; i++) {
		DotProdPtr = (int*) cvGetSeqElem(DotProds,i);
		CrossProdPtr = (int*) cvGetSeqElem(CrossProds,i);
		if (*DotProdPtr < MostCurvy && *CrossProdPtr > 0) { //If this locaiton is curvier than the previous MostCurvy location
			MostCurvy = *DotProdPtr; //replace the MostCurvy point
			MostCurvyIndex = i;
		}
	}

	//Set the tail to be the point on the boundary that is most curvy.
	Worm->Tail = (CvPoint*) cvGetSeqElem(Worm->Boundary, MostCurvyIndex);
	Worm->TailIndex=MostCurvyIndex;

	/* **********************************************************************/
	/*  Find the Head 													 	*/
	/* 	Excluding the neighborhood of the Tail, the head is the location of */
	/*	 the smallest dot product											*/
	/* **********************************************************************/

	float SecondMostCurvy = 1000;
	int DistBetPtsOnBound;
	DistBetPtsOnBound = 0;

	/* Set the fallback head location to be halfway away from the tail along the boundary. 	*/
	/* That way, if for some reason there is no reasonable head found, the default 			*/
	/* will at least be a pretty good gueess												*/
	int SecondMostCurvyIndex = (Worm->TailIndex+ TotalBPts/2)%TotalBPts;
	
	

	for (i = 0; i < TotalBPts; i++) {
		DotProdPtr =(int*) cvGetSeqElem(DotProds,i);
		CrossProdPtr=(int*) cvGetSeqElem(CrossProds,i);
		DistBetPtsOnBound = DistBetPtsOnCircBound(TotalBPts, i, MostCurvyIndex);
		//If we are at least a 1/4 of the total boundary away from the most curvy point.
		if (DistBetPtsOnBound > (TotalBPts / 4)) {
			//If this location is curvier than the previous SecondMostCurvy location & is not an invagination
			if (*DotProdPtr< SecondMostCurvy && *CrossProdPtr > 0) {
				SecondMostCurvy = *DotProdPtr; //replace the MostCurvy point
				SecondMostCurvyIndex = i;
			}
		}
	}

	Worm->Head = (CvPoint*) cvGetSeqElem(Worm->Boundary,
			SecondMostCurvyIndex);  

	Worm->HeadIndex = SecondMostCurvyIndex;
	cvClearMemStorage(Worm->MemScratchStorage);
	return 0;
}
示例#15
0
CV_IMPL CvSeq*
cvSegmentMotion( const CvArr* mhiimg, CvArr* segmask, CvMemStorage* storage,
                 double timestamp, double seg_thresh )
{
    CvSeq* components = 0;
    CvMat* mask8u = 0;

    CV_FUNCNAME( "cvSegmentMotion" );

    __BEGIN__;

    CvMat  mhistub, *mhi = (CvMat*)mhiimg;
    CvMat  maskstub, *mask = (CvMat*)segmask;
    Cv32suf v, comp_idx;
    int stub_val, ts;
    int x, y;

    if( !storage )
        CV_ERROR( CV_StsNullPtr, "NULL memory storage" );

    CV_CALL( mhi = cvGetMat( mhi, &mhistub ));
    CV_CALL( mask = cvGetMat( mask, &maskstub ));

    if( CV_MAT_TYPE( mhi->type ) != CV_32FC1 || CV_MAT_TYPE( mask->type ) != CV_32FC1 )
        CV_ERROR( CV_BadDepth, "Both MHI and the destination mask" );

    if( !CV_ARE_SIZES_EQ( mhi, mask ))
        CV_ERROR( CV_StsUnmatchedSizes, "" );

    CV_CALL( mask8u = cvCreateMat( mhi->rows + 2, mhi->cols + 2, CV_8UC1 ));
    cvZero( mask8u );
    cvZero( mask );
    CV_CALL( components = cvCreateSeq( CV_SEQ_KIND_GENERIC, sizeof(CvSeq),
                                       sizeof(CvConnectedComp), storage ));
    
    v.f = (float)timestamp; ts = v.i;
    v.f = FLT_MAX*0.1f; stub_val = v.i;
    comp_idx.f = 1;

    for( y = 0; y < mhi->rows; y++ )
    {
        int* mhi_row = (int*)(mhi->data.ptr + y*mhi->step);
        for( x = 0; x < mhi->cols; x++ )
        {
            if( mhi_row[x] == 0 )
                mhi_row[x] = stub_val;
        }
    }

    for( y = 0; y < mhi->rows; y++ )
    {
        int* mhi_row = (int*)(mhi->data.ptr + y*mhi->step);
        uchar* mask8u_row = mask8u->data.ptr + (y+1)*mask8u->step + 1;

        for( x = 0; x < mhi->cols; x++ )
        {
            if( mhi_row[x] == ts && mask8u_row[x] == 0 )
            {
                CvConnectedComp comp;
                int x1, y1;
                CvScalar _seg_thresh = cvRealScalar(seg_thresh);
                CvPoint seed = cvPoint(x,y);

                CV_CALL( cvFloodFill( mhi, seed, cvRealScalar(0), _seg_thresh, _seg_thresh,
                                      &comp, CV_FLOODFILL_MASK_ONLY + 2*256 + 4, mask8u ));

                for( y1 = 0; y1 < comp.rect.height; y1++ )
                {
                    int* mask_row1 = (int*)(mask->data.ptr +
                                    (comp.rect.y + y1)*mask->step) + comp.rect.x;
                    uchar* mask8u_row1 = mask8u->data.ptr +
                                    (comp.rect.y + y1+1)*mask8u->step + comp.rect.x+1;

                    for( x1 = 0; x1 < comp.rect.width; x1++ )
                    {
                        if( mask8u_row1[x1] > 1 )
                        {
                            mask8u_row1[x1] = 1;
                            mask_row1[x1] = comp_idx.i;
                        }
                    }
                }
                comp_idx.f++;
                cvSeqPush( components, &comp );
            }
        }
    }

    for( y = 0; y < mhi->rows; y++ )
    {
        int* mhi_row = (int*)(mhi->data.ptr + y*mhi->step);
        for( x = 0; x < mhi->cols; x++ )
        {
            if( mhi_row[x] == stub_val )
                mhi_row[x] = 0;
        }
    }

    __END__;

    cvReleaseMat( &mask8u );
    return components;
}
示例#16
0
CvSeq *
cvFindNextContour( CvContourScanner scanner )
{
    char *img0;
    char *img;
    int step;
    int width, height;
    int x, y;
    int prev;
    CvPoint lnbd;
    CvSeq *contour = 0;
    int nbd;
    int mode;
    CvStatus result = (CvStatus) 1;

    CV_FUNCNAME( "cvFindNextContour" );

    __BEGIN__;

    if( !scanner )
        CV_ERROR( CV_StsNullPtr, "" );
    icvEndProcessContour( scanner );

    /* initialize local state */
    img0 = scanner->img0;
    img = scanner->img;
    step = scanner->img_step;
    x = scanner->pt.x;
    y = scanner->pt.y;
    width = scanner->img_size.width;
    height = scanner->img_size.height;
    mode = scanner->mode;
    lnbd = scanner->lnbd;
    nbd = scanner->nbd;

    prev = img[x - 1];

    for( ; y < height; y++, img += step )
    {
        for( ; x < width; x++ )
        {
            int p = img[x];

            if( p != prev )
            {
                _CvContourInfo *par_info = 0;
                _CvContourInfo *l_cinfo = 0;
                CvSeq *seq = 0;
                int is_hole = 0;
                CvPoint origin;

                if( !(prev == 0 && p == 1) )    /* if not external contour */
                {
                    /* check hole */
                    if( p != 0 || prev < 1 )
                        goto resume_scan;

                    if( prev & -2 )
                    {
                        lnbd.x = x - 1;
                    }
                    is_hole = 1;
                }

                if( mode == 0 && (is_hole || img0[lnbd.y * step + lnbd.x] > 0) )
                    goto resume_scan;

                origin.y = y;
                origin.x = x - is_hole;

                /* find contour parent */
                if( mode <= 1 || (!is_hole && mode == 2) || lnbd.x <= 0 )
                {
                    par_info = &(scanner->frame_info);
                }
                else
                {
                    int lval = img0[lnbd.y * step + lnbd.x] & 0x7f;
                    _CvContourInfo *cur = scanner->cinfo_table[lval - 2];

                    assert( lval >= 2 );

                    /* find the first bounding contour */
                    while( cur )
                    {
                        if( (unsigned) (lnbd.x - cur->rect.x) < (unsigned) cur->rect.width &&
                            (unsigned) (lnbd.y - cur->rect.y) < (unsigned) cur->rect.height )
                        {
                            if( par_info )
                            {
                                if( icvTraceContour( scanner->img0 +
                                                     par_info->origin.y * step +
                                                     par_info->origin.x, step, img + lnbd.x,
                                                     par_info->is_hole ) > 0 )
                                    break;
                            }
                            par_info = cur;
                        }
                        cur = cur->next;
                    }

                    assert( par_info != 0 );

                    /* if current contour is a hole and previous contour is a hole or
                       current contour is external and previous contour is external then
                       the parent of the contour is the parent of the previous contour else
                       the parent is the previous contour itself. */
                    if( par_info->is_hole == is_hole )
                    {
                        par_info = par_info->parent;
                        /* every contour must have a parent
                           (at least, the frame of the image) */
                        if( !par_info )
                            par_info = &(scanner->frame_info);
                    }

                    /* hole flag of the parent must differ from the flag of the contour */
                    assert( par_info->is_hole != is_hole );
                    if( par_info->contour == 0 )        /* removed contour */
                        goto resume_scan;
                }

                lnbd.x = x - is_hole;

                cvSaveMemStoragePos( scanner->storage2, &(scanner->backup_pos) );

                seq = cvCreateSeq( scanner->seq_type1, scanner->header_size1,
                                   scanner->elem_size1, scanner->storage1 );
                if( !seq )
                {
                    result = CV_OUTOFMEM_ERR;
                    goto exit_func;
                }
                seq->flags |= is_hole ? CV_SEQ_FLAG_HOLE : 0;

                /* initialize header */
                if( mode <= 1 )
                {
                    l_cinfo = &(scanner->cinfo_temp);
                    result = icvFetchContour( img + x - is_hole, step,
                                              cvPoint( origin.x + scanner->offset.x,
                                                       origin.y + scanner->offset.y),
                                              seq, scanner->approx_method1 );
                    if( result < 0 )
                        goto exit_func;
                }
                else
                {
                    union { _CvContourInfo* ci; CvSetElem* se; } v;
                    v.ci = l_cinfo;
                    cvSetAdd( scanner->cinfo_set, 0, &v.se );
                    l_cinfo = v.ci;

                    result = icvFetchContourEx( img + x - is_hole, step,
                                                cvPoint( origin.x + scanner->offset.x,
                                                         origin.y + scanner->offset.y),
                                                seq, scanner->approx_method1,
                                                nbd, &(l_cinfo->rect) );
                    if( result < 0 )
                        goto exit_func;
                    l_cinfo->rect.x -= scanner->offset.x;
                    l_cinfo->rect.y -= scanner->offset.y;

                    l_cinfo->next = scanner->cinfo_table[nbd - 2];
                    scanner->cinfo_table[nbd - 2] = l_cinfo;

                    /* change nbd */
                    nbd = (nbd + 1) & 127;
                    nbd += nbd == 0 ? 3 : 0;
                }

                l_cinfo->is_hole = is_hole;
                l_cinfo->contour = seq;
                l_cinfo->origin = origin;
                l_cinfo->parent = par_info;

                if( scanner->approx_method1 != scanner->approx_method2 )
                {
                    result = icvApproximateChainTC89( (CvChain *) seq,
                                                      scanner->header_size2,
                                                      scanner->storage2,
                                                      &(l_cinfo->contour),
                                                      scanner->approx_method2 );
                    if( result < 0 )
                        goto exit_func;
                    cvClearMemStorage( scanner->storage1 );
                }

                l_cinfo->contour->v_prev = l_cinfo->parent->contour;

                if( par_info->contour == 0 )
                {
                    l_cinfo->contour = 0;
                    if( scanner->storage1 == scanner->storage2 )
                    {
                        cvRestoreMemStoragePos( scanner->storage1, &(scanner->backup_pos) );
                    }
                    else
                    {
                        cvClearMemStorage( scanner->storage1 );
                    }
                    p = img[x];
                    goto resume_scan;
                }

                cvSaveMemStoragePos( scanner->storage2, &(scanner->backup_pos2) );
                scanner->l_cinfo = l_cinfo;
                scanner->pt.x = x + 1;
                scanner->pt.y = y;
                scanner->lnbd = lnbd;
                scanner->img = (char *) img;
                scanner->nbd = nbd;
                contour = l_cinfo->contour;

                result = CV_OK;
                goto exit_func;
              resume_scan:
                prev = p;
                /* update lnbd */
                if( prev & -2 )
                {
                    lnbd.x = x;
                }
            }                   /* end of prev != p */
        }                       /* end of loop on x */

        lnbd.x = 0;
        lnbd.y = y + 1;
        x = 1;
        prev = 0;

    }                           /* end of loop on y */

  exit_func:

    if( result != 0 )
        contour = 0;
    if( result < 0 )
        CV_ERROR_FROM_STATUS( result );

    __END__;

    return contour;
}
static void doCoalesce(image_session_t *mySession)
{
CvSeq *newDetected = NULL;
image_detected_t *current;
CvRect tempRect;
int over_left, over_top;
int right1, right2, over_right;
int bottom1, bottom2, over_bottom;
int over_width, over_height;
int r1Area, r2Area;
int *merged = NULL;
int newI = 0;
int i, j;
uint16_t current_category;

	for(current_category = 0; current_category < num_image_categories; current_category++) {
		current = &mySession->detected[current_category];
		if(current->category->coalesceOverlap != 1.0f && current->detected)
		{
			// Loop the number of detected objects
			newDetected = cvCreateSeq( 0, sizeof(CvSeq), sizeof(CvRect), mySession->lstorage );
			merged = malloc(sizeof(int) *  current->detected->total);
			newI = 0;
			for(i = 0; i < current->detected->total; i++ ) merged[i] = -1; // quickly setup merged variable
			for(i = 0; i < current->detected->total; i++ )
			{
				CvRect* r = (CvRect*)cvGetSeqElem( current->detected, i );

				if(merged[i] == -1) {
					cvSeqPush( newDetected, r );
					merged[i] = newI;
					newI++;
				}

				if(current->detected->total - i > 0)
				{
					r1Area = r->width * r->height;

					for(j = i + 1; j < current->detected->total; j++ )
					{
						// Reset rectangles to original size
						CvRect* r2 = (CvRect*)cvGetSeqElem( current->detected, j );
						r2Area = r2->width * r2->height;

						right1 = r->x + r->width;
						right2 = r2->x + r2->width;
						bottom1 = r->y + r->height;
						bottom2 = r2->y + r2->height;

						if (!(bottom1 < r2->y) && !(r->y > bottom2) && !(right1 < r2->x) && !(r->x > right2))
						{

							// Ok, compute the rectangle of overlap:
							if (bottom1 > bottom2) over_bottom = bottom2;
							else over_bottom = bottom1;

							if (r->y < r2->y) over_top = r2->y;
							else over_top = r->y;

							if (right1 > right2) over_right = right2;
							else over_right = right1;

							if (r->x < r2->x) over_left = r2->x;
							else over_left = r->x;

							over_width=over_right-over_left;
							over_height=over_bottom-over_top;

							if((r1Area * current->category->coalesceOverlap <= over_width*over_height) || (r2Area * current->category->coalesceOverlap <= over_width*over_height))
							{
								ci_debug_printf(10, "srv_classify_image: Merging detected %s at X: %d, Y: %d, Height: %d, Width: %d and X2: %d, Y2: %d, Height2: %d, Width2: %d\n",
										current->category->name, r->x, r->y, r->height, r->width, r2->x, r2->y, r2->height, r2->width);
								tempRect = cvMaxRect( (CvRect*)cvGetSeqElem( newDetected, merged[i] ), r2);
								cvSeqRemove( newDetected, merged[i] );
								cvSeqInsert( newDetected, merged[i], &tempRect );
								merged[j] = merged[i];
							}
						}
					}
				}
			}
			cvClearSeq( current->detected );
			current->detected = newDetected;
			free(merged);
		}
	}
}
示例#18
0
文件: LOOV.cpp 项目: fvallet/LOOV
int main(int argc, char *argv[]) {
	/* initialisation of the parameters  */
    LOOV_params *param=alloc_init_LOOV_params();
    param = parse_arg(param, argc, argv);
	/* initialisation of the boxes sequences */
    CvMemStorage* storage_box = cvCreateMemStorage(0);
    CvSeq* seq_box = cvCreateSeq( 0, sizeof(CvSeq), sizeof(box*), storage_box);							// list of boxes that are shown in the current frame
    CvMemStorage* storage_box_final = cvCreateMemStorage(0);
    CvSeq* seq_box_final = cvCreateSeq( 0, sizeof(CvSeq), sizeof(box*), storage_box_final);				// boxes list  that no longer appear

    if (param->videoName==NULL) { fprintf(stderr,"enter video name after parameter -v\n"); exit(0); }
    CvCapture* capture = cvCaptureFromFile(param->videoName);    										// read video
    if (!capture)        { printf("error on video %s\n",param->videoName); exit(1); }
    cvSetCaptureProperty(capture, CV_CAP_PROP_POS_FRAMES, param->startFrame);  							// get video property
    IplImage* frame_temp = cvQueryFrame( capture );   													// get the first frame    

	/* computed parameters depending on the image size */
    int video_depth=1;    																				
    for (int i=0;i<frame_temp->depth;i++) video_depth=video_depth*2;									// find the max threshold
    param->max_thr = video_depth-1;
    param->it_connected_caractere = round_me((float)frame_temp->width*param->aspect_ratio*param->it_connected_caractere);       
    param->y_min_size_text = round_me((float)frame_temp->height*param->y_min_size_text);
    param->x_min_size_text = round_me((float)frame_temp->width*param->aspect_ratio*param->x_min_size_text);
	
	/* read mask image, to process only a part of the images */
    IplImage* frame=cvCreateImage(cvSize(frame_temp->width*param->aspect_ratio, frame_temp->height), frame_temp->depth, frame_temp->nChannels);
    cvResize(frame_temp, frame, CV_INTER_CUBIC);
    IplImage* im_mask=0;    
    if (param->path_im_mask!=NULL) {
        im_mask=cvLoadImage(param->path_im_mask, CV_LOAD_IMAGE_GRAYSCALE);
        if ((frame->width!=im_mask->width) || (frame->height!=im_mask->height)){
            IplImage* im_mask_resize = cvCreateImage(cvSize(frame->width, frame->height),im_mask->depth, 1);  // resize mask to the images video size
            cvResize(im_mask, im_mask_resize, CV_INTER_CUBIC);
            cvReleaseImage(&im_mask);
            im_mask = cvCloneImage(im_mask_resize);
            cvReleaseImage(&im_mask_resize);
        }
    }   
    
    printf("processing of frames from %d to %d\n", param->startFrame, param->startFrame+param->nbFrame);
    
    IplImage* frame_BW=cvCreateImage(cvSize(frame_temp->width*param->aspect_ratio, frame_temp->height), frame_temp->depth, 1);
    IplImage* frame_BW_temp=cvCreateImage(cvSize(frame_temp->width, frame_temp->height), frame_temp->depth, 1);   
    int frameNum=param->startFrame;
    while((frameNum<param->startFrame+param->nbFrame) && (frame_temp = cvQueryFrame( capture ))) {  // capture the current frame and put it in frame_temp
        frameNum++;
        if( frame_temp ) {	
            cvCvtColor(frame_temp, frame_BW_temp, CV_RGB2GRAY);			                            // convert frame from color to gray
            cvResize(frame_temp, frame, CV_INTER_CUBIC);                                            // resize for aspect ratio
            cvResize(frame_BW_temp, frame_BW, CV_INTER_CUBIC);
            cvCvtColor(frame, frame_BW, CV_RGB2GRAY);
			IplImage* im = cvCloneImage(frame_BW);			
            im = sobel_double_H(im, param);															// find edge of characters		
            if (param->path_im_mask!=NULL) cvAnd(im,im_mask,im, NULL);								// apply mask if it exists
            im = connected_caractere(im, param);													// connect edges of a same line
            im = delete_horizontal_bar(im, param);													// filter noise on the resulting image
            im = delete_vertical_bar(im, param);													// filter noise on the resulting image
            if (param->path_im_mask!=NULL) cvAnd(im,im_mask,im, NULL);								// apply mask if it exists
            spatial_detection_box(im, seq_box, frameNum, frame_BW, frame, frame, im_mask, param); 	// Detect boxes spatial position
            temporal_detection_box(seq_box, seq_box_final, frameNum, frame_BW, im_mask, param);     // Temporal tracking of the boxes
            cvReleaseImage(&im);
        }
    }     
    cvReleaseImage(&frame_BW);
    cvReleaseImage(&im_mask);

    /* finish the transcriptin of the boxes in seq_box */
    for (int i=0;i<seq_box->total;i++){
        box* pt_search_box = *(box**)cvGetSeqElem(seq_box, i);
        if (pt_search_box->stop_frame - pt_search_box->start_frame > param->min_duration_box) {         
            cvSeqPush(seq_box_final, &pt_search_box);                                               // copy boxes in seq_box_final
            cvSeqSort(pt_search_box->seq_thr_t, cmp_thr, 0);
            int* thr_med = (int*)cvGetSeqElem( pt_search_box->seq_thr_t, (int)(pt_search_box->nb_img_detect_avg_t/2) );   
            set_threshold_OCR_Image(pt_search_box->im_average_mask_t,*thr_med);                
            transcription_box(pt_search_box, param);                                                // process transcription of the boxes
            if (param->print_text == 1){                                                            // print transcription
                printf("box_%d img_avg ymin=%d ymax=%d xmin=%d xmax=%d " ,pt_search_box->num ,round_me(pt_search_box->ymin_avg), round_me(pt_search_box->xmin_avg), round_me(pt_search_box->ymax_avg), round_me(pt_search_box->xmax_avg));
                print_transcription_image(get_img_OCR_Image(pt_search_box->im_average_mask_t), round_me(pt_search_box->thr_med), param);
            }
        }
        else free_box(pt_search_box);
    }
            
    /* Write transcription in output_path+".OCR" file */
    char * file_txt_temp=sprintf_alloc("%s.OCR", param->output_path);
    FILE * file_txt = fopen(file_txt_temp, "w");
    free(file_txt_temp); 
    cvSeqSort( seq_box_final, cmp_box_by_frame, 0);
    for (int i=0;i<seq_box_final->total;i++){
        file_print_box(file_txt, *(box**)cvGetSeqElem(seq_box_final, i), param);   //
    }    
    fclose(file_txt);

    /* free memory */     
    for (int i=0;i<seq_box_final->total;i++){
        free_box(*(box**)cvGetSeqElem(seq_box_final, i));
    }    
    cvClearSeq(seq_box);
    cvReleaseMemStorage( &storage_box );
    cvReleaseImage(&im_mask);
    cvClearSeq(seq_box_final);
    cvReleaseMemStorage( &storage_box_final );
    cvReleaseCapture( &capture ); 

    return 0;
}
示例#19
0
CV_IMPL CvSeq*
cvConvexHull2( const CvArr* array, void* hull_storage,
               int orientation, int return_points )
{
    union { CvContour* c; CvSeq* s; } hull;
    CvPoint** pointer = 0;
    CvPoint2D32f** pointerf = 0;
    int* stack = 0;

    CV_FUNCNAME( "cvConvexHull2" );

    hull.s = 0;

    __BEGIN__;

    CvMat* mat = 0;
    CvSeqReader reader;
    CvSeqWriter writer;
    CvContour contour_header;
    union { CvContour c; CvSeq s; } hull_header;
    CvSeqBlock block, hullblock;
    CvSeq* ptseq = 0;
    CvSeq* hullseq = 0;
    int is_float;
    int* t_stack;
    int t_count;
    int i, miny_ind = 0, maxy_ind = 0, total;
    int hulltype;
    int stop_idx;
    sklansky_func sklansky;

    if( CV_IS_SEQ( array ))
    {
        ptseq = (CvSeq*)array;
        if( !CV_IS_SEQ_POINT_SET( ptseq ))
            CV_ERROR( CV_StsBadArg, "Unsupported sequence type" );
        if( hull_storage == 0 )
            hull_storage = ptseq->storage;
    }
    else
    {
        CV_CALL( ptseq = cvPointSeqFromMat(
            CV_SEQ_KIND_GENERIC, array, &contour_header, &block ));
    }

    if( CV_IS_STORAGE( hull_storage ))
    {
        if( return_points )
        {
            CV_CALL( hullseq = cvCreateSeq(
                CV_SEQ_KIND_CURVE|CV_SEQ_ELTYPE(ptseq)|
                CV_SEQ_FLAG_CLOSED|CV_SEQ_FLAG_CONVEX,
                sizeof(CvContour), sizeof(CvPoint),(CvMemStorage*)hull_storage ));
        }
        else
        {
            CV_CALL( hullseq = cvCreateSeq(
                CV_SEQ_KIND_CURVE|CV_SEQ_ELTYPE_PPOINT|
                CV_SEQ_FLAG_CLOSED|CV_SEQ_FLAG_CONVEX,
                sizeof(CvContour), sizeof(CvPoint*), (CvMemStorage*)hull_storage ));
        }
    }
    else
    {
        if( !CV_IS_MAT( hull_storage ))
            CV_ERROR(CV_StsBadArg, "Destination must be valid memory storage or matrix");

        mat = (CvMat*)hull_storage;

        if( mat->cols != 1 && mat->rows != 1 || !CV_IS_MAT_CONT(mat->type))
            CV_ERROR( CV_StsBadArg,
            "The hull matrix should be continuous and have a single row or a single column" );

        if( mat->cols + mat->rows - 1 < ptseq->total )
            CV_ERROR( CV_StsBadSize, "The hull matrix size might be not enough to fit the hull" );

        if( CV_MAT_TYPE(mat->type) != CV_SEQ_ELTYPE(ptseq) &&
            CV_MAT_TYPE(mat->type) != CV_32SC1 )
            CV_ERROR( CV_StsUnsupportedFormat,
            "The hull matrix must have the same type as input or 32sC1 (integers)" );

        CV_CALL( hullseq = cvMakeSeqHeaderForArray(
            CV_SEQ_KIND_CURVE|CV_MAT_TYPE(mat->type)|CV_SEQ_FLAG_CLOSED,
            sizeof(contour_header), CV_ELEM_SIZE(mat->type), mat->data.ptr,
            mat->cols + mat->rows - 1, &hull_header.s, &hullblock ));

        cvClearSeq( hullseq );
    }

    total = ptseq->total;
    if( total == 0 )
    {
        if( mat )
            CV_ERROR( CV_StsBadSize,
            "Point sequence can not be empty if the output is matrix" );
        EXIT;
    }

    cvStartAppendToSeq( hullseq, &writer );

    is_float = CV_SEQ_ELTYPE(ptseq) == CV_32FC2;
    hulltype = CV_SEQ_ELTYPE(hullseq);
    sklansky = !is_float ? (sklansky_func)icvSklansky_32s :
                           (sklansky_func)icvSklansky_32f;

    CV_CALL( pointer = (CvPoint**)cvAlloc( ptseq->total*sizeof(pointer[0]) ));
    CV_CALL( stack = (int*)cvAlloc( (ptseq->total + 2)*sizeof(stack[0]) ));
    pointerf = (CvPoint2D32f**)pointer;

    cvStartReadSeq( ptseq, &reader );

    for( i = 0; i < total; i++ )
    {
        pointer[i] = (CvPoint*)reader.ptr;
        CV_NEXT_SEQ_ELEM( ptseq->elem_size, reader );
    }

    // sort the point set by x-coordinate, find min and max y
    if( !is_float )
    {
        icvSortPointsByPointers_32s( pointer, total, 0 );
        for( i = 1; i < total; i++ )
        {
            int y = pointer[i]->y;
            if( pointer[miny_ind]->y > y )
                miny_ind = i;
            if( pointer[maxy_ind]->y < y )
                maxy_ind = i;
        }
    }
    else
    {
        icvSortPointsByPointers_32f( pointerf, total, 0 );
        for( i = 1; i < total; i++ )
        {
            float y = pointerf[i]->y;
            if( pointerf[miny_ind]->y > y )
                miny_ind = i;
            if( pointerf[maxy_ind]->y < y )
                maxy_ind = i;
        }
    }

    if( pointer[0]->x == pointer[total-1]->x &&
        pointer[0]->y == pointer[total-1]->y )
    {
        if( hulltype == CV_SEQ_ELTYPE_PPOINT )
        {
            CV_WRITE_SEQ_ELEM( pointer[0], writer );
        }
        else if( hulltype == CV_SEQ_ELTYPE_INDEX )
        {
            int index = 0;
            CV_WRITE_SEQ_ELEM( index, writer );
        }
        else
        {
            CvPoint pt = pointer[0][0];
            CV_WRITE_SEQ_ELEM( pt, writer );
        }
        goto finish_hull;
    }

    /*upper half */
    {
        int *tl_stack = stack;
        int tl_count = sklansky( pointer, 0, maxy_ind, tl_stack, -1, 1 );
        int *tr_stack = tl_stack + tl_count;
        int tr_count = sklansky( pointer, ptseq->total - 1, maxy_ind, tr_stack, -1, -1 );

        /* gather upper part of convex hull to output */
        if( orientation == CV_COUNTER_CLOCKWISE )
        {
            CV_SWAP( tl_stack, tr_stack, t_stack );
            CV_SWAP( tl_count, tr_count, t_count );
        }

        if( hulltype == CV_SEQ_ELTYPE_PPOINT )
        {
            for( i = 0; i < tl_count - 1; i++ )
                CV_WRITE_SEQ_ELEM( pointer[tl_stack[i]], writer );

            for( i = tr_count - 1; i > 0; i-- )
                CV_WRITE_SEQ_ELEM( pointer[tr_stack[i]], writer );
        }
        else if( hulltype == CV_SEQ_ELTYPE_INDEX )
        {
            CV_CALL( icvCalcAndWritePtIndices( pointer, tl_stack,
                                               0, tl_count-1, ptseq, &writer ));
            CV_CALL( icvCalcAndWritePtIndices( pointer, tr_stack,
                                               tr_count-1, 0, ptseq, &writer ));
        }
        else
        {
            for( i = 0; i < tl_count - 1; i++ )
                CV_WRITE_SEQ_ELEM( pointer[tl_stack[i]][0], writer );

            for( i = tr_count - 1; i > 0; i-- )
                CV_WRITE_SEQ_ELEM( pointer[tr_stack[i]][0], writer );
        }
        stop_idx = tr_count > 2 ? tr_stack[1] : tl_count > 2 ? tl_stack[tl_count - 2] : -1;
    }

    /* lower half */
    {
        int *bl_stack = stack;
        int bl_count = sklansky( pointer, 0, miny_ind, bl_stack, 1, -1 );
        int *br_stack = stack + bl_count;
        int br_count = sklansky( pointer, ptseq->total - 1, miny_ind, br_stack, 1, 1 );

        if( orientation != CV_COUNTER_CLOCKWISE )
        {
            CV_SWAP( bl_stack, br_stack, t_stack );
            CV_SWAP( bl_count, br_count, t_count );
        }

        if( stop_idx >= 0 )
        {
            int check_idx = bl_count > 2 ? bl_stack[1] :
                            bl_count + br_count > 2 ? br_stack[2-bl_count] : -1;
            if( check_idx == stop_idx || check_idx >= 0 &&
                pointer[check_idx]->x == pointer[stop_idx]->x &&
                pointer[check_idx]->y == pointer[stop_idx]->y )
            {
                /* if all the points lie on the same line, then
                   the bottom part of the convex hull is the mirrored top part
                   (except the exteme points).*/
                bl_count = MIN( bl_count, 2 );
                br_count = MIN( br_count, 2 );
            }
        }

        if( hulltype == CV_SEQ_ELTYPE_PPOINT )
        {
            for( i = 0; i < bl_count - 1; i++ )
                CV_WRITE_SEQ_ELEM( pointer[bl_stack[i]], writer );

            for( i = br_count - 1; i > 0; i-- )
                CV_WRITE_SEQ_ELEM( pointer[br_stack[i]], writer );
        }
        else if( hulltype == CV_SEQ_ELTYPE_INDEX )
        {
            CV_CALL( icvCalcAndWritePtIndices( pointer, bl_stack,
                                               0, bl_count-1, ptseq, &writer ));
            CV_CALL( icvCalcAndWritePtIndices( pointer, br_stack,
                                               br_count-1, 0, ptseq, &writer ));
        }
        else
        {
            for( i = 0; i < bl_count - 1; i++ )
                CV_WRITE_SEQ_ELEM( pointer[bl_stack[i]][0], writer );

            for( i = br_count - 1; i > 0; i-- )
                CV_WRITE_SEQ_ELEM( pointer[br_stack[i]][0], writer );
        }
    }

finish_hull:
    CV_CALL( cvEndWriteSeq( &writer ));

    if( mat )
    {
        if( mat->rows > mat->cols )
            mat->rows = hullseq->total;
        else
            mat->cols = hullseq->total;
    }
    else
    {
        hull.s = hullseq;
        hull.c->rect = cvBoundingRect( ptseq,
            ptseq->header_size < (int)sizeof(CvContour) ||
            &ptseq->flags == &contour_header.flags );

        /*if( ptseq != (CvSeq*)&contour_header )
            hullseq->v_prev = ptseq;*/
    }

    __END__;

    cvFree( &pointer );
    cvFree( &stack );

    return hull.s;
}
示例#20
0
CvBlobTrackSeq::CvBlobTrackSeq(int TrackSize)
{
    m_pMem = cvCreateMemStorage();
    m_pSeq = cvCreateSeq(0,sizeof(CvSeq),TrackSize,m_pMem);
}
示例#21
0
int AdaBoost::read_num_class_data(const char* filename, int var_count, CvMat** data, CvMat** responses)
{
	const int M = 1024;
	FILE* f = fopen(filename, "rt");
	CvMemStorage* storage;
	CvSeq* seq;
	char buf[M + 2];
	float* el_ptr;
	CvSeqReader reader;
	int i=0, j=0;

	if(!f)
		return 0;

	el_ptr = new float[var_count + 1];
	storage = cvCreateMemStorage();
	seq	= cvCreateSeq(0, sizeof(*seq), (var_count + 1) * sizeof(float),	storage);

	for(;;)
	{
		char* ptr;

		if(!fgets(buf, M, f) || !strchr(buf, ','))
			break;

		el_ptr[0] = buf[0];
		ptr = buf + 2;

		for(i = 1; i <= var_count; i++)
		{
			int n = 0;
			sscanf(ptr, "%f%n", el_ptr + i, &n);
			ptr += n + 1;
		}

		if (i <= var_count)
			break;

		cvSeqPush(seq, el_ptr);
	}
	fclose(f);

	*data = cvCreateMat(seq->total, var_count, CV_32F);
	*responses = cvCreateMat(seq->total, 1, CV_32F);

	cvStartReadSeq(seq, &reader);

	for (i = 0; i < seq->total; i++)
	{
		const float* sdata = (float*) reader.ptr + 1;
		float* ddata = data[0]->data.fl + var_count * i;
		float* dr = responses[0]->data.fl + i;

		for (j = 0; j < var_count; j++)
			ddata[j] = sdata[j];

		*dr = sdata[-1];
		CV_NEXT_SEQ_ELEM(seq->elem_size, reader);
	}

	cvReleaseMemStorage(&storage);
	delete el_ptr;
	return 1;
}
示例#22
0
文件: blob.cpp 项目: nosuchtim/MMTT1
/**
- FUNCIÓ: ExternPerimeter
- FUNCIONALITAT: Get extern perimeter (perimeter touching image borders)
- PARÀMETRES:
	- maskImage: if != NULL, counts maskImage black pixels as external pixels and contour points touching
				 them are counted as external contour points.
	- xBorder: true to consider blobs touching horizontal borders as extern
	- yBorder: true to consider blobs touching vertical borders as extern
- RESULTAT:
	- 
- RESTRICCIONS:
	- 
- AUTOR: rborras
- DATA DE CREACIÓ: 2008/05/05
- MODIFICACIÓ: Data. Autor. Descripció.
- NOTA: If CBlobContour::GetContourPoints aproximates contours with a method different that NONE,
		this function will not give correct results
*/
double CBlob::ExternPerimeter( IplImage *maskImage, bool xBorder /* = true */, bool yBorder /* = true */)
{
	t_PointList externContour, externalPoints;
	CvSeqReader reader;
	CvSeqWriter writer;
	CvPoint actualPoint, previousPoint;
	bool find = false;
	int i,j;
	int delta = 0;
	
	// it is calculated?
	if( m_externPerimeter != -1 )
	{
		return m_externPerimeter;
	}

	// get contour pixels
	externContour = m_externalContour.GetContourPoints();

	m_externPerimeter = 0;

	// there are contour pixels?
	if( externContour == NULL )
	{
		return m_externPerimeter;
	}

	cvStartReadSeq( externContour, &reader);

	// create a sequence with the external points of the blob
	externalPoints = cvCreateSeq( externContour->flags, externContour->header_size, externContour->elem_size, 
								  m_storage );
	cvStartAppendToSeq( externalPoints, &writer );
	previousPoint.x = -1;

	// which contour pixels touch border?
	for( j=0; j< externContour->total; j++)
	{
		CV_READ_SEQ_ELEM( actualPoint, reader);

		find = false;

		// pixel is touching border?
		if ( xBorder & ((actualPoint.x == 0) || (actualPoint.x == m_originalImageSize.width - 1 )) ||
			 yBorder & ((actualPoint.y == 0) || (actualPoint.y == m_originalImageSize.height - 1 )))
		{
			find = true;
		}
		else
		{
			if( maskImage != NULL )
			{
				// verify if some of 8-connected neighbours is black in mask
				char *pMask;
				
				pMask = (maskImage->imageData + actualPoint.x - 1 + (actualPoint.y - 1) * maskImage->widthStep);
				
				for ( i = 0; i < 3; i++, pMask++ )
				{
					if(*pMask == 0 && !find ) 
					{
						find = true;
						break;
					}						
				}
				
				if(!find)
				{
					pMask = (maskImage->imageData + actualPoint.x - 1 + (actualPoint.y ) * maskImage->widthStep);
				
					for ( i = 0; i < 3; i++, pMask++ )
					{
						if(*pMask == 0 && !find ) 
						{
							find = true;
							break;
						}
					}
				}
			
				if(!find)
				{
					pMask = (maskImage->imageData + actualPoint.x - 1 + (actualPoint.y + 1) * maskImage->widthStep);

					for ( i = 0; i < 3; i++, pMask++ )
					{
						if(*pMask == 0 && !find ) 
						{
							find = true;
							break;
						}
					}
				}
			}
		}

		if( find )
		{
			if( previousPoint.x > 0 )
				delta = abs(previousPoint.x - actualPoint.x) + abs(previousPoint.y - actualPoint.y);

			// calculate separately each external contour segment 
			if( delta > 2 )
			{
				cvEndWriteSeq( &writer );
				m_externPerimeter += cvArcLength( externalPoints, CV_WHOLE_SEQ, 0 );
				
				cvClearSeq( externalPoints );
				cvStartAppendToSeq( externalPoints, &writer );
				delta = 0;
				previousPoint.x = -1;
			}

			CV_WRITE_SEQ_ELEM( actualPoint, writer );
			previousPoint = actualPoint;
		}
		
	}

	cvEndWriteSeq( &writer );

	m_externPerimeter += cvArcLength( externalPoints, CV_WHOLE_SEQ, 0 );

	cvClearSeq( externalPoints );

	// divide by two because external points have one side inside the blob and the other outside
	// Perimeter of external points counts both sides, so it must be divided
	m_externPerimeter /= 2.0;
	
	return m_externPerimeter;
}
int CvMLData::read_csv(const char* filename)
{
    const int M = 1000000;
    const char str_delimiter[3] = { ' ', delimiter, '\0' };
    FILE* file = 0;
    CvMemStorage* storage;
    CvSeq* seq;
    char *ptr;
    float* el_ptr;
    CvSeqReader reader;
    int cols_count = 0;
    uchar *var_types_ptr = 0;

    clear();

    file = fopen( filename, "rt" );

    if( !file )
        return -1;

    // read the first line and determine the number of variables
    std::vector<char> _buf(M);
    char* buf = &_buf[0];
    if( !fgets_chomp( buf, M, file ))
    {
        fclose(file);
        return -1;
    }

    ptr = buf;
    while( *ptr == ' ' )
        ptr++;
    for( ; *ptr != '\0'; )
    {
        if(*ptr == delimiter || *ptr == ' ')
        {
            cols_count++;
            ptr++;
            while( *ptr == ' ' ) ptr++;
        }
        else
            ptr++;
    }

    cols_count++;

    if ( cols_count == 0)
    {
        fclose(file);
        return -1;
    }

    // create temporary memory storage to store the whole database
    el_ptr = new float[cols_count];
    storage = cvCreateMemStorage();
    seq = cvCreateSeq( 0, sizeof(*seq), cols_count*sizeof(float), storage );

    var_types = cvCreateMat( 1, cols_count, CV_8U );
    cvZero( var_types );
    var_types_ptr = var_types->data.ptr;

    for(;;)
    {
        char *token = NULL;
        int type;
        token = strtok(buf, str_delimiter);
        if (!token)
            break;
        for (int i = 0; i < cols_count-1; i++)
        {
            str_to_flt_elem( token, el_ptr[i], type);
            var_types_ptr[i] |= type;
            token = strtok(NULL, str_delimiter);
            if (!token)
            {
                fclose(file);
                return -1;
            }
        }
        str_to_flt_elem( token, el_ptr[cols_count-1], type);
        var_types_ptr[cols_count-1] |= type;
        cvSeqPush( seq, el_ptr );
        if( !fgets_chomp( buf, M, file ) )
            break;
    }
    fclose(file);

    values = cvCreateMat( seq->total, cols_count, CV_32FC1 );
    missing = cvCreateMat( seq->total, cols_count, CV_8U );
    var_idx_mask = cvCreateMat( 1, values->cols, CV_8UC1 );
    cvSet( var_idx_mask, cvRealScalar(1) );
    train_sample_count = seq->total;

    cvStartReadSeq( seq, &reader );
    for(int i = 0; i < seq->total; i++ )
    {
        const float* sdata = (float*)reader.ptr;
        float* ddata = values->data.fl + cols_count*i;
        uchar* dm = missing->data.ptr + cols_count*i;

        for( int j = 0; j < cols_count; j++ )
        {
            ddata[j] = sdata[j];
            dm[j] = ( fabs( MISS_VAL - sdata[j] ) <= FLT_EPSILON );
        }
        CV_NEXT_SEQ_ELEM( seq->elem_size, reader );
    }

    if ( cvNorm( missing, 0, CV_L1 ) <= FLT_EPSILON )
        cvReleaseMat( &missing );

    cvReleaseMemStorage( &storage );
    delete []el_ptr;
    return 0;
}
示例#24
0
/*
 * thread for displaying the opencv content
 */
void *cv_threadfunc (void *ptr) {
	IplImage* timg = cvCloneImage(rgbimg); // Image we do our processing on
	IplImage* dimg = cvCloneImage(rgbimg); // Image we draw on
	CvSize sz = cvSize( timg->width & -2, timg->height & -2);
	IplImage* outimg = cvCreateImage(sz, 8, 3);

	CvMemStorage* storage = cvCreateMemStorage(0);
	CvSeq* squares; // Sequence for squares - sets of 4 points
	CvSeq* contours; // Raw contours list
	CvSeq* result; // Single contour being processed

	CBlobResult blobs;
	CBlob *currentBlob;

	IplImage *pyr = cvCreateImage(cvSize(sz.width/2, sz.height/2), 8, 1);

	// Set region of interest
	cvSetImageROI(timg, cvRect(0, 0, sz.width, sz.height));
	cvSetImageROI(dimg, cvRect(0, 0, sz.width, sz.height));

	// Processing and contours
	while (1) {
		squares = cvCreateSeq(0, sizeof(CvSeq), sizeof(CvPoint), storage);

		pthread_mutex_lock( &mutex_rgb );
		cvCopy(rgbimg, dimg, 0);
		cvCopy(rgbimg, timg, 0);
		pthread_mutex_unlock( &mutex_rgb );

		// BLUR TEST
		// cvPyrDown(dimg, pyr, 7);
		// cvPyrUp(pyr, timg, 7);

		// DILATE TEST
		IplConvKernel* element = cvCreateStructuringElementEx(5, 5, 2, 2, 0);
		IplConvKernel* element2 = cvCreateStructuringElementEx(3, 3, 1, 1, 0);
		cvDilate(timg, timg, element, 2);
		cvErode(timg, timg, element2, 3);

		// THRESHOLD TEST 
		cvThreshold(timg, timg, 200, 255, CV_THRESH_BINARY);

		// Output processed or raw image.
		cvCvtColor(timg, outimg, CV_GRAY2BGR);

		// BLOB TEST
		blobs = CBlobResult( timg, (IplImage*)NULL, 0, true );
		// blobs.Filter( blobs, B_EXCLUDE, CBlobGetArea(), B_LESS, 50 );
		
		printf("Blobs: %d\n", blobs.GetNumBlobs());

		CBlob biggestBlob;
		blobs.GetNthBlob( CBlobGetArea(), 1, biggestBlob );
		biggestBlob.FillBlob( outimg, CV_RGB(255, 0, 0) );
		CvSeq* dest;
		biggestBlob.GetConvexHull(dest);
		
		// for (int i = 0; i < blobs.GetNumBlobs(); i++ )
		// {
		// 	currentBlob = blobs.GetBlob(i);
		// 	currentBlob->FillBlob( outimg, CV_RGB(255,0,0) );
		// }
		

//		// CONTOUR FINDING
//		cvFindContours(timg, storage, &contours, sizeof(CvContour), CV_RETR_LIST, CV_CHAIN_APPROX_SIMPLE, cvPoint(0,0));
//
//		while (contours)
//		{
//			// Approximate contour, accuracy proportional to perimeter of contour; may want to tune accuracy.
//			result = cvApproxPoly(contours, sizeof(CvContour), storage, CV_POLY_APPROX_DP, cvContourPerimeter(contours) * 0.02, 0);
//			// Filter small contours and contours w/o 4 vertices (filters noise, finds rectangles)
//			if (result->total == 4 && 
//				fabs(cvContourArea(result, CV_WHOLE_SEQ)) > 600 && 
//				cvCheckContourConvexity(result))
//			{
//				// Skipped checking whether angles were close to 90 degrees here; may want to implement.
//				// Probably also want to check if it's square enough to filter out ex. long windows.
//
//				for (int i = 0; i < 4; i++)
//				{
//					// Write vertices to output sequence
//					cvSeqPush(squares, (CvPoint*)cvGetSeqElem(result, i));
//				}
//			}
//
//			// Take next contour
//			contours = contours->h_next;
//		}
//
//
//		// DRAW RECTANGLES
//		CvSeqReader reader;
//		cvStartReadSeq(squares, &reader, 0);
//
//		// Read 4 points at a time
//		CvPoint pt[4];
//		CvPoint *rect = pt;
//		CvRect out[4];
//		CvRect *outrect = out;
//		for (int i = 0; i < squares->total; i += 4)
//		{
//			int count = 4;
//			
//			// Which point is which corner is unpredictable.
//			CV_READ_SEQ_ELEM(pt[0], reader); 
//			CV_READ_SEQ_ELEM(pt[1], reader);
//			CV_READ_SEQ_ELEM(pt[2], reader);
//			CV_READ_SEQ_ELEM(pt[3], reader);
//			// Draw rectangle on output
//			cvPolyLine(outimg, &rect, &count, 1, 1, CV_RGB(0,255,0), 1, CV_AA, 0);
//			// Make rectangles
//			// Print (temporary)
//			printf("Rect[0]: %d, %d\n", pt[0].x, pt[0].y);
//			printf("Rect[1]: %d, %d\n", pt[1].x, pt[1].y);
//			printf("Rect[2]: %d, %d\n", pt[2].x, pt[2].y);
//			printf("Rect[3]: %d, %d\n\n", pt[3].x, pt[3].y);
//			fflush(stdout);
//
//		}
//
		// Print on order
		if( cvWaitKey( 15 )==27 )
		{
				}

		cvShowImage (FREENECTOPENCV_WINDOW_N,outimg);
		cvClearMemStorage(storage);
	}
	pthread_exit(NULL);
}
示例#25
0
void icvCalcContoursCorrespondence(CvSeq* contour1,
                                   CvSeq* contour2,
                                   CvSeq** corr,
                                   CvMemStorage* storage)
{
    int i,j;                    // counter of cycles
    int Ni,Nj;                  // size of contours
    _CvWork** W;                // graph for search minimum of work

    CvPoint* point1;            // array of first contour point
    CvPoint* point2;            // array of second contour point
    CvPoint2D32f* edges1;       // array of first contour edge
    CvPoint2D32f* edges2;       // array of second contour edge

    //CvPoint null_edge = {0,0};    //
    CvPoint2D32f small_edge;
    //double inf;                   // infinity

    CvSeq* corr01;
    CvSeqWriter writer;

    char path;                  //

    // Find size of contours
    Ni = contour1->total + 1;
    Nj = contour2->total + 1;

    // Create arrays
    W = (_CvWork**)malloc(sizeof(_CvWork*)*Ni);
    for(i=0; i<Ni; i++)
    {
        W[i] = (_CvWork*)malloc(sizeof(_CvWork)*Nj);
    }

    point1 = (CvPoint* )malloc( Ni*sizeof(CvPoint) );
    point2 = (CvPoint* )malloc( Nj*sizeof(CvPoint) );
    edges1 = (CvPoint2D32f* )malloc( (Ni-1)*sizeof(CvPoint2D32f) );
    edges2 = (CvPoint2D32f* )malloc( (Nj-1)*sizeof(CvPoint2D32f) );

    // Initialize arrays of point
    cvCvtSeqToArray( contour1, point1, CV_WHOLE_SEQ );
    cvCvtSeqToArray( contour2, point2, CV_WHOLE_SEQ );

    point1[Ni-1] = point1[0];
    point2[Nj-1] = point2[0];

    for(i=0; i<Ni-1; i++)
    {
        edges1[i].x = (float)( point1[i+1].x - point1[i].x );
        edges1[i].y = (float)( point1[i+1].y - point1[i].y );
    };

    for(i=0; i<Nj-1; i++)
    {
        edges2[i].x = (float)( point2[i+1].x - point2[i].x );
        edges2[i].y = (float)( point2[i+1].y - point2[i].y );
    };

    // Find infinity constant
    //inf=1;
/////////////

//Find min path in graph

/////////////
    W[0][0].w_east      = 0;
    W[0][0].w_south     = 0;
    W[0][0].w_southeast = 0;

    W[1][1].w_southeast = _cvStretchingWork( &edges1[0], &edges2[0] );
    W[1][1].w_east = inf;
    W[1][1].w_south = inf;
    W[1][1].path_se = PATH_TO_SE;

    W[0][1].w_south =  _cvStretchingWork( &null_edge, &edges2[0] );
    W[0][1].path_s = 3;
    W[1][0].w_east =  _cvStretchingWork( &edges2[0], &null_edge );
    W[1][0].path_e = PATH_TO_E;

    for( i=1; i<Ni; i++ )
    {
        W[i][0].w_south     = inf;
        W[i][0].w_southeast = inf;
    }

    for(j=1; j<Nj; j++)
    {
        W[0][j].w_east      = inf;
        W[0][j].w_southeast = inf;
    }

    for(i=2; i<Ni; i++)
    {
        j=0;/////////
        W[i][j].w_east = W[i-1][j].w_east;
        W[i][j].w_east = W[i][j].w_east /*+
            _cvBendingWork( &edges1[i-2], &edges1[i-1], &null_edge, &null_edge, NULL )*/;
        W[i][j].w_east = W[i][j].w_east + _cvStretchingWork( &edges2[i-1], &null_edge );
        W[i][j].path_e = PATH_TO_E;

        j=1;//////////
        W[i][j].w_south = inf;

        _cvWorkEast (i, j, W, edges1, edges2);

        W[i][j].w_southeast = W[i-1][j-1].w_east;
        W[i][j].w_southeast = W[i][j].w_southeast + _cvStretchingWork( &edges1[i-1], &edges2[j-1] );

        small_edge.x = NULL_EDGE*edges1[i-2].x;
        small_edge.y = NULL_EDGE*edges1[i-2].y;

        W[i][j].w_southeast = W[i][j].w_southeast +
                              _cvBendingWork( &edges1[i-2], &edges1[i-1], /*&null_edge*/&small_edge, &edges2[j-1]/*, &edges2[Nj-2]*/);

        W[i][j].path_se = PATH_TO_E;
    }

    for(j=2; j<Nj; j++)
    {
        i=0;//////////
        W[i][j].w_south = W[i][j-1].w_south;
        W[i][j].w_south = W[i][j].w_south + _cvStretchingWork( &null_edge, &edges2[j-1] );
        W[i][j].w_south = W[i][j].w_south /*+
            _cvBendingWork( &null_edge, &null_edge, &edges2[j-2], &edges2[j-1], NULL )*/;
        W[i][j].path_s = 3;

        i=1;///////////
        W[i][j].w_east= inf;

        _cvWorkSouth(i, j, W, edges1, edges2);

        W[i][j].w_southeast = W[i-1][j-1].w_south;
        W[i][j].w_southeast = W[i][j].w_southeast + _cvStretchingWork( &edges1[i-1], &edges2[j-1] );

        small_edge.x = NULL_EDGE*edges2[j-2].x;
        small_edge.y = NULL_EDGE*edges2[j-2].y;

        W[i][j].w_southeast = W[i][j].w_southeast +
                              _cvBendingWork( /*&null_edge*/&small_edge, &edges1[i-1], &edges2[j-2], &edges2[j-1]/*, &edges1[Ni-2]*/);
        W[i][j].path_se = 3;
    }

    for(i=2; i<Ni; i++)
        for(j=2; j<Nj; j++)
        {
            _cvWorkEast     (i, j, W, edges1, edges2);
            _cvWorkSouthEast(i, j, W, edges1, edges2);
            _cvWorkSouth    (i, j, W, edges1, edges2);
        }

    i=Ni-1;
    j=Nj-1;

    *corr = cvCreateSeq(0,
                        sizeof(CvSeq),
                        sizeof(int),
                        storage );

    corr01 = *corr;
    cvStartAppendToSeq( corr01, &writer );
    if( W[i][j].w_east > W[i][j].w_southeast )
    {
        if( W[i][j].w_southeast > W[i][j].w_south )
        {
            path = 3;
        }
        else
        {
            path = PATH_TO_SE;
        }
    }
    else
    {
        if( W[i][j].w_east < W[i][j].w_south )
        {
            path = PATH_TO_E;
        }
        else
        {
            path = 3;
        }
    }
    do
    {
        CV_WRITE_SEQ_ELEM( j, writer );

        switch( path )
        {
        case PATH_TO_E:
            path = W[i][j].path_e;
            i--;
            cvFlushSeqWriter( &writer );
            corr01->h_next = cvCreateSeq(   0,
                                            sizeof(CvSeq),
                                            sizeof(int),
                                            storage );
            corr01 = corr01->h_next;
            cvStartAppendToSeq( corr01, &writer );
            break;

        case PATH_TO_SE:
            path = W[i][j].path_se;
            j--;
            i--;
            cvFlushSeqWriter( &writer );
            corr01->h_next = cvCreateSeq(   0,
                                            sizeof(CvSeq),
                                            sizeof(int),
                                            storage );
            corr01 = corr01->h_next;
            cvStartAppendToSeq( corr01, &writer );
            break;

        case 3:
            path = W[i][j].path_s;
            j--;
            break;
        }

    } while( (i>=0) && (j>=0) );
    cvFlushSeqWriter( &writer );

    // Free memory
    for(i=1; i<Ni; i++)
    {
        free(W[i]);
    }
    free(W);
    free(point1);
    free(point2);
    free(edges1);
    free(edges2);
}
/*
// find rectangular regions in the given image that are likely 
// to contain objects and corresponding confidence levels
//
// API
// CvSeq* cvLatentSvmDetectObjects(const IplImage* image, 
//									CvLatentSvmDetector* detector, 
//									CvMemStorage* storage, 
//									float overlap_threshold = 0.5f,
                                    int numThreads = -1);
// INPUT
// image				- image to detect objects in
// detector				- Latent SVM detector in internal representation
// storage				- memory storage to store the resultant sequence 
//							of the object candidate rectangles
// overlap_threshold	- threshold for the non-maximum suppression algorithm [here will be the reference to original paper]
// OUTPUT
// sequence of detected objects (bounding boxes and confidence levels stored in CvObjectDetection structures)
*/
CvSeq* cvLatentSvmDetectObjects(IplImage* image, 
								CvLatentSvmDetector* detector, 
								CvMemStorage* storage, 
								float overlap_threshold, int numThreads)
{
	CvLSVMFeaturePyramid *H = 0;
    CvPoint *points = 0, *oppPoints = 0;
    int kPoints = 0;
    float *score = 0;    
    unsigned int maxXBorder = 0, maxYBorder = 0;
	int numBoxesOut = 0;
	CvPoint *pointsOut = 0;
	CvPoint *oppPointsOut = 0; 
    float *scoreOut = 0;
	CvSeq* result_seq = 0;
    int error = 0;

    cvConvertImage(image, image, CV_CVTIMG_SWAP_RB);
    // Getting maximum filter dimensions
	getMaxFilterDims((const CvLSVMFilterObject**)(detector->filters), detector->num_components, 
                     detector->num_part_filters, &maxXBorder, &maxYBorder);
    // Create feature pyramid with nullable border
    H = createFeaturePyramidWithBorder(image, maxXBorder, maxYBorder);
    // Search object
    error = searchObjectThresholdSomeComponents(H, (const CvLSVMFilterObject**)(detector->filters), 
        detector->num_components, detector->num_part_filters, detector->b, detector->score_threshold, 
        &points, &oppPoints, &score, &kPoints, numThreads);
    if (error != LATENT_SVM_OK)
    {
        return NULL;
    }
    // Clipping boxes
    clippingBoxes(image->width, image->height, points, kPoints);
    clippingBoxes(image->width, image->height, oppPoints, kPoints);
    // NMS procedure
    nonMaximumSuppression(kPoints, points, oppPoints, score, overlap_threshold,
                &numBoxesOut, &pointsOut, &oppPointsOut, &scoreOut);

	result_seq = cvCreateSeq( 0, sizeof(CvSeq), sizeof(CvObjectDetection), storage );

	for (int i = 0; i < numBoxesOut; i++)
	{
		CvObjectDetection detection = {{0, 0, 0, 0}, 0};
		detection.score = scoreOut[i];
		CvRect bounding_box = {0, 0, 0, 0};
		bounding_box.x = pointsOut[i].x;
		bounding_box.y = pointsOut[i].y;
		bounding_box.width = oppPointsOut[i].x - pointsOut[i].x;
		bounding_box.height = oppPointsOut[i].y - pointsOut[i].y;
		detection.rect = bounding_box;
		cvSeqPush(result_seq, &detection);
	}
    cvConvertImage(image, image, CV_CVTIMG_SWAP_RB);

    freeFeaturePyramidObject(&H);
    free(points);
    free(oppPoints);
    free(score);

	return result_seq;
}
示例#27
0
void  detect(IplImage* img_8uc1,IplImage* img_8uc3) 
{  
	clock_t inicio, fin;
	inicio = clock();

	CvMemStorage* storage = cvCreateMemStorage();
	CvSeq* first_contour = NULL;
	CvSeq* maxitem=NULL;
	char resultado [] = " ";
	double area=0,areamax=0;
	double longitudExt = 0;
	double radio = 0;
	int maxn=0;
	int Nc = cvFindContours(
		img_8uc1,
		storage,
		&first_contour,
		sizeof(CvContour),
		CV_RETR_LIST 
		);
	int n=0;
	//printf( "Contornos detectados: %d\n", Nc );

	if(Nc>0)
	{
		for( CvSeq* c=first_contour; c!=NULL; c=c->h_next ) 
		{     
			area=cvContourArea(c,CV_WHOLE_SEQ );

			if(area>areamax)
			{
				areamax=area;
				maxitem=c;
				maxn=n;
			}
			n++;
		}

		CvMemStorage* storage3 = cvCreateMemStorage(0);

		if(areamax>5000)
		{
			maxitem = cvApproxPoly( maxitem, sizeof(CvContour), storage3, CV_POLY_APPROX_DP, 10, 1 );
			CvPoint pt0;

			CvMemStorage* storage1 = cvCreateMemStorage(0);
			CvMemStorage* storage2 = cvCreateMemStorage(0);
			CvSeq* ptseq = cvCreateSeq( CV_SEQ_KIND_GENERIC|CV_32SC2, sizeof(CvContour), sizeof(CvPoint), storage1 );
			CvSeq* hull;
			CvSeq* defects;

			CvPoint minDefectPos;;
			minDefectPos.x = 1000000;
			minDefectPos.y = 1000000;

			CvPoint maxDefectPos;
			maxDefectPos.x = 0;
			maxDefectPos.y = 0;			


			for(int i = 0; i < maxitem->total; i++ )
			{   
				CvPoint* p = CV_GET_SEQ_ELEM( CvPoint, maxitem, i );
				pt0.x = p->x;
				pt0.y = p->y;
				cvSeqPush( ptseq, &pt0 );
			}
			hull = cvConvexHull2( ptseq, 0, CV_CLOCKWISE, 0 );
			int hullcount = hull->total;

			defects= cvConvexityDefects(ptseq,hull,storage2  );

			//printf(" Numero de defectos %d \n",defects->total);
			CvConvexityDefect* defectArray;  

			int j=0;  

			// This cycle marks all defects of convexity of current contours.  

			longitudExt = 0;

			for(;defects;defects = defects->h_next)  
			{  
				int nomdef = defects->total; // defect amount  
				//outlet_float( m_nomdef, nomdef );  
				//printf(" defect no %d \n",nomdef);

				if(nomdef == 0)  
					continue;  

				// Alloc memory for defect set.     
				//fprintf(stderr,"malloc\n");  
				defectArray = (CvConvexityDefect*)malloc(sizeof(CvConvexityDefect)*nomdef);  

				// Get defect set.  
				//fprintf(stderr,"cvCvtSeqToArray\n");  
				cvCvtSeqToArray(defects,defectArray, CV_WHOLE_SEQ); 






				// Draw marks for all defects.  
				for(int i=0; i<nomdef; i++)  
				{  					

					CvPoint startP;
					startP.x = defectArray[i].start->x;
					startP.y = defectArray[i].start->y;

					CvPoint depthP;
					depthP.x = defectArray[i].depth_point->x;
					depthP.y = defectArray[i].depth_point->y;

					CvPoint endP;
					endP.x = defectArray[i].end->x;
					endP.y = defectArray[i].end->y;





					//obtener minimo y maximo

					minDefectPos.x = getMin (startP.x, depthP.x, endP.x, minDefectPos.x);
					minDefectPos.y = getMin (startP.y, depthP.y, endP.y, minDefectPos.y);

					maxDefectPos.x = getMax (startP.x, depthP.x, endP.x, maxDefectPos.x);
					maxDefectPos.y = getMax (startP.y, depthP.y, endP.y, maxDefectPos.y);					

					//fin obtener minimo y maximo
					if (saveLength)
					{
						longitudExt += longBtwnPoints(startP, depthP);
						longitudExt += longBtwnPoints(depthP, endP);


					}
					//printf(" defect depth for defect %d %f \n",i,defectArray[i].depth);
					cvLine(img_8uc3, startP, depthP, CV_RGB(255,255,0),1, CV_AA, 0 ); 


					cvCircle( img_8uc3, depthP, 5, CV_RGB(0,0,164), 2, 8,0);  
					cvCircle( img_8uc3, startP, 5, CV_RGB(255,0,0), 2, 8,0);  
					cvCircle( img_8uc3, endP, 5, CV_RGB(0,255,0), 2, 8,0);  

					cvLine(img_8uc3, depthP, endP,CV_RGB(0,0,0),1, CV_AA, 0 );   
				} 

				/*if (nomdef>0)
				{
				resultado [0] = identificaGesto (longitudExt, nomdef, radio);
				if (resultado[0] !=' ')
				printf ("Gesto identificado (%c) \n", resultado[0]);
				}*/

				if (saveLength)
				{
					radio = (double)maxDefectPos.x / (double)maxDefectPos.y;
					if (nomdef>0)
					{
						printf ("_______________________\n");

						
 
						
						

						resultado [0] = identificaGesto (longitudExt, nomdef, radio);
						
						fin = clock();
						fin = fin - inicio;

						if (resultado[0] !=' ')
							printf ("Gesto identificado (%c) \n", resultado[0]);
						else
							printf ("No se identifico ningun gesto\n");

						printf("Tiempo de ejecucion: %f\nLongitud %g \nNomDef %i \nradio %g \n",(((float)fin)/CLOCKS_PER_SEC ), longitudExt, nomdef, radio);
						FILE *fp;
						fp=fopen("archivo.txt", "a");
						if (nomdef == 6)
							fprintf(fp, "\n>>>>>>>5<<<<<<\n%g\n%i\n%g\n",longitudExt, nomdef, radio);
						else
							fprintf(fp, "\n%g\n%i\n%g\n",longitudExt, nomdef, radio);
						fclose (fp);
					}
					else
						printf("No hay defectos");
					printf ("_______________________\n");
				}
				/*
				char txt[]="0";
				txt[0]='0'+nomdef-1;
				CvFont font;
				cvInitFont(&font, CV_FONT_HERSHEY_SIMPLEX, 1.0, 1.0, 0, 5, CV_AA);
				cvPutText(img_8uc3, txt, cvPoint(50, 50), &font, cvScalar(0, 0, 255, 0)); */

				CvFont font;
				cvInitFont(&font, CV_FONT_HERSHEY_SIMPLEX, 1.0, 1.0, 0, 5, CV_AA);
				if (resultado!= NULL)
					cvPutText(img_8uc3, resultado, cvPoint(50, 50), &font, cvScalar(0, 0, 255, 0));

				j++;  

				// Free memory.   
				free(defectArray);  
			} 

			pt0 = **CV_GET_SEQ_ELEM( CvPoint*, hull, hullcount - 1 );

			for(int i = 0; i < hullcount; i++ )
			{

				CvPoint pt = **CV_GET_SEQ_ELEM( CvPoint*, hull, i );
				cvLine( img_8uc3, pt0, pt, CV_RGB( 0, 255, 0 ), 1, CV_AA, 0 );
				pt0 = pt;
			}



			cvLine( img_8uc3, minDefectPos, cvPoint( (maxDefectPos.x), (minDefectPos.y)), CV_RGB( 2500, 0, 0 ), 1, CV_AA, 0 );
			cvLine( img_8uc3,  cvPoint( (maxDefectPos.x), (minDefectPos.y)), maxDefectPos, CV_RGB( 2500, 0, 0 ), 1, CV_AA, 0 );
			cvLine( img_8uc3, maxDefectPos, cvPoint( (minDefectPos.x), (maxDefectPos.y)), CV_RGB( 2500, 0, 0 ), 1, CV_AA, 0 );
			cvLine( img_8uc3, cvPoint( (minDefectPos.x), (maxDefectPos.y)), minDefectPos, CV_RGB( 2500, 0, 0 ), 1, CV_AA, 0 );

			cvReleaseMemStorage( &storage );
			cvReleaseMemStorage( &storage1 );
			cvReleaseMemStorage( &storage2 );
			cvReleaseMemStorage( &storage3 );
			//return 0;
		}
	}
void cvFindBlobsByCCClasters(IplImage* pFG, CvBlobSeq* pBlobs, CvMemStorage* storage)
{   /* Create contours: */
    IplImage*       pIB = NULL;
    CvSeq*          cnt = NULL;
    CvSeq*          cnt_list = cvCreateSeq(0,sizeof(CvSeq),sizeof(CvSeq*), storage );
    CvSeq*          clasters = NULL;
    int             claster_cur, claster_num;

    pIB = cvCloneImage(pFG);
    cvThreshold(pIB,pIB,128,255,CV_THRESH_BINARY);
    cvFindContours(pIB,storage, &cnt, sizeof(CvContour), CV_RETR_EXTERNAL);
    cvReleaseImage(&pIB);

    /* Create cnt_list.      */
    /* Process each contour: */
    for(; cnt; cnt=cnt->h_next)
    {
        cvSeqPush( cnt_list, &cnt);
    }

    claster_num = cvSeqPartition( cnt_list, storage, &clasters, CompareContour, NULL );

    for(claster_cur=0; claster_cur<claster_num; ++claster_cur)
    {
        int         cnt_cur;
        CvBlob      NewBlob;
        double      M00,X,Y,XX,YY; /* image moments */
        CvMoments   m;
        CvRect      rect_res = cvRect(-1,-1,-1,-1);
        CvMat       mat;

        for(cnt_cur=0; cnt_cur<clasters->total; ++cnt_cur)
        {
            CvRect  rect;
            CvSeq*  cnt;
            int k = *(int*)cvGetSeqElem( clasters, cnt_cur );
            if(k!=claster_cur) continue;
            cnt = *(CvSeq**)cvGetSeqElem( cnt_list, cnt_cur );
            rect = ((CvContour*)cnt)->rect;

            if(rect_res.height<0)
            {
                rect_res = rect;
            }
            else
            {   /* Unite rects: */
                int x0,x1,y0,y1;
                x0 = MIN(rect_res.x,rect.x);
                y0 = MIN(rect_res.y,rect.y);
                x1 = MAX(rect_res.x+rect_res.width,rect.x+rect.width);
                y1 = MAX(rect_res.y+rect_res.height,rect.y+rect.height);
                rect_res.x = x0;
                rect_res.y = y0;
                rect_res.width = x1-x0;
                rect_res.height = y1-y0;
            }
        }

        if(rect_res.height < 1 || rect_res.width < 1)
        {
            X = 0;
            Y = 0;
            XX = 0;
            YY = 0;
        }
        else
        {
            cvMoments( cvGetSubRect(pFG,&mat,rect_res), &m, 0 );
            M00 = cvGetSpatialMoment( &m, 0, 0 );
            if(M00 <= 0 ) continue;
            X = cvGetSpatialMoment( &m, 1, 0 )/M00;
            Y = cvGetSpatialMoment( &m, 0, 1 )/M00;
            XX = (cvGetSpatialMoment( &m, 2, 0 )/M00) - X*X;
            YY = (cvGetSpatialMoment( &m, 0, 2 )/M00) - Y*Y;
        }
        NewBlob = cvBlob(rect_res.x+(float)X,rect_res.y+(float)Y,(float)(4*sqrt(XX)),(float)(4*sqrt(YY)));
        pBlobs->AddBlob(&NewBlob);

    }   /* Next cluster. */

    #if 0
    {   // Debug info:
        IplImage* pI = cvCreateImage(cvSize(pFG->width,pFG->height),IPL_DEPTH_8U,3);
        cvZero(pI);
        for(claster_cur=0; claster_cur<claster_num; ++claster_cur)
        {
            int         cnt_cur;
            CvScalar    color = CV_RGB(rand()%256,rand()%256,rand()%256);

            for(cnt_cur=0; cnt_cur<clasters->total; ++cnt_cur)
            {
                CvSeq*  cnt;
                int k = *(int*)cvGetSeqElem( clasters, cnt_cur );
                if(k!=claster_cur) continue;
                cnt = *(CvSeq**)cvGetSeqElem( cnt_list, cnt_cur );
                cvDrawContours( pI, cnt, color, color, 0, 1, 8);
            }

            CvBlob* pB = pBlobs->GetBlob(claster_cur);
            int x = cvRound(CV_BLOB_RX(pB)), y = cvRound(CV_BLOB_RY(pB));
            cvEllipse( pI,
                cvPointFrom32f(CV_BLOB_CENTER(pB)),
                cvSize(MAX(1,x), MAX(1,y)),
                0, 0, 360,
                color, 1 );
        }

        cvNamedWindow( "Clusters", 0);
        cvShowImage( "Clusters",pI );

        cvReleaseImage(&pI);

    }   /* Debug info. */
    #endif

}   /* cvFindBlobsByCCClasters */
示例#29
0
//--------------------------------------------------------------
void eyeTracker::update(ofxCvGrayscaleImage & grayImgFromCam, float threshold, float minSize, float maxSize,  float minSquareness) {

    //threshold?
    //threshold = thresh;

    grayImgPreWarp.setFromPixels(grayImgFromCam.getPixels(), grayImgFromCam.width, grayImgFromCam.height);		// TODO: there's maybe an unnecessary grayscale image (and copy) here...

    if( flipX || flipY ) {
        grayImgPreWarp.mirror(flipY, flipX);
    }

    /*  // before we were scaling and translating, but this is removed for now

     if (fabs(xoffset-1) > 0.1f || fabs(yoffset-1) > 0.1f){
    	grayImgPreWarp.translate(xoffset, yoffset);
    }

    if (fabs(scalef-1) > 0.1f){
    	grayImgPreWarp.scale(scalef, scalef);
    }*/

    grayImg = grayImgPreWarp;



    grayImgPreModification = grayImg;
    grayImg.blur(5);

    if (bUseContrast == true) {
        grayImg.applyBrightnessContrast(brightness,contrast);
    }

    if (bUseGamma == true) {
        grayImg.applyMinMaxGamma(gamma);
    }

    grayImg += edgeMask;

    threshImg = grayImg;


    threshImg.contrastStretch();
    threshImg.threshold(threshold, true);


    // the dilation of a 640 x 480 image is very slow, so let's just do a ROI near the thing we like:

    threshImg.setROI(currentEyePoint.x-50, currentEyePoint.y-50, 100,100);	// 200 pix ok?
    if (bUseDilate == true) {
        for (int i = 0; i < nDilations; i++) {
            threshImg.dilate();
        }
    }
    threshImg.resetROI();



    bFoundOne = false;

    int		whoFound = -1;

    int num = contourFinder.findContours(threshImg, minSize, maxSize, 100, false, true);
    if( num ) {

        for(int k = 0; k < num; k++) {



            float ratio =	contourFinder.blobs[k].boundingRect.width < contourFinder.blobs[k].boundingRect.height ?
                            contourFinder.blobs[k].boundingRect.width / contourFinder.blobs[k].boundingRect.height :
                            contourFinder.blobs[k].boundingRect.height / contourFinder.blobs[k].boundingRect.width;

            float arcl = contourFinder.blobs[k].length;
            float area = contourFinder.blobs[k].area;
            float compactness = (float)((arcl*arcl/area)/FOUR_PI);

            if (bUseCompactnessTest	== true && compactness > maxCompactness) {
                continue;
            }


            //printf("compactness %f \n", compactness);

            //lets ignore rectangular blobs
            if( ratio > minSquareness) {
                currentEyePoint = contourFinder.blobs[k].centroid;
                currentNormPoint.x = currentEyePoint.x;
                currentNormPoint.y = currentEyePoint.y;

                currentNormPoint.x /= w;
                currentNormPoint.y /= h;


                bFoundOne = true;
                whoFound = k;

                break;
            }
        }
    }

    if (bFoundOne && whoFound != -1) {



        // do some convex hull stuff:
        CvSeq* ptseq = cvCreateSeq( CV_SEQ_KIND_GENERIC|CV_32SC2, sizeof(CvContour),sizeof(CvPoint), storage );
        CvSeq* hull;
        CvPoint pt0;

        for(int i = 0; i < contourFinder.blobs[whoFound].nPts; i++ ) {

            pt0.x = contourFinder.blobs[whoFound].pts[i].x;
            pt0.y = contourFinder.blobs[whoFound].pts[i].y;
            cvSeqPush( ptseq, &pt0 );

        }
        hull = cvConvexHull2( ptseq, 0, CV_CLOCKWISE, 0 );
        int hullcount = hull->total;



        // -------------------------------- TRY TO GET A GOOD ELLIPSE HELLS YEAH !!

        int MYN = hullcount;
        float x[MYN], y[MYN];
        double p[6];
        double ellipseParam[5];
        float theta;
        FitEllipse fitter;
        for (int i=0; i<MYN; i++) {
            CvPoint pt = **CV_GET_SEQ_ELEM( CvPoint*, hull, i);
            x[i] = pt.x;
            y[i] =  pt.y;
        }
        double xc, yc;
        double xa, ya;
        double la, lb;
        fitter.apply(x,y,MYN);
        p[0] = fitter.Axx;
        p[1] = fitter.Axy;
        p[2] = fitter.Ayy;
        p[3] = fitter.Ax;
        p[4] = fitter.Ay;
        p[5] = fitter.Ao;

        bool bOk = solve_ellipse(p,ellipseParam);

        ofxCvBlob temp;


        if (bOk == true) {

            //float *params_ellipse = pupilGeometries[whichEye].params_ellipse;
            float axis_a = ellipseParam[0];
            float axis_b = ellipseParam[1];
            float cx 	 = ellipseParam[2];
            float cy 	 = ellipseParam[3];
            theta	 = ellipseParam[4];
            float aspect = axis_b/axis_a;

            for (int i = 0; i < 5; i++) {
                eyeTrackedEllipse.ellipseParam[i] = ellipseParam[i];
            }

            //theta = ofRandom(0,TWO_PI);


            int resolution = 24;

            ofxPoint2f ptsForRotation[resolution];

            for (int i=0; i<resolution; i++) {
                float t = TWO_PI * (float)i/(float)resolution;
                float ex = cx + (axis_a * cos(t ));
                float ey = cy + (axis_b * sin(t ));
                ptsForRotation[i].set(ex,ey);
            }

            for (int i=0; i<resolution; i++) {
                ptsForRotation[i].rotate(theta * RAD_TO_DEG, ofxPoint2f(cx, cy));
            }

            currentEyePoint.set(cx, cy);
            currentNormPoint.x = currentEyePoint.x;
            currentNormPoint.y = currentEyePoint.y;
            currentNormPoint.x /= w;
            currentNormPoint.y /= h;


        } else {


            bFoundOne = false;

        }

        cvRelease((void **)&hull);

    }
示例#30
0
IplImage* PlateFinder::FindPlate (IplImage *src) {
	IplImage* plate;
	IplImage* contourImg = cvCreateImage(cvGetSize(src), IPL_DEPTH_8U, 1);	// anh tim contour
	IplImage* grayImg =  cvCreateImage(cvGetSize(src), IPL_DEPTH_8U, 1);	// anh xam
	cvCvtColor(src, grayImg, CV_RGB2GRAY);

	IplImage* cloneImg = cvCreateImage(cvGetSize(src), IPL_DEPTH_8U, 3);
	cloneImg = cvCloneImage(src);
	
	// tien xu ly anh
	cvCopy(grayImg, contourImg);
	cvNormalize(contourImg, contourImg, 0, 255, CV_MINMAX);
	ImageRestoration(contourImg);
	
	IplImage* rectImg = cvCreateImage(cvGetSize(src), IPL_DEPTH_8U, 3);
	cvMerge(contourImg, contourImg, contourImg, NULL, rectImg); // tron anh

	// tim contour cua buc anh
	CvMemStorage *storagePlate = cvCreateMemStorage(0);
	CvSeq *contours = cvCreateSeq(CV_SEQ_ELTYPE_POINT, sizeof(CvSeq), sizeof(CvPoint), storagePlate);
	cvFindContours(contourImg, storagePlate, &contours, sizeof(CvContour), CV_RETR_EXTERNAL, CV_CHAIN_APPROX_SIMPLE, cvPoint(0, 0));

	//cvShowImage("contourImg", contourImg);
	

	int xmin, ymin, xmax, ymax, w, h, s, r;
	int count;
	double ratio;	// ty le chieu rong tren chieu cao
	CvRect rectPlate; 

	// luu lai cac anh co kha nang la bien so
	IplImage** plateArr = new IplImage *[5];
	int j = 0;
	for (int i = 0; i < 5; i++)
	{
		plateArr[i] = NULL;
	}

	while (contours) {
		count = contours->total;
		CvPoint *PointArray = new CvPoint[count];
		cvCvtSeqToArray (contours, PointArray, CV_WHOLE_SEQ);

		for (int i = 0; i < count; i++)
		{
			if (i == 0)
			{
				xmin = xmax = PointArray[i].x;
				ymin = ymax = PointArray[i].y;
			}

			if (PointArray[i].x > xmax) {
				xmax = PointArray[i].x;
			}
			if (PointArray[i].x < xmin)  {
				xmin = PointArray[i].x;
			}

			if (PointArray[i].y > ymax) {
				ymax = PointArray[i].y;
			}
			if (PointArray[i].y < ymin)  {
				ymin = PointArray[i].y;
			}
		}

		w = xmax - xmin;
		h = ymax - ymin;
		s = w * h;

		cvRectangle (rectImg, cvPoint(xmin, ymin), cvPoint(xmax, ymax), RED);

		// loai bo nhung hinh chu nhat co ti le khong dung
		if (s != 0) {
			r = (contourImg->height * contourImg->width) / s;
		} else {
			r = 1000;
		}

		if (w == 0 && h == 0) {
			ratio = 0;
		} else {
			ratio = (double)w/h;
		}

		if (r > 30 && r < 270) {
			// ve ra hcn mau xanh la
			cvRectangle (rectImg, cvPoint(xmin, ymin), cvPoint(xmax, ymax), GREEN);

			if (ratio > 2.6 && ratio < 7) {
				cvRectangle (rectImg, cvPoint(xmin, ymin), cvPoint(xmax, ymax), BLUE);

				if (w > 80 && w < 250 && h > 25 && h < 150) {
					rectPlate = cvRect (xmin, ymin, w, h);

					cvRectangle (cloneImg, cvPoint(rectPlate.x, rectPlate.y),
						cvPoint(rectPlate.x + rectPlate.width, rectPlate.y + rectPlate.height), RED, 3);

					// cat bien so
					plate = cvCreateImage(cvSize(rectPlate.width, rectPlate.height), IPL_DEPTH_8U, 3);
					cvSetImageROI(src, rectPlate);
					cvCopy(src, plate, NULL);
					cvResetImageROI(src);

					// luu vao mang cac bien so plateArr
					int cnt = CountCharacter(plate);
					if (cnt >= 5) {
						plateArr[j] = cvCloneImage(plate);
						j++;
					}
				}
			}
		}

		delete []PointArray;

		contours = contours->h_next;
	}

	// sap xep
	if (plateArr[0]) 
	{
		int w = plateArr[0]->width;

		int flag;
		for (int i = 1; i < 4; i++)
		{
			if (plateArr[i] && plateArr[i]->width < w)
			{
				flag = i;
			}
		}

		plateArr[0] = plateArr[flag];
	}

	cvShowImage("cloneImg", cloneImg);
	//cvShowImage("rectImg", rectImg);
	//cvShowImage("plate", plateArr[0]);

	cvReleaseImage(&contourImg);
	cvReleaseImage(&rectImg);
	cvReleaseImage(&plate);

	return plateArr[0];
}