//--------------------------------------------------------------------------------
int ContourFinder::findContours(	ofxCvGrayscaleImage&  input,
									int minArea,
									int maxArea,
									int nConsidered,
									double hullPress,	
									bool bFindHoles,
									bool bUseApproximation  ) {
	reset();

	// opencv will clober the image it detects contours on, so we want to
    // copy it into a copy before we detect contours.  That copy is allocated
    // if necessary (necessary = (a) not allocated or (b) wrong size)
	// so be careful if you pass in different sized images to "findContours"
	// there is a performance penalty, but we think there is not a memory leak
    // to worry about better to create mutiple contour finders for different
    // sizes, ie, if you are finding contours in a 640x480 image but also a
    // 320x240 image better to make two ContourFinder objects then to use
    // one, because you will get penalized less.

	if( inputCopy.width == 0 ) {
		inputCopy.allocate( input.width, input.height );
		inputCopy = input;
	} else {
		if( inputCopy.width == input.width && inputCopy.height == input.height ) 
			inputCopy = input;
		else {
			// we are allocated, but to the wrong size --
			// been checked for memory leaks, but a warning:
			// be careful if you call this function with alot of different
			// sized "input" images!, it does allocation every time
			// a new size is passed in....
			//inputCopy.clear();
			inputCopy.allocate( input.width, input.height );
			inputCopy = input;
		}
	}

	CvSeq* contour_list = NULL;
	contour_storage = cvCreateMemStorage( 1000 );
	storage	= cvCreateMemStorage( 1000 );

	CvContourRetrievalMode  retrieve_mode
        = (bFindHoles) ? CV_RETR_LIST : CV_RETR_EXTERNAL;
	cvFindContours( inputCopy.getCvImage(), contour_storage, &contour_list,
                    sizeof(CvContour), retrieve_mode, bUseApproximation ? CV_CHAIN_APPROX_SIMPLE : CV_CHAIN_APPROX_NONE );
	
	CvSeq* contour_ptr = contour_list;

	nCvSeqsFound = 0;

	// put the contours from the linked list, into an array for sorting
	while( (contour_ptr != NULL) )  {
		CvBox2D box = cvMinAreaRect2(contour_ptr);
		int objectId; // If the contour is an object, then objectId is its ID
		objectId = (bTrackObjects)? templates->getTemplateId(box.size.width,box.size.height): -1;
		
		if(objectId != -1 ) { //If the blob is a object
			Blob blob		= Blob();
			blob.id			= objectId;
			blob.isObject	= true;
			float area = cvContourArea( contour_ptr, CV_WHOLE_SEQ );

			cvMoments( contour_ptr, myMoments );
		
			// this is if using non-angle bounding box
			CvRect rect	= cvBoundingRect( contour_ptr, 0 );
			blob.boundingRect.x      = rect.x;
			blob.boundingRect.y      = rect.y;
			blob.boundingRect.width  = rect.width;
			blob.boundingRect.height = rect.height;

			//For anglebounding rectangle
			blob.angleBoundingBox=box;
			blob.angleBoundingRect.x	  = box.center.x;
			blob.angleBoundingRect.y	  = box.center.y;
			blob.angleBoundingRect.width  = box.size.height;
			blob.angleBoundingRect.height = box.size.width;
			blob.angle = box.angle;

			//TEMPORARY INITIALIZATION TO 0, Will be calculating afterwards.This is to prevent sending wrong data
			blob.D.x = 0;
			blob.D.y = 0;
			blob.maccel = 0;

			// assign other parameters
			blob.area                = fabs(area);
			blob.hole                = area < 0 ? true : false;
			blob.length 			 = cvArcLength(contour_ptr);
		
			blob.centroid.x			 = (myMoments->m10 / myMoments->m00);
			blob.centroid.y 		 = (myMoments->m01 / myMoments->m00);
			blob.lastCentroid.x 	 = 0;
			blob.lastCentroid.y 	 = 0;

			// get the points for the blob:
			CvPoint           pt;
			CvSeqReader       reader;
			cvStartReadSeq( contour_ptr, &reader, 0 );
	
    		for( int j=0; j < contour_ptr->total; j++ ) {
				CV_READ_SEQ_ELEM( pt, reader );
				blob.pts.push_back( ofPoint((float)pt.x, (float)pt.y) );
			}
			blob.nPts = blob.pts.size();

			objects.push_back(blob);
			
		} else if ( bTrackBlobs ) { // SEARCH FOR BLOBS
			float area = fabs( cvContourArea(contour_ptr, CV_WHOLE_SEQ) );
			if( (area > minArea) && (area < maxArea) ) {
				Blob blob=Blob();
				float area = cvContourArea( contour_ptr, CV_WHOLE_SEQ );
				cvMoments( contour_ptr, myMoments );
				
				// this is if using non-angle bounding box
				CvRect rect	= cvBoundingRect( contour_ptr, 0 );
				blob.boundingRect.x      = rect.x;
				blob.boundingRect.y      = rect.y;
				blob.boundingRect.width  = rect.width;
				blob.boundingRect.height = rect.height;
				
				//Angle Bounding rectangle
				blob.angleBoundingRect.x	  = box.center.x;
				blob.angleBoundingRect.y	  = box.center.y;
				blob.angleBoundingRect.width  = box.size.height;
				blob.angleBoundingRect.height = box.size.width;
				blob.angle = box.angle;
				
				// assign other parameters
				blob.area                = fabs(area);
				blob.hole                = area < 0 ? true : false;
				blob.length 			 = cvArcLength(contour_ptr);
				// AlexP
				// The cast to int causes errors in tracking since centroids are calculated in
				// floats and they migh land between integer pixel values (which is what we really want)
				// This not only makes tracking more accurate but also more fluid
				blob.centroid.x			 = (myMoments->m10 / myMoments->m00);
				blob.centroid.y 		 = (myMoments->m01 / myMoments->m00);
				blob.lastCentroid.x 	 = 0;
				blob.lastCentroid.y 	 = 0;
				
				// get the points for the blob:
				CvPoint           pt;
				CvSeqReader       reader;
				cvStartReadSeq( contour_ptr, &reader, 0 );
				
    			for( int j=0; j < min(TOUCH_MAX_CONTOUR_LENGTH, contour_ptr->total); j++ ) {
					CV_READ_SEQ_ELEM( pt, reader );
					blob.pts.push_back( ofPoint((float)pt.x, (float)pt.y) );
				}
				blob.nPts = blob.pts.size();
				
				blobs.push_back(blob);
			}
		} 
		contour_ptr = contour_ptr->h_next;
	}
		
	if ( bTrackFingers ) {  // SEARCH FOR FINGERS
		CvPoint*		PointArray;
		int*			hull;
		int				hullsize;
		
		if (contour_list)
			contour_list = cvApproxPoly(contour_list, sizeof(CvContour), storage, CV_POLY_APPROX_DP, hullPress, 1 );
			
		for( ; contour_list != 0; contour_list = contour_list->h_next ){
			int count = contour_list->total; // This is number point in contour
				
			CvRect rect = cvContourBoundingRect(contour_list, 1);
			
			if ( (rect.width*rect.height) > 300 ){		// Analize the bigger contour
				CvPoint center;
				center.x = rect.x+rect.width/2;
				center.y = rect.y+rect.height/2;
				
				PointArray = (CvPoint*)malloc( count*sizeof(CvPoint) ); // Alloc memory for contour point set.
				hull = (int*)malloc(sizeof(int)*count);	// Alloc memory for indices of convex hull vertices.
					
				cvCvtSeqToArray(contour_list, PointArray, CV_WHOLE_SEQ); // Get contour point set.
					
				// Find convex hull for curent contour.
				cvConvexHull(	PointArray,
								count,
								NULL,
								CV_COUNTER_CLOCKWISE,
								hull,
								&hullsize);
					
				int upper = 640, lower = 0;
				for	(int j=0; j<hullsize; j++) {
					int idx = hull[j]; // corner index
					if (PointArray[idx].y < upper) 
						upper = PointArray[idx].y;
					if (PointArray[idx].y > lower) 
						lower = PointArray[idx].y;
				}
				
				float cutoff = lower - (lower - upper) * 0.1f;
				// find interior angles of hull corners
				for (int j=0; j<hullsize; j++) {
					int idx = hull[j]; // corner index
					int pdx = idx == 0 ? count - 1 : idx - 1; //  predecessor of idx
					int sdx = idx == count - 1 ? 0 : idx + 1; // successor of idx
						
					cv::Point v1 = cv::Point(PointArray[sdx].x - PointArray[idx].x, PointArray[sdx].y - PointArray[idx].y);
					cv::Point v2 = cv::Point(PointArray[pdx].x - PointArray[idx].x, PointArray[pdx].y - PointArray[idx].y);
						
					float angle = acos( (v1.x*v2.x + v1.y*v2.y) / (norm(v1) * norm(v2)) );
						
					// low interior angle + within upper 90% of region -> we got a finger
					if (angle < 1 ){ //&& PointArray[idx].y < cutoff) {
						Blob blob = Blob();
						
						//float area = cvContourArea( contour_ptr, CV_WHOLE_SEQ );
						//cvMoments( contour_ptr, myMoments );
						
						// this is if using non-angle bounding box
						//CvRect rect	= cvBoundingRect( contour_ptr, 0 );
						blob.boundingRect.x      = PointArray[idx].x-5;
						blob.boundingRect.y      = PointArray[idx].y-5;
						blob.boundingRect.width  = 10;
						blob.boundingRect.height = 10;
						
						//Angle Bounding rectangle
						blob.angleBoundingRect.x	  = PointArray[idx].x-5;
						blob.angleBoundingRect.y	  = PointArray[idx].y-5;
						blob.angleBoundingRect.width  = 10;
						blob.angleBoundingRect.height = 10;
						blob.angle = atan2(PointArray[idx].x - center.x , PointArray[idx].y - center.y);
						
						// assign other parameters
						//blob.area                = fabs(area);
						//blob.hole                = area < 0 ? true : false;
						//blob.length 			 = cvArcLength(contour_ptr);
						// AlexP
						// The cast to int causes errors in tracking since centroids are calculated in
						// floats and they migh land between integer pixel values (which is what we really want)
						// This not only makes tracking more accurate but also more fluid
						blob.centroid.x			 = PointArray[idx].x;//(myMoments->m10 / myMoments->m00);
						blob.centroid.y 		 = PointArray[idx].y;//(myMoments->m01 / myMoments->m00);
						blob.lastCentroid.x 	 = 0;
						blob.lastCentroid.y 	 = 0;
						
						fingers.push_back(blob);
					}
				}
				// Free memory.
				free(PointArray);
				free(hull);
			}
		}
	}
	
	nBlobs = blobs.size();
	nFingers = fingers.size();
	nObjects = objects.size();
	
	// Free the storage memory.
	// Warning: do this inside this function otherwise a strange memory leak
	if( contour_storage != NULL )
		cvReleaseMemStorage(&contour_storage);
	
	if( storage != NULL )
		cvReleaseMemStorage(&storage);

	return (bTrackFingers)? nFingers:nBlobs;
}
示例#2
0
int fmaFitEllipse(void)
{
    long lErrors = 0; 
    CvPoint points[1000];
    CvPoint2D32f fpoints[1000];
    CvBox2D box;
    
    CvMemStorage* storage = cvCreateMemStorage(0);
    CvContour* contour;
    CvSize axis;
    IplImage* img = cvCreateImage( cvSize(200,200), IPL_DEPTH_8U, 1 );
    
    for( int k = 0 ; k < 1000; k++ )
    {
    
        iplSet( img, 0 );

        CvPoint center = { 100, 100 };
    
        double angle = atsInitRandom( 0, 360 );
        axis.height = (int)atsInitRandom( 5, 50 );
        axis.width  = (int)atsInitRandom( 5, 50 );   

        cvEllipse( img, center, axis, angle, 0, 360, 255, -1 );
    
        cvFindContours( img, storage, (CvSeq**)&contour, sizeof(CvContour) );

        cvCvtSeqToArray( (CvSeq*)contour, points );
        for( int i = 0; i < contour->total; i++ )
        {
            fpoints[i].x = (float)points[i].x;
            fpoints[i].y = (float)points[i].y;
        }
    
        cvFitEllipse( fpoints, contour->total, &box );

        //compare boxes
        if( fabs( box.center.x - center.x) > 1 || fabs( box.center.y - center.y ) > 1 )
        {
            lErrors++;
        }             

        if( ( fabs( box.size.width  - (axis.width * 2 ) ) > 4 || 
              fabs( box.size.height - (axis.height * 2) ) > 4 ) &&
            ( fabs( box.size.height - (axis.width * 2 ) ) > 4 || 
              fabs( box.size.width - (axis.height * 2) ) > 4 ) )           
        {
            lErrors++;

            //graphic
            /*IplImage* rgb = cvCreateImage( cvSize(200,200), IPL_DEPTH_8U, 3 );
            iplSet( rgb, 0 );
                        
            cvEllipse( rgb, center, axis, angle, 0, 360, CV_RGB(255,0,0) , 1 );
            
            int window = atsCreateWindow( "proba", cvPoint(0,0), cvSize(200,200) );
            cvEllipse( rgb, center, cvSize( box.size.width/2, box.size.height/2) , -box.angle, 
                        0, 360, CV_RGB(0,255,0) , 1 );

            //draw center 
            cvEllipse( rgb, center, cvSize( 0, 0) , 0, 
                        0, 360, CV_RGB(255,255,255) , -1 );
            
            atsDisplayImage( rgb, window, cvPoint(0,0), cvSize(200,200) );
            
            getch();

            atsDestroyWindow( window );
            
            //one more
            cvFitEllipse( fpoints, contour->total, &box );
          */
        }
    }
    cvReleaseMemStorage( &storage );
    
    if( !lErrors) return trsResult(TRS_OK, "No errors");
    else
        return trsResult(TRS_FAIL, "Fixed %d errors", lErrors);
    
}
示例#3
0
/***************************************************************************************\
*
*   This function compute intermediate polygon between contour1 and contour2
*
*   Correspondence between points of contours specify by corr
*
*   param = [0,1];  0 correspondence to contour1, 1 - contour2
*
\***************************************************************************************/
CvSeq* icvBlendContours(CvSeq* contour1,
                        CvSeq* contour2,
                        CvSeq* corr,
                        double param,
                        CvMemStorage* storage)
{
    int j;

    CvSeqWriter writer01;
    CvSeqReader reader01;

    int Ni,Nj;              // size of contours
    int i;                  // counter

    CvPoint* point1;        // array of first contour point
    CvPoint* point2;        // array of second contour point

    CvPoint point_output;   // intermediate storage of ouput point

    int corr_point;

    // Create output sequence.
    CvSeq* output = cvCreateSeq(0,
                                sizeof(CvSeq),
                                sizeof(CvPoint),
                                storage );

    // Find size of contours.
    Ni = contour1->total + 1;
    Nj = contour2->total + 1;

    point1 = (CvPoint* )malloc( Ni*sizeof(CvPoint) );
    point2 = (CvPoint* )malloc( Nj*sizeof(CvPoint) );

    // Initialize arrays of point
    cvCvtSeqToArray( contour1, point1, CV_WHOLE_SEQ );
    cvCvtSeqToArray( contour2, point2, CV_WHOLE_SEQ );

    // First and last point mast be equal.
    point1[Ni-1] = point1[0];
    point2[Nj-1] = point2[0];

    // Initializes process of writing to sequence.
    cvStartAppendToSeq( output, &writer01);

    i = Ni-1; //correspondence to points of contour1
    for( ; corr; corr = corr->h_next )
    {
        //Initializes process of sequential reading from sequence
        cvStartReadSeq( corr, &reader01, 0 );

        for(j=0; j < corr->total; j++)
        {
            // Read element from sequence.
            CV_READ_SEQ_ELEM( corr_point, reader01 );

            // Compute point of intermediate polygon.
            point_output.x = cvRound(point1[i].x + param*( point2[corr_point].x - point1[i].x ));
            point_output.y = cvRound(point1[i].y + param*( point2[corr_point].y - point1[i].y ));

            // Write element to sequence.
            CV_WRITE_SEQ_ELEM( point_output, writer01 );
        }
        i--;
    }
    // Updates sequence header.
    cvFlushSeqWriter( &writer01 );

    return output;
}
示例#4
0
void icvCalcContoursCorrespondence(CvSeq* contour1,
                                   CvSeq* contour2,
                                   CvSeq** corr,
                                   CvMemStorage* storage)
{
    int i,j;                    // counter of cycles
    int Ni,Nj;                  // size of contours
    _CvWork** W;                // graph for search minimum of work

    CvPoint* point1;            // array of first contour point
    CvPoint* point2;            // array of second contour point
    CvPoint2D32f* edges1;       // array of first contour edge
    CvPoint2D32f* edges2;       // array of second contour edge

    //CvPoint null_edge = {0,0};    //
    CvPoint2D32f small_edge;
    //double inf;                   // infinity

    CvSeq* corr01;
    CvSeqWriter writer;

    char path;                  //

    // Find size of contours
    Ni = contour1->total + 1;
    Nj = contour2->total + 1;

    // Create arrays
    W = (_CvWork**)malloc(sizeof(_CvWork*)*Ni);
    for(i=0; i<Ni; i++)
    {
        W[i] = (_CvWork*)malloc(sizeof(_CvWork)*Nj);
    }

    point1 = (CvPoint* )malloc( Ni*sizeof(CvPoint) );
    point2 = (CvPoint* )malloc( Nj*sizeof(CvPoint) );
    edges1 = (CvPoint2D32f* )malloc( (Ni-1)*sizeof(CvPoint2D32f) );
    edges2 = (CvPoint2D32f* )malloc( (Nj-1)*sizeof(CvPoint2D32f) );

    // Initialize arrays of point
    cvCvtSeqToArray( contour1, point1, CV_WHOLE_SEQ );
    cvCvtSeqToArray( contour2, point2, CV_WHOLE_SEQ );

    point1[Ni-1] = point1[0];
    point2[Nj-1] = point2[0];

    for(i=0; i<Ni-1; i++)
    {
        edges1[i].x = (float)( point1[i+1].x - point1[i].x );
        edges1[i].y = (float)( point1[i+1].y - point1[i].y );
    };

    for(i=0; i<Nj-1; i++)
    {
        edges2[i].x = (float)( point2[i+1].x - point2[i].x );
        edges2[i].y = (float)( point2[i+1].y - point2[i].y );
    };

    // Find infinity constant
    //inf=1;
/////////////

//Find min path in graph

/////////////
    W[0][0].w_east      = 0;
    W[0][0].w_south     = 0;
    W[0][0].w_southeast = 0;

    W[1][1].w_southeast = _cvStretchingWork( &edges1[0], &edges2[0] );
    W[1][1].w_east = inf;
    W[1][1].w_south = inf;
    W[1][1].path_se = PATH_TO_SE;

    W[0][1].w_south =  _cvStretchingWork( &null_edge, &edges2[0] );
    W[0][1].path_s = 3;
    W[1][0].w_east =  _cvStretchingWork( &edges2[0], &null_edge );
    W[1][0].path_e = PATH_TO_E;

    for( i=1; i<Ni; i++ )
    {
        W[i][0].w_south     = inf;
        W[i][0].w_southeast = inf;
    }

    for(j=1; j<Nj; j++)
    {
        W[0][j].w_east      = inf;
        W[0][j].w_southeast = inf;
    }

    for(i=2; i<Ni; i++)
    {
        j=0;/////////
        W[i][j].w_east = W[i-1][j].w_east;
        W[i][j].w_east = W[i][j].w_east /*+
            _cvBendingWork( &edges1[i-2], &edges1[i-1], &null_edge, &null_edge, NULL )*/;
        W[i][j].w_east = W[i][j].w_east + _cvStretchingWork( &edges2[i-1], &null_edge );
        W[i][j].path_e = PATH_TO_E;

        j=1;//////////
        W[i][j].w_south = inf;

        _cvWorkEast (i, j, W, edges1, edges2);

        W[i][j].w_southeast = W[i-1][j-1].w_east;
        W[i][j].w_southeast = W[i][j].w_southeast + _cvStretchingWork( &edges1[i-1], &edges2[j-1] );

        small_edge.x = NULL_EDGE*edges1[i-2].x;
        small_edge.y = NULL_EDGE*edges1[i-2].y;

        W[i][j].w_southeast = W[i][j].w_southeast +
                              _cvBendingWork( &edges1[i-2], &edges1[i-1], /*&null_edge*/&small_edge, &edges2[j-1]/*, &edges2[Nj-2]*/);

        W[i][j].path_se = PATH_TO_E;
    }

    for(j=2; j<Nj; j++)
    {
        i=0;//////////
        W[i][j].w_south = W[i][j-1].w_south;
        W[i][j].w_south = W[i][j].w_south + _cvStretchingWork( &null_edge, &edges2[j-1] );
        W[i][j].w_south = W[i][j].w_south /*+
            _cvBendingWork( &null_edge, &null_edge, &edges2[j-2], &edges2[j-1], NULL )*/;
        W[i][j].path_s = 3;

        i=1;///////////
        W[i][j].w_east= inf;

        _cvWorkSouth(i, j, W, edges1, edges2);

        W[i][j].w_southeast = W[i-1][j-1].w_south;
        W[i][j].w_southeast = W[i][j].w_southeast + _cvStretchingWork( &edges1[i-1], &edges2[j-1] );

        small_edge.x = NULL_EDGE*edges2[j-2].x;
        small_edge.y = NULL_EDGE*edges2[j-2].y;

        W[i][j].w_southeast = W[i][j].w_southeast +
                              _cvBendingWork( /*&null_edge*/&small_edge, &edges1[i-1], &edges2[j-2], &edges2[j-1]/*, &edges1[Ni-2]*/);
        W[i][j].path_se = 3;
    }

    for(i=2; i<Ni; i++)
        for(j=2; j<Nj; j++)
        {
            _cvWorkEast     (i, j, W, edges1, edges2);
            _cvWorkSouthEast(i, j, W, edges1, edges2);
            _cvWorkSouth    (i, j, W, edges1, edges2);
        }

    i=Ni-1;
    j=Nj-1;

    *corr = cvCreateSeq(0,
                        sizeof(CvSeq),
                        sizeof(int),
                        storage );

    corr01 = *corr;
    cvStartAppendToSeq( corr01, &writer );
    if( W[i][j].w_east > W[i][j].w_southeast )
    {
        if( W[i][j].w_southeast > W[i][j].w_south )
        {
            path = 3;
        }
        else
        {
            path = PATH_TO_SE;
        }
    }
    else
    {
        if( W[i][j].w_east < W[i][j].w_south )
        {
            path = PATH_TO_E;
        }
        else
        {
            path = 3;
        }
    }
    do
    {
        CV_WRITE_SEQ_ELEM( j, writer );

        switch( path )
        {
        case PATH_TO_E:
            path = W[i][j].path_e;
            i--;
            cvFlushSeqWriter( &writer );
            corr01->h_next = cvCreateSeq(   0,
                                            sizeof(CvSeq),
                                            sizeof(int),
                                            storage );
            corr01 = corr01->h_next;
            cvStartAppendToSeq( corr01, &writer );
            break;

        case PATH_TO_SE:
            path = W[i][j].path_se;
            j--;
            i--;
            cvFlushSeqWriter( &writer );
            corr01->h_next = cvCreateSeq(   0,
                                            sizeof(CvSeq),
                                            sizeof(int),
                                            storage );
            corr01 = corr01->h_next;
            cvStartAppendToSeq( corr01, &writer );
            break;

        case 3:
            path = W[i][j].path_s;
            j--;
            break;
        }

    } while( (i>=0) && (j>=0) );
    cvFlushSeqWriter( &writer );

    // Free memory
    for(i=1; i<Ni; i++)
    {
        free(W[i]);
    }
    free(W);
    free(point1);
    free(point2);
    free(edges1);
    free(edges2);
}
//--------------------------------------------------------------------------------
int ofxContourFinder::findContours(	ofxCvGrayscaleImage&  input,
									int minArea,
									int maxArea,
									int nConsidered,
									double hullPress,	
									bool bFindHoles,
									bool bUseApproximation) {
	// get width/height disregarding ROI
    IplImage* ipltemp = input.getCvImage();
    width = ipltemp->width;
    height = ipltemp->height;
	reset();

	// opencv will clober the image it detects contours on, so we want to
    // copy it into a copy before we detect contours.  That copy is allocated
    // if necessary (necessary = (a) not allocated or (b) wrong size)
	// so be careful if you pass in different sized images to "findContours"
	// there is a performance penalty, but we think there is not a memory leak
    // to worry about better to create mutiple contour finders for different
    // sizes, ie, if you are finding contours in a 640x480 image but also a
    // 320x240 image better to make two ofxContourFinder objects then to use
    // one, because you will get penalized less.

	if( inputCopy.width == 0 ) {
		inputCopy.allocate( input.width, input.height );
		inputCopy = input;
	} else {
		if( inputCopy.width == input.width && inputCopy.height == input.height ) 
			inputCopy = input;
		else {
			// we are allocated, but to the wrong size --
			// been checked for memory leaks, but a warning:
			// be careful if you call this function with alot of different
			// sized "input" images!, it does allocation every time
			// a new size is passed in....
			inputCopy.clear();
			inputCopy.allocate( input.width, input.height );
			inputCopy = input;
		}
	}

	CvSeq* contour_list = NULL;
	contour_storage = cvCreateMemStorage( 1000 );
	storage	= cvCreateMemStorage( 1000 );

	CvContourRetrievalMode  retrieve_mode
        = (bFindHoles) ? CV_RETR_LIST : CV_RETR_EXTERNAL;
	cvFindContours( inputCopy.getCvImage(), contour_storage, &contour_list,
                    sizeof(CvContour), retrieve_mode, bUseApproximation ? CV_CHAIN_APPROX_SIMPLE : CV_CHAIN_APPROX_NONE );
	
	CvSeq* contour_ptr = contour_list;

	nCvSeqsFound = 0;

	// put the contours from the linked list, into an array for sorting
	while( (contour_ptr != NULL) )  {
		CvBox2D box=cvMinAreaRect2(contour_ptr);
		
        float area = fabs( cvContourArea(contour_ptr, CV_WHOLE_SEQ) );
        if( (area > minArea) && (area < maxArea) ) {
            ofxBlob blob = ofxBlob();
            float area = cvContourArea( contour_ptr, CV_WHOLE_SEQ);
            cvMoments( contour_ptr, myMoments );
            
            // this is if using non-angle bounding box
            CvRect rect	= cvBoundingRect( contour_ptr, 0 );
            blob.boundingRect.x      = rect.x/width;
            blob.boundingRect.y      = rect.y/height;
            blob.boundingRect.width  = rect.width/width;
            blob.boundingRect.height = rect.height/height;
            
            //Angle Bounding rectangle
            blob.angleBoundingRect.x	  = box.center.x/width;
            blob.angleBoundingRect.y	  = box.center.y/height;
            blob.angleBoundingRect.width  = box.size.height/width;
            blob.angleBoundingRect.height = box.size.width/height;
            blob.angle = box.angle;
            
            // assign other parameters
            blob.area                = fabs(area);
            blob.hole                = area < 0 ? true : false;
            blob.length 			 = cvArcLength(contour_ptr);
            
            // The cast to int causes errors in tracking since centroids are calculated in
            // floats and they migh land between integer pixel values (which is what we really want)
            // This not only makes tracking more accurate but also more fluid
            blob.centroid.x			 = (myMoments->m10 / myMoments->m00) / width;
            blob.centroid.y 		 = (myMoments->m01 / myMoments->m00) / height;
            blob.lastCentroid.x 	 = 0;
            blob.lastCentroid.y 	 = 0;
            
            if (blob.nFingers != 0){
                
                blob.nFingers = 0;
                blob.fingers.clear();
            }
            
            // get the points for the blob:
            CvPoint           pt;
            CvSeqReader       reader;
            cvStartReadSeq( contour_ptr, &reader, 0 );
            
            for( int j=0; j < min(TOUCH_MAX_CONTOUR_LENGTH, contour_ptr->total); j++ ) {
                CV_READ_SEQ_ELEM( pt, reader );
                blob.pts.push_back( ofPoint((float)pt.x / width, (float)pt.y / height) );
            }
            blob.nPts = blob.pts.size();
            
            // Check if it´s a Hand and if it have fingers
            //
            if (area > 5000){
                CvPoint*    PointArray;
                int*        hull;
                int         hullsize;
                
                CvSeq*  contourAprox = cvApproxPoly(contour_ptr, sizeof(CvContour), storage, CV_POLY_APPROX_DP, hullPress, 1 );
                int count = contourAprox->total; // This is number point in contour
                    
        
                PointArray = (CvPoint*)malloc( count*sizeof(CvPoint) ); // Alloc memory for contour point set.
                hull = (int*)malloc(sizeof(int)*count);	// Alloc memory for indices of convex hull vertices.
                
                cvCvtSeqToArray(contourAprox, PointArray, CV_WHOLE_SEQ); // Get contour point set.
                
                // Find convex hull for curent contour.
                cvConvexHull( PointArray, count, NULL, CV_COUNTER_CLOCKWISE, hull, &hullsize);
                
                int upper = 1, lower = 0;
                for	(int j=0; j<hullsize; j++) {
                    int idx = hull[j]; // corner index
                    if (PointArray[idx].y < upper) 
                        upper = PointArray[idx].y;
                    if (PointArray[idx].y > lower) 
                        lower = PointArray[idx].y;
                }
                
                float cutoff = lower - (lower - upper) * 0.1f;
                // find interior angles of hull corners
                for (int j=0; j < hullsize; j++) {
                    int idx = hull[j]; // corner index
                    int pdx = idx == 0 ? count - 1 : idx - 1; //  predecessor of idx
                    int sdx = idx == count - 1 ? 0 : idx + 1; // successor of idx
                    
                    cv::Point v1 = cv::Point(PointArray[sdx].x - PointArray[idx].x, PointArray[sdx].y - PointArray[idx].y);
                    cv::Point v2 = cv::Point(PointArray[pdx].x - PointArray[idx].x, PointArray[pdx].y - PointArray[idx].y);
                    
                    float angle = acos( (v1.x*v2.x + v1.y*v2.y) / (norm(v1) * norm(v2)) );
                    
                    // We got a finger
                    //
                    if (angle < 1 ){
                        ofPoint posibleFinger = ofPoint((float)PointArray[idx].x / width, 
                                                        (float)PointArray[idx].y / height);
                        
                        blob.nFingers++;
                        blob.fingers.push_back( posibleFinger );
                    }
                }
                
                
                if ( blob.nFingers > 0 ){
                    // because means that probably it's a hand                    
                    ofVec2f fingersAverage;
                    for (int j = 0; j < blob.fingers.size(); j++){
                        fingersAverage += blob.fingers[j];
                    }
                    
                    fingersAverage /= blob.fingers.size();
                    
                    if (blob.gotFingers){
                        blob.palm = (blob.palm + fingersAverage)*0.5;
                        //blob.palm = fingersAverage;
                    } else {
                        blob.palm = fingersAverage;
                        blob.gotFingers = true;   // If got more than three fingers in a road it'll remember
                    }
                }
                
                // Free memory.
                free(PointArray);
                free(hull);
            }
            
            blobs.push_back(blob);
        }
        contour_ptr = contour_ptr->h_next;
    }
    
	nBlobs = blobs.size();
	
	// Free the storage memory.
	// Warning: do this inside this function otherwise a strange memory leak
	if( contour_storage != NULL )
		cvReleaseMemStorage(&contour_storage);
	
	if( storage != NULL )
		cvReleaseMemStorage(&storage);
    
    free(contour_ptr);

	return nBlobs;
}
static int aGestureRecognition(void)
{       
    IplImage *image, *imagew, *image_rez, *mask_rez, *image_hsv, *img_p[2],*img_v,
             *init_mask_ver = 0, *final_mask_ver = 0;
    CvPoint3D32f *pp, p;

    CvPoint pt;
    CvSize2D32f fsize;
    CvPoint3D32f center, cf;
    IplImage *image_mask, *image_maskw;
    
    CvSize size;
    CvHistogram *hist, *hist_mask;

    int width, height;
    int k_points, k_indexs;
    int warpFlag, interpolate;

    int hdim[2] = {20, 20};
    
    double coeffs[3][3], rect[2][2], rez = 0, eps_rez = 2.5, rez_h;
    float *thresh[2];
    float hv[3];
    
    float reps, aeps, ww;
    float line[6], in[3][3], h[3][3];
    float cx, cy, fx, fy;

    static char num[4]; 
    
    char *name_image;  
    char *name_range_image;
    char *name_verify_data;
    char *name_init_mask_very;
    char *name_final_mask_very;

    CvSeq *numbers;
    CvSeq *points;
    CvSeq *indexs;
        
    CvMemStorage *storage;
    CvRect hand_roi, hand_roi_trans;
    
    int i,j, lsize, block_size = 1000, flag;
    int code;

    FILE *filin, *fil_ver;

/* read tests params */

    code = TRS_OK;

/*  define input information    */
    strcpy (num, "001"); 

    lsize = strlen(data_path)+12;
    name_verify_data = (char*)trsmAlloc(lsize);
    name_range_image = (char*)trsmAlloc(lsize);
    name_image = (char*)trsmAlloc(lsize);

    name_init_mask_very = (char*)trsmAlloc(lsize);
    name_final_mask_very = (char*)trsmAlloc(lsize);

/*  define input range_image file path        */
    strcpy(name_range_image, data_path);
    strcat(name_range_image, "rpts");
    strcat(name_range_image, num);
    strcat(name_range_image, ".txt");

/*  define input image file path        */
    strcpy(name_image, data_path);
    strcat(name_image, "real");
    strcat(name_image, num);
    strcat(name_image, ".bmp");

/*  define verify data file path        */
    strcpy(name_verify_data, data_path);
    strcat(name_verify_data, "very");
    strcat(name_verify_data, num);
    strcat(name_verify_data, ".txt");

/*  define verify init mask file path    */
    strcpy(name_init_mask_very, data_path);
    strcat(name_init_mask_very, "imas");
    strcat(name_init_mask_very, num);
    strcat(name_init_mask_very, ".bmp");

/*  define verify final mask file path    */
    strcpy(name_final_mask_very, data_path);
    strcat(name_final_mask_very, "fmas");
    strcat(name_final_mask_very, num);
    strcat(name_final_mask_very, ".bmp");

    filin = fopen(name_range_image,"r");
    fil_ver = fopen(name_verify_data,"r");

    fscanf( filin, "\n%d %d\n", &width, &height);
    printf("width=%d height=%d  reading testing data...", width,height);

    OPENCV_CALL( storage = cvCreateMemStorage ( block_size ) );
    OPENCV_CALL( points = cvCreateSeq( CV_SEQ_POINT3D_SET, sizeof(CvSeq),
                            sizeof(CvPoint3D32f), storage ) );
    OPENCV_CALL (indexs = cvCreateSeq( CV_SEQ_POINT_SET, sizeof(CvSeq),
                            sizeof(CvPoint), storage ) );

    pp = 0;
    
/*  read input image from file   */   
    image = atsCreateImageFromFile( name_image );
    if(image == NULL)   {code = TRS_FAIL; goto m_exit;}

/*  read input 3D points from input file        */
    for (i = 0; i < height; i++)
    {
        for (j = 0; j < width; j++)    
        {
            fscanf( filin, "%f %f %f\n", &p.x, &p.y, &p.z);
            if(/*p.x != 0 || p.y != 0 ||*/ p.z != 0)
            {
                OPENCV_CALL(cvSeqPush(points, &p));
                pt.x = j; pt.y = i;
                OPENCV_CALL(cvSeqPush(indexs, &pt));
                               
            }
        }
    }

    k_points = points->total;
    k_indexs = indexs->total;

/*   convert sequence to array          */
    pp = (CvPoint3D32f*)trsmAlloc(k_points * sizeof(CvPoint3D32f));
    OPENCV_CALL(cvCvtSeqToArray(points, pp ));

/*  find 3D-line      */

    reps = (float)0.1;
    aeps = (float)0.1;
    ww = (float)0.08;

    OPENCV_CALL( cvFitLine3D(pp, k_points, CV_DIST_WELSCH, &ww, reps, aeps, line ));

/*  find hand location      */
    flag = -1;
    fsize.width = fsize.height = (float)0.22;  //   (hand size in m)

    numbers = NULL;
    OPENCV_CALL( cvFindHandRegion (pp, k_points, indexs,line, fsize,
                      flag,&center,storage, &numbers));

/*   read verify data    */
    fscanf( fil_ver, "%f %f %f\n", &cf.x, &cf.y, &cf.z);
    rez+= cvSqrt((center.x - cf.x)*(center.x - cf.x)+(center.y - cf.y)*(center.y - cf.y)+
         (center.z - cf.z)*(center.z - cf.z))/3.;
    
/*    create hand mask            */
    size.height = height;
    size.width = width;
    OPENCV_CALL( image_mask = cvCreateImage(size, IPL_DEPTH_8U, 1) ); 

    OPENCV_CALL( cvCreateHandMask(numbers, image_mask, &hand_roi) );

/*  read verify initial image mask                  */
    init_mask_ver = atsCreateImageFromFile( name_init_mask_very );
    if(init_mask_ver == NULL)   {code = TRS_FAIL; goto m_exit;}
    
    rez+= iplNorm(init_mask_ver, image_mask, IPL_L2) / (width*height+0.);

/*  calculate homographic transformation matrix            */
    cx = (float)(width / 2.);
    cy = (float)(height / 2.);
    fx = fy = (float)571.2048;

/* define intrinsic camera parameters                      */
    in[0][1] = in[1][0] = in[2][0] = in[2][1] = 0;
    in[0][0] = fx; in[0][2] = cx;
    in[1][1] = fy; in[1][2] = cy;
    in[2][2] = 1;

    OPENCV_CALL( cvCalcImageHomography(line, &center, in, h) );
    
    rez_h = 0;
    for(i=0;i<3;i++)
    {
        fscanf( fil_ver, "%f %f %f\n", &hv[0], &hv[1], &hv[2]);
        for(j=0;j<3;j++)
        {
            rez_h+=(hv[j] - h[i][j])*(hv[j] - h[i][j]);
        }
    }
    rez+=sqrt(rez_h)/9.;

/*   image unwarping         */
    size.width = image->width; 
    size.height = image->height; 
    OPENCV_CALL( imagew = cvCreateImage(size, IPL_DEPTH_8U,3) );
    OPENCV_CALL( image_maskw = cvCreateImage(size, IPL_DEPTH_8U,1) );

    iplSet(image_maskw, 0);

    cvSetImageROI(image, hand_roi);
    cvSetImageROI(image_mask, hand_roi);

/* convert homographic transformation matrix from float to double      */
    for(i=0;i<3;i++)
        for(j=0;j<3;j++)
            coeffs[i][j] = (double)h[i][j];

/*  get bounding rectangle for image ROI         */
    iplGetPerspectiveBound(image, coeffs, rect);

    width = (int)(rect[1][0] - rect[0][0]);
    height = (int)(rect[1][1] - rect[0][1]);
    hand_roi_trans.x = (int)rect[0][0];hand_roi_trans.y = (int)rect[0][1];
    hand_roi_trans.width = width; hand_roi_trans.height = height;

    cvMaxRect(&hand_roi, &hand_roi_trans, &hand_roi);
    iplSetROI((IplROI*)image->roi, 0, hand_roi.x, hand_roi.y,
               hand_roi.width,hand_roi.height);
    iplSetROI((IplROI*)image_mask->roi, 0, hand_roi.x, hand_roi.y,
                hand_roi.width,hand_roi.height);

    warpFlag = IPL_WARP_R_TO_Q;
/*    interpolate = IPL_INTER_CUBIC;   */
/*    interpolate = IPL_INTER_NN;      */
    interpolate = IPL_INTER_LINEAR;
    iplWarpPerspective(image, imagew, coeffs, warpFlag, interpolate);
    iplWarpPerspective(image_mask, image_maskw, coeffs, warpFlag, IPL_INTER_NN);  
    
/*  set new image and mask ROI after transformation        */
    iplSetROI((IplROI*)imagew->roi,0, (int)rect[0][0], (int)rect[0][1],(int)width,(int)height);
    iplSetROI((IplROI*)image_maskw->roi,0, (int)rect[0][0], (int)rect[0][1],(int)width,(int)height);

/*  copy image ROI to new image and resize        */
    size.width = width; size.height = height;
    image_rez = cvCreateImage(size, IPL_DEPTH_8U,3);
    mask_rez = cvCreateImage(size, IPL_DEPTH_8U,1);
 
    iplCopy(imagew,image_rez);
    iplCopy(image_maskw,mask_rez);
    
/* convert rezult image from RGB to HSV               */
    image_hsv = iplCreateImageHeader(3, 0, IPL_DEPTH_8U, "HSV", "HSV",
                                   IPL_DATA_ORDER_PIXEL, IPL_ORIGIN_TL,IPL_ALIGN_DWORD,
                                   image_rez->width, image_rez->height, NULL, NULL, NULL, NULL);
    iplAllocateImage(image_hsv, 0, 0 ); 
    strcpy(image_rez->colorModel, "RGB");
    strcpy(image_rez->channelSeq, "RGB");
    image_rez->roi = NULL;

    iplRGB2HSV(image_rez, image_hsv);

/* convert to three images planes                      */
    img_p[0] = cvCreateImage(size, IPL_DEPTH_8U,1);
    img_p[1] = cvCreateImage(size, IPL_DEPTH_8U,1);
    img_v = cvCreateImage(size, IPL_DEPTH_8U,1);

    cvCvtPixToPlane(image_hsv, img_p[0], img_p[1], img_v, NULL);
   
/*  calculate histograms                */
    hist = cvCreateHist ( 2, hdim, CV_HIST_ARRAY);
    hist_mask = cvCreateHist ( 2, hdim, CV_HIST_ARRAY);

/*  install histogram threshold         */
    thresh[0] = (float*) trsmAlloc(2*sizeof(float));
    thresh[1] = (float*) trsmAlloc(2*sizeof(float));

    thresh[0][0] = thresh[1][0] = -0.5;
    thresh[0][1] = thresh[1][1] = 255.5;
    cvSetHistThresh( hist, thresh, 1);
    cvSetHistThresh( hist_mask, thresh, 1);

    cvCalcHist(img_p, hist, 0);
        
    cvCalcHistMask(img_p, mask_rez, hist_mask, 0);
            
    cvCalcProbDensity(hist, hist_mask, hist_mask);

    cvCalcBackProject( img_p, mask_rez, hist_mask ); 

/*  read verify final image mask                  */
    final_mask_ver = atsCreateImageFromFile( name_final_mask_very );
    if(final_mask_ver == NULL)   {code = TRS_FAIL; goto m_exit;}

    rez+= iplNorm(final_mask_ver, mask_rez, IPL_L2) / (width*height+0.);

    trsWrite( ATS_CON | ATS_SUM, "\n gesture recognition \n");
    trsWrite( ATS_CON | ATS_SUM, "result testing error = %f \n",rez);

    if(rez > eps_rez) code = TRS_FAIL;
    else code = TRS_OK;
    
m_exit:    

    cvReleaseImage(&image_mask);
    cvReleaseImage(&mask_rez);
    cvReleaseImage(&image_rez);
    atsReleaseImage(final_mask_ver);
    atsReleaseImage(init_mask_ver);

    cvReleaseImage(&imagew);
    cvReleaseImage(&image_maskw); 

    cvReleaseImage(&img_p[0]);
    cvReleaseImage(&img_p[1]);
    cvReleaseImage(&img_v);
 
    cvReleaseHist( &hist);
    cvReleaseHist( &hist_mask);
    
    cvReleaseMemStorage ( &storage );

    trsFree(pp);
    trsFree(name_final_mask_very);
    trsFree(name_init_mask_very);
    trsFree(name_image);
    trsFree(name_range_image);
    trsFree(name_verify_data);

    fclose(filin);
    fclose(fil_ver);

    
/*    _getch();       */
    return code;
}
示例#7
0
void find_convex_hull(struct ctx *ctx)
{
    CvSeq *defects;
    CvConvexityDefect *defect_array;
    int i;
    int x = 0, y = 0;
    int dist = 0;

    ctx->hull = NULL;

    if (!ctx->contour)
        return;

    ctx->hull = cvConvexHull2(ctx->contour, ctx->hull_st, CV_CLOCKWISE, 0);

    if (ctx->hull)
    {

        /* Get convexity defects of contour w.r.t. the convex hull */
        defects = cvConvexityDefects(ctx->contour, ctx->hull,
                                     ctx->defects_st);

        if (defects && defects->total)
        {
            defect_array = (CvConvexityDefect*)calloc(defects->total,
                           sizeof(CvConvexityDefect));
            cvCvtSeqToArray(defects, defect_array, CV_WHOLE_SEQ);

            /* Average depth points to get hand center */
            for (i = 0; i < defects->total && i < NUM_DEFECTS; i++)
            {
                x += defect_array[i].depth_point->x;
                y += defect_array[i].depth_point->y;

                ctx->defects[i] = cvPoint(defect_array[i].depth_point->x,
                                          defect_array[i].depth_point->y);
            }

            x /= defects->total;
            y /= defects->total;

            ctx->num_defects = defects->total;
            ctx->hand_center = cvPoint(x, y);

            /* Compute hand radius as mean of distances of
            defects' depth point to hand center */
            for (i = 0; i < defects->total; i++)
            {
                int d = (x - defect_array[i].depth_point->x) *
                        (x - defect_array[i].depth_point->x) +
                        (y - defect_array[i].depth_point->y) *
                        (y - defect_array[i].depth_point->y);

                dist += sqrt(d);
            }

            ctx->hand_radius = dist / defects->total;
            free(defect_array);
        }
    }
}
/*F///////////////////////////////////////////////////////////////////////////////////////
//    Name: icvCreateContourTree
//    Purpose:
//    Create binary tree representation for the contour 
//    Context:
//    Parameters:
//      contour - pointer to input contour object.
//      storage - pointer to the current storage block
//      tree   -  output pointer to the binary tree representation 
//      threshold - threshold for the binary tree building 
//
//F*/
static CvStatus
icvCreateContourTree( const CvSeq * contour, CvMemStorage * storage,
                      CvContourTree ** tree, double threshold )
{
    CvPoint *pt_p;              /*  pointer to previos points   */
    CvPoint *pt_n;              /*  pointer to next points      */
    CvPoint *pt1, *pt2;         /*  pointer to current points   */

    CvPoint t, tp1, tp2, tp3, tn1, tn2, tn3;
    int lpt, flag, i, j, i_tree, j_1, j_3, i_buf;
    double s, sp1, sp2, sn1, sn2, s_c, sp1_c, sp2_c, sn1_c, sn2_c, h, hp1, hp2, hn1, hn2,
        a, ap1, ap2, an1, an2, b, bp1, bp2, bn1, bn2;
    double a_s_c, a_sp1_c;

    _CvTrianAttr **ptr_p, **ptr_n, **ptr1, **ptr2;      /*  pointers to pointers of triangles  */
    _CvTrianAttr *cur_adr;

    int *num_p, *num_n, *num1, *num2;   /*   numbers of input contour points   */
    int nm, nmp1, nmp2, nmp3, nmn1, nmn2, nmn3;
    int seq_flags = 1, i_end, prev_null, prev2_null;
    double koef = 1.5;
    double eps = 1.e-7;
    double e;
    CvStatus status;
    int hearder_size;
    _CvTrianAttr tree_one, tree_two, *tree_end, *tree_root;

    CvSeqWriter writer;

    assert( contour != NULL && contour->total >= 4 );
    status = CV_OK;

    if( contour == NULL )
        return CV_NULLPTR_ERR;
    if( contour->total < 4 )
        return CV_BADSIZE_ERR;

    if( !CV_IS_SEQ_POINT_SET( contour ))
        return CV_BADFLAG_ERR;


/*   Convert Sequence to array    */
    lpt = contour->total;
    pt_p = pt_n = NULL;
    num_p = num_n = NULL;
    ptr_p = ptr_n = ptr1 = ptr2 = NULL;
    tree_end = NULL;

    pt_p = (CvPoint *) cvAlloc( lpt * sizeof( CvPoint ));
    pt_n = (CvPoint *) cvAlloc( lpt * sizeof( CvPoint ));

    num_p = (int *) cvAlloc( lpt * sizeof( int ));
    num_n = (int *) cvAlloc( lpt * sizeof( int ));

    hearder_size = sizeof( CvContourTree );
    seq_flags = CV_SEQ_POLYGON_TREE;
    cvStartWriteSeq( seq_flags, hearder_size, sizeof( _CvTrianAttr ), storage, &writer );

    ptr_p = (_CvTrianAttr **) cvAlloc( lpt * sizeof( _CvTrianAttr * ));
    ptr_n = (_CvTrianAttr **) cvAlloc( lpt * sizeof( _CvTrianAttr * ));

    memset( ptr_p, 0, lpt * sizeof( _CvTrianAttr * ));
    memset( ptr_n, 0, lpt * sizeof( _CvTrianAttr * ));

    if( pt_p == NULL || pt_n == NULL )
        return CV_OUTOFMEM_ERR;
    if( ptr_p == NULL || ptr_n == NULL )
        return CV_OUTOFMEM_ERR;

/*     write fild for the binary tree root   */
/*  start_writer = writer;   */

    tree_one.pt.x = tree_one.pt.y = 0;
    tree_one.sign = 0;
    tree_one.area = 0;
    tree_one.r1 = tree_one.r2 = 0;
    tree_one.next_v1 = tree_one.next_v2 = tree_one.prev_v = NULL;

    CV_WRITE_SEQ_ELEM( tree_one, writer );
    tree_root = (_CvTrianAttr *) (writer.ptr - writer.seq->elem_size);

    if( cvCvtSeqToArray( contour, (char *) pt_p ) == (char *) contour )
        return CV_BADPOINT_ERR;

    for( i = 0; i < lpt; i++ )
        num_p[i] = i;

    i = lpt;
    flag = 0;
    i_tree = 0;
    e = 20.;                    /*  initial threshold value   */
    ptr1 = ptr_p;
    ptr2 = ptr_n;
    pt1 = pt_p;
    pt2 = pt_n;
    num1 = num_p;
    num2 = num_n;
/*  binary tree constraction    */
    while( i > 4 )
    {
        if( flag == 0 )
        {
            ptr1 = ptr_p;
            ptr2 = ptr_n;
            pt1 = pt_p;
            pt2 = pt_n;
            num1 = num_p;
            num2 = num_n;
            flag = 1;
        }
        else
        {
            ptr1 = ptr_n;
            ptr2 = ptr_p;
            pt1 = pt_n;
            pt2 = pt_p;
            num1 = num_n;
            num2 = num_p;
            flag = 0;
        }
        t = pt1[0];
        nm = num1[0];
        tp1 = pt1[i - 1];
        nmp1 = num1[i - 1];
        tp2 = pt1[i - 2];
        nmp2 = num1[i - 2];
        tp3 = pt1[i - 3];
        nmp3 = num1[i - 3];
        tn1 = pt1[1];
        nmn1 = num1[1];
        tn2 = pt1[2];
        nmn2 = num1[2];

        i_buf = 0;
        i_end = -1;
        CV_MATCH_CHECK( status,
                        icvCalcTriAttr( contour, t, tp1, nmp1, tn1, nmn1, &s, &s_c, &h, &a,
                                        &b ));
        CV_MATCH_CHECK( status,
                        icvCalcTriAttr( contour, tp1, tp2, nmp2, t, nm, &sp1, &sp1_c, &hp1,
                                        &ap1, &bp1 ));
        CV_MATCH_CHECK( status,
                        icvCalcTriAttr( contour, tp2, tp3, nmp3, tp1, nmp1, &sp2, &sp2_c, &hp2,
                                        &ap2, &bp2 ));
        CV_MATCH_CHECK( status,
                        icvCalcTriAttr( contour, tn1, t, nm, tn2, nmn2, &sn1, &sn1_c, &hn1,
                                        &an1, &bn1 ));


        j_3 = 3;
        prev_null = prev2_null = 0;
        for( j = 0; j < i; j++ )
        {
            tn3 = pt1[j_3];
            nmn3 = num1[j_3];
            if( j == 0 )
                j_1 = i - 1;
            else
                j_1 = j - 1;

            CV_MATCH_CHECK( status, icvCalcTriAttr( contour, tn2, tn1, nmn1, tn3, nmn3,
                                                    &sn2, &sn2_c, &hn2, &an2, &bn2 ));

            if( (s_c < sp1_c && s_c < sp2_c && s_c <= sn1_c && s_c <= sn2_c && s_c < e) ||
                (((s_c == sp1_c && s_c <= sp2_c) || (s_c == sp2_c && s_c <= sp1_c)) &&
                s_c <= sn1_c && s_c <= sn2_c && s_c < e && j > 1 && prev2_null == 0) ||
                (s_c < eps && j > 0 && prev_null == 0) )
            {
                prev_null = prev2_null = 1;
                if( s_c < threshold )
                {
                    if( ptr1[j_1] == NULL && ptr1[j] == NULL )
                    {
                        if( i_buf > 0 )
                            ptr2[i_buf - 1] = NULL;
                        else
                            i_end = 0;
                    }
                    else
                    {
/*   form next vertex  */
                        tree_one.pt = t;
                        tree_one.sign = (char) (CV_SIGN( s ));
                        tree_one.r1 = h / a;
                        tree_one.r2 = b / a;
                        tree_one.area = fabs( s );
                        tree_one.next_v1 = ptr1[j_1];
                        tree_one.next_v2 = ptr1[j];

                        CV_WRITE_SEQ_ELEM( tree_one, writer );
                        cur_adr = (_CvTrianAttr *) (writer.ptr - writer.seq->elem_size);

                        if( ptr1[j_1] != NULL )
                            ptr1[j_1]->prev_v = cur_adr;
                        if( ptr1[j] != NULL )
                            ptr1[j]->prev_v = cur_adr;

                        if( i_buf > 0 )
                            ptr2[i_buf - 1] = cur_adr;
                        else
                        {
                            tree_end = (_CvTrianAttr *) writer.ptr;
                            i_end = 1;
                        }
                        i_tree++;
                    }
                }
                else
/*   form next vertex    */
                {
                    tree_one.pt = t;
                    tree_one.sign = (char) (CV_SIGN( s ));
                    tree_one.area = fabs( s );
                    tree_one.r1 = h / a;
                    tree_one.r2 = b / a;
                    tree_one.next_v1 = ptr1[j_1];
                    tree_one.next_v2 = ptr1[j];

                    CV_WRITE_SEQ_ELEM( tree_one, writer );
                    cur_adr = (_CvTrianAttr *) (writer.ptr - writer.seq->elem_size);

                    if( ptr1[j_1] != NULL )
                        ptr1[j_1]->prev_v = cur_adr;
                    if( ptr1[j] != NULL )
                        ptr1[j]->prev_v = cur_adr;

                    if( i_buf > 0 )
                        ptr2[i_buf - 1] = cur_adr;
                    else
                    {
                        tree_end = cur_adr;
                        i_end = 1;
                    }
                    i_tree++;
                }
            }
            else
/*   the current triangle is'not LMIAT    */
            {
                prev_null = 0;
                switch (prev2_null)
                {
                case 0:
                    break;
                case 1:
                    {
                        prev2_null = 2;
                        break;
                    }
                case 2:
                    {
                        prev2_null = 0;
                        break;
                    }
                }
                if( j != i - 1 || i_end == -1 )
                    ptr2[i_buf] = ptr1[j];
                else if( i_end == 0 )
                    ptr2[i_buf] = NULL;
                else
                    ptr2[i_buf] = tree_end;
                pt2[i_buf] = t;
                num2[i_buf] = num1[j];
                i_buf++;
            }
/*    go to next vertex    */
            tp3 = tp2;
            tp2 = tp1;
            tp1 = t;
            t = tn1;
            tn1 = tn2;
            tn2 = tn3;
            nmp3 = nmp2;
            nmp2 = nmp1;
            nmp1 = nm;
            nm = nmn1;
            nmn1 = nmn2;
            nmn2 = nmn3;

            sp2 = sp1;
            sp1 = s;
            s = sn1;
            sn1 = sn2;
            sp2_c = sp1_c;
            sp1_c = s_c;
            s_c = sn1_c;
            sn1_c = sn2_c;

            ap2 = ap1;
            ap1 = a;
            a = an1;
            an1 = an2;
            bp2 = bp1;
            bp1 = b;
            b = bn1;
            bn1 = bn2;
            hp2 = hp1;
            hp1 = h;
            h = hn1;
            hn1 = hn2;
            j_3++;
            if( j_3 >= i )
                j_3 = 0;
        }

        i = i_buf;
        e = e * koef;
    }

/*  constract tree root  */
    if( i != 4 )
        return CV_BADFACTOR_ERR;

    t = pt2[0];
    tn1 = pt2[1];
    tn2 = pt2[2];
    tp1 = pt2[3];
    nm = num2[0];
    nmn1 = num2[1];
    nmn2 = num2[2];
    nmp1 = num2[3];
/*   first pair of the triangles   */
    CV_MATCH_CHECK( status,
                    icvCalcTriAttr( contour, t, tp1, nmp1, tn1, nmn1, &s, &s_c, &h, &a, &b ));
    CV_MATCH_CHECK( status,
                    icvCalcTriAttr( contour, tn2, tn1, nmn1, tp1, nmp1, &sn2, &sn2_c, &hn2,
                                    &an2, &bn2 ));
/*   second pair of the triangles   */
    CV_MATCH_CHECK( status,
                    icvCalcTriAttr( contour, tn1, t, nm, tn2, nmn2, &sn1, &sn1_c, &hn1, &an1,
                                    &bn1 ));
    CV_MATCH_CHECK( status,
                    icvCalcTriAttr( contour, tp1, tn2, nmn2, t, nm, &sp1, &sp1_c, &hp1, &ap1,
                                    &bp1 ));

    a_s_c = fabs( s_c - sn2_c );
    a_sp1_c = fabs( sp1_c - sn1_c );

    if( a_s_c > a_sp1_c )
/*   form child vertexs for the root     */
    {
        tree_one.pt = t;
        tree_one.sign = (char) (CV_SIGN( s ));
        tree_one.area = fabs( s );
        tree_one.r1 = h / a;
        tree_one.r2 = b / a;
        tree_one.next_v1 = ptr2[3];
        tree_one.next_v2 = ptr2[0];

        tree_two.pt = tn2;
        tree_two.sign = (char) (CV_SIGN( sn2 ));
        tree_two.area = fabs( sn2 );
        tree_two.r1 = hn2 / an2;
        tree_two.r2 = bn2 / an2;
        tree_two.next_v1 = ptr2[1];
        tree_two.next_v2 = ptr2[2];

        CV_WRITE_SEQ_ELEM( tree_one, writer );
        cur_adr = (_CvTrianAttr *) (writer.ptr - writer.seq->elem_size);

        if( s_c > sn2_c )
        {
            if( ptr2[3] != NULL )
                ptr2[3]->prev_v = cur_adr;
            if( ptr2[0] != NULL )
                ptr2[0]->prev_v = cur_adr;
            ptr1[0] = cur_adr;

            i_tree++;

            CV_WRITE_SEQ_ELEM( tree_two, writer );
            cur_adr = (_CvTrianAttr *) (writer.ptr - writer.seq->elem_size);

            if( ptr2[1] != NULL )
                ptr2[1]->prev_v = cur_adr;
            if( ptr2[2] != NULL )
                ptr2[2]->prev_v = cur_adr;
            ptr1[1] = cur_adr;

            i_tree++;

            pt1[0] = tp1;
            pt1[1] = tn1;
        }
        else
        {
            CV_WRITE_SEQ_ELEM( tree_two, writer );
            cur_adr = (_CvTrianAttr *) (writer.ptr - writer.seq->elem_size);

            if( ptr2[1] != NULL )
                ptr2[1]->prev_v = cur_adr;
            if( ptr2[2] != NULL )
                ptr2[2]->prev_v = cur_adr;
            ptr1[0] = cur_adr;

            i_tree++;

            CV_WRITE_SEQ_ELEM( tree_one, writer );
            cur_adr = (_CvTrianAttr *) (writer.ptr - writer.seq->elem_size);

            if( ptr2[3] != NULL )
                ptr2[3]->prev_v = cur_adr;
            if( ptr2[0] != NULL )
                ptr2[0]->prev_v = cur_adr;
            ptr1[1] = cur_adr;

            i_tree++;

            pt1[0] = tn1;
            pt1[1] = tp1;
        }
    }
    else
    {
        tree_one.pt = tp1;
        tree_one.sign = (char) (CV_SIGN( sp1 ));
        tree_one.area = fabs( sp1 );
        tree_one.r1 = hp1 / ap1;
        tree_one.r2 = bp1 / ap1;
        tree_one.next_v1 = ptr2[2];
        tree_one.next_v2 = ptr2[3];

        tree_two.pt = tn1;
        tree_two.sign = (char) (CV_SIGN( sn1 ));
        tree_two.area = fabs( sn1 );
        tree_two.r1 = hn1 / an1;
        tree_two.r2 = bn1 / an1;
        tree_two.next_v1 = ptr2[0];
        tree_two.next_v2 = ptr2[1];

        CV_WRITE_SEQ_ELEM( tree_one, writer );
        cur_adr = (_CvTrianAttr *) (writer.ptr - writer.seq->elem_size);

        if( sp1_c > sn1_c )
        {
            if( ptr2[2] != NULL )
                ptr2[2]->prev_v = cur_adr;
            if( ptr2[3] != NULL )
                ptr2[3]->prev_v = cur_adr;
            ptr1[0] = cur_adr;

            i_tree++;

            CV_WRITE_SEQ_ELEM( tree_two, writer );
            cur_adr = (_CvTrianAttr *) (writer.ptr - writer.seq->elem_size);

            if( ptr2[0] != NULL )
                ptr2[0]->prev_v = cur_adr;
            if( ptr2[1] != NULL )
                ptr2[1]->prev_v = cur_adr;
            ptr1[1] = cur_adr;

            i_tree++;

            pt1[0] = tn2;
            pt1[1] = t;
        }
        else
        {
            CV_WRITE_SEQ_ELEM( tree_two, writer );
            cur_adr = (_CvTrianAttr *) (writer.ptr - writer.seq->elem_size);

            if( ptr2[0] != NULL )
                ptr2[0]->prev_v = cur_adr;
            if( ptr2[1] != NULL )
                ptr2[1]->prev_v = cur_adr;
            ptr1[0] = cur_adr;

            i_tree++;

            CV_WRITE_SEQ_ELEM( tree_one, writer );
            cur_adr = (_CvTrianAttr *) (writer.ptr - writer.seq->elem_size);

            if( ptr2[2] != NULL )
                ptr2[2]->prev_v = cur_adr;
            if( ptr2[3] != NULL )
                ptr2[3]->prev_v = cur_adr;
            ptr1[1] = cur_adr;

            i_tree++;

            pt1[0] = t;
            pt1[1] = tn2;

        }
    }

/*    form root   */
    s = cvContourArea( contour );

    tree_root->pt = pt1[1];
    tree_root->sign = 0;
    tree_root->area = fabs( s );
    tree_root->r1 = 0;
    tree_root->r2 = 0;
    tree_root->next_v1 = ptr1[0];
    tree_root->next_v2 = ptr1[1];
    tree_root->prev_v = NULL;

    ptr1[0]->prev_v = (_CvTrianAttr *) tree_root;
    ptr1[1]->prev_v = (_CvTrianAttr *) tree_root;

/*     write binary tree root   */
/*    CV_WRITE_SEQ_ELEM (tree_one, start_writer);   */
    i_tree++;
/*  create Sequence hearder     */
    *((CvSeq **) tree) = cvEndWriteSeq( &writer );
/*   write points for the main segment into sequence header   */
    (*tree)->p1 = pt1[0];

  M_END:

    cvFree( &ptr_n );
    cvFree( &ptr_p );
    cvFree( &num_n );
    cvFree( &num_p );
    cvFree( &pt_n );
    cvFree( &pt_p );

    return status;
}
示例#9
0
/*
    函数:contoursample 
    功能:轮廓抽样
    参数:seq ------ 轮廓点序列
     samplearry --- 用于存放抽样点
         samplearry ---- 抽样点数
*/
int contoursample(CvSeq * seq , CvPoint *samplearry, int samplenum)
{
    int num = 0 ; 
    for (CvSeq *s = seq ; s !=NULL;s=s->h_next)
        num +=s->total;

	if ( num < samplenum)
	{
		return 0; 
	}

    CvPoint *pointarray = (CvPoint *)malloc(num * sizeof(CvPoint));
    
    int accum = 0 ; 
    for (CvSeq *s =seq ; s!=NULL;s=s->h_next)
    {
        cvCvtSeqToArray( s, pointarray +accum);
        accum +=s->total;
        
    }
    
 
    // 轮廓点随机打乱
    CvRNG rng; 
    rng = cvRNG(cvGetTickCount());
    CvPoint pointtemp;
    int tagtemp = -1;
    for (int i = 0 ; i < num ; ++i)
    {
        int index = cvRandInt(&rng)%(num-i)+i;
        if(index !=i)
        {
            pointtemp = pointarray[index];
            pointarray[index] = pointarray[i];
            pointarray[i] = pointtemp;
            
        }
    }
    // 如果*samplenum > num 即取样点数远远小于轮廓点数随即抽取samplenum个点节省运算时间
    if (num > 3 * samplenum)
    {
        
        CvPoint *pointarray2 = (CvPoint *)malloc(3*samplenum * sizeof(CvPoint));
        for (int i = 0;i < 3*samplenum;++i)
        {
            pointarray2[i] = pointarray[i];
        }
        free(pointarray);
        pointarray = pointarray2;
        num = 3 * samplenum;
    }
    // 计算轮廓点与点间距离
	int beg = 0,nelement = 0;
	pairDistance* pd = (pairDistance*)malloc(sizeof(pairDistance)*((num-1)*num/2));
    for (int i = 0 ; i < num ; i++)
    {
        for (int j = i +1 ; j < num ; ++j)
        {
			pd[nelement].i = i;
			pd[nelement].j = j;
			pd[nelement++].distance = (pointarray[i].x -pointarray[j].x) * (pointarray[i].x -pointarray[j].x) + 
				(pointarray[i].y -pointarray[j].y) * (pointarray[i].y -pointarray[j].y);
        
        }
    }
	// 排序
	quick_sort(pd,0,nelement-1);
    // 删除最小距离点对中的其中一个点直到满足samplenum
    int nneedremove = num - samplenum;
    int *mask = (int *)malloc( num * sizeof(int));
    memset(mask,0,num * sizeof(int));
    //list<pairDistance>::iterator iter = list_pair.begin();
    //list<pairDistance>::iterator iter = list_pair.begin();
    while (nneedremove > 0)
    {
        int index0 = pd[beg].i;
        int index1 = pd[beg].j;
        if (mask[index0] == 0 && mask[index1] ==0)
        {
            mask[index1] = 1 ;
            nneedremove --;
        }
        beg++;
    }
    // 将抽样点存放到samplearry中
    int nstartindex = 0 ;
    for (int i = 0 ; i < num ; ++i)
    {
        if (mask[i] ==0)
        {
            samplearry[nstartindex] = pointarray[i];
            nstartindex++;
        }
    }

    free(pointarray);
	free(pd);
	return 1;
}
示例#10
0
void AirCursor::analyzeGrab()
{
    cvClearMemStorage(m_cvMemStorage);

    // get current depth map from Kinect
    const XnDepthPixel* depthMap = m_depthGenerator.GetDepthMap();

    // convert 16bit openNI depth map to 8bit IplImage used in opencv processing
    int origDepthIndex = 0;
    char* depthPtr = m_iplDepthMap->imageData;
    char* debugPtr = 0;
    if (m_debugImageEnabled) debugPtr = m_iplDebugImage->imageData;
    for (unsigned int y = 0; y < DEPTH_MAP_SIZE_Y; y++)
    {
        for (unsigned int x = 0; x < DEPTH_MAP_SIZE_X; x++)
        {
            // get current depth value from original depth map
            short depth = depthMap[origDepthIndex];

            // check that current value is in the allowed range determined by clipping distances,
            // and if it is map it to range 0 - 255 so that 255 is the closest value
            unsigned char pixel = 0;
            if (depth >= NEAR_CLIPPING_DISTANCE && depth <= FAR_CLIPPING_DISTANCE)
            {
               depth -= NEAR_CLIPPING_DISTANCE;
                    pixel = 255 - (255.0f * ((float)depth / (FAR_CLIPPING_DISTANCE - NEAR_CLIPPING_DISTANCE)));
            }
            else {
                pixel = 0;
            }
            m_iplDepthMap->imageData[y * m_iplDepthMap->widthStep + x] = pixel;
            *depthPtr = pixel;

            if (m_debugImageEnabled)
            {
                // init debug image with the same depth map
                *(debugPtr + 0) = pixel;
                *(debugPtr + 1) = pixel;
                *(debugPtr + 2) = pixel;
                debugPtr += 3;
            }
            origDepthIndex++;
            depthPtr++;
        }
    }

    // calculate region of interest corner points in real world coordinates
    XnPoint3D rwPoint1 = m_handPosRealWorld;
    rwPoint1.X -= HAND_ROI_SIZE_LEFT;
    rwPoint1.Y += HAND_ROI_SIZE_UP;
    XnPoint3D rwPoint2 = m_handPosRealWorld;
    rwPoint2.X += HAND_ROI_SIZE_RIGHT;
    rwPoint2.Y -= HAND_ROI_SIZE_DOWN;

    // convert corner points to projective coordinates
    XnPoint3D projPoint1, projPoint2;
    m_depthGenerator.ConvertRealWorldToProjective(1, &rwPoint1, &projPoint1);
    m_depthGenerator.ConvertRealWorldToProjective(1, &rwPoint2, &projPoint2);

    // round projected corner points to ints and clip them against the depth map
    int ROItopLeftX = qRound(projPoint1.X); int ROItopLeftY = qRound(projPoint1.Y);
    int ROIbottomRightX = qRound(projPoint2.X); int ROIbottomRightY = qRound(projPoint2.Y);
    if (ROItopLeftX < 0) ROItopLeftX = 0; else if (ROItopLeftX > DEPTH_MAP_SIZE_X - 1) ROItopLeftX = DEPTH_MAP_SIZE_X - 1;
    if (ROItopLeftY < 0) ROItopLeftY = 0; else if (ROItopLeftY > DEPTH_MAP_SIZE_Y - 1) ROItopLeftY = DEPTH_MAP_SIZE_Y - 1;
    if (ROIbottomRightX < 0) ROIbottomRightX = 0; else if (ROIbottomRightX > DEPTH_MAP_SIZE_X - 1) ROIbottomRightX = DEPTH_MAP_SIZE_X - 1;
    if (ROIbottomRightY < 0) ROIbottomRightY = 0; else if (ROIbottomRightY > DEPTH_MAP_SIZE_Y - 1) ROIbottomRightY = DEPTH_MAP_SIZE_Y - 1;

    // set region of interest
    CvRect rect = cvRect(ROItopLeftX, ROItopLeftY, ROIbottomRightX - ROItopLeftX, ROIbottomRightY - ROItopLeftY);
    if(rect.height > 0 && rect.width > 0)
    {
        cvSetImageROI(m_iplDepthMap, rect);
        if (m_debugImageEnabled) cvSetImageROI(m_iplDebugImage, rect);
    }

    // use depth threshold to isolate hand
    // as a center point of thresholding, it seems that it's better to use a point bit below
    // the point Nite gives as the hand point
    XnPoint3D rwThresholdPoint = m_handPosRealWorld; rwThresholdPoint.Y -= 30;
    XnPoint3D projThresholdPoint;
    m_depthGenerator.ConvertRealWorldToProjective(1, &rwThresholdPoint, &projThresholdPoint);
    int lowerBound = (unsigned char)m_iplDepthMap->imageData[(int)projThresholdPoint.Y * DEPTH_MAP_SIZE_X + (int)projThresholdPoint.X] - DEPTH_THRESHOLD;
    if (lowerBound < 0) lowerBound = 0;
    cvThreshold( m_iplDepthMap, m_iplDepthMap, lowerBound, 255, CV_THRESH_BINARY );

    // color used for drawing the hand in the debug image, green for normal and red for grab.
    // color lags one frame from actual grab status but in practice that shouldn't be too big of a problem
    int rCol, gCol, bCol;
    if(m_grabbing) {
        rCol = 255; gCol = 0; bCol = 0;
    }
    else {
        rCol = 0; gCol = 255; bCol = 0;
    }

    // go through the ROI and paint hand on debug image with current grab status color
    if (m_debugImageEnabled)
    {
        // index of first pixel in the ROI
        int startIndex = ROItopLeftY * m_iplDepthMap->widthStep + ROItopLeftX;

        depthPtr = &(m_iplDepthMap->imageData[startIndex]);
        debugPtr = &(m_iplDebugImage->imageData[startIndex * 3]);

        // how much index needs to increase when moving to next line
        int vertInc = m_iplDepthMap->widthStep - (ROIbottomRightX - ROItopLeftX);

        for (int y = ROItopLeftY; y < ROIbottomRightY; y++)
        {
            for (int x = ROItopLeftX; x < ROIbottomRightX; x++)
            {
                if((unsigned char)*depthPtr > 0)
                {
                    *(debugPtr + 0) = rCol / 2;
                    *(debugPtr + 1) = gCol / 2;
                    *(debugPtr + 2) = bCol / 2;
                }

                // next pixel
                depthPtr++;
                debugPtr += 3;
            }

            // next line
            depthPtr += vertInc;
            debugPtr += vertInc * 3;
        }
    }

    // find contours in the hand and draw them on debug image
    CvSeq* contours = 0;
    cvFindContours(m_iplDepthMap, m_cvMemStorage, &contours, sizeof(CvContour));
    if (m_debugImageEnabled)
    {
        if(contours)
        {
            cvDrawContours(m_iplDebugImage, contours, cvScalar(rCol, gCol , bCol), cvScalar(rCol, gCol, bCol), 1);
        }
    }

    // go through contours and search for the biggest one
    CvSeq* biggestContour = 0;
    double biggestArea = 0.0f;
    for(CvSeq* currCont = contours; currCont != 0; currCont = currCont->h_next)
    {
        // ignore small contours which are most likely caused by artifacts
        double currArea = cvContourArea(currCont);
        if(currArea < CONTOUR_MIN_SIZE) continue;

        if(!biggestContour || currArea > biggestArea) {
            biggestContour = currCont;
            biggestArea = currArea;
        }
    }

    int numOfValidDefects = 0;

    if(biggestContour)
    {
        // calculate convex hull of the biggest contour found which is hopefully the hand
        CvSeq* hulls = cvConvexHull2(biggestContour, m_cvMemStorage, CV_CLOCKWISE, 0);

        if (m_debugImageEnabled)
        {
            // calculate convex hull and return it in a different form.
            // only required for drawing
            CvSeq* hulls2 = cvConvexHull2(biggestContour, m_cvMemStorage, CV_CLOCKWISE, 1);

            // draw the convex hull
            cvDrawContours(m_iplDebugImage, hulls2, cvScalar(rCol, gCol , bCol), cvScalar(rCol, gCol, bCol), 1);
        }

        // calculate convexity defects of hand's convex hull
        CvSeq* defects = cvConvexityDefects(biggestContour, hulls, m_cvMemStorage);

        int numOfDefects = defects->total;

        if (numOfDefects > 0)
        {
            // calculate defect min size in projective coordinates.
            // this is done using a vector from current hand position to a point DEFECT_MIN_SIZE amount above it.
            // that vector is converted to projective coordinates and it's length is calculated.
            XnPoint3D rwTempPoint = m_handPosRealWorld;
            rwTempPoint.Y += DEFECT_MIN_SIZE;
            XnPoint3D projTempPoint;
            m_depthGenerator.ConvertRealWorldToProjective(1, &rwTempPoint, &projTempPoint);
            int defectMinSizeProj = m_handPosProjected.Y - projTempPoint.Y;

            // convert opencv seq to array
            CvConvexityDefect* defectArray;defectArray = (CvConvexityDefect*)malloc(sizeof(CvConvexityDefect) * numOfDefects);
            cvCvtSeqToArray(defects, defectArray, CV_WHOLE_SEQ);

            for(int i = 0; i < numOfDefects; i++)
            {
                // ignore too small defects
                if((defectArray[i].depth) < defectMinSizeProj)
                {
                   continue;
                }

               numOfValidDefects++;

               if (m_debugImageEnabled)
               {
                   // draw blue point to defect
                   cvCircle(m_iplDebugImage, *(defectArray[i].depth_point), 5, cvScalar(0, 0, 255), -1);
                   cvCircle(m_iplDebugImage, *(defectArray[i].start), 5, cvScalar(0, 0, 255), -1);
                   cvCircle(m_iplDebugImage, *(defectArray[i].end), 5, cvScalar(0, 0, 255), -1);
               }
            }

            free(defectArray);
        }
    }

    if (m_debugImageEnabled)
    {
        cvResetImageROI(m_iplDebugImage);

        // draw white dot on current hand position
        cvCircle(m_iplDebugImage, cvPoint(m_handPosProjected.X, m_handPosProjected.Y), 5, cvScalar(255, 255, 255), -1);

        // draw gray dot on current center of threshold position
        //cvCircle(m_iplDebugImage, cvPoint(projThresholdPoint.X, projThresholdPoint.Y), 5, cvScalar(127, 127, 127), -1);

        // draw ROI with green
        //cvRectangle(m_iplDebugImage, cvPoint(ROItopLeftX, ROItopLeftY), cvPoint(ROIbottomRightX, ROIbottomRightY), cvScalar(0, 255, 0));
    }

    // determine current grab status based on defect count
    if(numOfValidDefects <= GRAB_MAX_DEFECTS)
    {
        m_currentGrab = true;
    }
    else
    {
        m_currentGrab = false;
    }

    if (m_debugImageEnabled)
    {
        // debug strings
        QList<QString> debugStrings;
        debugStrings.push_back(QString("hand distance: " + QString::number(m_handPosRealWorld.Z) + " mm").toStdString().c_str());
        debugStrings.push_back(QString("defects: " + QString::number(numOfValidDefects)).toStdString().c_str());

        // convert iplDebugImage to QImage
        char* scanLinePtr = m_iplDebugImage->imageData;
        for (int y = 0;y < DEPTH_MAP_SIZE_Y; y++) {
            memcpy(m_debugImage->scanLine(y), scanLinePtr, DEPTH_MAP_SIZE_X * 3);
            scanLinePtr += DEPTH_MAP_SIZE_X * 3;
        }

        emit debugUpdate(*m_debugImage, debugStrings);
    }
}
示例#11
0
void save_camera_paramsOriginal( const char* out_filename, int image_count, CvSize img_size,
                         CvSize board_size, float square_size,
                         float aspect_ratio, int flags,
                         const CvMat* camera_matrix, CvMat* dist_coeffs,
                         const CvMat* extr_params, const CvSeq* image_points_seq,
                         const CvMat* reproj_errs, double avg_reproj_err )
{
    CvFileStorage* fs = cvOpenFileStorage( out_filename, 0, CV_STORAGE_WRITE );

    time_t t;
    time( &t );
    struct tm *t2 = localtime( &t );
    char buf[1024];
    strftime( buf, sizeof(buf)-1, "%c", t2 );

    cvWriteString( fs, "calibration_time", buf );

    cvWriteInt( fs, "image_count", image_count );
    cvWriteInt( fs, "image_width", img_size.width );
    cvWriteInt( fs, "image_height", img_size.height );
    cvWriteInt( fs, "board_width", board_size.width );
    cvWriteInt( fs, "board_height", board_size.height );
    cvWriteReal( fs, "square_size", square_size );

    if( flags & CV_CALIB_FIX_ASPECT_RATIO )
        cvWriteReal( fs, "aspect_ratio", aspect_ratio );

    if( flags != 0 )
    {
        sprintf( buf, "flags: %s%s%s%s",
            flags & CV_CALIB_USE_INTRINSIC_GUESS ? "+use_intrinsic_guess" : "",
            flags & CV_CALIB_FIX_ASPECT_RATIO ? "+fix_aspect_ratio" : "",
            flags & CV_CALIB_FIX_PRINCIPAL_POINT ? "+fix_principal_point" : "",
            flags & CV_CALIB_ZERO_TANGENT_DIST ? "+zero_tangent_dist" : "" );
        cvWriteComment( fs, buf, 0 );
    }

    cvWriteInt( fs, "flags", flags );

    cvWrite( fs, "camera_matrix", camera_matrix );
    cvWrite( fs, "distortion_coefficients", dist_coeffs );

    cvWriteReal( fs, "avg_reprojection_error", avg_reproj_err );
    if( reproj_errs )
        cvWrite( fs, "per_view_reprojection_errors", reproj_errs );

    if( extr_params )
    {
        cvWriteComment( fs, "a set of 6-tuples (rotation vector + translation vector) for each view", 0 );
        cvWrite( fs, "extrinsic_parameters", extr_params );
    }

    if( image_points_seq )
    {
        cvWriteComment( fs, "the array of board corners projections used for calibration", 0 );
        assert( image_points_seq->total == image_count );
        CvMat* image_points = cvCreateMat( 1, image_count*board_size.width*board_size.height, CV_32FC2 );
        cvCvtSeqToArray( image_points_seq, image_points->data.fl );

        cvWrite( fs, "image_points", image_points );
        cvReleaseMat( &image_points );
    }

    cvReleaseFileStorage( &fs );
}
示例#12
0
int findStableMatches( CvSeq *seq, float minRad, float maxRad, CandidatePtrVector& kps, IplImage* bin ) {

  // Return value
  int retVal = -1;

  // Threshold Contour entries size
  int elements = seq->total;
  if( elements < 8 ) {
    return retVal;
  }

  // Gather statistics
  CvRect rect = cvBoundingRect( seq );
  int high = ( rect.height < rect.width ? rect.width : rect.height );
  int low = ( rect.height < rect.width ? rect.height : rect.width );

  // If bounding box is very small simply return
  if( low < minRad*2  ) {
    return retVal;
  }
  
  // Allocate Contour array
  CvPoint *group_pos = (CvPoint*) malloc(elements * sizeof(CvPoint));
  cvCvtSeqToArray(seq, group_pos, CV_WHOLE_SEQ);

  // Calculate arc and downsampling statistics
  double arc_length = cvArcLength( seq );
  double arc_approx = arc_length / 10;
  double rect_approx = 12*(float)high / (float)low;
  double downsample = 2 * elements / (rect_approx + arc_approx);
  double ds_length = arc_length / 4;

  // Perform downsampling
  int maxSize = downsample * elements;
  int newSize = 0;
  CvPoint *dsed = (CvPoint*) malloc(maxSize * sizeof(CvPoint));
  dsed[0] = CvPoint( group_pos[0] );
  CvPoint last = CvPoint( dsed[0] );
  newSize++;
  for( int i = 1; i < elements; i++ ) {
    double dist_so_far = dist_squared( group_pos[i], last );
    if( dist_so_far > ds_length && newSize < maxSize ) {
      dsed[newSize] = CvPoint( group_pos[i] );
      newSize++;
      last = CvPoint( group_pos[i] );
    }
  }

  // Check to make sure reduced Contour size is sufficient [quickfix: todo revise above]
  if( newSize < 6 ) {
    free(group_pos);
    free(dsed);
    return -1;
  }

  // Fit Ellipse
  CvPoint2D32f* input = (CvPoint2D32f*)malloc(newSize*sizeof(CvPoint2D32f));
  for( int i=0; i<newSize; i++ ) {
    input[i].x = dsed[i].x;
    input[i].y = dsed[i].y;
  }
  CvBox2D* box = (CvBox2D*)malloc(sizeof(CvBox2D));
  cvFitEllipse( input, newSize, box );

  // Threshold size
  float esize = PI*box->size.height*box->size.width/4.0f;
  if( esize < PI*maxRad*maxRad ) {

    // Add
    Candidate *kp = new Candidate;
    kp->angle = box->angle;
    kp->r = box->center.y;
    kp->c = box->center.x;
    kp->minor = box->size.width/2;
    kp->major = box->size.height/2;
    kp->magnitude = 0;
    kp->method = ADAPTIVE;
    kps.push_back( kp );
    retVal = 0;

  } else {

    // Interest point too large
    retVal = 1;
  }

  // Deallocations
  free(box);
  free(input);
  free(group_pos);
  free(dsed);

  return retVal;
}
示例#13
0
IplImage* PlateFinder::FindPlate (IplImage *src) {
	IplImage* plate;
	IplImage* contourImg = cvCreateImage(cvGetSize(src), IPL_DEPTH_8U, 1);	// anh tim contour
	IplImage* grayImg =  cvCreateImage(cvGetSize(src), IPL_DEPTH_8U, 1);	// anh xam
	cvCvtColor(src, grayImg, CV_RGB2GRAY);

	IplImage* cloneImg = cvCreateImage(cvGetSize(src), IPL_DEPTH_8U, 3);
	cloneImg = cvCloneImage(src);
	
	// tien xu ly anh
	cvCopy(grayImg, contourImg);
	cvNormalize(contourImg, contourImg, 0, 255, CV_MINMAX);
	ImageRestoration(contourImg);
	
	IplImage* rectImg = cvCreateImage(cvGetSize(src), IPL_DEPTH_8U, 3);
	cvMerge(contourImg, contourImg, contourImg, NULL, rectImg); // tron anh

	// tim contour cua buc anh
	CvMemStorage *storagePlate = cvCreateMemStorage(0);
	CvSeq *contours = cvCreateSeq(CV_SEQ_ELTYPE_POINT, sizeof(CvSeq), sizeof(CvPoint), storagePlate);
	cvFindContours(contourImg, storagePlate, &contours, sizeof(CvContour), CV_RETR_EXTERNAL, CV_CHAIN_APPROX_SIMPLE, cvPoint(0, 0));

	//cvShowImage("contourImg", contourImg);
	

	int xmin, ymin, xmax, ymax, w, h, s, r;
	int count;
	double ratio;	// ty le chieu rong tren chieu cao
	CvRect rectPlate; 

	// luu lai cac anh co kha nang la bien so
	IplImage** plateArr = new IplImage *[5];
	int j = 0;
	for (int i = 0; i < 5; i++)
	{
		plateArr[i] = NULL;
	}

	while (contours) {
		count = contours->total;
		CvPoint *PointArray = new CvPoint[count];
		cvCvtSeqToArray (contours, PointArray, CV_WHOLE_SEQ);

		for (int i = 0; i < count; i++)
		{
			if (i == 0)
			{
				xmin = xmax = PointArray[i].x;
				ymin = ymax = PointArray[i].y;
			}

			if (PointArray[i].x > xmax) {
				xmax = PointArray[i].x;
			}
			if (PointArray[i].x < xmin)  {
				xmin = PointArray[i].x;
			}

			if (PointArray[i].y > ymax) {
				ymax = PointArray[i].y;
			}
			if (PointArray[i].y < ymin)  {
				ymin = PointArray[i].y;
			}
		}

		w = xmax - xmin;
		h = ymax - ymin;
		s = w * h;

		cvRectangle (rectImg, cvPoint(xmin, ymin), cvPoint(xmax, ymax), RED);

		// loai bo nhung hinh chu nhat co ti le khong dung
		if (s != 0) {
			r = (contourImg->height * contourImg->width) / s;
		} else {
			r = 1000;
		}

		if (w == 0 && h == 0) {
			ratio = 0;
		} else {
			ratio = (double)w/h;
		}

		if (r > 30 && r < 270) {
			// ve ra hcn mau xanh la
			cvRectangle (rectImg, cvPoint(xmin, ymin), cvPoint(xmax, ymax), GREEN);

			if (ratio > 2.6 && ratio < 7) {
				cvRectangle (rectImg, cvPoint(xmin, ymin), cvPoint(xmax, ymax), BLUE);

				if (w > 80 && w < 250 && h > 25 && h < 150) {
					rectPlate = cvRect (xmin, ymin, w, h);

					cvRectangle (cloneImg, cvPoint(rectPlate.x, rectPlate.y),
						cvPoint(rectPlate.x + rectPlate.width, rectPlate.y + rectPlate.height), RED, 3);

					// cat bien so
					plate = cvCreateImage(cvSize(rectPlate.width, rectPlate.height), IPL_DEPTH_8U, 3);
					cvSetImageROI(src, rectPlate);
					cvCopy(src, plate, NULL);
					cvResetImageROI(src);

					// luu vao mang cac bien so plateArr
					int cnt = CountCharacter(plate);
					if (cnt >= 5) {
						plateArr[j] = cvCloneImage(plate);
						j++;
					}
				}
			}
		}

		delete []PointArray;

		contours = contours->h_next;
	}

	// sap xep
	if (plateArr[0]) 
	{
		int w = plateArr[0]->width;

		int flag;
		for (int i = 1; i < 4; i++)
		{
			if (plateArr[i] && plateArr[i]->width < w)
			{
				flag = i;
			}
		}

		plateArr[0] = plateArr[flag];
	}

	cvShowImage("cloneImg", cloneImg);
	//cvShowImage("rectImg", rectImg);
	//cvShowImage("plate", plateArr[0]);

	cvReleaseImage(&contourImg);
	cvReleaseImage(&rectImg);
	cvReleaseImage(&plate);

	return plateArr[0];
}
示例#14
0
bool Classifier::run(const IplImage *frame, CObjectList *objects, bool scored)
{
    double xDiff = 0, yDiff = 0;
    optical_flow(frame, &xDiff, &yDiff);
    
    totalXDiff += xDiff;
    totalYDiff += yDiff;
    if (!scored)
        return true;
    
    cout << "--------------------------------------" << endl;
    cout << "\t\tRun" << endl;
    
    assert((frame != NULL) && (objects != NULL));
    
    printf("Let's go!\n");
    
    for (int i = 0; i < (int)prevObjects.size(); ++i) {
        if (prevObjects[i].rect.x > -20 && prevObjects[i].rect.x < frame->width 
         && prevObjects[i].rect.y > -20 && prevObjects[i].rect.y < frame->height) {
            objects->push_back(prevObjects[i]);
            cout << prevObjects[i].label << " is now at (" << prevObjects[i].rect.x << ", " << prevObjects[i].rect.y << ")" << endl;
        }
    }
    
    //printf("HEY OPTICAL FLOW!!!! %f %f\n", totalXDiff, totalYDiff);
    
    // move old objects
    for (int i = 0; i < (int)objects->size(); ++i) {
        (*objects)[i].rect.x -= totalXDiff * 3;
        (*objects)[i].rect.y -= totalYDiff * 3;
    }
    
    cout << "Flow: " << totalXDiff << " " << totalYDiff << endl;
    totalYDiff = 0;
    totalXDiff = 0;
    
    
    // Convert to grayscale.
    IplImage *gray  = cvCreateImage(cvGetSize(frame), IPL_DEPTH_8U, 1);
    cvCvtColor(frame, gray, CV_BGR2GRAY);
    
    // Resize by half first, as per the handout.
    double scale = 2.0;
    IplImage *dst = cvCreateImage(cvSize(gray->width  / scale, gray->height  / scale), gray->depth,  gray->nChannels);
    cvResize(gray, dst);

    printf("About to do SURF\n");
    CvSeq *keypoints = 0, *descriptors = 0;
    CvSURFParams params = cvSURFParams(100, SURF_SIZE == 128);
    cvExtractSURF(dst, 0, &keypoints, &descriptors, storage, params);
    
    cout << "desc: " << descriptors->total << endl;
    if (descriptors->total == 0) return false;
    
    vector<float> desc;
    desc.resize(descriptors->total * descriptors->elem_size/sizeof(float));
    cvCvtSeqToArray(descriptors, &desc[0]);
    
    vector<CvSURFPoint> keypts;
    keypts.resize(keypoints->total);
    cvCvtSeqToArray(keypoints, &keypts[0]);
    
    vector<float *> features;
    int where = 0;
    for (int pt = 0; pt < keypoints->total; ++pt) {
        float *f = new float[SURF_SIZE];
        for (int j = 0; j < SURF_SIZE; ++j) {
            f[j] = desc[where];
            ++where;
        }
        features.push_back(f);
    }
    printf("Done SURF\n");

    printf("Clustering...\n");
    vector<int> cluster(features.size());
    for (int i = 0; i < (int)features.size(); ++i) {
        cluster[i] = best_cluster(centers, features[i]);
    }
    printf("Done clustering...\n");
    
    vector<FoundObject> newObjects;
    run_boxscan(dst, cluster, keypts, features, newObjects, objects);
    for (int i = 0; i < (int)newObjects.size(); ++i) {
        if (newObjects[i].object.rect.x > -20 && newObjects[i].object.rect.x < frame->width 
         && newObjects[i].object.rect.y > -20 && newObjects[i].object.rect.y < frame->height) {
            objects->push_back(newObjects[i].object);
            cout << "Found object: " << newObjects[i].object.label << " at (" ;
            cout << newObjects[i].object.rect.x << ", " << newObjects[i].object.rect.y << ")" << endl;
        }
    }
    
    prevObjects = *objects;
    
    cvReleaseImage(&gray);
  
    return true;
}
示例#15
0
bool Classifier::extract(TTrainingFileList& fileList)
{
    cout << "Classes:" << endl;
    for (int i = 0; i < (int)fileList.classes.size(); i++) {
        cout << fileList.classes[i] << " (";
        int count = 0;
        for (int j = 0; j < (int)fileList.files.size(); j++) {
            if (fileList.files[j].label == fileList.classes[i]) {
                count += 1;
            }
        }
        cout << count << " samples)" << endl;
    }
    cout << endl;

    IplImage *image;
    
    int maxImages = INT_MAX;

    cout << "Processing images..." << endl;

    //vector<int> numIpoints(kNumObjectTypes);
    int minfiles = min(maxImages, (int)fileList.files.size());
    for (int i = 0; i < minfiles; i++) 
    {
        // skip too many others...
        if (fileList.files[i].label == "other" && --max_others < 0) continue;

        // show progress
        if (i % 10 == 0) showProgress(i, minfiles);

        image = cvLoadImage(fileList.files[i].filename.c_str(), 0);
        
        if (image == NULL) {
            cerr << "ERROR: could not load image " << fileList.files[i].filename.c_str() << endl;
            continue;
        }
        
        string label = fileList.files[i].label;

        CvSeq *keypoints = 0, *descriptors = 0;
        CvSURFParams params = cvSURFParams(100, SURF_SIZE == 128);
        cvExtractSURF(image, 0, &keypoints, &descriptors, storage, params);
        
        if (descriptors->total == 0) continue;
        
        vector<float> desc;
        desc.resize(descriptors->total * descriptors->elem_size/sizeof(float));
        cvCvtSeqToArray(descriptors, &desc[0]);
        
        vector<float *> features;
        int where = 0;
        for (int pt = 0; pt < keypoints->total; ++pt) {
            float *f = new float[SURF_SIZE];
            for (int j = 0; j < SURF_SIZE; ++j) {
                f[j] = desc[where];
                ++where;
            }
            features.push_back(f);
        }
        
        if (i % test_to_train == 1 || !test_on) 
            set_train.push(stringToClassInt(label), features);
        else set_test.push(stringToClassInt(label), features);
    }
    
    
    cout << endl << "Extracted surf features. " << endl;

    cout << "Train Set" << endl;
    set_train.recount();
    set_train.stats();
    
    cout << "Test Set" << endl;
    set_train.recount();
    set_test.stats();
    
    return true;
}