Esempio n. 1
0
//! Calculate contour points from crack codes
t_PointList CBlobContour::GetContourPoints()
{
	// it is calculated?
	if( m_contourPoints != NULL )
		return m_contourPoints;

	if ( m_contour == NULL || m_contour->total <= 0 )
	{
		return NULL;
	}

	CvSeq *tmpPoints;
	CvSeqReader reader;
	CvSeqWriter writer;
	CvPoint actualPoint;
	CvRect boundingBox;

	// if aproximation is different than simple extern perimeter will not work
	tmpPoints = cvApproxChains( m_contour, m_parentStorage, CV_CHAIN_APPROX_NONE);


	// apply an offset to contour points to recover real coordinates
	
	cvStartReadSeq( tmpPoints, &reader);

	m_contourPoints = cvCreateSeq( tmpPoints->flags, tmpPoints->header_size, tmpPoints->elem_size, m_parentStorage );
	cvStartAppendToSeq(m_contourPoints, &writer );

	// also calculate bounding box of the contour to allow cvPointPolygonTest
	// work correctly on the generated polygon
	boundingBox.x = boundingBox.y = 10000;
	boundingBox.width = boundingBox.height = 0;
	
	for( int i=0; i< tmpPoints->total; i++)
	{
		CV_READ_SEQ_ELEM( actualPoint, reader);

		actualPoint.x += m_startPoint.x;
		actualPoint.y += m_startPoint.y;

		boundingBox.x = MIN( boundingBox.x, actualPoint.x );
		boundingBox.y = MIN( boundingBox.y, actualPoint.y );
		boundingBox.width = MAX( boundingBox.width, actualPoint.x );
		boundingBox.height = MAX( boundingBox.height, actualPoint.y );
		
		CV_WRITE_SEQ_ELEM( actualPoint, writer );
	}
	cvEndWriteSeq( &writer );
	cvClearSeq( tmpPoints );

	// assign calculated bounding box
	((CvContour*)m_contourPoints)->rect = boundingBox;


	return m_contourPoints;
}
Esempio n. 2
0
void PlanarSubdivisionEdgeToPoly(CvSubdiv2DEdge edge, CvSeq* buffer)
{
   cvClearSeq(buffer);
   if(!edge) return;

   CvSubdiv2DPoint* v0 = cvSubdiv2DEdgeOrg(edge);
   if (!v0 || v0->flags < 0) { return; }

   for (; ; edge = cvSubdiv2DGetEdge(edge, CV_NEXT_AROUND_LEFT))
   {
      CvSubdiv2DPoint* v = cvSubdiv2DEdgeDst(edge);
      if (!v || v->flags < 0) { cvClearSeq(buffer); return; }
      cvSeqPush(buffer, &v->pt);

      if (0 == memcmp(&v->pt, &v0->pt, sizeof(CvPoint2D32f)))
         break;
   }
   if (buffer->total <= 2) cvClearSeq(buffer);
}
Esempio n. 3
0
/**
 * Try to detect a card from the learned list
 * @param card The binary enhanced image
 */
enum card_type
card_detect(CvMat *card_roi)
{

	int contour_count     = 0;
	bool card_found       = false;
	enum card_type card   = CARD_UNKNOWN;
	CvMemStorage* storage = cvCreateMemStorage(0);
	CvSeq *contour        = NULL;
	CvSeq *contours       = NULL;
	CvMat *edges          = NULL;

	/* Detect edges and find */
	edges = cvCreateMat( card_roi->rows, card_roi->cols, card_roi->type );
	//cvCanny( card_roi, edges, 0, 255, 3 );
	contour_count = cvFindContours(
        card_roi,
        storage,
        &contours,
        sizeof(CvContour),
        CV_RETR_LIST,
		CV_CHAIN_APPROX_NONE,
		cvPoint(0, 0)
	);

	/* Compare contours */
	if( contour_count > 0)
	{
		for( contour = contours; contour != NULL; contour = contour->h_next )
		{
			cvDrawContours( 
				edges,
				contour,
				CV_RGB(255,255,255),
				CV_RGB(255,255,255),
				0,
				2,
				CV_FILLED, cvPoint(0, 0) );
#if DEBUG
				cvShowImage( "card_edges", edges );
				cvMoveWindow( "card_edges", 0, 0 );
				while( cvWaitKey(250) != 32 )
				;
#endif
		}
	}

	/* Cleanup */
	cvClearSeq( contours );
	cvReleaseMat( &edges );
	cvClearMemStorage(storage);
	cvReleaseMemStorage(&storage);

	return card;
}
Esempio n. 4
0
/**
- FUNCTION: Assigment operator
- FUNCTIONALITY: Assigns a blob to the current 
- PARAMETERS:
	- src: blob to assign
- RESULT:
	- the current blob is replaced by the src blob
- RESTRICTIONS:
- AUTHOR: Ricard Borr�
- CREATION DATE: 25-05-2005.
- MODIFICATION: Date. Author. Description.
*/
CBlob& CBlob::operator=(const CBlob &src )
{
	// si ja s� el mateix, no cal fer res
	if (this != &src)
	{
		// Eliminar v�texs del blob 
		cvClearSeq(edges);
		// i la zona de mem�ia on s�
		cvReleaseMemStorage( &m_storage );

		// creem una sequencia buida per als edges
		m_storage = cvCreateMemStorage(0);
		edges = cvCreateSeq( CV_SEQ_KIND_GENERIC|CV_32SC2,
								   sizeof(CvContour),
								   sizeof(CvPoint),m_storage);

		// copiem les propietats del blob origen a l'actual
		etiqueta = src.etiqueta;		
		exterior = src.exterior;
		area = src.Area();
		perimeter = src.Perimeter();
		parent = src.parent;
		minx = src.minx;
		maxx = src.maxx;
		miny = src.miny;
		maxy = src.maxy;
		sumx = src.sumx;
		sumy = src.sumy;
		sumxx = src.sumxx;
		sumyy = src.sumyy;
		sumxy = src.sumxy;
		mean = src.mean;
		stddev = src.stddev;
		externPerimeter = src.externPerimeter;

		// copiem els edges del blob origen a l'actual
		CvSeqReader reader;
		CvSeqWriter writer;
		CvPoint edgeactual;
		
		cvStartReadSeq( src.Edges(), &reader);
		cvStartAppendToSeq( edges, &writer );

		for( int i=0; i< src.Edges()->total; i++)
		{
			CV_READ_SEQ_ELEM( edgeactual ,reader);
			CV_WRITE_SEQ_ELEM( edgeactual , writer );
		}
		
		cvEndWriteSeq( &writer );
	}
	return *this;
}
int cudaCascadeClassifierDetectMultiScale(cv::cuda::CascadeClassifier_CUDA* classifier, const cv::cuda::GpuMat* image, cv::cuda::GpuMat* objectsBuf, double scaleFactor, int minNeighbors, CvSize minSize, CvSeq* results)
{
   cvClearSeq(results);
   int count = classifier->detectMultiScale(*image, *objectsBuf, scaleFactor, minNeighbors, minSize);
   if (count == 0) return count;

   cv::cuda::GpuMat detectedRectangles = objectsBuf->colRange(0, count);
   cv::Mat mat;
   detectedRectangles.download(mat);
   cvSeqPushMulti(results, mat.data, count);
   return count;
}
Esempio n. 6
0
bool ContoursProcessor::FindContours()
{
	if (storage==NULL)
    {
      storage = cvCreateMemStorage(0);
    }
    else
    {
      cvClearMemStorage(storage);
    }
    cvFindContours( Tmp_img, storage, &contours, sizeof(CvContour),
                    CV_RETR_LIST, CV_CHAIN_APPROX_SIMPLE, cvPoint(0,0) );
	if(ImageResult)
		cvClearSeq(ImageResult);
    if (contours)
    {
      if (ResultsStorage==NULL)
      {
        ResultsStorage = cvCreateMemStorage(0);
      }
      else
      {
        cvClearMemStorage(ResultsStorage);
      }
      ImageResult = cvCreateSeq(0,sizeof(CvSeq),sizeof(T_MEAS_RESULTS_REC),ResultsStorage);
      T_MEAS_RESULTS_REC ContourResult;
      int Idx=1;
      for( CvSeq* c=contours; c!=NULL; c=c->h_next ) { 
      //for (i=0;i<contours->total;i++)
        //ImageResult
        if (ImageResult)
        {
          cvContourMoments(c,&Moments);
          if ((Moments.m00>3000.0)&&(Moments.m00<8000.0))
          {
            memset(&ContourResult,0,sizeof(ContourResult));
            ContourResult.ObjNo=Idx;
            ContourResult.Area = Moments.m00;
            ContourResult.Center.x = Moments.m10/Moments.m00;
            ContourResult.Center.y = Moments.m01/Moments.m00;
            cvSeqPushFront(ImageResult,&ContourResult);
            Idx++;
          }
        }
      }
      CalcResult(ImageResult); 
      //PrintResults(0, res_img,ImageResult);
      cvDrawContours( cnt_img, contours, CV_RGB(128,0,0), CV_RGB(0,128,0), 3, 1, CV_AA, cvPoint(0,0) );
    }

	return true;
}
Esempio n. 7
0
void
card_cleanup(void)
{
	enum card_type current = CARD_JOKER;

	for(current = CARD_JOKER; current < CARD_TYPE_END; current++)
	{
		if( card_contours[current] != NULL )
		{
			cvClearSeq( card_contours[current] );
		}
	}
}
Esempio n. 8
0
IplImage* Harrlike::DetectSaveROI()
{
	if(m_justSet)
	{
		IplImage* small_image;
		CvMemStorage* storage = cvCreateMemStorage(0);//default:64k byte
		CvSeq* faces = NULL;

		/*Smallize to boost the speed of detection*/
		if( m_PyrDown )
		{
			if(m_srcImage->nChannels == 1)
			small_image = cvCreateImage( cvSize(m_srcImage->width/2,m_srcImage->height/2), IPL_DEPTH_8U, 1 );
			else
			small_image = cvCreateImage( cvSize(m_srcImage->width/2,m_srcImage->height/2), IPL_DEPTH_8U, 3 );

			cvPyrDown( m_srcImage, small_image, CV_GAUSSIAN_5x5 );
			scale = 2;
		}

		else small_image = m_srcImage;

		faces = cvHaarDetectObjects( small_image, cascade, storage, 1.2, 2 , CV_HAAR_DO_CANNY_PRUNING );

		/* Draw all faces with rectangle */
		if(faces!=NULL)
		{
			Draw_Rectangle(faces,scale);

			SaveROI(m_OriginalImage,faces);

			if( small_image != m_srcImage )cvReleaseImage( &small_image );
			
			cvClearSeq(faces);
			cvReleaseImage( &small_image );
			cvReleaseMemStorage( &storage );
			return m_normalizeImage;
		}
		else
		{
			printf("m_Number %d can't be foundd!\n");
			return NULL;
		}
		/*cvNamedWindow( "test", 0 );
			cvShowImage( "test", m_srcImage );
			cvSaveImage("Face_Detect1.jpg",m_srcImage); 
			cvWaitKey(0);*/
	}
	return NULL;
}
Esempio n. 9
0
/** Clear a SegmentedWorm struct but not dallocate memory. **/
void ClearSegmentedInfo(SegmentedWorm* SegWorm){
	//SegWorm->Head=NULL; /** This is probably a mistake **/
	//SegWorm->Tail=NULL; /** This is probably a mistake  because memory is not reallocated later.**/

	if (SegWorm->LeftBound!=NULL){
		cvClearSeq(SegWorm->LeftBound);
	}else{
		printf("SegWorm->LeftBound==NULL");
	}
	if (SegWorm->RightBound!=NULL){
			cvClearSeq(SegWorm->RightBound);
		}else{
			printf("SegWorm->RightBound==NULL");
		}

	if (SegWorm->Centerline!=NULL){
			cvClearSeq(SegWorm->Centerline);
		}else{
			printf("SegWorm->Centerline==NULL");
		}


}
// the function draws all the squares in the image  
void drawSquares( IplImage* img, CvSeq* squares,CvPoint& centre)  
{  
	CvSeqReader reader;  
	IplImage* cpy = cvCloneImage( img );  
	int i;  
	static	int c=0;
	// initialize reader of the sequence  
	cvStartReadSeq( squares, &reader, 0 );  
	//	cout<<fabs(cvContourArea(squares,CV_WHOLE_SEQ)) <<endl;
	// read 4 sequence elements at a time (all vertices of a square)  
	for( i = 0; i < squares->total; i += 4 )  
	{  
		centre=cvPoint(0,0);
		CvPoint* rect = pt;  
		int count = 4;  

		// read 4 vertices  
		memcpy( pt, reader.ptr, squares->elem_size );  
		CV_NEXT_SEQ_ELEM( squares->elem_size, reader );  
		memcpy( pt + 1, reader.ptr, squares->elem_size );  
		CV_NEXT_SEQ_ELEM( squares->elem_size, reader );  
		memcpy( pt + 2, reader.ptr, squares->elem_size );  
		CV_NEXT_SEQ_ELEM( squares->elem_size, reader );  
		memcpy( pt + 3, reader.ptr, squares->elem_size );  
		CV_NEXT_SEQ_ELEM( squares->elem_size, reader );  


		/*定形状*/
		if(isNotLicense(pt,count)) continue;
		// draw the square as a closed polyline   
		cvPolyLine( cpy, &rect, &count, 1, 1, CV_RGB(0,255,0), 3, CV_AA, 0 );  
		for(int j=0;j<count;j++){
			centre.x+=rect[j].x;
			centre.y+=rect[j].y;
		}
		centre.x/=4;
		centre.y/=4;
		cvCircle( cpy, centre,2, CV_RGB(255,0,255), 3, CV_AA, 0 );  
		c++;
		break;
	}
	//cout<<"count: "<<c<<endl;

	// show the resultant image  
	cvNamedWindow( wndname, 0 );  
//	cvShowImage( wndname, cpy );  
	cvReleaseImage( &cpy );  
	cvClearSeq(squares);

}  
Esempio n. 11
0
double BlobGetHullPerimeter::operator()(Blob &blob)
{
	CvSeq *convexHull;
	double perimeter;

	convexHull = blob.GetConvexHull();

	if( convexHull )
		perimeter = fabs(cvArcLength(convexHull,CV_WHOLE_SEQ,1));
	else
		return 0;

	cvClearSeq(convexHull);

	return perimeter;
}
Esempio n. 12
0
CvSeq *reghand::filthull(CvSeq *hullseq)
{
    int thresh=handcenter.y;
    CvSeq *filtedhullseq=cvCloneSeq(hullseq);
    cvClearSeq(filtedhullseq);
    for (int i=0;i<hullseq->total;i++)
    {
        CvPoint** data=CV_GET_SEQ_ELEM(CvPoint*,hullseq,i);
        CvPoint currpt=**data;
        if(currpt.y<thresh)
            cvSeqPush(filtedhullseq,data);

        ;
    }
    return filtedhullseq;
}
Esempio n. 13
0
 inline int Init(const CvRect& roi, const CvTrackingRect& prev, CvMemStorage* mstg = NULL)
 {
     m_rROI = roi;
     m_trPrev = prev;
     if (NULL != mstg)
         m_mstgRects = mstg;
     if (NULL == m_mstgRects)
         return 0;
     if (NULL == m_seqRects)
         m_seqRects = cvCreateSeq(0, sizeof(CvSeq), sizeof(CvTrackingRect), m_mstgRects);
     else
         cvClearSeq(m_seqRects);
     if (NULL == m_seqRects)
         return 0;
     return 1;
 };
Esempio n. 14
0
double BlobGetHullArea::operator()(Blob &blob)
{
	CvSeq *convexHull;
	double area;

	convexHull = blob.GetConvexHull();

	if( convexHull )
		area = fabs(cvContourArea(convexHull));
	else
		return 0;

	cvClearSeq(convexHull);

	return area;
}
Esempio n. 15
0
 inline int Init(const CvRect& roi, const CvTrackingRect& prev, CvMemStorage* mstg = NULL) {
     m_rROI = roi;
     m_trPrev = prev;
     if (NULL != mstg) {
         m_mstgRects = mstg;
     }
     if (NULL == m_mstgRects) {
         return FALSE;
     }
     if (NULL == m_seqRects) {
         m_seqRects = cvCreateSeq(0, sizeof(CvSeq), sizeof(CvTrackingRect), m_mstgRects);
     } else {
         cvClearSeq(m_seqRects);
     }
     if (NULL == m_seqRects) {
         return FALSE;
     }
     return TRUE;
 };
Esempio n. 16
0
static void
kms_face_detector_finalize (GObject * object)
{
    KmsFaceDetector *facedetector = KMS_FACE_DETECTOR (object);

    cvReleaseImageHeader (&facedetector->priv->cvImage);
    cvReleaseImage (&facedetector->priv->cvResizedImage);

    if (facedetector->priv->pStorageFace != NULL)
        cvClearMemStorage (facedetector->priv->pStorageFace);
    if (facedetector->priv->pFaceRectSeq != NULL)
        cvClearSeq (facedetector->priv->pFaceRectSeq);

    cvReleaseMemStorage (&facedetector->priv->pStorageFace);
    cvReleaseHaarClassifierCascade (&facedetector->priv->pCascadeFace);

    g_mutex_clear (&facedetector->priv->mutex);

    G_OBJECT_CLASS (kms_face_detector_parent_class)->finalize (object);
}
Esempio n. 17
0
void mvContours::init_contour_template_database (const char** image_database_vector, int num_images, std::vector<HU_MOMENTS> &output_moments) {
    // iterate in reverse dir because we push back onto the output_moments
    for (int i = num_images-1; i >= 0; i--) {
        IplImage* img = cvLoadImage(image_database_vector[i], CV_LOAD_IMAGE_GRAYSCALE);
        cvFindContours (img, m_storage, &m_contours, sizeof(CvContour), CV_RETR_EXTERNAL, CV_CHAIN_APPROX_SIMPLE);

        if (m_contours == NULL || m_contours->total == 0) {
            printf ("ERROR: No contours found when loading contour_image %s\n", image_database_vector[i]);
            exit(1);
        }
        else {
            // get the hu moments and add it to the vector
            HU_MOMENTS h;
            get_hu_moments (m_contours, h);
            output_moments.push_back(h);
            // clear sequence for next loop
            cvClearSeq(m_contours);
        }
        cvReleaseImage(&img);
    }
}
Esempio n. 18
0
/*
 * Use the slider bar to generate a rectangle in an arbitrary location and illuminate with it on the fly
 *
 */
int DoOnTheFlyIllumination(Experiment* exp) {

	CvSeq* montage = CreateIlluminationMontage(exp->Worm->MemScratchStorage);

	/** Note, out of laziness I am hardcoding the grid dimensions to be Numsegments by number of segments **/
	;

	CvPoint origin = ConvertSlidlerToWormSpace(exp->Params->IllumSquareOrig,exp->Params->DefaultGridSize);
	GenerateSimpleIllumMontage(montage, origin, exp->Params->IllumSquareRad, exp->Params->DefaultGridSize);

	/** Illuminate the worm **/
	/** ...in camera space **/
	IllumWorm(exp->Worm->Segmented, montage, exp->IlluminationFrame->iplimg,
			exp->Params->DefaultGridSize,exp->Params->IllumFlipLR);
	LoadFrameWithImage(exp->IlluminationFrame->iplimg, exp->IlluminationFrame);
	/** ... in DLP space **/
	IllumWorm(exp->segWormDLP, montage, exp->forDLP->iplimg,
			exp->Params->DefaultGridSize,exp->Params->IllumFlipLR);
	LoadFrameWithImage(exp->forDLP->iplimg, exp->forDLP);

	cvClearSeq(montage);

}
Esempio n. 19
0
CvSeq *reghand::elimNeighborHulls(CvSeq *hullseq)
{
    int disthreshold=handradis/3;
    CvSeq *filtedhullseq=cvCloneSeq(hullseq);
    if(hullseq->total<=1) return filtedhullseq;
    cvClearSeq(filtedhullseq);
    CvPoint **curdata;CvPoint currpt,nextpt;
    for(int i=0;i<hullseq->total-1;i++)
    {
        curdata=CV_GET_SEQ_ELEM(CvPoint*,hullseq,i);
        currpt=**curdata;
        nextpt=**CV_GET_SEQ_ELEM(CvPoint*,hullseq,i+1);
        double distance=sqrt(pow(double(currpt.x-nextpt.x),2)+pow(double(currpt.y-nextpt.y),2));
        if(distance>disthreshold) cvSeqPush(filtedhullseq,curdata);

    }
    //	if(hullseq->total==2)return filtedhullseq;
    curdata=CV_GET_SEQ_ELEM(CvPoint*,hullseq,hullseq->total-1);
    currpt=**curdata;
    nextpt=**CV_GET_SEQ_ELEM(CvPoint*,hullseq,0);
    double distance=sqrt(pow(double(currpt.x-nextpt.x),2)+pow(double(currpt.y-nextpt.y),2));
    if(distance>disthreshold) cvSeqPush(filtedhullseq,curdata);
    return filtedhullseq;
}
Esempio n. 20
0
/*
 * This Function segments a worm.
 * It requires that certain information be present in the WormAnalysisData struct Worm
 * It requires Worm->Boundary be full
 * It requires that Params->NumSegments be greater than zero
 *
 */
int SegmentWorm(WormAnalysisData* Worm, WormAnalysisParam* Params){
	if (cvSeqExists(Worm->Boundary) == 0){
		printf("Error! No boundary found in SegmentWorm()\n");
		return -1;
	}




	Worm->Segmented->NumSegments=Params->NumSegments;

	/***Clear Out any stale Segmented Information Already in the Worm Structure***/
	ClearSegmentedInfo(Worm->Segmented);

	Worm->Segmented->Head=Worm->Head;
	Worm->Segmented->Tail=Worm->Tail;

	/*** It would be nice to check that Worm->Boundary exists ***/

	/*** Clear Out Scratch Storage ***/
	cvClearMemStorage(Worm->MemScratchStorage);


	/*** Slice the boundary into left and right components ***/
	if (Worm->HeadIndex==Worm->TailIndex) printf("Error! Worm->HeadIndex==Worm->TailIndex in SegmentWorm()!\n");
	CvSeq* OrigBoundA=cvSeqSlice(Worm->Boundary,cvSlice(Worm->HeadIndex,Worm->TailIndex),Worm->MemScratchStorage,1);
	CvSeq* OrigBoundB=cvSeqSlice(Worm->Boundary,cvSlice(Worm->TailIndex,Worm->HeadIndex),Worm->MemScratchStorage,1);

	if (OrigBoundA->total < Params->NumSegments || OrigBoundB->total < Params->NumSegments ){
		printf("Error in SegmentWorm():\n\tWhen splitting  the original boundary into two, one or the other has less than the number of desired segments!\n");
		printf("OrigBoundA->total=%d\nOrigBoundB->total=%d\nParams->NumSegments=%d\n",OrigBoundA->total,OrigBoundB->total,Params->NumSegments);
		printf("Worm->HeadIndex=%d\nWorm->TailIndex=%d\n",Worm->HeadIndex,Worm->TailIndex);
		return -1; /** Andy make this return -1 **/

	}

	cvSeqInvert(OrigBoundB);


	/*** Resample One of the Two Boundaries so that both are the same length ***/

	//Create sequences to store the Normalized Boundaries
	CvSeq* NBoundA=	cvCreateSeq(CV_SEQ_ELTYPE_POINT,sizeof(CvSeq),sizeof(CvPoint),Worm->MemScratchStorage);
	CvSeq* NBoundB=cvCreateSeq(CV_SEQ_ELTYPE_POINT,sizeof(CvSeq),sizeof(CvPoint),Worm->MemScratchStorage);

	//Resample L&R boundary to have the same number of points as min(L,R)
	if (OrigBoundA->total > OrigBoundB->total){
		resampleSeq(OrigBoundA,NBoundA,OrigBoundB->total );
		NBoundB=OrigBoundB;
	}else{
		resampleSeq(OrigBoundB,NBoundB,OrigBoundA->total );
		NBoundA=OrigBoundA;
	}
	//Now both NBoundA and NBoundB are the same length.



	/*
	 * Now Find the Centerline
	 *
	 */

	/*** Clear out Stale Centerline Information ***/
	cvClearSeq(Worm->Centerline);

	/*** Compute Centerline, from Head To Tail ***/
	FindCenterline(NBoundA,NBoundB,Worm->Centerline);



	/*** Smooth the Centerline***/
	CvSeq* SmoothUnresampledCenterline = smoothPtSequence (Worm->Centerline, 0.5*Worm->Centerline->total/Params->NumSegments, Worm->MemScratchStorage);

	/*** Note: If you wanted to you could smooth the centerline a second time here. ***/


	/*** Resample the Centerline So it has the specified Number of Points ***/
	//resampleSeq(SmoothUnresampledCenterline,Worm->Segmented->Centerline,Params->NumSegments);

	resampleSeqConstPtsPerArcLength(SmoothUnresampledCenterline,Worm->Segmented->Centerline,Params->NumSegments);

	/** Save the location of the centerOfWorm as the point halfway down the segmented centerline **/
	Worm->Segmented->centerOfWorm= CV_GET_SEQ_ELEM( CvPoint , Worm->Segmented->Centerline, Worm->Segmented->NumSegments / 2 );

	/*** Remove Repeat Points***/
	//RemoveSequentialDuplicatePoints (Worm->Segmented->Centerline);

	/*** Use Marc's Perpendicular Segmentation Algorithm
	 *   To Segment the Left and Right Boundaries and store them
	 */
	SegmentSides(OrigBoundA,OrigBoundB,Worm->Segmented->Centerline,Worm->Segmented->LeftBound,Worm->Segmented->RightBound);
	return 0;

}
Esempio n. 21
0
/**
- FUNCIÓ: ExternPerimeter
- FUNCIONALITAT: Get extern perimeter (perimeter touching image borders)
- PARÀMETRES:
	- maskImage: if != NULL, counts maskImage black pixels as external pixels and contour points touching
				 them are counted as external contour points.
	- xBorder: true to consider blobs touching horizontal borders as extern
	- yBorder: true to consider blobs touching vertical borders as extern
- RESULTAT:
	- 
- RESTRICCIONS:
	- 
- AUTOR: rborras
- DATA DE CREACIÓ: 2008/05/05
- MODIFICACIÓ: Data. Autor. Descripció.
- NOTA: If CBlobContour::GetContourPoints aproximates contours with a method different that NONE,
		this function will not give correct results
*/
double CBlob::ExternPerimeter( IplImage *maskImage, bool xBorder /* = true */, bool yBorder /* = true */)
{
	t_PointList externContour, externalPoints;
	CvSeqReader reader;
	CvSeqWriter writer;
	CvPoint actualPoint, previousPoint;
	bool find = false;
	int i,j;
	int delta = 0;
	
	// it is calculated?
	if( m_externPerimeter != -1 )
	{
		return m_externPerimeter;
	}

	// get contour pixels
	externContour = m_externalContour.GetContourPoints();

	m_externPerimeter = 0;

	// there are contour pixels?
	if( externContour == NULL )
	{
		return m_externPerimeter;
	}

	cvStartReadSeq( externContour, &reader);

	// create a sequence with the external points of the blob
	externalPoints = cvCreateSeq( externContour->flags, externContour->header_size, externContour->elem_size, 
								  m_storage );
	cvStartAppendToSeq( externalPoints, &writer );
	previousPoint.x = -1;

	// which contour pixels touch border?
	for( j=0; j< externContour->total; j++)
	{
		CV_READ_SEQ_ELEM( actualPoint, reader);

		find = false;

		// pixel is touching border?
		if ( xBorder & ((actualPoint.x == 0) || (actualPoint.x == m_originalImageSize.width - 1 )) ||
			 yBorder & ((actualPoint.y == 0) || (actualPoint.y == m_originalImageSize.height - 1 )))
		{
			find = true;
		}
		else
		{
			if( maskImage != NULL )
			{
				// verify if some of 8-connected neighbours is black in mask
				char *pMask;
				
				pMask = (maskImage->imageData + actualPoint.x - 1 + (actualPoint.y - 1) * maskImage->widthStep);
				
				for ( i = 0; i < 3; i++, pMask++ )
				{
					if(*pMask == 0 && !find ) 
					{
						find = true;
						break;
					}						
				}
				
				if(!find)
				{
					pMask = (maskImage->imageData + actualPoint.x - 1 + (actualPoint.y ) * maskImage->widthStep);
				
					for ( i = 0; i < 3; i++, pMask++ )
					{
						if(*pMask == 0 && !find ) 
						{
							find = true;
							break;
						}
					}
				}
			
				if(!find)
				{
					pMask = (maskImage->imageData + actualPoint.x - 1 + (actualPoint.y + 1) * maskImage->widthStep);

					for ( i = 0; i < 3; i++, pMask++ )
					{
						if(*pMask == 0 && !find ) 
						{
							find = true;
							break;
						}
					}
				}
			}
		}

		if( find )
		{
			if( previousPoint.x > 0 )
				delta = abs(previousPoint.x - actualPoint.x) + abs(previousPoint.y - actualPoint.y);

			// calculate separately each external contour segment 
			if( delta > 2 )
			{
				cvEndWriteSeq( &writer );
				m_externPerimeter += cvArcLength( externalPoints, CV_WHOLE_SEQ, 0 );
				
				cvClearSeq( externalPoints );
				cvStartAppendToSeq( externalPoints, &writer );
				delta = 0;
				previousPoint.x = -1;
			}

			CV_WRITE_SEQ_ELEM( actualPoint, writer );
			previousPoint = actualPoint;
		}
		
	}

	cvEndWriteSeq( &writer );

	m_externPerimeter += cvArcLength( externalPoints, CV_WHOLE_SEQ, 0 );

	cvClearSeq( externalPoints );

	// divide by two because external points have one side inside the blob and the other outside
	// Perimeter of external points counts both sides, so it must be divided
	m_externPerimeter /= 2.0;
	
	return m_externPerimeter;
}
Esempio n. 22
0
void FindObjectMain::process_surf()
{

	if(!object_image)
	{
// Only does greyscale
		object_image = cvCreateImage( 
			cvSize(object_image_w, object_image_h), 
			8, 
			1);
	}

	if(!scene_image)
	{
// Only does greyscale
		scene_image = cvCreateImage( 
			cvSize(scene_image_w, scene_image_h), 
			8, 
			1);
	}

// Select only region with image size
// Does this do anything?
	cvSetImageROI( object_image, cvRect( 0, 0, object_w, object_h ) );
	cvSetImageROI( scene_image, cvRect( 0, 0, scene_w, scene_h ) );

	if(!prev_object) prev_object = new unsigned char[object_image_w * object_image_h];
	memcpy(prev_object, object_image->imageData, object_image_w * object_image_h);
	grey_crop((unsigned char*)scene_image->imageData, 
		get_input(scene_layer), 
		scene_x1, 
		scene_y1, 
		scene_x2, 
		scene_y2,
		scene_image_w,
		scene_image_h);


	grey_crop((unsigned char*)object_image->imageData, 
		get_input(object_layer), 
		object_x1, 
		object_y1, 
		object_x2, 
		object_y2,
		object_image_w,
		object_image_h);


	if(!storage) storage = cvCreateMemStorage(0);
	CvSURFParams params = cvSURFParams(500, 1);


//printf("FindObjectMain::process_surf %d\n", __LINE__);

// Only compute keypoints if the image changed
	if(memcmp(prev_object, object_image->imageData, object_image_w * object_image_h))
	{
		if(object_keypoints) cvClearSeq(object_keypoints);
		if(object_descriptors) cvClearSeq(object_descriptors);
		cvExtractSURF(object_image, 
			0, 
			&object_keypoints, 
			&object_descriptors, 
			storage, 
			params,
			0);
	}

//printf("FindObjectMain::process_surf %d object keypoints=%d\n", __LINE__, object_keypoints->total);
// Draw the keypoints
// 		for(int i = 0; i < object_keypoints->total; i++)
// 		{
//         	CvSURFPoint* r1 = (CvSURFPoint*)cvGetSeqElem( object_keypoints, i );
// 			int size = r1->size / 4;
// 			draw_rect(frame[object_layer], 
//   				r1->pt.x + object_x1 - size, 
//   				r1->pt.y + object_y1 - size, 
//   				r1->pt.x + object_x1 + size, 
//  				r1->pt.y + object_y1 + size);
// 		}


//printf("FindObjectMain::process_surf %d\n", __LINE__);

// TODO: make the surf data persistent & check for image changes instead
	if(scene_keypoints) cvClearSeq(scene_keypoints);
	if(scene_descriptors) cvClearSeq(scene_descriptors);
	cvExtractSURF(scene_image, 
		0, 
		&scene_keypoints, 
		&scene_descriptors, 
		storage, 
		params,
		0);

// Draw the keypoints
// 		for(int i = 0; i < scene_keypoints->total; i++)
// 		{
//         	CvSURFPoint* r1 = (CvSURFPoint*)cvGetSeqElem( scene_keypoints, i );
// 			int size = r1->size / 4;
// 			draw_rect(frame[scene_layer], 
//   				r1->pt.x + scene_x1 - size, 
//   				r1->pt.y + scene_y1 - size, 
//   				r1->pt.x + scene_x1 + size, 
//  				r1->pt.y + scene_y1 + size);
// 		}

// printf("FindObjectMain::process_surf %d %d %d scene keypoints=%d\n", 
// __LINE__, 
// scene_w,
// scene_h,
// scene_keypoints->total);

	int *point_pairs = 0;
	int total_pairs = 0;
	CvPoint src_corners[4] = 
	{
		{ 0, 0 }, 
		{ object_w, 0 }, 
		{ object_w, object_h }, 
		{ 0, object_h }
	};

	CvPoint dst_corners[4] = 
	{
		{ 0, 0 },
		{ 0, 0 },
		{ 0, 0 },
		{ 0, 0 }
	};

//printf("FindObjectMain::process_surf %d\n", __LINE__);
	if(scene_keypoints->total &&
		object_keypoints->total &&
		locatePlanarObject(object_keypoints, 
		object_descriptors, 
		scene_keypoints, 
		scene_descriptors, 
		src_corners, 
		dst_corners,
		&point_pairs,
		&total_pairs))
	{





// Draw keypoints in the scene & object layer
		if(config.draw_keypoints)
		{
//printf("FindObjectMain::process_surf %d total pairs=%d\n", __LINE__, total_pairs);
			for(int i = 0; i < total_pairs; i++)
			{
        		CvSURFPoint* r1 = (CvSURFPoint*)cvGetSeqElem( object_keypoints, point_pairs[i * 2] );
        		CvSURFPoint* r2 = (CvSURFPoint*)cvGetSeqElem( scene_keypoints, point_pairs[i * 2 + 1] );


				int size = r2->size * 1.2 / 9 * 2;
				draw_rect(get_input(scene_layer), 
  					r2->pt.x + scene_x1 - size, 
  					r2->pt.y + scene_y1 - size, 
  					r2->pt.x + scene_x1 + size, 
 					r2->pt.y + scene_y1 + size);
				draw_rect(get_input(object_layer), 
  					r1->pt.x + object_x1 - size, 
  					r1->pt.y + object_y1 - size, 
  					r1->pt.x + object_x1 + size, 
 					r1->pt.y + object_y1 + size);
			}
		}


//printf("FindObjectMain::process_surf %d\n", __LINE__);
// Get object outline in the scene layer
		border_x1 = dst_corners[0].x + scene_x1;
		border_y1 = dst_corners[0].y + scene_y1;
		border_x2 = dst_corners[1].x + scene_x1;
		border_y2 = dst_corners[1].y + scene_y1;
		border_x3 = dst_corners[2].x + scene_x1;
		border_y3 = dst_corners[2].y + scene_y1;
		border_x4 = dst_corners[3].x + scene_x1;
		border_y4 = dst_corners[3].y + scene_y1;
//printf("FindObjectMain::process_surf %d\n", __LINE__);


		
	}
//printf("FindObjectMain::process_surf %d\n", __LINE__);



// for(int i = 0; i < object_y2 - object_y1; i++)
// {
// 	unsigned char *dst = get_input(object_layer)->get_rows()[i];
// 	unsigned char *src = (unsigned char*)object_image->imageData + i * (object_x2 - object_x1);
// 	for(int j = 0; j < object_x2 - object_x1; j++)
// 	{
// 		*dst++ = *src;
// 		*dst++ = 0x80;
// 		*dst++ = 0x80;
// 		src++;
// 	}
// }


// Frees the image structures
	if(point_pairs) free(point_pairs);
}
/* cvDetectNewBlobs
 * Return 1 and fill blob pNewBlob  with
 * blob parameters if new blob is detected:
 */
int CvBlobDetectorCC::DetectNewBlob(IplImage* /*pImg*/, IplImage* pFGMask, CvBlobSeq* pNewBlobList, CvBlobSeq* pOldBlobList)
{
    int         result = 0;
    CvSize      S = cvSize(pFGMask->width,pFGMask->height);

    /* Shift blob list: */
    {
        int     i;
        if(m_pBlobLists[SEQ_SIZE-1]) delete m_pBlobLists[SEQ_SIZE-1];

        for(i=SEQ_SIZE-1; i>0; --i)  m_pBlobLists[i] = m_pBlobLists[i-1];

        m_pBlobLists[0] = new CvBlobSeq;

    }   /* Shift blob list. */

    /* Create contours and add new blobs to blob list: */
    {   /* Create blobs: */
        CvBlobSeq       Blobs;
        CvMemStorage*   storage = cvCreateMemStorage();

        if(m_Clastering)
        {   /* Glue contours: */
            cvFindBlobsByCCClasters(pFGMask, &Blobs, storage );
        }   /* Glue contours. */
        else
        { /**/
            IplImage*       pIB = cvCloneImage(pFGMask);
            CvSeq*          cnts = NULL;
            CvSeq*          cnt = NULL;
            cvThreshold(pIB,pIB,128,255,CV_THRESH_BINARY);
            cvFindContours(pIB,storage, &cnts, sizeof(CvContour), CV_RETR_EXTERNAL);

            /* Process each contour: */
            for(cnt = cnts; cnt; cnt=cnt->h_next)
            {
                CvBlob  NewBlob;
                /* Image moments: */
                double      M00,X,Y,XX,YY;
                CvMoments   m;
                CvRect      r = ((CvContour*)cnt)->rect;
                CvMat       mat;
                if(r.height < S.height*m_HMin || r.width < S.width*m_WMin) continue;
                cvMoments( cvGetSubRect(pFGMask,&mat,r), &m, 0 );
                M00 = cvGetSpatialMoment( &m, 0, 0 );
                if(M00 <= 0 ) continue;
                X = cvGetSpatialMoment( &m, 1, 0 )/M00;
                Y = cvGetSpatialMoment( &m, 0, 1 )/M00;
                XX = (cvGetSpatialMoment( &m, 2, 0 )/M00) - X*X;
                YY = (cvGetSpatialMoment( &m, 0, 2 )/M00) - Y*Y;
                NewBlob = cvBlob(r.x+(float)X,r.y+(float)Y,(float)(4*sqrt(XX)),(float)(4*sqrt(YY)));
                Blobs.AddBlob(&NewBlob);

            }   /* Next contour. */

            cvReleaseImage(&pIB);

        }   /* One contour - one blob. */

        {   /* Delete small and intersected blobs: */
            int i;
            for(i=Blobs.GetBlobNum(); i>0; i--)
            {
                CvBlob* pB = Blobs.GetBlob(i-1);

                if(pB->h < S.height*m_HMin || pB->w < S.width*m_WMin)
                {
                    Blobs.DelBlob(i-1);
                    continue;
                }

                if(pOldBlobList)
                {
                    int j;
                    for(j=pOldBlobList->GetBlobNum(); j>0; j--)
                    {
                        CvBlob* pBOld = pOldBlobList->GetBlob(j-1);
                        if((fabs(pBOld->x-pB->x) < (CV_BLOB_RX(pBOld)+CV_BLOB_RX(pB))) &&
                           (fabs(pBOld->y-pB->y) < (CV_BLOB_RY(pBOld)+CV_BLOB_RY(pB))))
                        {   /* Intersection detected, delete blob from list: */
                            Blobs.DelBlob(i-1);
                            break;
                        }
                    }   /* Check next old blob. */
                }   /*  if pOldBlobList. */
            }   /*  Check next blob. */
        }   /*  Delete small and intersected blobs. */

        {   /* Bubble-sort blobs by size: */
            int N = Blobs.GetBlobNum();
            int i,j;
            for(i=1; i<N; ++i)
            {
                for(j=i; j>0; --j)
                {
                    CvBlob  temp;
                    float   AreaP, AreaN;
                    CvBlob* pP = Blobs.GetBlob(j-1);
                    CvBlob* pN = Blobs.GetBlob(j);
                    AreaP = CV_BLOB_WX(pP)*CV_BLOB_WY(pP);
                    AreaN = CV_BLOB_WX(pN)*CV_BLOB_WY(pN);
                    if(AreaN < AreaP)break;
                    temp = pN[0];
                    pN[0] = pP[0];
                    pP[0] = temp;
                }
            }

            /* Copy only first 10 blobs: */
            for(i=0; i<MIN(N,10); ++i)
            {
                m_pBlobLists[0]->AddBlob(Blobs.GetBlob(i));
            }

        }   /* Sort blobs by size. */

        cvReleaseMemStorage(&storage);

    }   /* Create blobs. */

    {   /* Shift each track: */
        int j;
        for(j=0; j<m_TrackNum; ++j)
        {
            int     i;
            DefSeq* pTrack = m_TrackSeq+j;

            for(i=SEQ_SIZE-1; i>0; --i)
                pTrack->pBlobs[i] = pTrack->pBlobs[i-1];

            pTrack->pBlobs[0] = NULL;
            if(pTrack->size == SEQ_SIZE)pTrack->size--;
        }
    }   /* Shift each track. */

    /* Analyze blob list to find best blob trajectory: */
    {
        double      BestError = -1;
        int         BestTrack = -1;;
        CvBlobSeq*  pNewBlobs = m_pBlobLists[0];
        int         i;
        int         NewTrackNum = 0;
        for(i=pNewBlobs->GetBlobNum(); i>0; --i)
        {
            CvBlob* pBNew = pNewBlobs->GetBlob(i-1);
            int     j;
            int     AsignedTrack = 0;
            for(j=0; j<m_TrackNum; ++j)
            {
                double  dx,dy;
                DefSeq* pTrack = m_TrackSeq+j;
                CvBlob* pLastBlob = pTrack->size>0?pTrack->pBlobs[1]:NULL;
                if(pLastBlob == NULL) continue;
                dx = fabs(CV_BLOB_X(pLastBlob)-CV_BLOB_X(pBNew));
                dy = fabs(CV_BLOB_Y(pLastBlob)-CV_BLOB_Y(pBNew));
                if(dx > 2*CV_BLOB_WX(pLastBlob) || dy > 2*CV_BLOB_WY(pLastBlob)) continue;
                AsignedTrack++;

                if(pTrack->pBlobs[0]==NULL)
                {   /* Fill existed track: */
                    pTrack->pBlobs[0] = pBNew;
                    pTrack->size++;
                }
                else if((m_TrackNum+NewTrackNum)<SEQ_NUM)
                {   /* Duplicate existed track: */
                    m_TrackSeq[m_TrackNum+NewTrackNum] = pTrack[0];
                    m_TrackSeq[m_TrackNum+NewTrackNum].pBlobs[0] = pBNew;
                    NewTrackNum++;
                }
            }   /* Next track. */

            if(AsignedTrack==0 && (m_TrackNum+NewTrackNum)<SEQ_NUM )
            {   /* Initialize new track: */
                m_TrackSeq[m_TrackNum+NewTrackNum].size = 1;
                m_TrackSeq[m_TrackNum+NewTrackNum].pBlobs[0] = pBNew;
                NewTrackNum++;
            }
        }   /* Next new blob. */

        m_TrackNum += NewTrackNum;

        /* Check each track: */
        for(i=0; i<m_TrackNum; ++i)
        {
            int     Good = 1;
            DefSeq* pTrack = m_TrackSeq+i;
            CvBlob* pBNew = pTrack->pBlobs[0];
            if(pTrack->size != SEQ_SIZE) continue;
            if(pBNew == NULL ) continue;

            /* Check intersection last blob with existed: */
            if(Good && pOldBlobList)
            {
                int k;
                for(k=pOldBlobList->GetBlobNum(); k>0; --k)
                {
                    CvBlob* pBOld = pOldBlobList->GetBlob(k-1);
                    if((fabs(pBOld->x-pBNew->x) < (CV_BLOB_RX(pBOld)+CV_BLOB_RX(pBNew))) &&
                       (fabs(pBOld->y-pBNew->y) < (CV_BLOB_RY(pBOld)+CV_BLOB_RY(pBNew))))
                        Good = 0;
                }
            }   /* Check intersection last blob with existed. */

            /* Check distance to image border: */
            if(Good)
            {   /* Check distance to image border: */
                float    dx = MIN(pBNew->x,S.width-pBNew->x)/CV_BLOB_RX(pBNew);
                float    dy = MIN(pBNew->y,S.height-pBNew->y)/CV_BLOB_RY(pBNew);
                if(dx < m_MinDistToBorder || dy < m_MinDistToBorder) Good = 0;
            }   /* Check distance to image border. */

            /* Check uniform motion: */
            if(Good)
            {   /* Check uniform motion: */
                double      Error = 0;
                int         N = pTrack->size;
                CvBlob**    pBL = pTrack->pBlobs;
                float       sum[2] = {0,0};
                float       jsum[2] = {0,0};
                float       a[2],b[2]; /* estimated parameters of moving x(t) = a*t+b*/
                int         j;

                for(j=0; j<N; ++j)
                {
                    float   x = pBL[j]->x;
                    float   y = pBL[j]->y;
                    sum[0] += x;
                    jsum[0] += j*x;
                    sum[1] += y;
                    jsum[1] += j*y;
                }

                a[0] = 6*((1-N)*sum[0]+2*jsum[0])/(N*(N*N-1));
                b[0] = -2*((1-2*N)*sum[0]+3*jsum[0])/(N*(N+1));
                a[1] = 6*((1-N)*sum[1]+2*jsum[1])/(N*(N*N-1));
                b[1] = -2*((1-2*N)*sum[1]+3*jsum[1])/(N*(N+1));

                for(j=0; j<N; ++j)
                {
                    Error +=
                        pow(a[0]*j+b[0]-pBL[j]->x,2)+
                        pow(a[1]*j+b[1]-pBL[j]->y,2);
                }

                Error = sqrt(Error/N);

                if( Error > S.width*0.01 ||
                    fabs(a[0])>S.width*0.1 ||
                    fabs(a[1])>S.height*0.1)
                    Good = 0;

                /* New best trajectory: */
                if(Good && (BestError == -1 || BestError > Error))
                {   /* New best trajectory: */
                    BestTrack = i;
                    BestError = Error;
                }   /* New best trajectory. */
            }   /*  Check uniform motion. */
        }   /*  Next track. */

        #if 0
        {   /**/
            printf("BlobDetector configurations = %d [",m_TrackNum);
            int i;
            for(i=0; i<SEQ_SIZE; ++i)
            {
                printf("%d,",m_pBlobLists[i]?m_pBlobLists[i]->GetBlobNum():0);
            }
            printf("]\n");
        }
        #endif

        if(BestTrack >= 0)
        {   /* Put new blob to output and delete from blob list: */
            assert(m_TrackSeq[BestTrack].size == SEQ_SIZE);
            assert(m_TrackSeq[BestTrack].pBlobs[0]);
            pNewBlobList->AddBlob(m_TrackSeq[BestTrack].pBlobs[0]);
            m_TrackSeq[BestTrack].pBlobs[0] = NULL;
            m_TrackSeq[BestTrack].size--;
            result = 1;
        }   /* Put new blob to output and mark in blob list to delete. */
    }   /*  Analyze blod list to find best blob trajectory. */

    {   /* Delete bad tracks: */
        int i;
        for(i=m_TrackNum-1; i>=0; --i)
        {   /* Delete bad tracks: */
            if(m_TrackSeq[i].pBlobs[0]) continue;
            if(m_TrackNum>0)
                m_TrackSeq[i] = m_TrackSeq[--m_TrackNum];
        }   /* Delete bad tracks: */
    }

#ifdef USE_OBJECT_DETECTOR
    if( m_split_detector && pNewBlobList->GetBlobNum() > 0 )
    {
        int num_new_blobs = pNewBlobList->GetBlobNum();
        int i = 0;

        if( m_roi_seq ) cvClearSeq( m_roi_seq );
        m_debug_blob_seq.Clear();
        for( i = 0; i < num_new_blobs; ++i )
        {
            CvBlob* b = pNewBlobList->GetBlob(i);
            CvMat roi_stub;
            CvMat* roi_mat = 0;
            CvMat* scaled_roi_mat = 0;

            CvDetectedBlob d_b = cvDetectedBlob( CV_BLOB_X(b), CV_BLOB_Y(b), CV_BLOB_WX(b), CV_BLOB_WY(b), 0 );
            m_debug_blob_seq.AddBlob(&d_b);

            float scale = m_param_roi_scale * m_min_window_size.height / CV_BLOB_WY(b);

            float b_width =   MAX(CV_BLOB_WX(b), m_min_window_size.width / scale)
                            + (m_param_roi_scale - 1.0F) * (m_min_window_size.width / scale)
                            + 2.0F * m_max_border / scale;
            float b_height = CV_BLOB_WY(b) * m_param_roi_scale + 2.0F * m_max_border / scale;

            CvRect roi = cvRectIntersection( cvRect( cvFloor(CV_BLOB_X(b) - 0.5F*b_width),
                                                     cvFloor(CV_BLOB_Y(b) - 0.5F*b_height),
                                                     cvCeil(b_width), cvCeil(b_height) ),
                                             cvRect( 0, 0, pImg->width, pImg->height ) );
            if( roi.width <= 0 || roi.height <= 0 )
                continue;

            if( m_roi_seq ) cvSeqPush( m_roi_seq, &roi );

            roi_mat = cvGetSubRect( pImg, &roi_stub, roi );
            scaled_roi_mat = cvCreateMat( cvCeil(scale*roi.height), cvCeil(scale*roi.width), CV_8UC3 );
            cvResize( roi_mat, scaled_roi_mat );

            m_detected_blob_seq.Clear();
            m_split_detector->Detect( scaled_roi_mat, &m_detected_blob_seq );
            cvReleaseMat( &scaled_roi_mat );

            for( int k = 0; k < m_detected_blob_seq.GetBlobNum(); ++k )
            {
                CvDetectedBlob* b = (CvDetectedBlob*) m_detected_blob_seq.GetBlob(k);

                /* scale and shift each detected blob back to the original image coordinates */
                CV_BLOB_X(b) = CV_BLOB_X(b) / scale + roi.x;
                CV_BLOB_Y(b) = CV_BLOB_Y(b) / scale + roi.y;
                CV_BLOB_WX(b) /= scale;
                CV_BLOB_WY(b) /= scale;

                CvDetectedBlob d_b = cvDetectedBlob( CV_BLOB_X(b), CV_BLOB_Y(b), CV_BLOB_WX(b), CV_BLOB_WY(b), 1,
                        b->response );
                m_debug_blob_seq.AddBlob(&d_b);
            }

            if( m_detected_blob_seq.GetBlobNum() > 1 )
            {
                /*
                 * Split blob.
                 * The original blob is replaced by the first detected blob,
                 * remaining detected blobs are added to the end of the sequence:
                 */
                CvBlob* first_b = m_detected_blob_seq.GetBlob(0);
                CV_BLOB_X(b)  = CV_BLOB_X(first_b);  CV_BLOB_Y(b)  = CV_BLOB_Y(first_b);
                CV_BLOB_WX(b) = CV_BLOB_WX(first_b); CV_BLOB_WY(b) = CV_BLOB_WY(first_b);

                for( int j = 1; j < m_detected_blob_seq.GetBlobNum(); ++j )
                {
                    CvBlob* detected_b = m_detected_blob_seq.GetBlob(j);
                    pNewBlobList->AddBlob(detected_b);
                }
            }
        }   /* For each new blob. */

        for( i = 0; i < pNewBlobList->GetBlobNum(); ++i )
        {
            CvBlob* b = pNewBlobList->GetBlob(i);
            CvDetectedBlob d_b = cvDetectedBlob( CV_BLOB_X(b), CV_BLOB_Y(b), CV_BLOB_WX(b), CV_BLOB_WY(b), 2 );
            m_debug_blob_seq.AddBlob(&d_b);
        }
    }   // if( m_split_detector )
#endif

    return result;

}   /* cvDetectNewBlob */
void defense::ImageToEllipseList(IplImage* TheInput,int PlaneNumber){

    priority_queue<TheEllipse, vector<TheEllipse>,less<vector<TheEllipse>::value_type> > EllipQueue;
    
    TheTargetsEllipses.clear();
    
    CvMemStorage* G_storage=NULL;
	G_storage=cvCreateMemStorage(0);   
    CvSeq* contour = 0;
    IplImage * Maska;
    Maska = cvCreateImage( cvGetSize(TheInput),IPL_DEPTH_8U,1); 
    int TotalEllip=0;
    
    for (int k=0;k<PlaneNumber;k++){ 
        cvInRangeS(TheInput,cvScalarAll((k-1)*255/(float)PlaneNumber),cvScalarAll(k*255/(float)PlaneNumber),Maska);
        cvSmooth(Maska,Maska,CV_MEDIAN,3);  
        int NC=cvFindContours( Maska, G_storage, &contour, sizeof(CvContour), 
                              CV_RETR_EXTERNAL, CV_CHAIN_APPROX_TC89_L1 );
            for( ; contour != 0; contour = contour->h_next )
            {
                
                if ((contour->total > 10 )&&(TotalEllip<MaxEllip)){
                    
                    CvMat* CountArray;
                    CvBox2D Ellipdesc;
                    CvPoint2D32f * OtroArray;
                    OtroArray = new CvPoint2D32f[contour->total];
                    for(int q=0;q<contour->total;q++){
                        CvPoint* p = (CvPoint*)cvGetSeqElem( contour, q );
                        OtroArray[q].x = (float)p->x;
                        OtroArray[q].y=(float)p->y;
                    }
                    CountArray=cvCreateMatHeader(contour->total,1,CV_32FC2);
                    cvInitMatHeader(CountArray,contour->total,1,CV_32FC2,OtroArray);
                    // calculating the best ellipse	
                    Ellipdesc=cvFitEllipse2(CountArray);
                    
                    
                    EllipQueue.push(TheEllipse(Ellipdesc.center.x,
                                               Ellipdesc.center.y,
                                               Ellipdesc.size.width,
                                               Ellipdesc.size.height,
                                               Ellipdesc.angle,
                                               k*255/PlaneNumber));
                    TotalEllip++;
                    delete [] OtroArray;
                    cvReleaseMat(&CountArray);  
                } // end of if contour-> total
                
                
            } // end of for contours
            

        
    } // end For the Planes
    while (!EllipQueue.empty()){
        TheTargetsEllipses.push_back(EllipQueue.top());
        EllipQueue.pop();
    }
    
    cvReleaseImage(&Maska);
    
    // releasing mem storages
    if (contour!=NULL){cvClearSeq(contour);}
    //cvClearMemStorage(storage);
    if (G_storage!=NULL){cvReleaseMemStorage(&G_storage);}
    
    
    

}
Esempio n. 25
0
vector<Face> FaceDetect::cascadeResult(const IplImage* inputImage, CvHaarClassifierCascade* casc,
                                       const DetectObjectParameters &params)
{
    // Clear the memory d->storage which was used before
    cvClearMemStorage(d->storage);

    vector<Face> result;

    CvSeq* faces = 0;

    // Create two points to represent the face locations
    CvPoint pt1, pt2;

    // Check whether the cascade has loaded successfully. Else report and error and quit
    if (!casc)
    {
        cerr << "ERROR: Could not load classifier cascade." << endl;
        return result;
    }

    // There can be more than one face in an image. So create a growable sequence of faces.
    // Detect the objects and store them in the sequence

    /*cout << "cvHaarDetectObjects: image size " << inputImage->width << " " << inputImage->height
            << " searchIncrement " << params.searchIncrement
            << " grouping " << params.grouping
            << " flags " << params.flags
            << " min size " << params.minSize.width << " " << params.minSize.height << endl;*/

    faces = cvHaarDetectObjects(inputImage,
            casc,
            d->storage,
            params.searchIncrement,                // Increase search scale by 5% everytime
            params.grouping,                       // Drop groups of less than n detections
            params.flags,                          // Optionally, pre-test regions by edge detection
            params.minSize                         // Minimum face size to look for
    );

    // Loop the number of faces found.
    for (int i = 0; i < (faces ? faces->total : 0); i++)
    {
        // Create a new rectangle for drawing the face

        CvRect* r = (CvRect*) cvGetSeqElem(faces, i);

        // Find the dimensions of the face

        pt1.x     = r->x;
        pt2.x     = r->x + r->width;
        pt1.y     = r->y;
        pt2.y     = r->y + r->height;

        Face face = Face(pt1.x,pt1.y,pt2.x,pt2.y);

        result.push_back(face);
    }

    cvClearSeq(faces);

    //Please don't delete next line even if commented out. It helps with testing intermediate results.
    //LibFaceUtils::showImage(inputImage, result);

    return result;
}
Esempio n. 26
0
void ControlWidget::MatchingImage()
{
    if(this->m_storage_Matching == NULL) {
        this->matching_image = cvCreateImage(cvGetSize(this->list_imagerd), IPL_DEPTH_8U, 1);
        this->m_storage_Matching = cvCreateMemStorage(0);
    }
    else {
        this->matching_image = cvCreateImage(cvGetSize(this->list_imagerd), IPL_DEPTH_8U, 1);
        cvClearMemStorage(this->m_storage_Matching);
    }

    for(int i = 0; i < 12; ++i)
    {
        this->True_Point[i] = 0;
    }

    CvSeq* keypoints2;
    CvSeq* descriptors2;

    CvSURFParams params;
    params = cvSURFParams(this->surf_Hessian, 1);

    cvExtractSURF(this->gray_list_image, 0, &keypoints2, &descriptors2, this->m_storage_Matching,
                  params);

    cv::Vector<int> ptpairs;

    this->findPairs(this->image_Keypoints, this->image_Descriptors, keypoints2, descriptors2,
                    ptpairs);

    this->matching_image = cvCloneImage(this->list_imagerd);

    for(int i = 0; i < (int)ptpairs.size(); i += 2)
    {
        CvSURFPoint* pt1 = (CvSURFPoint*)cvGetSeqElem(this->image_Keypoints, ptpairs[i]);
        CvSURFPoint* pt2 = (CvSURFPoint*)cvGetSeqElem(keypoints2, ptpairs[i + 1]);

        CvPoint center;
        int radius;

        center.x = cvRound(pt2->pt.x);
        center.y = cvRound(pt2->pt.y);
//        radius = cvRound(pt2->size * 1.2 / 9.0 * 2.0);
//        cvCircle(this->matching_image, center, radius, cvScalar(0, 0, 255), 1, 8, 0);
        cvCircle(this->matching_image, center, 2, cvScalar(0, 0, 255), -1, 0, 0);

        this->JudgePairs(center.x, center.y);
    }

    QImage Matching_Image = QImage((const unsigned char*)(this->matching_image->imageData),
                              this->matching_image->width, this->matching_image->height,
                              QImage::Format_RGB888).rgbSwapped();

    this->bufferMatchingImage = new QPixmap();
    *bufferMatchingImage = QPixmap::fromImage(Matching_Image);
    *bufferMatchingImage = bufferMatchingImage->scaled(800, 300);

    cvZero(this->matching_image);
    cvClearSeq(keypoints2);
    cvClearSeq(descriptors2);
}
Esempio n. 27
0
/*
 * Main thread for Kinect input, vision processing, and network send - everything, really.
 */
void *cv_threadfunc (void *ptr) {
	// Images for openCV
	IplImage* timg = cvCloneImage(rgbimg); // Image we do our processing on
	IplImage* dimg = cvCloneImage(timg); // Image we draw on
	CvSize sz = cvSize( timg->width & -2, timg->height & -2);
	IplImage* outimg = cvCreateImage(sz, 8, 3);

	// Mem. mgmt. Remember to clear each time we run loop.
	CvMemStorage* storage = cvCreateMemStorage(0);

	// Set region of interest
	cvSetImageROI(timg, cvRect(0, 0, sz.width, sz.height));
	if (display) { cvSetImageROI(dimg, cvRect(0, 0, sz.width, sz.height)); }

	// Open network socket.
	CRRsocket = openSocket();
	if (CRRsocket < 0) pthread_exit(NULL);

	/*
	 * MAIN LOOP
	 */
	while (1) 
	{ 
		// Sequence to run ApproxPoly on
		CvSeq* polyseq = cvCreateSeq( CV_SEQ_KIND_CURVE | CV_32SC2, sizeof(CvSeq), sizeof(CvPoint), storage );
		CvSeq* contours; // Raw contours list
		CvSeq* hull; // Current convex hull
		int hullcount; // # of points in hull

		/* PULL RAW IMAGE FROM KINECT */
		pthread_mutex_lock( &mutex_rgb );
		if (display) { cvCopy(rgbimg, dimg, 0); }
		cvCopy(rgbimg, timg, 0);
		pthread_mutex_unlock( &mutex_rgb );

		/* DILATE */
		IplConvKernel* element = cvCreateStructuringElementEx(3, 3, 1, 1, 0);
		IplConvKernel* element2 = cvCreateStructuringElementEx(5, 5, 2, 2, 0);
		cvDilate(timg, timg, element2, 1);
		cvErode(timg, timg, element, 1);

		/* THRESHOLD*/
		cvThreshold(timg, timg, 100, 255, CV_THRESH_BINARY);

		/* OUTPUT PROCESSED OR RAW IMAGE (FindContours destroys image) */
		if (display) { cvCvtColor(dimg, outimg, CV_GRAY2BGR); }

		/* CONTOUR FINDING */
		cvFindContours(timg, storage, &contours, sizeof(CvContour), CV_RETR_LIST, CV_CHAIN_APPROX_SIMPLE, cvPoint(0,0));
 
		/* CONVEX HULL + POLYGON APPROXIMATION + CONVERT TO RECTANGLE + FILTER FOR INVALID RECTANGLES */ 
		// Store points to draw line between
		CvPoint* draw1;
		CvPoint* draw2; 
		vector<PolyVertices> rectangleList;

		while (contours) // Run for all polygons
		{
			// List of raw rectangles
			PolyVertices fullrect;

			// Filter noise
			if (fabs(cvContourArea(contours, CV_WHOLE_SEQ)) > 600)
			{
				// Get convex hull
				hull = cvConvexHull2( contours, storage, CV_CLOCKWISE, 1 );
				hullcount = hull->total;

				// Draw hull (red line)
				if (display) {
					draw1 = (CvPoint*)cvGetSeqElem(hull, hullcount - 1);
					for (int i = 0; i < hullcount; i++)
					{
						draw2 = (CvPoint*)cvGetSeqElem( hull, i );
						cvLine( outimg, *draw1, *draw2, CV_RGB(255,0,0), 1, 8, 0 );
						draw1 = draw2;
					}
				}

				// Convert polys from convex hull to rectangles, fill list
				polyToQuad(hull, &fullrect, outimg);

				// Filter for bad rectangles
				if(!(fullrect.points[0] == NULL || fullrect.points[1] == NULL || 
					fullrect.points[2] == NULL || fullrect.points[3] == NULL)
					&& !fullrect.isMalformed())
				{
					/* FILL rectangleList */
					rectangleList.push_back(fullrect);

					#ifdef DEBUG_MAIN
					printf("RESULT: (%d,%d), (%d,%d), (%d,%d), (%d,%d)\n",
						fullrect.points[0]->x, fullrect.points[0]->y, 
						fullrect.points[1]->x, fullrect.points[1]->y,
						fullrect.points[2]->x, fullrect.points[2]->y, 
						fullrect.points[3]->x, fullrect.points[3]->y);
					fflush(stdout);
					#endif
				}

			}
			cvClearSeq(polyseq);
			contours = contours->h_next;
		}

		/* FILTER OVERLAPPING RECTANGLES */
		FilterInnerRects(rectangleList);

		/* SORT INTO CORRECT BUCKET */
		SortRects(rectangleList);

		/* DRAW & PROCESS MATH; FILL SEND STRUCT */
		// TODO: Might want to make the math stuff static for efficiency.
		RobotMath robot;
		TrackingData outgoing;
		memset(&outgoing, 0, sizeof(TrackingData));

		// Fill packets
		// Packet fields are unsigned 16bit integers, so we need to scale them up
		// Currently both dist and angle scaled 100x (hundredths precision)
		// NOTE:
		// Currently correct results are only calculated by using bottom basket and constant for top.
		if (rectangleList[0].isValid())
		{
			outgoing.distHigh = 100 * robot.GetDistance(*(rectangleList[0].points[2]), *(rectangleList[0].points[3]), 0);
			outgoing.angleHigh = 100 * robot.GetAngle(*(rectangleList[0].points[2]), *(rectangleList[0].points[3]));
		}
//		if (rectangleList[1].isValid())
//		{
//			outgoing.distLeft = 100 * robot.GetDistance(*(rectangleList[1].points[2]), *(rectangleList[1].points[3]), 1);
//			outgoing.angleLeft = 100 * robot.GetAngle(*(rectangleList[1].points[2]), *(rectangleList[1].points[3]));
//		}
//		if (rectangleList[2].isValid())
//		{
//			outgoing.distRight = 100 * robot.GetDistance(*(rectangleList[2].points[2]), *(rectangleList[2].points[3]), 2);
//			outgoing.angleRight = 100 * robot.GetAngle(*(rectangleList[2].points[2]), *(rectangleList[2].points[3]));
//		}
		if (rectangleList[3].isValid())
		{
			outgoing.distLow = 100 * robot.GetDistance(*(rectangleList[3].points[2]), *(rectangleList[3].points[3]), 3);
			outgoing.angleLow = 100 * robot.GetAngle(*(rectangleList[3].points[2]), *(rectangleList[3].points[3]));
		}

		// Draw filtered rects (thick blue line)
		if (display) {
			for (int i = 0; i < 4; i++)
			{
				if (outimg && rectangleList[i].isValid())
				{
					cvLine( outimg, *(rectangleList[i].points[3]), *(rectangleList[i].points[2]), CV_RGB(0,0,255), 2, 8, 0 );
					cvLine( outimg, *(rectangleList[i].points[2]), *(rectangleList[i].points[0]), CV_RGB(0,0,255), 2, 8, 0 );
					cvLine( outimg, *(rectangleList[i].points[0]), *(rectangleList[i].points[1]), CV_RGB(0,0,255), 2, 8, 0 );
					cvLine( outimg, *(rectangleList[i].points[1]), *(rectangleList[i].points[3]), CV_RGB(0,0,255), 2, 8, 0 );
				}
			}
		}

		#ifdef DEBUG_MAIN
		printf("Top distance: %d\n", outgoing.distHigh);
		printf("Top angle: %d\n\n", outgoing.angleHigh);
		#endif

		CvPoint cent1 = cvPoint(320, 0);
		CvPoint cent2 = cvPoint(320, 480);
 		if (display) { cvLine( outimg, cent1, cent2, CV_RGB(0,255,0), 1, 8, 0 ); }

		/* SEND TO CRIO */
		sendData(&outgoing, CRRsocket);

		if( cvWaitKey( 15 )==27 )
		{
			// Empty for now.
		}

		/* DISPLAY */
		if (display) { cvShowImage (FREENECTOPENCV_WINDOW_N,outimg); }
		
		/* CLEANUP */
		cvClearMemStorage(storage);
	}
	pthread_exit(NULL);
}
Esempio n. 28
0
/**
- FUNCTION: ClearEdges
- FUNCTIONALITY: Delete current blob edges
- PARAMETERS:
- RESULT:
- RESTRICTIONS:
- AUTHOR: Ricard Borr�
- CREATION DATE: 25-05-2005.
- MODIFICATION: Date. Author. Description.
*/
void CBlob::ClearEdges()
{
	// Eliminar v�texs del blob eliminat
	cvClearSeq( edges );
}
Esempio n. 29
0
void CvFaceElement::FindContours(IplImage* img, IplImage* thresh, int nLayers, int dMinSize)
{
    CvSeq* seq;
    CvRect roi = m_rROI;
    Extend(roi, 1);
    cvSetImageROI(img, roi);
    cvSetImageROI(thresh, roi);
    // layers
    int colors[MAX_LAYERS] = {0};
    int iMinLevel = 0, iMaxLevel = 255;
    float step, power;
    ThresholdingParam(img, nLayers / 2, iMinLevel, iMaxLevel, step, power, 4);
    int iMinLevelPrev = iMinLevel;
    int iMaxLevelPrev = iMinLevel;
    if (m_trPrev.iColor != 0)
    {
        iMinLevelPrev = m_trPrev.iColor - nLayers / 2;
        iMaxLevelPrev = m_trPrev.iColor + nLayers / 2;
    }
    if (iMinLevelPrev < iMinLevel)
    {
        iMaxLevelPrev += iMinLevel - iMinLevelPrev;
        iMinLevelPrev = iMinLevel;
    }
    if (iMaxLevelPrev > iMaxLevel)
    {
        iMinLevelPrev -= iMaxLevelPrev - iMaxLevel;
        if (iMinLevelPrev < iMinLevel)
            iMinLevelPrev = iMinLevel;
        iMaxLevelPrev = iMaxLevel;
    }
    int n = nLayers;
    n -= (iMaxLevelPrev - iMinLevelPrev + 1) / 2;
    step = float(iMinLevelPrev - iMinLevel + iMaxLevel - iMaxLevelPrev) / float(n);
    int j = 0;
    float level;
    for (level = (float)iMinLevel; level < iMinLevelPrev && j < nLayers; level += step, j++)
        colors[j] = int(level + 0.5);
    for (level = (float)iMinLevelPrev; level < iMaxLevelPrev && j < nLayers; level += 2.0, j++)
        colors[j] = int(level + 0.5);
    for (level = (float)iMaxLevelPrev; level < iMaxLevel && j < nLayers; level += step, j++)
        colors[j] = int(level + 0.5);
    //
    for (int i = 0; i < nLayers; i++)
    {
        cvThreshold(img, thresh, colors[i], 255.0, CV_THRESH_BINARY);
        if (cvFindContours(thresh, m_mstgRects, &seq, sizeof(CvContour), CV_RETR_CCOMP, CV_CHAIN_APPROX_SIMPLE))
        {
            CvTrackingRect cr;
            for (CvSeq* external = seq; external; external = external->h_next)
            {
                cr.r = cvContourBoundingRect(external);
                Move(cr.r, roi.x, roi.y);
                if (RectInRect(cr.r, m_rROI) && cr.r.width > dMinSize  && cr.r.height > dMinSize)
                {
                    cr.ptCenter = Center(cr.r);
                    cr.iColor = colors[i];
                    cvSeqPush(m_seqRects, &cr);
                }
                for (CvSeq* internal = external->v_next; internal; internal = internal->h_next)
                {
                    cr.r = cvContourBoundingRect(internal);
                    Move(cr.r, roi.x, roi.y);
                    if (RectInRect(cr.r, m_rROI) && cr.r.width > dMinSize  && cr.r.height > dMinSize)
                    {
                        cr.ptCenter = Center(cr.r);
                        cr.iColor = colors[i];
                        cvSeqPush(m_seqRects, &cr);
                    }
                }
            }
            cvClearSeq(seq);
        }
    }
    cvResetImageROI(img);
    cvResetImageROI(thresh);
}//void CvFaceElement::FindContours(IplImage* img, IplImage* thresh, int nLayers)
Esempio n. 30
0
CvRect* QOpenCvImageBox::detect_objs( IplImage* img, CvMemStorage* storage, CvHaarClassifierCascade* cascade, int image_scale, int &objs_found, int &calc_time, double scale_factor, int min_neighbors )
{
    IplImage *gray, *small_img;
    int i;

    if( cascade )
    {
        storage = cvCreateMemStorage(0);

        gray = cvCreateImage( cvSize(img->width,img->height), 8, 1 );
        small_img = cvCreateImage( cvSize( cvRound (img->width/image_scale), cvRound (img->height/image_scale)), 8, 1 );

        cvCvtColor( img, gray, CV_RGB2GRAY );
        cvResize( gray, small_img, CV_INTER_LINEAR );
        cvEqualizeHist( small_img, small_img );
        cvClearMemStorage( storage );

        double t = (double)cvGetTickCount();
        CvSeq* faces = cvHaarDetectObjects( small_img, cascade, storage,
                                            scale_factor,
                                            min_neighbors,
                                            0
                                            //|CV_HAAR_FIND_BIGGEST_OBJECT
                                            //|CV_HAAR_DO_ROUGH_SEARCH
                                            |CV_HAAR_DO_CANNY_PRUNING
                                            //|CV_HAAR_SCALE_IMAGE
                                            ,
                                            cvSize(0, 0) );
        t = (double)cvGetTickCount() - t;

        //printf( "detection time = %gms\n", t/((double)cvGetTickFrequency()*1000.) );
        calc_time=t/((double)cvGetTickFrequency()*1000.);

        // Update the number
        objs_found = faces->total;

        cvReleaseImage( &gray );
        cvReleaseImage( &small_img );

        // Loop the number of faces found.
        //printf("Detected %d faces!\n", faces->total);

        CvRect* farray=new CvRect[objs_found];
        for( i = 0; i < (faces ? objs_found : 0); i++ )
        {
            // Create a new rectangle for drawing the face
            CvRect* r = (CvRect*)cvGetSeqElem( faces, i );

            farray[i].x=r->x;
            farray[i].y=r->y;
            farray[i].width=r->width;
            farray[i].height=r->height;
        }

        cvClearSeq(faces);
        cvReleaseMemStorage(&storage);

        return farray;
    }

    return NULL;
}