IplImage *process(IplImage **_img)
{
	fprintf(stderr, "Processing image:\n");
	IplImage *img = *_img;

	/* Convert to HSV */
	print_time("Converting to HSV");
	CvSize size = cvGetSize(img);
	IplImage *hsv = cvCreateImage(size, IPL_DEPTH_8U, 3);
	cvCvtColor(img, hsv, CV_BGR2HSV);

	/* Generate mask */
	CvMat *mask = cvCreateMat(size.height, size.width, CV_8UC1);
	//cvInRangeS(hsv, cvScalar(0.11*256, 0.60*256, 0.20*256, 0),cvScalar(0.14*256, 1.00*256, 1.00*256, 0), mask);
	cvInRangeS(hsv, cvScalar(0,0.6*256,0.6*256,0),cvScalar(0.21*256,256,256,0), mask);
	cvReleaseImage(&hsv);

	/* Perform morphological ops */
	print_time("Performing morphologies");
	IplConvKernel *se21 = cvCreateStructuringElementEx(21, 21, 10, 10, CV_SHAPE_RECT, NULL);
	IplConvKernel *se11 = cvCreateStructuringElementEx(11, 11, 5,  5,  CV_SHAPE_RECT, NULL);
	cvClose(mask, mask, se21);
	cvOpen(mask, mask, se11);
	cvReleaseStructuringElement(&se21);
	cvReleaseStructuringElement(&se11);

	/* Hough transform */
	IplImage *hough_in = cvCreateImage(size, 8, 1);
	cvCopy(mask, hough_in, NULL);
	int rows=size.height;
	int cols=size.width;
	int j,k;
	int breakflag=0;
	for(j=0;j<rows;j++) {
	  for(k=0;k<cols;k++)
	    {
		CvScalar val=cvGet2D(hough_in,j,k);
		if(val.val[0]==255)
		{
		  sprintf(dat,"%d-%d:",k,j);
		  int rc = serialport_write(fd, dat);
	          if(rc==-1) return 0;
		  fprintf(fp,"%d %d\n",k,j);	
		  breakflag=1;
		  break;
		}
	    }
	  if(breakflag)
	    break;
	}
	return hough_in;
}
Example #2
0
//形态学等级滤波器(二值,默认SE为矩形3*3)
void lhMorpRankFilterB(const IplImage* src, IplImage* dst, IplConvKernel* se = NULL, unsigned int rank = 0)
{
	assert(src != NULL  &&  dst != NULL && src != dst );

	bool defaultse = false;
	int card;
	if (se == NULL)
	{
		card = 3*3;
		assert(rank >= 0 && rank <= card);
		se = cvCreateStructuringElementEx(3, 3, 1, 1, CV_SHAPE_RECT);
		defaultse = true;
	}
	else
	{
		card = lhStructuringElementCard(se);
		assert(rank >= 0 && rank <= card);
	}

	//default rank is median
	if (rank == 0)
		rank = card/2+1;

	IplConvKernel* semap =	lhStructuringElementMap(se);

	CvMat *semat = cvCreateMat(semap->nRows, semap->nCols, CV_32FC1);

	int i;
	for (i=0; i<semap->nRows*semap->nCols; i++)
	{
		semat->data.fl[i] = semap->values[i];
	}

	cvThreshold(src, dst, 0, 1, CV_THRESH_BINARY);
	IplImage *temp = cvCreateImage(cvGetSize(dst), 8, 1);

	cvFilter2D(dst, temp, semat, cvPoint(semap->anchorX, semap->anchorY));

	cvThreshold(temp, dst, card-rank, 255, CV_THRESH_BINARY);

	cvReleaseMat(&semat);
	cvReleaseStructuringElement(&semap);

	if (defaultse)
		cvReleaseStructuringElement(&se);	
	
	cvReleaseImage(&temp);

}
Example #3
0
static gboolean gst_motiondetect_apply (
    IplImage * cvReferenceImage, const IplImage * cvCurrentImage,
    const IplImage * cvMaskImage, float noiseThreshold)
{
  IplConvKernel *kernel = cvCreateStructuringElementEx (3, 3, 1, 1,
      CV_SHAPE_ELLIPSE, NULL);
  int threshold = (int)((1 - noiseThreshold) * 255);
  IplImage *cvAbsDiffImage = cvReferenceImage;
  double maxVal = -1.0;

  cvAbsDiff( cvReferenceImage, cvCurrentImage, cvAbsDiffImage );
  cvThreshold (cvAbsDiffImage, cvAbsDiffImage, threshold, 255,
      CV_THRESH_BINARY);
  cvErode (cvAbsDiffImage, cvAbsDiffImage, kernel, 1);

  cvReleaseStructuringElement(&kernel);

  cvMinMaxLoc(cvAbsDiffImage, NULL, &maxVal, NULL, NULL, cvMaskImage );
  if (maxVal > 0) {
    return TRUE;
  } else {
    return FALSE;
  }

}
Example #4
0
void BlobTracking::process(const cv::Mat &img_input, const cv::Mat &img_mask, cv::Mat &img_output)
{
  if(img_input.empty() || img_mask.empty())
    return;

  loadConfig();

  if(firstTime)
    saveConfig();

  IplImage* frame = new IplImage(img_input);
  cvConvertScale(frame, frame, 1, 0);

  IplImage* segmentated = new IplImage(img_mask);
  
  IplConvKernel* morphKernel = cvCreateStructuringElementEx(5, 5, 1, 1, CV_SHAPE_RECT, NULL);
  cvMorphologyEx(segmentated, segmentated, NULL, morphKernel, CV_MOP_OPEN, 1);

  if(showBlobMask)
    cvShowImage("Blob Mask", segmentated);

  IplImage* labelImg = cvCreateImage(cvGetSize(frame), IPL_DEPTH_LABEL, 1);

  cvb::CvBlobs blobs;
  unsigned int result = cvb::cvLabel(segmentated, labelImg, blobs);
  
  //cvb::cvFilterByArea(blobs, 500, 1000000);
  cvb::cvFilterByArea(blobs, minArea, maxArea);
  
  //cvb::cvRenderBlobs(labelImg, blobs, frame, frame, CV_BLOB_RENDER_BOUNDING_BOX);
  if(debugBlob)
    cvb::cvRenderBlobs(labelImg, blobs, frame, frame, CV_BLOB_RENDER_BOUNDING_BOX|CV_BLOB_RENDER_CENTROID|CV_BLOB_RENDER_ANGLE|CV_BLOB_RENDER_TO_STD);
  else
    cvb::cvRenderBlobs(labelImg, blobs, frame, frame, CV_BLOB_RENDER_BOUNDING_BOX|CV_BLOB_RENDER_CENTROID|CV_BLOB_RENDER_ANGLE);

  cvb::cvUpdateTracks(blobs, tracks, 200., 5);
  
  if(debugTrack)
    cvb::cvRenderTracks(tracks, frame, frame, CV_TRACK_RENDER_ID|CV_TRACK_RENDER_BOUNDING_BOX|CV_TRACK_RENDER_TO_STD);
  else
    cvb::cvRenderTracks(tracks, frame, frame, CV_TRACK_RENDER_ID|CV_TRACK_RENDER_BOUNDING_BOX);
  
  //std::map<CvID, CvTrack *> CvTracks

  if(showOutput)
    cvShowImage("Blob Tracking", frame);

  cv::Mat img_result(frame);
  img_result.copyTo(img_output);

  //cvReleaseImage(&frame);
  //cvReleaseImage(&segmentated);
  cvReleaseImage(&labelImg);
  delete frame;
  delete segmentated;
  cvReleaseBlobs(blobs);
  cvReleaseStructuringElement(&morphKernel);

  firstTime = false;
}
void catcierge_template_matcher_destroy(catcierge_matcher_t **octx)
{
	catcierge_template_matcher_t *ctx;
	size_t i;

	if (!octx || !(*octx))
		return;

	ctx = (catcierge_template_matcher_t *)*octx;

	if (ctx->snouts)
	{
		for (i = 0; i < ctx->snout_count; i++)
		{
			cvReleaseImage(&ctx->snouts[i]);
			ctx->snouts[i] = NULL;
		}

		free(ctx->snouts);
		ctx->snouts = NULL;
	}

	if (ctx->flipped_snouts)
	{
		for (i = 0; i < ctx->snout_count; i++)
		{
			cvReleaseImage(&ctx->flipped_snouts[i]);
			ctx->flipped_snouts[i] = NULL;
		}

		free(ctx->flipped_snouts);
		ctx->flipped_snouts = NULL;
	}

	if (ctx->storage)
	{
		cvReleaseMemStorage(&ctx->storage);
		ctx->storage = NULL;
	}

	if (ctx->kernel)
	{
		cvReleaseStructuringElement(&ctx->kernel);
		ctx->kernel = NULL;
	}

	if (ctx->matchres)
	{
		for (i = 0; i < ctx->snout_count; i++)
		{
			cvReleaseImage(&ctx->matchres[i]);
		}

		free(ctx->matchres);
		ctx->matchres = NULL;
	}

	free(*octx);
	*octx = NULL;
}
//
// function "noiseRemoval":
// applying the open morphology on the image of color segmentation 
//
IplImage* noiseRemoval(IplImage* inputImage)
{
	int iWidth = inputImage->width;
	int iHeight = inputImage->height;

	IplImage* imageNoiseRem = cvCreateImage(cvSize(iWidth,iHeight),IPL_DEPTH_8U,1);
	if(!imageNoiseRem)
		exit(EXIT_FAILURE);

	IplConvKernel* structureEle1 =cvCreateStructuringElementEx(
		3,
		3,
		1,
		1,
		CV_SHAPE_ELLIPSE,
		0);

	int operationType[2] = {
		CV_MOP_OPEN,
		CV_MOP_CLOSE
	};

	//seems function open and close, as well as erode and dilate reversed. 
	cvMorphologyEx(inputImage,imageNoiseRem,NULL,structureEle1,operationType[0],1);
	//cvMorphologyEx(inputImage,imageNoiseRem,NULL,structureEle1,operationType[1],1);

	//in order to connect regions breaked by mophology operation ahead
	//cvErode(imageNoiseRem,imageNoiseRem,structureEle1,1);
	
	cvReleaseStructuringElement(&structureEle1);

	return imageNoiseRem;
}
Example #7
0
//形态学约束击中-击不中变换 针对二值和灰度图像
void lhMorpHMTC(const IplImage* src, IplImage* dst, IplConvKernel* sefg, IplConvKernel* sebg =NULL)
{
	assert(src != NULL && dst != NULL && src != dst && sefg!= NULL && sefg!=sebg);

	if (sebg == NULL)
	{
		sebg = lhStructuringElementNot(sefg);

	}
	
	IplImage*  temp1 = cvCreateImage(cvGetSize(src), 8, 1);
	IplImage*  temp2 = cvCreateImage(cvGetSize(src), 8, 1);
	IplImage*  temp3 = cvCreateImage(cvGetSize(src), 8, 1);
	IplImage*  temp4 = cvCreateImage(cvGetSize(src), 8, 1);

	IplImage*  mask1 = cvCreateImage(cvGetSize(src), 8, 1);
	IplImage*  mask2 = cvCreateImage(cvGetSize(src), 8, 1);
	IplImage*  mask3 = cvCreateImage(cvGetSize(src), 8, 1);
	IplImage*  mask4 = cvCreateImage(cvGetSize(src), 8, 1);

	cvZero(mask1);
	cvZero(mask2);
	cvZero(mask3);
	cvZero(mask4);

	cvZero(dst);

	//P107 (5.5)
	cvErode( src, temp1, sebg);
	cvDilate( src, temp2, sebg);
	cvErode( src, temp3, sefg);
	cvDilate( src, temp4, sefg);

	cvCmp(src, temp3, mask1, CV_CMP_EQ);
	cvCmp(temp2, src,  mask2, CV_CMP_LT);
	cvAnd(mask1, mask2, mask2);

	cvCmp(src, temp4, mask3 , CV_CMP_EQ);
	cvCmp(temp1, src, mask4 , CV_CMP_GT);
	cvAnd(mask3, mask4, mask4);

	cvSub(src, temp2, dst, mask2);
	cvSub(temp1, src, dst, mask4);




	cvReleaseImage(&mask1);
	cvReleaseImage(&mask2);
	cvReleaseImage(&mask3);
	cvReleaseImage(&mask4);

	cvReleaseImage(&temp1);
	cvReleaseImage(&temp2);
	cvReleaseImage(&temp3);
	cvReleaseImage(&temp4);

	cvReleaseStructuringElement(&sebg);

}
Example #8
0
//形态学非约束击中-击不中变换 针对二值和灰度图像
void lhMorpHMTU(const IplImage* src, IplImage* dst, IplConvKernel* sefg, IplConvKernel* sebg =NULL)
{
	assert(src != NULL && dst != NULL && src != dst && sefg!= NULL && sefg!=sebg);

	if (sebg == NULL)
	{
		sebg = lhStructuringElementNot(sefg);

	}
	
	IplImage*  temp = cvCreateImage(cvGetSize(src), 8, 1);
	IplImage*  mask = cvCreateImage(cvGetSize(src), 8, 1);
	cvZero(mask);

	//P106 (5.4)
	cvErode( src, temp, sefg);
	cvDilate(src, dst, sebg);
	cvCmp(temp, dst, mask, CV_CMP_GT);

	cvSub(temp, dst, dst, mask);
	cvNot(mask, mask);
	cvSet(dst, cvScalar(0), mask);

	//cvCopy(dst, mask);
	//cvSet(dst, cvScalar(255), mask);
	cvReleaseImage(&mask);
	cvReleaseImage(&temp);

	cvReleaseStructuringElement(&sebg);
}
Example #9
0
//形态学二值击中-击不中变换
void lhMorpHMTB(const IplImage* src, IplImage* dst, IplConvKernel* sefg, IplConvKernel* sebg =NULL)
{
	assert(src != NULL && dst != NULL && src != dst && sefg!= NULL && sefg!=sebg);

	if (sebg == NULL)
	{
		sebg = lhStructuringElementNot(sefg);

	}
	IplImage*  temp1 = cvCreateImage(cvGetSize(src), 8, 1);
	IplImage*  temp2 = cvCreateImage(cvGetSize(src), 8, 1);

	//P104 (5.2)
	cvErode( src, temp1, sefg);
	cvNot(src, temp2);
	cvErode( temp2, temp2, sebg);
	cvAnd(temp1, temp2, dst);


	cvReleaseImage(&temp1);
	cvReleaseImage(&temp2);

	cvReleaseStructuringElement(&sebg);

}
Example #10
0
//形态学方向梯度
void lhMorpGradientDir(const IplImage* src, IplImage* dst, unsigned int angle, unsigned int len )
{
	assert(src != NULL && dst != NULL && src != dst);
	IplConvKernel* se = lhStructuringElementLine(angle, len);
	lhMorpGradient(src, dst, se);
	cvReleaseStructuringElement(&se);
}
Example #11
0
static av_cold void dilate_uninit(AVFilterContext *ctx)
{
    OCVContext *ocv = ctx->priv;
    DilateContext *dilate = ocv->priv;

    cvReleaseStructuringElement(&dilate->kernel);
}
Example #12
0
IplImage *contoursGetOutlineMorh(IplImage *src, IplImage *temp, int mask)
{
    int radius = 3;
    int cols = radius * 2 + 1;
    int rows = cols;
    IplImage *res;
    IplImage *bin  = cvCreateImage(cvGetSize(src), src->depth, 1);

    cvAdaptiveThreshold(src, bin, 255, CV_ADAPTIVE_THRESH_GAUSSIAN_C, CV_THRESH_BINARY, 7, 1);

    if (mask == 1) {
        IplImage *mask = cvCreateImage(cvGetSize(src), src->depth, 1);
        res = cvCreateImage(cvGetSize(src), src->depth, 1);
        cvThreshold(src, mask, 0, 255, CV_THRESH_BINARY_INV + CV_THRESH_OTSU);
        cvOr(bin, mask, res, NULL);

        cvReleaseImage(&mask);
    } else {
        res = bin;
    }

    IplConvKernel *element = cvCreateStructuringElementEx(cols, rows, radius, radius, CV_SHAPE_ELLIPSE, NULL);

    cvMorphologyEx(res, res, temp, element, CV_MOP_OPEN, 1);
    cvReleaseStructuringElement(&element);

    radius = 9;
    cols = radius * 2 + 1;
    rows = cols;
    element = cvCreateStructuringElementEx(cols, rows, radius, radius, CV_SHAPE_ELLIPSE, NULL);
    cvMorphologyEx(res, res, temp, element, CV_MOP_CLOSE, 1);
    cvReleaseStructuringElement(&element);

    radius = 7;
    cols = radius * 2 + 1;
    rows = cols;
    element = cvCreateStructuringElementEx(cols, rows, radius, radius, CV_SHAPE_ELLIPSE, NULL);
    cvErode(res, res, element, 1);
    cvDilate(res, res, element, 1);

    contoursDrawBorder(res);

    cvReleaseStructuringElement(&element);
    cvReleaseImage(&temp);

    return res;
}
Example #13
0
IplImage *closeImage(IplImage *source) {
    int radius = 3;
    IplConvKernel* Kern = cvCreateStructuringElementEx(radius*2+1, radius*2+1, radius, radius, CV_SHAPE_RECT, NULL);
    cvErode(source, source, Kern, 1);
    cvDilate(source, source, Kern, 1);
    
    cvReleaseStructuringElement(&Kern);
    return source;
}
/*
 * Transform the image into a two colored image, one color for the color we want to track, another color for the others colors
 * From this image, we get two datas : the number of pixel detected, and the center of gravity of these pixel
 */
CvPoint binarisation(IplImage* image, int *nbPixels) {
 
	int x, y;
	CvScalar pixel;
	IplImage *hsv, *mask;
	IplConvKernel *kernel;
	int sommeX = 0, sommeY = 0;
	*nbPixels = 0;
 
	// Create the mask &initialize it to white (no color detected)
	mask = cvCreateImage(cvGetSize(image), image->depth, 1);
 
	// Create the hsv image
	hsv = cvCloneImage(image);
	cvCvtColor(image, hsv, CV_BGR2HSV);
 
	cvShowImage("GeckoGeek Color Rectification", hsv);
	// We create the mask
	cvInRangeS(hsv, cvScalar(h - tolerance -1, s - tolerance, 0), cvScalar(h + tolerance -1, s + tolerance, 255), mask);
 
	// Create kernels for the morphological operation
	kernel = cvCreateStructuringElementEx(5, 5, 2, 2, CV_SHAPE_ELLIPSE);
 
	// Morphological opening (inverse because we have white pixels on black background)
	cvDilate(mask, mask, kernel, 1);
	cvErode(mask, mask, kernel, 1);  
 
	// We go through the mask to look for the tracked object and get its gravity center
	for(x = 0; x < mask->width; x++) {
		for(y = 0; y < mask->height; y++) { 
 
			// If its a tracked pixel, count it to the center of gravity's calcul
			if(((uchar *)(mask->imageData + y*mask->widthStep))[x] == 255) {
				sommeX += x;
				sommeY += y;
				(*nbPixels)++;
			}
		}
	}
 
	// Show the result of the mask image
	cvShowImage("GeckoGeek Mask", mask);
 
	// We release the memory of kernels
	cvReleaseStructuringElement(&kernel);
 
	// We release the memory of the mask
	cvReleaseImage(&mask);
	// We release the memory of the hsv image
    	cvReleaseImage(&hsv);
 
	// If there is no pixel, we return a center outside the image, else we return the center of gravity
	if(*nbPixels > 0)
		return cvPoint((int)(sommeX / (*nbPixels)), (int)(sommeY / (*nbPixels)));
	else
		return cvPoint(-1, -1);
}
void catcierge_haar_matcher_destroy(catcierge_matcher_t **octx)
{
	catcierge_haar_matcher_t *ctx;

	if (!octx || !(*octx))
		return;

	ctx = (catcierge_haar_matcher_t *)*octx;

	if (ctx->cascade)
	{
		cv2CascadeClassifier_destroy(ctx->cascade);
		ctx->cascade = NULL;
	}

	if (ctx->kernel2x2)
	{
		cvReleaseStructuringElement(&ctx->kernel2x2);
		ctx->kernel2x2 = NULL;
	}

	if (ctx->kernel3x3)
	{
		cvReleaseStructuringElement(&ctx->kernel3x3);
		ctx->kernel3x3 = NULL;
	}

	if (ctx->kernel5x1)
	{
		cvReleaseStructuringElement(&ctx->kernel5x1);
		ctx->kernel5x1 = NULL;
	}

	if (ctx->storage)
	{
		cvReleaseMemStorage(&ctx->storage);
		ctx->storage = NULL;
	}

	free(ctx);
	*octx = NULL;
}
Example #16
0
//形态学闭运算
void lhMorpClose(const IplImage* src, IplImage* dst, IplConvKernel* se=NULL, int iterations=1)
{

    cvDilate( src, dst, se, iterations );

	IplConvKernel* semap = lhStructuringElementMap(se);

    cvErode( dst, dst, semap, iterations );

	cvReleaseStructuringElement(&semap);

}
Example #17
0
CV_IMPL IplConvKernel *
cvCreateStructuringElementEx( int cols, int rows,
                              int anchorX, int anchorY,
                              int shape, int *values )
{
    IplConvKernel *element = 0;
    int i, size = rows * cols;
    int element_size = sizeof(*element) + size*sizeof(element->values[0]);

    CV_FUNCNAME( "cvCreateStructuringElementEx" );

    __BEGIN__;

    if( !values && shape == CV_SHAPE_CUSTOM )
        CV_ERROR_FROM_STATUS( CV_NULLPTR_ERR );

    if( cols <= 0 || rows <= 0 ||
        (unsigned) anchorX >= (unsigned) cols ||
        (unsigned) anchorY >= (unsigned) rows )
        CV_ERROR_FROM_STATUS( CV_BADSIZE_ERR );

    CV_CALL( element = (IplConvKernel *)cvAlloc(element_size + 32));
    if( !element )
        CV_ERROR_FROM_STATUS( CV_OUTOFMEM_ERR );

    element->nCols = cols;
    element->nRows = rows;
    element->anchorX = anchorX;
    element->anchorY = anchorY;
    element->nShiftR = shape < CV_SHAPE_ELLIPSE ? shape : CV_SHAPE_CUSTOM;
    element->values = (int*)(element + 1);

    if( shape == CV_SHAPE_CUSTOM )
    {
        if( !values )
            CV_ERROR( CV_StsNullPtr, "Null pointer to the custom element mask" );
        for( i = 0; i < size; i++ )
            element->values[i] = values[i];
    }
    else
    {
        CvMat el_hdr = cvMat( rows, cols, CV_32SC1, element->values );
        CV_CALL( CvMorphology::init_binary_element(&el_hdr,
                        shape, cvPoint(anchorX,anchorY)));
    }

    __END__;

    if( cvGetErrStatus() < 0 )
        cvReleaseStructuringElement( &element );

    return element;
}
Example #18
0
/*
 * Release IplConvKernel object from memory and delete from hashtable.
 */
void
release_iplconvkernel_object(void *ptr)
{
  if (ptr) {
    unregister_object(ptr);
    try {
      cvReleaseStructuringElement((IplConvKernel**)(&ptr));
    }
    catch (cv::Exception& e) {
      raise_cverror(e);
    }
  }
}
/*
 * Transform the image into a two colored image, one color for the color we want to track, another color for the others colors
 * From this image, we get two datas : the number of pixel detected, and the center of gravity of these pixel
 */
CvPoint ColourToTrack::binarise(IplImage* image)
{
	int x, y;
	IplImage *hsv;
	IplConvKernel *kernel;
	int sommeX = 0, sommeY = 0;
	nbPixels = 0;
 
	if(mask==NULL) mask = cvCreateImage(cvGetSize(image), image->depth, 1);

	// Create the hsv image
	hsv = cvCloneImage(image);
	//cvCvtColor(image, hsv, CV_BGR2HSV);
	cvCvtColor(image, hsv, CV_BGR2Lab);
 
	// We create the mask
	//cvInRangeS(hsv, cvScalar(colour.hsv.h - TOLERANCE -1, colour.hsv.s - TOLERANCE, 0), cvScalar(colour.hsv.h + TOLERANCE -1, colour.hsv.s + TOLERANCE, 255), mask);
	cvInRangeS(hsv, cvScalar(0, colour.hsv.s - TOLERANCE -1, colour.hsv.v - TOLERANCE), cvScalar(255, colour.hsv.s + TOLERANCE -1, colour.hsv.v + TOLERANCE), mask);
 
	// Create kernels for the morphological operation
	kernel = cvCreateStructuringElementEx(5, 5, 2, 2, CV_SHAPE_ELLIPSE);
 
	// Morphological closing (inverse because we have white pixels on black background)
	cvDilate(mask, mask, kernel, 1);
	cvErode(mask, mask, kernel, 1);  
 
	// We go through the mask to look for the tracked object and get its gravity center
	for(x = 0; x < mask->width; x++) {
		for(y = 0; y < mask->height; y++) { 
 
			// If it's a tracked pixel, count it to the center of gravity's calcul
			if(((uchar *)(mask->imageData + y*mask->widthStep))[x] == 255) {
				sommeX += x;
				sommeY += y;
				nbPixels++;
			}
		}
	}
 
	// We release the memory of kernels
	cvReleaseStructuringElement(&kernel);
 
	// We release the memory of the hsv image
    	cvReleaseImage(&hsv);
 
	// If there is no pixel, we return a center outside the image, else we return the center of gravity
	if(nbPixels > 0)
		return cvPoint((int)(sommeX / (nbPixels)), (int)(sommeY / (nbPixels)));
	else
		return cvPoint(-1, -1);
}
Example #20
0
void CTransformImage::Morphology()
{
	if(!m_transImage)
		return;

	IplConvKernel* element = cvCreateStructuringElementEx(3, 3, 1, 1, CV_SHAPE_RECT, NULL);

	cvDilate(m_transImage, m_transImage, element, 1);
	cvDilate(m_transImage, m_transImage, element, 1);
	cvErode (m_transImage, m_transImage, element, 1);
	cvErode (m_transImage, m_transImage, element, 1);

	cvReleaseStructuringElement(&element);
}
Example #21
0
//形态学击中-击不中开变换 
void lhMorpHMTOpen(const IplImage* src, IplImage* dst, IplConvKernel* sefg, IplConvKernel* sebg =NULL, int type=LH_MORP_TYPE_BINARY)
{
	assert(src != NULL && dst != NULL && src != dst && sefg!= NULL && sefg!=sebg);

	IplConvKernel* semap = lhStructuringElementMap(sefg);

	IplImage*  temp = cvCreateImage(cvGetSize(src), 8, 1);

	//P110 (5.8)
	lhMorpHMT(src, temp, sefg, sebg, type);
	cvDilate(temp, dst, semap);

	cvReleaseImage(&temp);
	cvReleaseStructuringElement(&semap);

}
Example #22
0
// callback function for erode/dilate trackbar
void ErodeDilate(int pos)
{
    int n = erode_dilate_pos - max_iters;
    int an = n > 0 ? n : -n;
    element = cvCreateStructuringElementEx( an*2+1, an*2+1, an, an, element_shape, 0 );
    if( n < 0 )
    {
        cvErode(src,dst,element,1);
    }
    else
    {
        cvDilate(src,dst,element,1);
    }
    cvReleaseStructuringElement(&element);
    cvShowImage("Erode/Dilate",dst);
}
Example #23
0
// callback function for open/close trackbar
void OpenClose(int pos)
{
    int n = open_close_pos - max_iters;
    int an = n > 0 ? n : -n;
    element = cvCreateStructuringElementEx( an*2+1, an*2+1, an, an, element_shape, 0 );
    if( n < 0 )
    {
        cvErode(src,dst,element,1);
        cvDilate(dst,dst,element,1);
    }
    else
    {
        cvDilate(src,dst,element,1);
        cvErode(dst,dst,element,1);
    }
    cvReleaseStructuringElement(&element);
    cvShowImage("Open/Close",dst);
}
DMZ_INTERNAL void prepare_image_for_cat(IplImage *image, IplImage *as_float, CharacterRectListIterator rect) {
  // Input image: IPL_DEPTH_8U [0 - 255]
  // Data for models: IPL_DEPTH_32F [0.0 - 1.0]
  
  cvSetImageROI(image, cvRect(rect->left, rect->top, kTrimmedCharacterImageWidth, kTrimmedCharacterImageHeight));
  
  // TODO: optimize this a lot!
  
  // Gradient
  IplImage *filtered_image = cvCreateImage(cvSize(kTrimmedCharacterImageWidth, kTrimmedCharacterImageHeight), IPL_DEPTH_8U, 1);
  //llcv_morph_grad3_2d_cross_u8(image, filtered_image);
  IplConvKernel *kernel = cvCreateStructuringElementEx(3, 3, 1, 1, CV_SHAPE_CROSS, NULL);
  cvMorphologyEx(image, filtered_image, NULL, kernel, CV_MOP_GRADIENT, 1);
  cvReleaseStructuringElement(&kernel);
  
  // Equalize
  llcv_equalize_hist(filtered_image, filtered_image);
  
  // Bilateral filter
  int aperture = 3;
  double space_sigma = (aperture / 2.0 - 1) * 0.3 + 0.8;
  double color_sigma = (aperture - 1) / 3.0;
  IplImage *smoothed_image = cvCreateImage(cvSize(kTrimmedCharacterImageWidth, kTrimmedCharacterImageHeight), IPL_DEPTH_8U, 1);
  cvSmooth(filtered_image, smoothed_image, CV_BILATERAL, aperture, aperture, space_sigma, color_sigma);
  
  // Convert to float
  cvConvertScale(smoothed_image, as_float, 1.0f / 255.0f, 0);
  
  cvReleaseImage(&smoothed_image);
  cvReleaseImage(&filtered_image);
  
  cvResetImageROI(image);

#if DEBUG_EXPIRY_CATEGORIZATION_PERFORMANCE
  dmz_debug_timer_print("prepare image", 2);
#endif
}
int main( int argc, char** argv )
{
    	IplImage* img;
		IplImage* mask2; //ÉùÃ÷IplImageÖ¸Õë

		CvRect r;
 
        //ÔØÈëͼÏñ
        img = cvLoadImage( "tennis_input.jpg",1);

		cvNamedWindow( "originalImage", 1 );//´´½¨´°¿Ú
        cvShowImage( "originalImage", img );//ÏÔʾͼÏñ
   
		CvSize size = cvGetSize(img);
		IplImage *hsv = cvCreateImage(size, IPL_DEPTH_8U, 3);
		cvCvtColor(img, hsv, CV_BGR2HSV);  


		CvMat *mask = cvCreateMat(size.height, size.width, CV_8UC1);
		mask2	 = cvCreateImage(size, IPL_DEPTH_8U,3);
		
		cvInRangeS(hsv, cvScalar(0.11*256, 0.60*256, 0.20*256, 0), cvScalar(0.14*256, 1.00*256, 1.00*256, 0), mask);
		cvReleaseImage(&hsv);

		IplConvKernel *se21 = cvCreateStructuringElementEx(21, 21, 10, 10, CV_SHAPE_RECT, NULL);
		IplConvKernel *se11 = cvCreateStructuringElementEx(11, 11, 5,  5,  CV_SHAPE_RECT, NULL);

		cvNamedWindow( "Mask before", 1 );//´´½¨´°¿Ú
        cvShowImage( "Mask before", mask );//ÏÔʾͼÏñ
		//cvClose(mask, mask, se21);  See completed example for cvClose definition
		cvDilate(mask, mask, se21,1);
		cvErode(mask, mask, se21,1);	
		//cvOpen(mask, mask, se11);  See completed example for cvOpen  definition
		cvErode(mask, mask, se11,1);
		cvDilate(mask, mask, se11,1);	
		cvReleaseStructuringElement(&se21);
		cvReleaseStructuringElement(&se11);

		/* Copy mask into a grayscale image */
		IplImage *hough_in = cvCreateImage(size, 8, 1);
		cvCopy(mask, hough_in, NULL);
		cvCvtColor( mask, mask2, CV_GRAY2BGR );
		
        cvSmooth(hough_in, hough_in, CV_GAUSSIAN, 15, 15, 0, 0);

		/* Run the Hough function */
		CvMemStorage *storage = cvCreateMemStorage(0);
		//CvSeq *circles = cvHoughCircles(hough_in, storage, CV_HOUGH_GRADIENT, 4, size.height/10, 50, 150, 0, 0);
		CvSeq *contour;
		cvFindContours( hough_in, storage, &contour, sizeof(CvContour), CV_RETR_CCOMP, CV_CHAIN_APPROX_SIMPLE );


		for(;contour;contour=contour->h_next)
		{
			    r=((CvContour*)contour)->rect;
	
				//ÔÚͼÖл­³ö¾ØÐοò
				cvRectangle(img,cvPoint(r.x,r.y),cvPoint(r.x+r.width,r.y+r.height),CV_RGB(255,0,0),3,CV_AA,0);
				cvRectangle(mask2,cvPoint(r.x,r.y),cvPoint(r.x+r.width,r.y+r.height),CV_RGB(255,0,0),3,CV_AA,0);

		}

		cvReleaseMemStorage(&storage);
	


		cvNamedWindow( "Image", 1 );//´´½¨´°¿Ú
        cvShowImage( "Image", img );//ÏÔʾͼÏñ
		cvNamedWindow( "Mask", 1 );//´´½¨´°¿Ú
        cvShowImage( "Mask", mask );//ÏÔʾͼÏñ

		cvNamedWindow( "Mask2", 1 );//´´½¨´°¿Ú
        cvShowImage( "Mask2", mask2 );//ÏÔʾͼÏñ



	    cvWaitKey(0); //µÈ´ý°´¼ü
        cvDestroyWindow( "Image" );//Ïú»Ù´°¿Ú
        cvReleaseImage( &img ); //ÊÍ·ÅͼÏñ
		cvDestroyWindow( "Mask" );//Ïú»Ù´°¿Ú
		cvDestroyWindow( "Mask before" );//Ïú»Ù´°¿Ú
		cvDestroyWindow( "Original" );//Ïú»Ù´°¿Ú
		cvDestroyWindow( "Mask2" );//Ïú»Ù´°¿Ú
        return 0;


}
Example #26
0
//---------------------------------------------------------
bool COpenCV_Morphology::On_Execute(void)
{
	int			Type, Shape, Radius, Iterations;
	CSG_Grid	*pInput, *pOutput;

	pInput		= Parameters("INPUT")		->asGrid();
	pOutput		= Parameters("OUTPUT")		->asGrid();
	Type		= Parameters("TYPE")		->asInt();
	Shape		= Parameters("SHAPE")		->asInt();
	Radius		= Parameters("RADIUS")		->asInt();
	Iterations	= Parameters("ITERATIONS")	->asInt();

	//-----------------------------------------------------
	switch( Shape )
	{
	default:
	case 0:	Shape	= CV_SHAPE_ELLIPSE;	break;
	case 1:	Shape	= CV_SHAPE_RECT;	break;
	case 2:	Shape	= CV_SHAPE_CROSS;	break;
	}

	//-----------------------------------------------------
	IplImage	*cv_pInput	= Get_CVImage(pInput);
	IplImage	*cv_pOutput	= Get_CVImage(Get_NX(), Get_NY(), pInput->Get_Type());
	IplImage	*cv_pTmp	= NULL;

	//-----------------------------------------------------
	IplConvKernel	*cv_pElement	= cvCreateStructuringElementEx(Radius * 2 + 1, Radius * 2 + 1, Radius, Radius, Shape, 0);

	switch( Type )
	{
	case 0:	// dilation
		cvDilate		(cv_pInput, cv_pOutput, cv_pElement, Iterations);
		break;

	case 1:	// erosion
		cvErode			(cv_pInput, cv_pOutput, cv_pElement, Iterations);
		break;

	case 2:	// opening
		cvMorphologyEx	(cv_pInput, cv_pOutput, cv_pTmp,
			cv_pElement, CV_MOP_OPEN    , Iterations
		);
		break;

	case 3:	// closing
		cvMorphologyEx	(cv_pInput, cv_pOutput, cv_pTmp,
			cv_pElement, CV_MOP_CLOSE   , Iterations
		);
		break;

	case 4:	// morpological gradient
		cvMorphologyEx	(cv_pInput, cv_pOutput, cv_pTmp	= Get_CVImage(Get_NX(), Get_NY(), pInput->Get_Type()),
			cv_pElement, CV_MOP_GRADIENT, Iterations
		);
		break;

	case 5:	// top hat
		cvMorphologyEx	(cv_pInput, cv_pOutput, cv_pTmp	= Get_CVImage(Get_NX(), Get_NY(), pInput->Get_Type()),
			cv_pElement, CV_MOP_TOPHAT  , Iterations
		);
		break;

	case 6:	// black hat
		cvMorphologyEx	(cv_pInput, cv_pOutput, cv_pTmp	= Get_CVImage(Get_NX(), Get_NY(), pInput->Get_Type()),
			cv_pElement, CV_MOP_BLACKHAT, Iterations
		);
		break;
	}

	cvReleaseStructuringElement(&cv_pElement);

	//-----------------------------------------------------
	Copy_CVImage_To_Grid(pOutput, cv_pOutput);

    cvReleaseImage(&cv_pInput);
    cvReleaseImage(&cv_pOutput);

	if( cv_pTmp )
	{
		cvReleaseImage(&cv_pTmp);
	}

	pOutput->Set_Name(CSG_String::Format(SG_T("%s [%s]"), pInput->Get_Name(), Get_Name().c_str()));

	return( true );
}
Example #27
0
int main(int argc, char* argv[])
{
  if( argc != 3 )
  {
    fprintf(stderr, "Usage: %s panorender.png photo.jpg\n", argv[0]);
    return 1;
  }

  IplImage* pano = cvLoadImage( argv[1], CV_LOAD_IMAGE_COLOR);  assert(pano);
  IplImage* img  = cvLoadImage( argv[2], CV_LOAD_IMAGE_COLOR);  assert(img);


  CvMat* pano_edges;
  CvMat* img_edges;

  {
    pano_edges = extractEdges(pano, PANO);

    cvThreshold( pano_edges, pano_edges, 200.0, 0, CV_THRESH_TOZERO );
    // the non-edge areas of the panorama should be dont-care areas. I implement
    // this by
    // x -> dilate ? x : mean;
    // another way to state the same thing:
    //   !dilate -> mask
    //   cvSet(mean)

#define DILATE_R    9
#define EDGE_MINVAL 180

    IplConvKernel* kernel = cvCreateStructuringElementEx( 2*DILATE_R + 1, 2*DILATE_R + 1,
                                                         DILATE_R, DILATE_R,
                                                         CV_SHAPE_ELLIPSE, NULL);
    CvMat* dilated = cvCreateMat( pano->height, pano->width, CV_8UC1 );

    cvDilate(pano_edges, dilated, kernel, 1);

    CvScalar avg = cvAvg(pano_edges, dilated);
    cvCmpS(dilated, EDGE_MINVAL, dilated, CV_CMP_LT);
    cvSet( pano_edges, avg, dilated );

    cvReleaseMat(&dilated);
    cvReleaseStructuringElement(&kernel);
  }

  {
    img_edges = extractEdges(img, PHOTO);
    cvSmooth(img_edges, img_edges, CV_GAUSSIAN, 13, 13, 0.0, 0.0);
  }

  CvPoint offset = alignImages( img_edges, pano_edges );
  printf("offset: x,y: %d %d\n", offset.x, offset.y );



  cvReleaseMat  ( &pano_edges );
  cvReleaseMat  ( &img_edges );
  cvReleaseImage( &pano );
  cvReleaseImage( &img );

  return 0;
}
DLLEXPORT int opencv_dilate(WolframLibraryData libData, mint Argc, MArgument *Args, MArgument res)
{
	mint dims[3], w, h, i, j;
	IplImage* src = 0;
	IplImage* dst = 0;
	raw_t_bit* src_data_bit = 0;
	raw_t_bit* dst_data_bit = 0;
	raw_t_ubit8* src_data_byte = 0;
	raw_t_ubit8* dst_data_byte = 0;
	raw_t_ubit16* src_data_bit16 = 0;
	raw_t_ubit16* dst_data_bit16 = 0;
	raw_t_real32* src_data_real32 = 0;
	raw_t_real32* dst_data_real32 = 0;
	IplConvKernel* element = 0;
	MImage image_in, image_out = 0;
	int radius;
	int err = 0;
	int type = 0;
	WolframImageLibrary_Functions imgFuns = libData->imageLibraryFunctions;
	
	if (Argc < 2) {
		return LIBRARY_FUNCTION_ERROR;
	}
	
	image_in = MArgument_getMImage(Args[0]);
	if(imgFuns->MImage_getRank(image_in) == 3) return LIBRARY_FUNCTION_ERROR;
	if(imgFuns->MImage_getChannels(image_in) != 1) return LIBRARY_FUNCTION_ERROR;
	radius = MArgument_getInteger(Args[1]);
	if(radius < 1) return LIBRARY_FUNCTION_ERROR;
	err = imgFuns->MImage_clone(image_in, &image_out);
	if (err) return LIBRARY_FUNCTION_ERROR;
	
	type = imgFuns->MImage_getDataType(image_in);
	h = imgFuns->MImage_getRowCount(image_in);
	w = imgFuns->MImage_getColumnCount(image_in);
	
	element = cvCreateStructuringElementEx(2*radius+1,2*radius+1, radius, radius, CV_SHAPE_RECT, 0);

	switch(type) {
		case MImage_Type_Bit: 
		{
			raw_t_bit* data_in = imgFuns->MImage_getBitData(image_in);
			raw_t_bit* data_out = imgFuns->MImage_getBitData(image_out);
			if (!data_in || !data_out) {
				err = LIBRARY_FUNCTION_ERROR;
				goto cleanup;
			}
			src = cvCreateImage( cvSize(w, h), IPL_DEPTH_1U, 1);
			dst = cvCreateImage( cvSize(w, h), IPL_DEPTH_1U, 1);
			src_data_bit = src->imageData;
			dst_data_bit = dst->imageData;
			for (i = 0; i < h; i++) {
				for (j = 0; j < w; j++) {
					(src_data_bit + i*src->widthStep)[j] = data_in[i*w+j];
				}
			}
			cvDilate(src, dst, element, 1);
			for (i = 0; i < h; i++) {
				for (j = 0; j < w; j++) {
					data_out[i*w+j] = (dst_data_bit + i*dst->widthStep)[j];
				}
			}
			break;
		}
		case MImage_Type_Bit8:
		{
		    raw_t_ubit8* data_in = imgFuns->MImage_getByteData(image_in);
			raw_t_ubit8* data_out = imgFuns->MImage_getByteData(image_out);
			if (!data_in || !data_out) {
				err = LIBRARY_FUNCTION_ERROR;
				goto cleanup;
			}
			src = cvCreateImage( cvSize(w, h), IPL_DEPTH_8U, 1);
			dst = cvCreateImage( cvSize(w, h), IPL_DEPTH_8U, 1);
			src_data_byte = src->imageData;
			dst_data_byte = dst->imageData;
			for (i = 0; i < h; i++) {
				for (j = 0; j < w; j++) {
					(src_data_byte + i*src->widthStep)[j] = data_in[i*w+j];
				}
			}
			cvDilate(src, dst, element, 1);
			for (i = 0; i < h; i++) {
				for (j = 0; j < w; j++) {
					data_out[i*w+j] = (dst_data_byte + i*dst->widthStep)[j];
				}
			}
			break;
		}
		case MImage_Type_Bit16:
		{
		    raw_t_ubit16* data_in = imgFuns->MImage_getBit16Data(image_in);
			raw_t_ubit16* data_out = imgFuns->MImage_getBit16Data(image_out);
			if (!data_in || !data_out) {
				err = LIBRARY_FUNCTION_ERROR;
				goto cleanup;
			}
			src = cvCreateImage( cvSize(w, h), IPL_DEPTH_16U, 1);
			dst = cvCreateImage( cvSize(w, h), IPL_DEPTH_16U, 1);
			src_data_bit16 = src->imageData;
			dst_data_bit16 = dst->imageData;
			for (i = 0; i < h; i++) {
				for (j = 0; j < w; j++) {
					(src_data_bit16 + i*src->widthStep)[j] = data_in[i*w+j];
				}
			}
			cvDilate(src, dst, element, 1);
			for (i = 0; i < h; i++) {
				for (j = 0; j < w; j++) {
					data_out[i*w+j] = (dst_data_bit16 + i*dst->widthStep)[j];
				}
			}
			break;
		}
		case MImage_Type_Real32:
		{
		    raw_t_real32* data_in = imgFuns->MImage_getReal32Data(image_in);
			raw_t_real32* data_out = imgFuns->MImage_getReal32Data(image_out);
			if (!data_in || !data_out) {
				err = LIBRARY_FUNCTION_ERROR;
				goto cleanup;
			}
			src = cvCreateImage( cvSize(w, h), IPL_DEPTH_32F, 1);
			dst = cvCreateImage( cvSize(w, h), IPL_DEPTH_32F, 1);
			src_data_real32 = src->imageData;
			dst_data_real32 = dst->imageData;
			for (i = 0; i < h; i++) {
				for (j = 0; j < w; j++) {
					(src_data_real32 + i*src->widthStep)[j] = data_in[i*w+j];
				}
			}
			cvDilate(src, dst, element, 1);
			for (i = 0; i < h; i++) {
				for (j = 0; j < w; j++) {
					data_out[i*w+j] = (dst_data_real32 + i*dst->widthStep)[j];
				}
			}
			break;
		}
		case MImage_Type_Real:
		{
		    raw_t_real64* data_in = imgFuns->MImage_getRealData(image_in);
			raw_t_real64* data_out = imgFuns->MImage_getRealData(image_out);
			if (!data_in || !data_out) {
				err = LIBRARY_FUNCTION_ERROR;
				goto cleanup;
			}
			src = cvCreateImage( cvSize(w, h), IPL_DEPTH_32F, 1);
			dst = cvCreateImage( cvSize(w, h), IPL_DEPTH_32F, 1);
			src_data_real32 = src->imageData;
			dst_data_real32 = dst->imageData;
			for (i = 0; i < h; i++) {
				for (j = 0; j < w; j++) {
					(src_data_real32 + i*src->widthStep)[j] = (raw_t_real32)data_in[i*w+j];
				}
			}
			cvDilate(src, dst, element, 1);
			for (i = 0; i < h; i++) {
				for (j = 0; j < w; j++) {
					data_out[i*w+j] = (dst_data_real32 + i*dst->widthStep)[j];
				}
			}
			break;
		}
		default:
		return LIBRARY_FUNCTION_ERROR;
	}

cleanup:
	if(src) cvReleaseImage( &src );
	if(dst) cvReleaseImage( &dst );
	if(element) cvReleaseStructuringElement(&element);
	if(err == 0) {
		MArgument_setMImage(res, image_out);
	}
	else {
		if(image_out) imgFuns->MImage_free(image_out);
	}
	return err;
}
Example #29
0
IplImage * find_macbeth( const char *img )
{
    IplImage * macbeth_img = cvLoadImage( img,
        CV_LOAD_IMAGE_ANYCOLOR|CV_LOAD_IMAGE_ANYDEPTH );
        
    IplImage * macbeth_original = cvCreateImage( cvSize(macbeth_img->width, macbeth_img->height), macbeth_img->depth, macbeth_img->nChannels );
    cvCopy(macbeth_img, macbeth_original);
        
    IplImage * macbeth_split[3];
    IplImage * macbeth_split_thresh[3];
    
    for(int i = 0; i < 3; i++) {
        macbeth_split[i] = cvCreateImage( cvSize(macbeth_img->width, macbeth_img->height), macbeth_img->depth, 1 );
        macbeth_split_thresh[i] = cvCreateImage( cvSize(macbeth_img->width, macbeth_img->height), macbeth_img->depth, 1 );
    }
    
    cvSplit(macbeth_img, macbeth_split[0], macbeth_split[1], macbeth_split[2], NULL);
    
    if( macbeth_img )
    {
        int adaptive_method = CV_ADAPTIVE_THRESH_MEAN_C;
        int threshold_type = CV_THRESH_BINARY_INV;
        int block_size = cvRound(
            MIN(macbeth_img->width,macbeth_img->height)*0.02)|1;
        fprintf(stderr,"Using %d as block size\n", block_size);
        
        double offset = 6;
        
        // do an adaptive threshold on each channel
        for(int i = 0; i < 3; i++) {
            cvAdaptiveThreshold(macbeth_split[i], macbeth_split_thresh[i], 255, adaptive_method, threshold_type, block_size, offset);
        }
        
        IplImage * adaptive = cvCreateImage( cvSize(macbeth_img->width, macbeth_img->height), IPL_DEPTH_8U, 1 );
        
        // OR the binary threshold results together
        cvOr(macbeth_split_thresh[0],macbeth_split_thresh[1],adaptive);
        cvOr(macbeth_split_thresh[2],adaptive,adaptive);
        
        for(int i = 0; i < 3; i++) {
            cvReleaseImage( &(macbeth_split[i]) );
            cvReleaseImage( &(macbeth_split_thresh[i]) );
        }
                
        int element_size = (block_size/10)+2;
        fprintf(stderr,"Using %d as element size\n", element_size);
        
        // do an opening on the threshold image
        IplConvKernel * element = cvCreateStructuringElementEx(element_size,element_size,element_size/2,element_size/2,CV_SHAPE_RECT);
        cvMorphologyEx(adaptive,adaptive,NULL,element,CV_MOP_OPEN);
        cvReleaseStructuringElement(&element);
        
        CvMemStorage* storage = cvCreateMemStorage(0);
        
        CvSeq* initial_quads = cvCreateSeq( 0, sizeof(*initial_quads), sizeof(void*), storage );
        CvSeq* initial_boxes = cvCreateSeq( 0, sizeof(*initial_boxes), sizeof(CvBox2D), storage );
        
        // find contours in the threshold image
        CvSeq * contours = NULL;
        cvFindContours(adaptive,storage,&contours);
        
        int min_size = (macbeth_img->width*macbeth_img->height)/
            (MACBETH_SQUARES*100);
        
        if(contours) {
            int count = 0;
            
            for( CvSeq* c = contours; c != NULL; c = c->h_next) {
                CvRect rect = ((CvContour*)c)->rect;
                // only interested in contours with these restrictions
                if(CV_IS_SEQ_HOLE(c) && rect.width*rect.height >= min_size) {
                    // only interested in quad-like contours
                    CvSeq * quad_contour = find_quad(c, storage, min_size);
                    if(quad_contour) {
                        cvSeqPush( initial_quads, &quad_contour );
                        count++;
                        rect = ((CvContour*)quad_contour)->rect;
                        
                        CvScalar average = contour_average((CvContour*)quad_contour, macbeth_img);
                        
                        CvBox2D box = cvMinAreaRect2(quad_contour,storage);
                        cvSeqPush( initial_boxes, &box );
                        
                        // fprintf(stderr,"Center: %f %f\n", box.center.x, box.center.y);
                        
                        double min_distance = MAX_RGB_DISTANCE;
                        CvPoint closest_color_idx = cvPoint(-1,-1);
                        for(int y = 0; y < MACBETH_HEIGHT; y++) {
                            for(int x = 0; x < MACBETH_WIDTH; x++) {
                                double distance = euclidean_distance_lab(average,colorchecker_srgb[y][x]);
                                if(distance < min_distance) {
                                    closest_color_idx.x = x;
                                    closest_color_idx.y = y;
                                    min_distance = distance;
                                }
                            }
                        }
                        
                        CvScalar closest_color = colorchecker_srgb[closest_color_idx.y][closest_color_idx.x];
                        // fprintf(stderr,"Closest color: %f %f %f (%d %d)\n",
                        //     closest_color.val[2],
                        //     closest_color.val[1],
                        //     closest_color.val[0],
                        //     closest_color_idx.x,
                        //     closest_color_idx.y
                        // );
                        
                        // cvDrawContours(
                        //     macbeth_img,
                        //     quad_contour,
                        //     cvScalar(255,0,0),
                        //     cvScalar(0,0,255),
                        //     0,
                        //     element_size
                        // );
                        // cvCircle(
                        //     macbeth_img,
                        //     cvPointFrom32f(box.center),
                        //     element_size*6,
                        //     cvScalarAll(255),
                        //     -1
                        // );
                        // cvCircle(
                        //     macbeth_img,
                        //     cvPointFrom32f(box.center),
                        //     element_size*6,
                        //     closest_color,
                        //     -1
                        // );
                        // cvCircle(
                        //     macbeth_img,
                        //     cvPointFrom32f(box.center),
                        //     element_size*4,
                        //     average,
                        //     -1
                        // );
                        // CvRect rect = contained_rectangle(box);
                        // cvRectangle(
                        //     macbeth_img,
                        //     cvPoint(rect.x,rect.y),
                        //     cvPoint(rect.x+rect.width, rect.y+rect.height),
                        //     cvScalarAll(0),
                        //     element_size
                        // );
                    }
                }
            }
            
            ColorChecker found_colorchecker;

            fprintf(stderr,"%d initial quads found", initial_quads->total);
            if(count > MACBETH_SQUARES) {
                fprintf(stderr," (probably a Passport)\n");
                
                CvMat* points = cvCreateMat( initial_quads->total , 1, CV_32FC2 );
                CvMat* clusters = cvCreateMat( initial_quads->total , 1, CV_32SC1 );
                
                CvSeq* partitioned_quads[2];
                CvSeq* partitioned_boxes[2];
                for(int i = 0; i < 2; i++) {
                    partitioned_quads[i] = cvCreateSeq( 0, sizeof(**partitioned_quads), sizeof(void*), storage );
                    partitioned_boxes[i] = cvCreateSeq( 0, sizeof(**partitioned_boxes), sizeof(CvBox2D), storage );
                }
                
                // set up the points sequence for cvKMeans2, using the box centers
                for(int i = 0; i < initial_quads->total; i++) {
                    CvBox2D box = (*(CvBox2D*)cvGetSeqElem(initial_boxes, i));
                    
                    cvSet1D(points, i, cvScalar(box.center.x,box.center.y));
                }
                
                // partition into two clusters: passport and colorchecker
                cvKMeans2( points, 2, clusters, 
                           cvTermCriteria( CV_TERMCRIT_EPS+CV_TERMCRIT_ITER,
                                           10, 1.0 ) );
        
                for(int i = 0; i < initial_quads->total; i++) {
                    CvPoint2D32f pt = ((CvPoint2D32f*)points->data.fl)[i];
                    int cluster_idx = clusters->data.i[i];
                    
                    cvSeqPush( partitioned_quads[cluster_idx],
                               cvGetSeqElem(initial_quads, i) );
                    cvSeqPush( partitioned_boxes[cluster_idx],
                               cvGetSeqElem(initial_boxes, i) );

                    // cvCircle(
                    //     macbeth_img,
                    //     cvPointFrom32f(pt),
                    //     element_size*2,
                    //     cvScalar(255*cluster_idx,0,255-(255*cluster_idx)),
                    //     -1
                    // );
                }
                
                ColorChecker partitioned_checkers[2];
                
                // check each of the two partitioned sets for the best colorchecker
                for(int i = 0; i < 2; i++) {
                    partitioned_checkers[i] =
                        find_colorchecker(partitioned_quads[i], partitioned_boxes[i],
                                      storage, macbeth_img, macbeth_original);
                }
                
                // use the colorchecker with the lowest error
                found_colorchecker = partitioned_checkers[0].error < partitioned_checkers[1].error ?
                    partitioned_checkers[0] : partitioned_checkers[1];
                
                cvReleaseMat( &points );
                cvReleaseMat( &clusters );
            }
            else { // just one colorchecker to test
                fprintf(stderr,"\n");
                found_colorchecker = find_colorchecker(initial_quads, initial_boxes,
                                  storage, macbeth_img, macbeth_original);
            }
            
            // render the found colorchecker
            draw_colorchecker(found_colorchecker.values,found_colorchecker.points,macbeth_img,found_colorchecker.size);
            
            // print out the colorchecker info
            for(int y = 0; y < MACBETH_HEIGHT; y++) {            
                for(int x = 0; x < MACBETH_WIDTH; x++) {
                    CvScalar this_value = cvGet2D(found_colorchecker.values,y,x);
                    CvScalar this_point = cvGet2D(found_colorchecker.points,y,x);
                    
                    printf("%.0f,%.0f,%.0f,%.0f,%.0f\n",
                        this_point.val[0],this_point.val[1],
                        this_value.val[2],this_value.val[1],this_value.val[0]);
                }
            }
            printf("%0.f\n%f\n",found_colorchecker.size,found_colorchecker.error);
            
        }
                
        cvReleaseMemStorage( &storage );
        
        if( macbeth_original ) cvReleaseImage( &macbeth_original );
        if( adaptive ) cvReleaseImage( &adaptive );
        
        return macbeth_img;
    }

    if( macbeth_img ) cvReleaseImage( &macbeth_img );

    return NULL;
}
	Region LocalisationPupil::getPupilRegion(int threshold){
		Region reg;
		CvConnectedComp **compList;
		IplImage* im2;
		int a,nComp;
		bool fini = false;
		int thresh = threshold;
		int wSize = 15;
		int iteration = 0;
		int iterationMax = 20;
		int diffWH = 50;
		//init region
		reg.x=0;
		reg.y=0;
		reg.dx=0;
		reg.dy=0;

	#ifdef DEBUG	
		cout<<"---- getpupilRegion ----"<<endl;
	#endif

		while( (0<thresh) && (thresh<255) && (!fini)){
			im2 = cvCloneImage(im);		

	#ifdef DEBUG
			cout<<"thresh = "<<thresh<<endl;
	#endif
			
			//filtrer moyenne 
			//cvSmooth( im2, im2, CV_MEDIAN, 3);
			
			//binarisation
			for(int i=0;i<((im2)->width)*((im2)->height);i++) {
				a=(int)(*((im2)->imageData+i))>=0?(int)*((im2)->imageData+i):(int)(256+*((im2)->imageData+i));
				if(a>=thresh)
				*((im2)->imageData+i)=(char)0;
				else
				*((im2)->imageData+i)=(char)255;
			}//for
			
			//erosion 1
			IplConvKernel* strel = cvCreateStructuringElementEx( wSize, wSize, 3, 3, CV_SHAPE_CROSS);
			cvErode(im2, im2, strel);		
			
			cvReleaseStructuringElement( &strel );
		
	#ifdef DEBUG
		IplImage* im3 = cvCloneImage(im2);
	 	
		char sname[20];
		sprintf(sname,"s_%d_w_%d.tiff",thresh,wSize);
		string _imageName = sname;
		string fname;
		if(param!=NULL && param->hasDebPupilRegionDir()){
			fname = param->deb_Pupil_Region_Dir;
			fname += _imageName;
			cout<<"file name = "<<fname<<endl;
			cvSaveImage(fname.c_str(), im2);
		} 
	#endif
		
			/*
			//dilation 1
			strel = cvCreateStructuringElementEx( 12, 12, 3, 3, CV_SHAPE_CROSS);
			cvDilate(im2, im2, strel);
			
			cvReleaseStructuringElement( &strel );
			*/
			//obtenir la region la plus grande dans l'image
			compList = getLargestConnectedComps(im2,&nComp);		

	#ifdef DEBUG
			cout<<"nComp = "<<nComp<<endl;
	#endif

			if(nComp<=0){
				// Il y a aucune region
				// Il faut augmenter le seuil et recommencer
	#ifdef DEBUG
				cout<<"Il y a aucune region! Augmenter le seuil et recommencer"<<endl<<endl;
	#endif
				thresh += 10;
				fini = false;
			}else{
				// Verifier si tout les regions ayant le long et le large tres different
				for(int i=0;i<nComp;i++){
	#ifdef DEBUG
		// Dessiner un regtangle representant cette region
		CvPoint pt1;
		CvPoint pt2;
			
		pt1.x = compList[i]->rect.x;
		pt1.y = compList[i]->rect.y;
		
		pt2.x = compList[i]->rect.x + compList[i]->rect.width;
		pt2.y = compList[i]->rect.y + compList[i]->rect.height;
		
		cvRectangle (im3,  pt1,  pt2, CV_RGB(255, 255, 255), 1);
	#endif
					if(abs(compList[i]->rect.width-compList[i]->rect.height)<=diffWH){
						fini = true;
						//break;
					}
				}
				
				
	#ifdef DEBUG

		cvSaveImage(fname.c_str(), im3);
		cvReleaseImage(&im3);
	#endif
		
				if(fini){
					// Il y a aumoin une region ayant le long et le large comparables
					// chercher une region qui est le plus similaire un cercle

	#ifdef DEBUG
					cout<<"Il y a aumoin une region ayant le long et le large comparables"<<endl;
	#endif

					int indexPupil,xc,yc,width,index,indexModel;
					float maxpoint,point;
					maxpoint = -100000.0f;
					indexPupil = 0;
					BYTE *model;
					for(int i=0;i<nComp;i++){

	#ifdef DEBUG				
						cout<<"\ti = "<<i<<"\t";
	#endif

						if(abs(compList[i]->rect.width-compList[i]->rect.height)<=diffWH){
							width = (compList[i]->rect.width>compList[i]->rect.height?compList[i]->rect.width:compList[i]->rect.height);
							model = this->initModel(width);
							xc = compList[i]->rect.x + compList[i]->rect.width/2;
							yc = compList[i]->rect.y + compList[i]->rect.height/2;

	#ifdef DEBUG
							cout<<"xc = "<<xc<<"\tyc = "<<yc<<endl;;
	#endif

							point = 0.0f;
							for(int x=xc-width/2,indexModel=0;x<xc+width/2;x++){
								for(int y=yc-width/2;y<yc+width/2;y++,indexModel++){
									if(x>=0&&x<=(im2)->width&&y>=0&&y<=(im2)->height){
										index = y*(im2)->width + x;
										//cout<<"index="<<index<<"\tindexModel="<<indexModel;
										//point += (float)((*((im2)->imageData+index)>0)&&(model[indexModel]>0)?1.0f:0.0f);
										a=(int)(*((im2)->imageData+index))>=0?(int)*((im2)->imageData+index):(int)(256+*((im2)->imageData+index));
										/*
										  if(a>0 && model[indexModel]>0){
											point += 1.0f;
										}
										*/
										if((a>0 && model[indexModel]>0)||(a<0 && model[indexModel]<0)){
											point += 1.0f;
										}else{
											point -= 1.0f;
										}
									}
								}
							}
							point /= width*width;

	#ifdef DEBUG						
							cout<<"\tpoint = "<<point;
	#endif

							if(maxpoint<point){
								maxpoint = point;
								indexPupil = i;

	#ifdef DEBUG							
								cout<<"\tmaxpoint = "<<maxpoint;
	#endif							
								
							}
							DELETE(model);
						}
	#ifdef DEBUG
						cout<<endl;
	#endif
					}
					
					reg.x = compList[indexPupil]->rect.x;
					reg.y = compList[indexPupil]->rect.y;
					reg.dx = compList[indexPupil]->rect.width;
					reg.dy = compList[indexPupil]->rect.height;
					
	#ifdef DEBUG
		// Dessiner un regtangle representant cette region
		CvPoint pt1;
		CvPoint pt2;
			
		pt1.x = reg.x;
		pt1.y = reg.y;
		
		pt2.x = reg.x + reg.dx;
		pt2.y = reg.y + reg.dy;
		
		cout<<"reg.x = "<<reg.x<<endl;
		cout<<"reg.y = "<<reg.y<<endl;
		cout<<"reg.dx = "<<reg.dx<<endl;
		cout<<"reg.dy = "<<reg.dy<<endl;
		
		cvRectangle (im2,  pt1,  pt2, CV_RGB(255, 255, 255), 1);
		
		cout<<"file name = "<<this->filename<<endl;
		cvSaveImage(this->filename.c_str(), im2);
	#endif
		
				}else{
					// Tous les region ayant le long et le large tres different
					// il faut diminuer le seuil, augmenter la taile de la fenetre de masque et recommencer
					if(thresh>10){
						thresh -= 10;
					}
					wSize += 1;

	#ifdef DEBUG				
					cout<<"Tous les region ayant le long et le large tres different! Diminuer le seuil et recommencer"<<endl<<endl;
	#endif

				}
			}
			
			for(int i=0;i<10;i++){
				DELETE(compList[i]);
			}
			DELETE(compList);
			if(iteration++>iterationMax){
				fini = true;
			}
			
		}//while

		cvReleaseImage(&im2);	
		
		return reg;
	}