Ejemplo n.º 1
0
int CV_CannyTest::validate_test_results( int test_case_idx )
{
    int code = CvTS::OK, nz0;
    prepare_to_validation(test_case_idx);
    
    double err = cvNorm(&test_mat[OUTPUT][0], &test_mat[REF_OUTPUT][0], CV_L1);
    if( err == 0 )
        goto _exit_;
    
    if( err != cvRound(err) || cvRound(err)%255 != 0 )
    {
        ts->printf( CvTS::LOG, "Some of the pixels, produced by Canny, are not 0's or 255's; the difference is %g\n", err );
        code = CvTS::FAIL_INVALID_OUTPUT;
        goto _exit_;
    }
    
    nz0 = cvCountNonZero(&test_mat[REF_OUTPUT][0]);
    err = (err/255/MAX(nz0,100))*100;
    if( err > 1 )
    {
        ts->printf( CvTS::LOG, "Too high percentage of non-matching edge pixels = %g%%\n", err);
        code = CvTS::FAIL_BAD_ACCURACY;
        goto _exit_;
    }
    
_exit_:
    if( code < 0 )
        ts->set_failed_test_info( code );
    return code;
}
Ejemplo n.º 2
0
// Other Image Operations
//
// 
int ofCvImage::countNonZeroInRegion( int x, int y, int w, int h ) const {
	int count = 0;
	cvSetImageROI( cvImage, cvRect(x,y,w,h) );
	count = cvCountNonZero( cvImage );
	cvResetImageROI( cvImage );
	return count;
}
Ejemplo n.º 3
0
void get_hand_interval_2 (IplImage *body, int *interval)
{
	CvMat *data, *labels, *means;
	int count;

#define CLUSTERS 2
	
	count = cvCountNonZero(body);
	data = cvCreateMat(count, 1, CV_32FC1);
	labels = cvCreateMat(count, 1, CV_32SC1);
	means = cvCreateMat(CLUSTERS, 1, CV_32FC1);

	fill_mat(body, data);
	cvKMeans2(data, CLUSTERS, labels,
		  cvTermCriteria(CV_TERMCRIT_EPS + CV_TERMCRIT_ITER, 10, 10.0),
		  1, 0, 0, means, 0);

	double tmp;
	cvMinMaxLoc(body, &tmp, NULL, NULL, NULL, NULL);
	interval[0] = tmp;
	cvMinMaxLoc(means, &tmp, NULL, NULL, NULL, NULL);
	interval[1] = tmp;
		  
	cvReleaseMat(&data);
	cvReleaseMat(&labels);
}
int CV_MHIGlobalOrientTest::validate_test_results( int test_case_idx )
{
    double ref_angle = cvTsCalcGlobalOrientation( &test_mat[INPUT][2], &test_mat[INPUT][1],
                                                  &test_mat[INPUT][0], timestamp, duration );
    double err_level = get_success_error_level( test_case_idx, 0, 0 );
    int code = CvTS::OK;
    int nz = cvCountNonZero( test_array[INPUT][1] );

    if( nz > 32 && !(min_angle - err_level <= angle &&
          max_angle + err_level >= angle) &&
        !(min_angle - err_level <= angle+360 &&
          max_angle + err_level >= angle+360) )
    {
        ts->printf( CvTS::LOG, "The angle=%g is outside (%g,%g) range\n",
                    angle, min_angle - err_level, max_angle + err_level );
        code = CvTS::FAIL_BAD_ACCURACY;
    }
    else if( fabs(angle - ref_angle) > err_level &&
             fabs(360 - fabs(angle - ref_angle)) > err_level )
    {
        ts->printf( CvTS::LOG, "The angle=%g differs too much from reference value=%g\n",
                    angle, ref_angle );
        code = CvTS::FAIL_BAD_ACCURACY;
    }

    if( code < 0 )
        ts->set_failed_test_info( code );
    return code;
}
Ejemplo n.º 5
0
 //! 字符尺寸验证
 bool verifyImgCharSizes(IplImage *r){
     //Char sizes 45x90
     float aspect = 45.0f / 90.0f;
     float charAspect = (float)r->width / (float)r->height;
     float error = 0.7;
     float minHeight = 10;
     float maxHeight = 35;
     float minWidth = 8;
     float maxWidth = 16;
     //We have a different aspect ratio for number 1, and it can be ~0.2
     float minAspect = 0.05;
     float maxAspect = aspect + aspect*error;
     //area of pixels
     //float area = cvContourArea(r);
     float area = cvCountNonZero(r);
     //bb area
     float bbArea = r->width*r->height;
     //% of pixel in area
     float percPixels = area / bbArea;
     
     if (percPixels < 1 && charAspect > minAspect && charAspect < maxAspect && r->height >= minHeight && r->height < maxHeight && r->width>minWidth && r->width < maxWidth)
         return true;
     else
         return false;
 }
static void
kms_crowd_detector_roi_analysis (KmsCrowdDetector * crowddetector,
    IplImage * low_speed_map, IplImage * high_speed_map)
{
  int curve;

  for (curve = 0; curve < crowddetector->priv->num_rois; curve++) {

    int high_speed_points = 0;
    int low_speed_points = 0;
    int total_pixels_occupied = 0;
    double occupation_percentage = 0.0;
    CvRect container =
        kms_crowd_detector_get_square_roi_contaniner (crowddetector, curve);

    cvRectangle (low_speed_map, cvPoint (container.x, container.y),
        cvPoint (container.x + container.width, container.y + container.height),
        cvScalar (0, 255, 0, 0), 1, 8, 0);
    cvSetImageROI (low_speed_map, container);
    cvSetImageROI (high_speed_map, container);
    low_speed_points = cvCountNonZero (low_speed_map);
    high_speed_points = cvCountNonZero (high_speed_map);
    cvResetImageROI (low_speed_map);
    cvResetImageROI (high_speed_map);
    total_pixels_occupied = high_speed_points + low_speed_points;
    if (crowddetector->priv->rois_data[curve].n_pixels_roi > 0) {
      occupation_percentage = ((double) total_pixels_occupied * 100 /
          crowddetector->priv->rois_data[curve].n_pixels_roi);
    } else {
      occupation_percentage = 0.0;
    }
    kms_crowd_detector_roi_occup_analysis (crowddetector,
        occupation_percentage,
        crowddetector->priv->rois_data[curve].occupancy_num_frames_to_event,
        crowddetector->priv->rois_data[curve].occupancy_level_min,
        crowddetector->priv->rois_data[curve].occupancy_level_med,
        crowddetector->priv->rois_data[curve].occupancy_level_max, curve);
    kms_crowd_detector_roi_fluidity_analysis (crowddetector,
        high_speed_points, low_speed_points,
        crowddetector->priv->rois_data[curve].fluidity_num_frames_to_event,
        crowddetector->priv->rois_data[curve].fluidity_level_min,
        crowddetector->priv->rois_data[curve].fluidity_level_med,
        crowddetector->priv->rois_data[curve].fluidity_level_max, curve);
  }
}
Ejemplo n.º 7
0
//  Function cvRefineForegroundMaskBySegm preforms FG post-processing based on segmentation
//    (all pixels of the segment will be classified as FG if majority of pixels of the region are FG).
// parameters:
//      segments - pointer to result of segmentation (for example MeanShiftSegmentation)
//      bg_model - pointer to CvBGStatModel structure
CV_IMPL void cvRefineForegroundMaskBySegm(CvSeq* segments, CvBGStatModel*  bg_model) {
    IplImage* tmp_image = cvCreateImage(cvSize(bg_model->foreground->width, bg_model->foreground->height),
                                        IPL_DEPTH_8U, 1);
    for (; segments; segments = ((CvSeq*)segments)->h_next) {
        CvSeq seq = *segments;
        seq.v_next = seq.h_next = NULL;
        cvZero(tmp_image);
        cvDrawContours(tmp_image, &seq, CV_RGB(0, 0, 255), CV_RGB(0, 0, 255), 10, -1);
        int num1 = cvCountNonZero(tmp_image);
        cvAnd(tmp_image, bg_model->foreground, tmp_image);
        int num2 = cvCountNonZero(tmp_image);
        if (num2 > num1 * 0.5) {
            cvDrawContours(bg_model->foreground, &seq, CV_RGB(0, 0, 255), CV_RGB(0, 0, 255), 10, -1);
        } else {
            cvDrawContours(bg_model->foreground, &seq, CV_RGB(0, 0, 0), CV_RGB(0, 0, 0), 10, -1);
        }
    }
    cvReleaseImage(&tmp_image);
}
Ejemplo n.º 8
0
void cvRefineForegroundMaskBySegm(CvSeq* segments, IplImage*  pFrImg)
{
	IplImage* tmp_image = cvCreateImage(cvSize(pFrImg->width, pFrImg->height), IPL_DEPTH_8U, 1);
	for (; segments; segments = ((CvSeq*)segments)->h_next)
	{
		CvSeq seq = *segments;
		seq.v_next = seq.h_next = NULL;
		cvZero(tmp_image);
		cvDrawContours(tmp_image, &seq, CV_RGB(0, 0, 255), CV_RGB(0, 0, 255), 10, -1);
		int num1 = cvCountNonZero(tmp_image);
		cvAnd(tmp_image, pFrImg, tmp_image);
		int num2 = cvCountNonZero(tmp_image);
		if (num2 > num1*0.5)
			cvDrawContours(pFrImg, &seq, CV_RGB(0, 0, 255), CV_RGB(0, 0, 255), 10, -1);
		else
			cvDrawContours(pFrImg, &seq, CV_RGB(0, 0, 0), CV_RGB(0, 0, 0), 10, -1);
	}
	cvReleaseImage(&tmp_image);
}
Ejemplo n.º 9
0
/**
 *  \brief	Returns all nonzero points in image into x- and y-coordinate sets
 */
void getPointsFromImageHough(int *Px, int *Py, int *numPoints) {
	unsigned char* rawROI = NULL; int ssize;
		cvGetRawData(zeta, &rawROI, &ssize, NULL);
		int num = cvCountNonZero(zeta); num = 0; int i,j;
		for (j = max_of_2(0,yGuess - height); j < min_of_2(height, yGuess + height); j++){
			for (i = max_of_2(0, xGuess - width); i < min_of_2(width, xGuess + width); i++){
				if (rawROI[i+j*width] != 0){ Px[num] = i; Py[num] = j; num++; }
			}
		}
		*numPoints = num;
}
void THISCLASS::OnStep() {
	// Get the input image
	IplImage* inputimage = mCore->mDataStructureImageGray.mImage;
	if (! inputimage) {
		AddError(wxT("No image on selected input."));
		return;
	}

	// Calculate non-zero elements
	if (mCalculateNonZero) {
		int non_zero= cvCountNonZero(inputimage);
		CommunicationMessage m(wxT("STATS_NONZERO"));
		m.AddInt(non_zero);
		mCore->mCommunicationInterface->Send(&m);
	}

	// Calculate sum
	if (mCalculateSum) {
		CvScalar sum= cvSum(inputimage);
		CommunicationMessage m(wxT("STATS_SUM"));
		m.AddDouble(sum.val[0]);
		mCore->mCommunicationInterface->Send(&m);
	}

	// Calculate mean and standard deviation
	if (mCalculateMeanStdDev) {
		CvScalar mean;
		CvScalar std_dev;
		cvAvgSdv(inputimage, &mean, &std_dev, NULL);
		CommunicationMessage m(wxT("STATS_MEANSTDDEV"));
		m.AddDouble(mean.val[0]);
		m.AddDouble(std_dev.val[0]);
		mCore->mCommunicationInterface->Send(&m);
	}

	// Calculate min and max
	if (mCalculateMinMax) {
		double min_val;
		double max_val;
		cvMinMaxLoc(inputimage, &min_val, &max_val, NULL, NULL, NULL);
		CommunicationMessage m(wxT("STATS_MINMAX"));
		m.AddDouble(min_val);
		m.AddDouble(max_val);
		mCore->mCommunicationInterface->Send(&m);
	}

	// Set the display
	DisplayEditor de(&mDisplayOutput);
	if (de.IsActive()) {
		de.SetMainImage(inputimage);
	}
}
Ejemplo n.º 11
0
//--------------------------------------------------------------------------------
int ofxCvImage::countNonZeroInRegion( int x, int y, int w, int h ) {
    //TODO: test this method

	if (w == 0 || h == 0) return 0;
    int count = 0;

    // intersect the global ROI with the region to check
    ofRectangle iRoi = getIntersectionROI( getROI(), ofRectangle(x,y,w,h) );

    ofRectangle lastROI = getROI();
    setROI(iRoi);
	count = cvCountNonZero( cvImage );
    setROI(lastROI);

	return count;
}
Ejemplo n.º 12
0
//--------------------------------------------------------------------------------
int ofxCvImage::countNonZeroInRegion( int x, int y, int w, int h ) {
	if( !bAllocated ){
		ofLog(OF_LOG_ERROR, "in countNonZeroInRegion, need to allocate image first");
		return 0;		
	}
	
    //TODO: test this method

	if (w == 0 || h == 0) return 0;
    int count = 0;

    // intersect the global ROI with the region to check
    ofRectangle iRoi = getIntersectionROI( getROI(), ofRectangle(x,y,w,h) );

    ofRectangle lastROI = getROI();
    setROI(iRoi);
	count = cvCountNonZero( cvImage );
    setROI(lastROI);

	return count;
}
static void
kms_crowd_detector_count_num_pixels_rois (KmsCrowdDetector * self)
{
  int curve;

  IplImage *src = cvCreateImage (cvSize (self->priv->actual_image->width,
          self->priv->actual_image->height), IPL_DEPTH_8U, 1);

  cvZero (src);

  for (curve = 0; curve < self->priv->num_rois; curve++) {
    cvFillConvexPoly (src, self->priv->curves[curve],
        self->priv->n_points[curve], cvScalar (255, 255, 255, 0), 8, 0);
    self->priv->rois_data[curve].n_pixels_roi = cvCountNonZero (src);
    self->priv->rois_data[curve].actual_occupation_level = 0;
    self->priv->rois_data[curve].potential_occupation_level = 0;
    self->priv->rois_data[curve].num_frames_potential_occupancy_level = 0;
    cvSetZero ((src));
  }
  cvReleaseImage (&src);
}
Ejemplo n.º 14
0
//--------------------------------------------------------------------------------
int ofxCvImage::countNonZeroInRegion( int x, int y, int w, int h ) {
	if( !bAllocated ){
		ofLogError("ofxCvImage") << "countNonZeroInRegion(): image not allocated";
		return 0;		
	}
	
    //TODO: test this method

	if (w == 0 || h == 0) return 0;
    int count = 0;

    // intersect the global ROI with the region to check
    ofRectangle iRoi = getIntersectionROI( getROI(), ofRectangle(x,y,w,h) );

    ofRectangle lastROI = getROI();
    setROI(iRoi);
	count = cvCountNonZero( cvImage );
    setROI(lastROI);

	return count;
}
int ImageAnalysis::ImageDiff(IplImage* n, IplImage *p)
{

    IplImage *n_gry,*prev_gry,*diff;

    n_gry=cvCreateImage(cvGetSize(n),IPL_DEPTH_8U,1);
    prev_gry=cvCreateImage(cvGetSize(p),IPL_DEPTH_8U,1);
    diff=cvCreateImage(cvGetSize(n),IPL_DEPTH_8U,1);

    cvCvtColor(n,n_gry,CV_RGB2GRAY);
    cvCvtColor(p,prev_gry,CV_RGB2GRAY);

    cvAbsDiff(prev_gry,n_gry,diff);
    cvThreshold(diff,diff,45,255,CV_THRESH_TOZERO);

    int percent=(cvCountNonZero(diff)*100)/(diff->height*diff->width);//calculate percent

    cvReleaseImage(&n_gry);
    cvReleaseImage(&prev_gry);
    cvReleaseImage(&diff);

    return percent;
}
Ejemplo n.º 16
0
/*!
 * \brief Compute depth interval defined by the hand. 
 *
 * Starting from the depth body image this function computes the depth
 * interval related to the hand, these two depth values are later used
 * as thresholds for the binarization procedure. This interval is
 * calculated through a K-means clustering of the body image.
 *
 * \param[in]      body depth image
 * \param[out]  hand depth min max values 
 */
void get_hand_interval (IplImage *body, int *interval)
{
	CvMat  *data, *par, *means;
	double var;
	int    min, count = cvCountNonZero(body);

	data  = cvCreateMat(count, 1, CV_32FC1);
	par   = cvCreateMat(count, 1, CV_8UC1);
	means = cvCreateMat(K, 1, CV_32FC1);

	fill_mat(body, data);

	min = kmeans_clustering(data, means, par);

	//var = get_cluster_var(data, par);

	interval[0] = min;
	interval[1] = (int)cvmGet(means, 0, 0); //- var;

	cvReleaseMat(&data);
	cvReleaseMat(&par);
	cvReleaseMat(&means);
}
Ejemplo n.º 17
0
void my_mouse_callback( int event, int x, int y, int flags, void* param )
{
	unsigned int hsv[3] = {};

	for(k=0;k<channels;k++)
	{
		char c = 0;

		if (k==0)
			c = 'H';

		else if (k==1)
			c='S';

		else
			c='V';

		hsv[k] = hsv_data[y*step+x*channels+k];
		//printf("%c: %d\t", c, hsv[k]);
	}

	// Save <H,S,V>
	if (event == CV_EVENT_LBUTTONDOWN)
	{
		hues[recorded_count] = hsv[0];
		sats[recorded_count] = hsv[1];
		vals[recorded_count] = hsv[2];
		++recorded_count;
	}

	// Draw new image with whites in places that match
	else if (event == CV_EVENT_RBUTTONDOWN)
	{
		h_min = getMinFromArray(hues,recorded_count);
		s_min = getMinFromArray(sats,recorded_count);
		v_min = getMinFromArray(vals,recorded_count);

		h_max = getMaxFromArray(hues,recorded_count);
		s_max = getMaxFromArray(sats,recorded_count);
		v_max = getMaxFromArray(vals,recorded_count);
		
		IplImage* img = cvCreateImage(cvSize(width,height), IPL_DEPTH_8U, 1);
		cvInRangeS(img_hsv,cvScalar(h_min,s_min,v_min,0),cvScalar(h_max,s_max,v_max,0),img);

		int count = cvCountNonZero(img);
		printf("count: %d\n", count);

		cvNamedWindow("secondwindow",CV_GUI_EXPANDED);
		cvShowImage("secondwindow",img);
		key = 0;

		while (key != 's')
		{
			key = cvWaitKey(0);			
		}

		memset(hues,0,recorded_count);
		memset(sats,0,recorded_count);
		memset(vals,0,recorded_count);
		recorded_count=0;

		cvReleaseImage(&img);
		cvDestroyWindow("secondwindow");
	}
	
	//printf("\n");	
}
Ejemplo n.º 18
0
/*-------------------------------------------------------------------------------------*/
void icvComputeProjectMatrixStatus(CvMat *objPoints4D,CvMat *points2,CvMat *status, CvMat *projMatr)
{
    /* Compute number of good points */
    int num = cvCountNonZero(status);
    
    /* Create arrays */
    CvMat *objPoints = 0;
    objPoints = cvCreateMat(4,num,CV_64F);

    CvMat *points2D = 0;
    points2D = cvCreateMat(2,num,CV_64F);

    int currVis = 0;
    int i;
#if 1
    FILE *file;
    file = fopen("d:\\test\\projStatus.txt","w");
#endif
    int totalNum = objPoints4D->cols;
    for( i = 0; i < totalNum; i++ )
    {
        fprintf(file,"%d (%d) ",i,status->data.ptr[i]);
        if( status->data.ptr[i] )
        {

#if 1
            double X,Y,Z,W;
            double x,y;
            X = cvmGet(objPoints4D,0,i);
            Y = cvmGet(objPoints4D,1,i);
            Z = cvmGet(objPoints4D,2,i);
            W = cvmGet(objPoints4D,3,i);

            x = cvmGet(points2,0,i);
            y = cvmGet(points2,1,i);
            fprintf(file,"%d (%lf %lf %lf %lf) - (%lf %lf)",i,X,Y,Z,W,x,y );
#endif
            cvmSet(objPoints,0,currVis,cvmGet(objPoints4D,0,i));
            cvmSet(objPoints,1,currVis,cvmGet(objPoints4D,1,i));
            cvmSet(objPoints,2,currVis,cvmGet(objPoints4D,2,i));
            cvmSet(objPoints,3,currVis,cvmGet(objPoints4D,3,i));

            cvmSet(points2D,0,currVis,cvmGet(points2,0,i));
            cvmSet(points2D,1,currVis,cvmGet(points2,1,i));

            currVis++;
        }
        
        fprintf(file,"\n");
    }

#if 1
    fclose(file);
#endif

    icvComputeProjectMatrix(objPoints,points2D,projMatr);

    /* Free allocated memory */
    cvReleaseMat(&objPoints);
    cvReleaseMat(&points2D);
}
Ejemplo n.º 19
0
//------------------------------------------------------------------------------
//------------------------------------------------------------------------------
CvMat *tgso (CvMat &tmap, int ntex, double sigma, double theta, CvMat &tsim, int useChi2) {


	CvMat *roundTmap=cvCreateMat(tmap.rows,tmap.cols,CV_32FC1);
	CvMat *comp=cvCreateMat(tmap.rows,tmap.cols,CV_32FC1);

	for (int i=0;i<tmap.rows;i++)
		for (int j=0;j<tmap.cols;j++)
			cvSetReal2D(roundTmap,i,j,cvRound(cvGetReal2D(&tmap,i,j)));

	cvSub(&tmap,roundTmap,comp);
	if (cvCountNonZero(comp)) {
		printf("texton labels not integral");
		cvReleaseMat(&roundTmap);
		cvReleaseMat(&comp);
		exit(1);
	}

	double min,max;
	cvMinMaxLoc(&tmap,&min,&max);
	if (min<1 && max>ntex) {
		char *msg=new char[50];
		printf(msg,"texton labels out of range [1,%d]",ntex);
		cvReleaseMat(&roundTmap);
		cvReleaseMat(&comp);
		exit(1);
	}

	cvReleaseMat(&roundTmap);
	cvReleaseMat(&comp);


	double wr=floor(sigma); //sigma=radius (Leo) 

	CvMat *x=cvCreateMat(1,wr-(-wr)+1, CV_64FC1);
	CvMat *y=cvCreateMat(wr-(-wr)+1,1, CV_64FC1);

	CvMat *u=cvCreateMat(wr-(-wr)+1,wr-(-wr)+1, CV_64FC1);
	CvMat *v=cvCreateMat(wr-(-wr)+1,wr-(-wr)+1, CV_64FC1);
	CvMat *gamma=cvCreateMat(u->rows,v->rows, CV_64FC1);

	// Set x,y directions 
	for (int j=-wr;j<=wr;j++) {
		cvSetReal2D(x,0,(j+wr),j);
		cvSetReal2D(y,(j+wr),0,j);
	}

	// Set u,v, meshgrids
	for (int i=0;i<u->rows;i++) {
		cvRepeat(x,u);
		cvRepeat(y,v);
	}

	// Compute the gamma matrix from the grid
	for (int i=0;i<u->rows;i++) 
		for (int j=0;j<u->cols;j++)
			cvSetReal2D(gamma,i,j,atan2(cvGetReal2D(v,i,j),cvGetReal2D(u,i,j)));

	cvReleaseMat(&x);
	cvReleaseMat(&y);

	CvMat *sum=cvCreateMat(u->rows,u->cols, CV_64FC1);
	cvMul(u,u,u);
	cvMul(v,v,v);
	cvAdd(u,v,sum);
	CvMat *mask=cvCreateMat(u->rows,u->cols, CV_8UC1);
	cvCmpS(sum,sigma*sigma,mask,CV_CMP_LE);
	cvConvertScale(mask,mask,1.0/255);
	cvSetReal2D(mask,wr,wr,0);
	int count=cvCountNonZero(mask);

	cvReleaseMat(&u);
	cvReleaseMat(&v);
	cvReleaseMat(&sum);

	CvMat *sub=cvCreateMat(mask->rows,mask->cols, CV_64FC1);
	CvMat *side=cvCreateMat(mask->rows,mask->cols, CV_8UC1);

	cvSubS(gamma,cvScalar(theta),sub);
	cvReleaseMat(&gamma);

	for (int i=0;i<mask->rows;i++){
		for (int j=0;j<mask->cols;j++) {
			double n=cvmGet(sub,i,j);
			double n_mod = n-floor(n/(2*M_PI))*2*M_PI;
			cvSetReal2D(side,i,j, 1 + int(n_mod < M_PI));
		}
	}

	cvMul(side,mask,side);
	cvReleaseMat(&sub);
	cvReleaseMat(&mask);

	CvMat *lmask=cvCreateMat(side->rows,side->cols, CV_8UC1);
	CvMat *rmask=cvCreateMat(side->rows,side->cols, CV_8UC1);
	cvCmpS(side,1,lmask,CV_CMP_EQ);
	cvCmpS(side,2,rmask,CV_CMP_EQ);
	int count1=cvCountNonZero(lmask), count2=cvCountNonZero(rmask);
	if (count1 != count2) {
		printf("Bug: imbalance\n");
	}

	CvMat *rlmask=cvCreateMat(side->rows,side->cols, CV_32FC1);
	CvMat *rrmask=cvCreateMat(side->rows,side->cols, CV_32FC1);
	cvConvertScale(lmask,rlmask,1.0/(255*count)*2);
	cvConvertScale(rmask,rrmask,1.0/(255*count)*2);


	cvReleaseMat(&lmask);
	cvReleaseMat(&rmask);
	cvReleaseMat(&side);

	int h=tmap.rows;
	int w=tmap.cols;


	CvMat *d       = cvCreateMat(h*w,ntex,CV_32FC1);
	CvMat *coltemp = cvCreateMat(h*w,1,CV_32FC1);
	CvMat *tgL     = cvCreateMat(h,w, CV_32FC1);
	CvMat *tgR     = cvCreateMat(h,w, CV_32FC1);
	CvMat *temp    = cvCreateMat(h,w,CV_8UC1);
	CvMat *im      = cvCreateMat(h,w, CV_32FC1);
	CvMat *sub2    = cvCreateMat(h,w,CV_32FC1);
	CvMat *sub2t   = cvCreateMat(w,h,CV_32FC1);
	CvMat *prod    = cvCreateMat(h*w,ntex,CV_32FC1);
	CvMat reshapehdr,*reshape;

	CvMat* tgL_pad = cvCreateMat(h+rlmask->rows-1,w+rlmask->cols-1,CV_32FC1);
	CvMat* tgR_pad = cvCreateMat(h+rlmask->rows-1,w+rlmask->cols-1,CV_32FC1);
	CvMat* im_pad  = cvCreateMat(h+rlmask->rows-1,w+rlmask->cols-1,CV_32FC1);

	CvMat *tg=cvCreateMat(h,w,CV_32FC1);
	cvZero(tg);
	
	if (useChi2 == 1){
		CvMat* temp_add1 = cvCreateMat(h,w,CV_32FC1);
		for (int i=0;i<ntex;i++) {
			cvCmpS(&tmap,i+1,temp,CV_CMP_EQ); 
			cvConvertScale(temp,im,1.0/255);

			cvCopyMakeBorder(tgL,tgL_pad,cvPoint((rlmask->cols-1)/2,(rlmask->rows-1)/2),IPL_BORDER_CONSTANT);
			cvCopyMakeBorder(tgR,tgR_pad,cvPoint((rlmask->cols-1)/2,(rlmask->rows-1)/2),IPL_BORDER_CONSTANT);
			cvCopyMakeBorder(im,im_pad,cvPoint((rlmask->cols-1)/2,(rlmask->rows-1)/2),IPL_BORDER_CONSTANT);

			cvFilter2D(im_pad,tgL_pad,rlmask,cvPoint((rlmask->cols-1)/2,(rlmask->rows-1)/2));
			cvFilter2D(im_pad,tgR_pad,rrmask,cvPoint((rlmask->cols-1)/2,(rlmask->rows-1)/2));

			cvGetSubRect(tgL_pad,tgL,cvRect((rlmask->cols-1)/2,(rlmask->rows-1)/2,tgL->cols,tgL->rows));
			cvGetSubRect(tgR_pad,tgR,cvRect((rlmask->cols-1)/2,(rlmask->rows-1)/2,tgR->cols,tgR->rows));

			cvSub(tgL,tgR,sub2);
			cvPow(sub2,sub2,2.0);
			cvAdd(tgL,tgR,temp_add1);
			cvAddS(temp_add1,cvScalar(0.0000000001),temp_add1);
			cvDiv(sub2,temp_add1,sub2);
			cvAdd(tg,sub2,tg);
		}
		cvScale(tg,tg,0.5);

		cvReleaseMat(&temp_add1);

	}
	else{// if not chi^2
		for (int i=0;i<ntex;i++) {
			cvCmpS(&tmap,i+1,temp,CV_CMP_EQ); 
			cvConvertScale(temp,im,1.0/255);

			cvCopyMakeBorder(tgL,tgL_pad,cvPoint((rlmask->cols-1)/2,(rlmask->rows-1)/2),IPL_BORDER_CONSTANT);
			cvCopyMakeBorder(tgR,tgR_pad,cvPoint((rlmask->cols-1)/2,(rlmask->rows-1)/2),IPL_BORDER_CONSTANT);
			cvCopyMakeBorder(im,im_pad,cvPoint((rlmask->cols-1)/2,(rlmask->rows-1)/2),IPL_BORDER_CONSTANT);

			cvFilter2D(im_pad,tgL_pad,rlmask,cvPoint((rlmask->cols-1)/2,(rlmask->rows-1)/2));
			cvFilter2D(im_pad,tgR_pad,rrmask,cvPoint((rlmask->cols-1)/2,(rlmask->rows-1)/2));

			cvGetSubRect(tgL_pad,tgL,cvRect((rlmask->cols-1)/2,(rlmask->rows-1)/2,tgL->cols,tgL->rows));
			cvGetSubRect(tgR_pad,tgR,cvRect((rlmask->cols-1)/2,(rlmask->rows-1)/2,tgR->cols,tgR->rows));

			cvSub(tgL,tgR,sub2);
			cvAbs(sub2,sub2);
			cvTranspose(sub2,sub2t);
			reshape=cvReshape(sub2t,&reshapehdr,0,h*w);
			cvGetCol(d,coltemp,i);
			cvCopy(reshape,coltemp);
		}

		cvMatMul(d,&tsim,prod);
		cvMul(prod,d,prod);


		CvMat *sumcols=cvCreateMat(h*w,1,CV_32FC1);
		cvSetZero(sumcols);
		for (int i=0;i<prod->cols;i++) {
			cvGetCol(prod,coltemp,i);
			cvAdd(sumcols,coltemp,sumcols);
		}

		reshape=cvReshape(sumcols,&reshapehdr,0,w);
		cvTranspose(reshape,tg);

		cvReleaseMat(&sumcols);
	}


	//Smooth the gradient now!!
	tg=fitparab(*tg,sigma,sigma/4,theta);
	cvMaxS(tg,0,tg); 

	
	cvReleaseMat(&im_pad);
	cvReleaseMat(&tgL_pad);
	cvReleaseMat(&tgR_pad);
	cvReleaseMat(&rlmask);
	cvReleaseMat(&rrmask);
	cvReleaseMat(&im);
	cvReleaseMat(&tgL);
	cvReleaseMat(&tgR);
	cvReleaseMat(&temp);
	cvReleaseMat(&coltemp);
	cvReleaseMat(&sub2);
	cvReleaseMat(&sub2t);
	cvReleaseMat(&d);
	cvReleaseMat(&prod);

	return tg;

}
Ejemplo n.º 20
0
int main(int argc, char *argv[])
{
  string dir = string(dirPrefix);
  vector<string> files = vector<string>();
  getdir(dir,files);
  // for (unsigned int i = 0;i < files.size();i++) {
      // cout << files[i] << endl;
  // }
  // return 0;
  // vector<float> joint_angles;
  // vector<float> t_matrix;
  pcl::PointCloud<pcl::PointXYZ> cloud;
  pcl::PointCloud<pcl::Normal> normals;
  vector<int> indices;
  int normals_k_neighbors = 50;
  double minVal, maxVal;
  vector<CvMat*> matXYZ, matNorms;
  vector<Rect> rects;
  
  // cvMinMaxLoc(mat[2], &minVal, &maxVal, NULL, NULL, mat[4]); // Z-axis
  vector<float> hist;
  vector<float> feature_vec;
  vector<float> vecZ, vecNormZ, vecCurv;
  IplImage *m, *m2;
  CvMat *mm, *mmask; CvMat tmp, tmp2;
  float binWidth;
  int nBins = 9;
  
  for (int nFile = 125; nFile < files.size(); nFile++) {
      loadPCDFile((dir+"pcd"+files[nFile].substr(4,4)+".txt").c_str(),cloud,indices);
      calculateNormals(cloud,normals,normals_k_neighbors);
      matXYZ = convertXYZToCvMat(indices, cloud);
      matNorms = convertNormalsToCvMat(indices, normals, true);
      loadRects(rects,(dir+files[nFile].substr(0,8)+"_pos.txt").c_str(),(dir+files[nFile].substr(0,8)+"_pos.txt").c_str());
      
      // loadAnglesFile(anglesFile,joint_angles,t_matrix);
      // for (int i=0;i<rects.size();i++) {
        // cout << rects[i] << endl;
      // }
      for (int rectNum = 0;rectNum < rects.size(); rectNum++) {
          cout << (rects[rectNum].isPos?1:0) << " qid:" << (nFile+1);
          // For z values
          binWidth = 0.05;
          extractRect(matXYZ[2], rects[rectNum],m);
          mm = cvGetMat(m, &tmp);
          extractRect(matXYZ[3], rects[rectNum],m2);
          mmask = cvGetMat(m2, &tmp2);
          // Need to erode the mask due to interpolation errors from rotating
          cvErode(mmask,mmask);
          if (cvCountNonZero(mmask) == 0) {
              feature_vec.clear();
              feature_vec.resize(3*nBins*3);
              for (int i=0;i<feature_vec.size();i++)
                  feature_vec[i] = 0;
              
              for (int i=0;i<feature_vec.size();i++) {
                printf(" %d:%.3f",i+1,feature_vec[i]);
                // if (i%nBins==nBins-1) printf("\n");
              }
              printf("\n");
              continue;
          }
          
          // printf("w: %d, h: %d, t: %d\n",mm->width,mm->height,mm->width*mm->height);
          // cvMinMaxLoc(mm, &minVal, &maxVal, NULL, NULL, mmask); // Z-axis
          // printf("min: %f, max: %f\n",minVal,maxVal);
          // Get the vector for 
          getHistogramFeatureVectorDirect(mm,nBins,binWidth,mmask,vecZ,true);
          cvReleaseImage(&m);
          cvReleaseImage(&m2);
          
          // For normZ values
          binWidth = 0.1;
          extractRect(matNorms[2], rects[rectNum],m);
          mm = cvGetMat(m, &tmp);
          extractRect(matNorms[4], rects[rectNum],m2);
          mmask = cvGetMat(m2, &tmp2);
          // Need to erode the mask due to interpolation errors from rotating
          cvErode(mmask,mmask);
          // printf("w: %d, h: %d, t: %d\n",mm->width,mm->height,mm->width*mm->height);
          // cvMinMaxLoc(mm, &minVal, &maxVal, NULL, NULL, mmask); // Z-axis
          // printf("min: %f, max: %f\n",minVal,maxVal);
          // Get the vector for 
          getHistogramFeatureVectorDirect(mm,nBins,binWidth,mmask,vecNormZ,false);
          cvReleaseImage(&m);
          cvReleaseImage(&m2);
          
          // For curv values
          binWidth = 0.01;
          extractRect(matNorms[3], rects[rectNum],m);
          mm = cvGetMat(m, &tmp);
          extractRect(matNorms[4], rects[rectNum],m2);
          mmask = cvGetMat(m2, &tmp2);
          // Need to erode the mask due to interpolation errors from rotating
          cvErode(mmask,mmask);
          // printf("w: %d, h: %d, t: %d\n",mm->width,mm->height,mm->width*mm->height);
          // cvMinMaxLoc(mm, &minVal, &maxVal, NULL, NULL, mmask); // Z-axis
          // printf("min: %f, max: %f\n",minVal,maxVal);
          // Get the vector for 
          getHistogramFeatureVectorDirect(mm,nBins,binWidth,mmask,vecCurv,false);
          cvReleaseImage(&m);
          cvReleaseImage(&m2);
          
          feature_vec.clear();
          feature_vec.resize(3*nBins*3);
          
          for (int i=0;i<nBins*3;i++) {
              feature_vec[i] = vecZ[i];
              feature_vec[i+nBins*3] = vecNormZ[i];
              feature_vec[i+nBins*6] = vecCurv[i];
          }
          // feature_vec.insert(feature_vec.end(), vecZ.begin(), vecZ.end());
          // feature_vec.insert(feature_vec.end(), vecNormZ.begin(), vecNormZ.end());
          // feature_vec.insert(feature_vec.end(), vecCurv.begin(), vecCurv.end());
          
          for (int i=0;i<feature_vec.size();i++) {
            printf(" %d:%.3f",i+1,feature_vec[i]);
            // if (i%nBins==nBins-1) printf("\n");
          }
          printf("\n");
      }
  }
  return 0;
  /*
  mat = convertNormalsToCvMat(indices, normals);
  //vector<CvMat*> mat = convertXYZToCvMat(indices, cloud);
  
  cvNamedWindow("Results", CV_WINDOW_AUTOSIZE);
  for (int i=0;i<mat.size();i++) {
      cvMinMaxLoc(mat[i], &minVal, &maxVal);
      printf("min: %g max: %g\n",minVal,maxVal);
      cvConvertScale(mat[i],mat[i],1.0/(maxVal-minVal),-minVal/(maxVal-minVal));
      cvShowImage("Results", mat[i]);
      cvWaitKey(0);
  }
  mat = convertXYZToCvMat(indices, cloud);
  for (int i=0;i<mat.size();i++) {
      cvMinMaxLoc(mat[i], &minVal, &maxVal);
      printf("min: %g max: %g\n",minVal,maxVal);
      cvConvertScale(mat[i],mat[i],1.0/(maxVal-minVal),-minVal/(maxVal-minVal));
      cvShowImage("Results", mat[i]);
      cvWaitKey(0);
  }
  
  printf("Global frame:\n");
  transform_to_global_frame(cloud,t_matrix);
  calculateNormals(cloud,normals,normals_k_neighbors);
  for (int i=0;i<10;i++) {
    printf("%d %.3f %.3f %.3f %.3f %.3f %.3f %.3f\n",
        indices[i],cloud.points[i].x,cloud.points[i].y,cloud.points[i].z,
        normals.points[i].normal[0],normals.points[i].normal[1],normals.points[i].normal[2],
        normals.points[i].curvature);
  }
  mat = convertNormalsToCvMat(indices, normals);
  //vector<CvMat*> mat = convertXYZToCvMat(indices, cloud);
  cvNamedWindow("Results", CV_WINDOW_AUTOSIZE);
  for (int i=0;i<mat.size();i++) {
      cvMinMaxLoc(mat[i], &minVal, &maxVal);
      printf("min: %g max: %g\n",minVal,maxVal);
      cvConvertScale(mat[i],mat[i],1.0/(maxVal-minVal),-minVal/(maxVal-minVal));
      cvShowImage("Results", mat[i]);
      cvWaitKey(0);
  }
  
  mat = convertXYZToCvMat(indices, cloud);
  for (int i=0;i<mat.size();i++) {
      cvMinMaxLoc(mat[i], &minVal, &maxVal);
      printf("min: %g max: %g\n",minVal,maxVal);
      cvConvertScale(mat[i],mat[i],1.0/(maxVal-minVal),-minVal/(maxVal-minVal));
      cvShowImage("Results", mat[i]);
      cvWaitKey(0);
  }*/
  
  //cvReleaseImage(&img);
  //cvDestroyWindow("Results");
  //return 0;
}
Ejemplo n.º 21
0
static GstFlowReturn gst_gcs_transform_ip(GstBaseTransform * btrans, GstBuffer * gstbuf) 
{
  GstGcs *gcs = GST_GCS (btrans);

  GST_GCS_LOCK (gcs);

  //////////////////////////////////////////////////////////////////////////////
  // get image data from the input, which is RGBA or BGRA
  gcs->pImageRGBA->imageData = (char*)GST_BUFFER_DATA(gstbuf);
  cvSplit(gcs->pImageRGBA,   gcs->pImgCh1, gcs->pImgCh2, gcs->pImgCh3, gcs->pImgChX );
  cvCvtColor(gcs->pImageRGBA,  gcs->pImgRGB, CV_BGRA2BGR);


  //////////////////////////////////////////////////////////////////////////////
  ////////////////////////////////////////////////////////MOTION CUES INTEGR////
  //////////////////////////////////////////////////////////////////////////////

  //////////////////////////////////////////////////////////////////////////////
  // apply step 1. filtering using bilateral filter. Cannot happen in-place => scratch
  cvSmooth(gcs->pImgRGB, gcs->pImgScratch, CV_BILATERAL, 3, 50, 3, 0);
  // create GRAY image
  cvCvtColor(gcs->pImgScratch, gcs->pImgGRAY, CV_BGR2GRAY);

  // Frame difference the GRAY and the previous one
  // not intuitive: first smooth frames, then 
  cvCopy( gcs->pImgGRAY,   gcs->pImgGRAY_copy,  NULL);
  cvCopy( gcs->pImgGRAY_1, gcs->pImgGRAY_1copy, NULL);
  get_frame_difference( gcs->pImgGRAY_copy, gcs->pImgGRAY_1copy, gcs->pImgGRAY_diff);
  cvErode( gcs->pImgGRAY_diff, gcs->pImgGRAY_diff, NULL, 3);
  cvDilate( gcs->pImgGRAY_diff, gcs->pImgGRAY_diff, NULL, 3);


  //////////////////////////////////////////////////////////////////////////////
  //////////////////////////////////////////////////////////////////////////////
  // ghost mapping
  gcs->dstTri[0].x = gcs->facepos.x - gcs->facepos.width/2 ;
  gcs->dstTri[0].y = gcs->facepos.y - gcs->facepos.height/2;
  gcs->dstTri[1].x = gcs->facepos.x - gcs->facepos.width/2;
  gcs->dstTri[1].y = gcs->facepos.y + gcs->facepos.height/2;
  gcs->dstTri[2].x = gcs->facepos.x + gcs->facepos.width/2;
  gcs->dstTri[2].y = gcs->facepos.y + gcs->facepos.height/2;

  if( gcs->ghostfilename){
    cvGetAffineTransform( gcs->srcTri, gcs->dstTri, gcs->warp_mat );
    cvWarpAffine( gcs->cvGhostBwResized, gcs->cvGhostBwAffined, gcs->warp_mat );
  }




  //////////////////////////////////////////////////////////////////////////////
  //////////////////////////////////////////////////////////////////////////////
  // GrabCut algorithm preparation and running

  gcs->facepos.x = gcs->facepos.x - gcs->facepos.width/2;
  gcs->facepos.y = gcs->facepos.y - gcs->facepos.height/2;

  // create an IplImage  with the skin colour pixels as 255
  compose_skin_matrix(gcs->pImgRGB, gcs->pImg_skin);
  // And the skin pixels with the movement mask
  cvAnd( gcs->pImg_skin,  gcs->pImgGRAY_diff,  gcs->pImgGRAY_diff);
  //cvErode( gcs->pImgGRAY_diff, gcs->pImgGRAY_diff, cvCreateStructuringElementEx(5, 5, 3, 3, CV_SHAPE_RECT,NULL), 1);
  cvDilate(gcs->pImgGRAY_diff, gcs->pImgGRAY_diff, cvCreateStructuringElementEx(7,7, 5,5, CV_SHAPE_RECT,NULL), 2);
  cvErode( gcs->pImgGRAY_diff, gcs->pImgGRAY_diff, cvCreateStructuringElementEx(5,5, 3,3, CV_SHAPE_RECT,NULL), 2);

  // if there is alpha==all 1's coming in, then we ignore it: prevents from no vibe before us
  if((0.75*(gcs->width * gcs->height) <= cvCountNonZero(gcs->pImgChX)))
    cvZero(gcs->pImgChX);
  // OR the input Alpha
  cvOr( gcs->pImgChX,  gcs->pImgGRAY_diff,  gcs->pImgGRAY_diff);


  //////////////////////////////////////////////////////////////////////////////
  // try to consolidate a single mask from all the sub-patches
  cvDilate(gcs->pImgGRAY_diff, gcs->pImgGRAY_diff, cvCreateStructuringElementEx(7,7, 5,5, CV_SHAPE_RECT,NULL), 3);
  cvErode( gcs->pImgGRAY_diff, gcs->pImgGRAY_diff, cvCreateStructuringElementEx(5,5, 3,3, CV_SHAPE_RECT,NULL), 4);

  //////////////////////////////////////////////////////////////////////////////
  // use either Ghost or boxes-model to create a PR foreground starting point in gcs->grabcut_mask
  if( gcs->ghostfilename)
    compose_grabcut_seedmatrix3(gcs->grabcut_mask, gcs->cvGhostBwAffined, gcs->pImgGRAY_diff  );
  else{
    // toss it all to the bbox creation function, together with the face position and size
    compose_grabcut_seedmatrix2(gcs->grabcut_mask, gcs->facepos, gcs->pImgGRAY_diff, gcs->facefound );
  }


  //////////////////////////////////////////////////////////////////////////////
#ifdef KMEANS
  gcs->num_clusters = 18; // keep it even to simplify integer arithmetics
  cvCopy(gcs->pImgRGB, gcs->pImgRGB_kmeans, NULL);
  posterize_image(gcs->pImgRGB_kmeans);
  create_kmeans_clusters(gcs->pImgRGB_kmeans, gcs->kmeans_points, gcs->kmeans_clusters, 
                         gcs->num_clusters, gcs->num_samples);
  adjust_bodybbox_w_clusters(gcs->grabcut_mask, gcs->pImgRGB_kmeans, gcs->num_clusters, gcs->facepos);
#endif //KMEANS


  //////////////////////////////////////////////////////////////////////////////
  if( gcs->debug < 70)
    run_graphcut_iteration( &(gcs->GC), gcs->pImgRGB, gcs->grabcut_mask, &gcs->bbox_prev);



  // get a copy of GRAY for the next iteration
  cvCopy(gcs->pImgGRAY, gcs->pImgGRAY_1, NULL);

  //////////////////////////////////////////////////////////////////////////////
  // if we want to display, just overwrite the output
  if( gcs->display ){
    int outputimage = gcs->debug;
    switch( outputimage ){
    case 1: // output the GRAY difference
      cvCvtColor( gcs->pImgGRAY_diff, gcs->pImgRGB, CV_GRAY2BGR );
      break;
    case 50:// Ghost remapped
      cvCvtColor( gcs->cvGhostBwAffined, gcs->pImgRGB, CV_GRAY2BGR );
      break;
    case 51:// Ghost applied
      cvAnd( gcs->cvGhostBwAffined, gcs->pImgGRAY, gcs->pImgGRAY, NULL );
      cvCvtColor( gcs->pImgGRAY, gcs->pImgRGB, CV_GRAY2BGR );
      break;
    case 60:// Graphcut
      cvAndS(gcs->grabcut_mask, cvScalar(1), gcs->grabcut_mask, NULL);  // get only FG
      cvConvertScale( gcs->grabcut_mask, gcs->grabcut_mask, 127.0);
      cvCvtColor( gcs->grabcut_mask, gcs->pImgRGB, CV_GRAY2BGR );
      break;
    case 61:// Graphcut applied on input/output image
      cvAndS(gcs->grabcut_mask, cvScalar(1), gcs->grabcut_mask, NULL);  // get only FG, PR_FG
      cvConvertScale( gcs->grabcut_mask, gcs->grabcut_mask, 255.0);
      cvAnd( gcs->grabcut_mask,  gcs->pImgGRAY,  gcs->pImgGRAY, NULL);
      cvCvtColor( gcs->pImgGRAY, gcs->pImgRGB, CV_GRAY2BGR );

      cvRectangle(gcs->pImgRGB, cvPoint(gcs->bbox_now.x, gcs->bbox_now.y), 
                  cvPoint(gcs->bbox_now.x + gcs->bbox_now.width, gcs->bbox_now.y+gcs->bbox_now.height),
                  cvScalar(127,0.0), 1, 8, 0 );
     break;
    case 70:// bboxes
      cvZero( gcs->pImgGRAY );
      cvMul( gcs->grabcut_mask,  gcs->grabcut_mask,  gcs->pImgGRAY, 40.0 );
      cvCvtColor( gcs->pImgGRAY, gcs->pImgRGB, CV_GRAY2BGR );
      break;
    case 71:// bboxes applied on the original image
      cvAndS(gcs->grabcut_mask, cvScalar(1), gcs->grabcut_mask, NULL);  // get only FG, PR_FG
      cvMul( gcs->grabcut_mask,  gcs->pImgGRAY,  gcs->pImgGRAY, 1.0 );
      cvCvtColor( gcs->pImgGRAY, gcs->pImgRGB, CV_GRAY2BGR );
      break;
    case 72: // input alpha channel mapped to output
      cvCvtColor( gcs->pImgChX, gcs->pImgRGB, CV_GRAY2BGR );
      break;
#ifdef KMEANS
    case 80:// k-means output
      cvCopy(gcs->pImgRGB_kmeans, gcs->pImgRGB, NULL);
      break;
    case 81:// k-means output filtered with bbox/ghost mask
      cvSplit(gcs->pImgRGB_kmeans, gcs->pImgCh1, gcs->pImgCh2, gcs->pImgCh3, NULL        );
      cvAndS(gcs->grabcut_mask, cvScalar(1), gcs->grabcut_mask, NULL);  // get FG and PR_FG
      cvConvertScale( gcs->grabcut_mask, gcs->grabcut_mask, 255.0);     // scale any to 255.

      cvAnd( gcs->grabcut_mask,  gcs->pImgCh1,  gcs->pImgCh1, NULL );
      cvAnd( gcs->grabcut_mask,  gcs->pImgCh2,  gcs->pImgCh2, NULL );
      cvAnd( gcs->grabcut_mask,  gcs->pImgCh3,  gcs->pImgCh3, NULL );

      cvMerge(              gcs->pImgCh1, gcs->pImgCh2, gcs->pImgCh3, NULL, gcs->pImgRGB);
      break;
#endif //KMEANS
    default:
      break;
    }
  }

  //////////////////////////////////////////////////////////////////////////////
  // copy anyhow the fg/bg to the alpha channel in the output image alpha ch
  cvSplit(gcs->pImgRGB, gcs->pImgCh1, gcs->pImgCh2, gcs->pImgCh3, NULL        );
  cvAndS(gcs->grabcut_mask, cvScalar(1), gcs->grabcut_mask, NULL);  // get only FG and possible FG
  cvConvertScale( gcs->grabcut_mask, gcs->grabcut_mask, 255.0);
  gcs->pImgChA->imageData = (char*)gcs->grabcut_mask->data.ptr;

  cvMerge(              gcs->pImgCh1, gcs->pImgCh2, gcs->pImgCh3, gcs->pImgChA, gcs->pImageRGBA);

  gcs->numframes++;

  GST_GCS_UNLOCK (gcs);  
  
  return GST_FLOW_OK;
}
Ejemplo n.º 22
0
void PlateFinder::ImageRestoration(IplImage *src)
{
	int w = src->width;
	int h = src->height;

	IplImage *mImg = cvCreateImage(cvSize(w/2, h/2), IPL_DEPTH_8U, 1);		// Anh su dung cho bien doi hinh thai hoc
	IplImage *src_pyrdown = cvCreateImage (cvSize(w/2, h/2), IPL_DEPTH_8U, 1);
	IplImage *tmp = cvCreateImage (cvSize(w/2, h/2), IPL_DEPTH_8U, 1);
	IplImage *thresholed = cvCreateImage (cvSize(w/2, h/2), IPL_DEPTH_8U, 1);	// Anh nhi phan voi nguong
	IplImage *mini_thresh = cvCreateImage (cvSize(w/2, h/2), IPL_DEPTH_8U, 1);
	IplImage *dst = cvCreateImage (cvSize(w/2, h/2), IPL_DEPTH_8U, 1);			// Anh lam ro vung bien so

	cvPyrDown (src, src_pyrdown);

	cvMorphologyEx(src_pyrdown, mImg, tmp, S2, CV_MOP_BLACKHAT);
	cvNormalize(mImg, mImg, 0, 255, CV_MINMAX);


	// Nhi phan hoa anh mImg
	cvThreshold(mImg, thresholed, (int)10*cvAvg(mImg).val[0], 255, CV_THRESH_BINARY);
	cvZero(dst);
	cvCopy(thresholed, mini_thresh);

	// Su dung hinh chu nhat co size = 8x16 truot tren toan bo anh
	
	int cnt;
	int nonZero1, nonZero2, nonZero3, nonZero4;
	CvRect rect;

	for (int i = 0; i < mini_thresh->width-32; i+=4)
	{
		for (int j = 0; j  < mini_thresh->height-16; j+=4)
		{
			rect = cvRect(i, j, 16, 8);
			cvSetImageROI (mini_thresh, rect);	//ROI = Region of Interest
			nonZero1 = cvCountNonZero(mini_thresh);
			cvResetImageROI(mini_thresh);

			rect = cvRect(i+16, j, 16, 8);
			cvSetImageROI (mini_thresh, rect);	//ROI = Region of Interest
			nonZero2 = cvCountNonZero(mini_thresh);
			cvResetImageROI(mini_thresh);

			rect = cvRect(i, j+8, 16, 8);
			cvSetImageROI (mini_thresh, rect);	//ROI = Region of Interest
			nonZero3 = cvCountNonZero(mini_thresh);
			cvResetImageROI(mini_thresh);

			rect = cvRect(i+16, j+8, 16, 8);
			cvSetImageROI (mini_thresh, rect);	//ROI = Region of Interest
			nonZero4 = cvCountNonZero(mini_thresh);
			cvResetImageROI(mini_thresh);

			cnt = 0;
			if (nonZero1 > 15) { cnt++; }
			if (nonZero2 > 15) { cnt++; }
			if (nonZero3 > 15) { cnt++; }
			if (nonZero4 > 15) { cnt++; }

			if (cnt > 2)
			{
				rect = cvRect (i, j, 32, 16);
				cvSetImageROI(dst, rect);
				cvSetImageROI(mini_thresh, rect);
				cvCopy(mini_thresh, dst);
				cvResetImageROI(dst);
				cvResetImageROI(mini_thresh);
			}
		}
	}

	IplImage* dst_clone = cvCloneImage(dst);

	cvDilate(dst, dst, NULL, 2);
	cvErode(dst, dst, NULL, 2);
	cvDilate(dst, dst, S1, 9);
	cvErode(dst, dst, S1, 10);
	cvDilate(dst, dst);

	/*cvShowImage("Source" , src);
	cvShowImage("mImg", mImg);	
	cvShowImage("mini_thresh", mini_thresh);	
	cvShowImage("dst_clone", dst_clone);	
	cvShowImage("dst", dst);*/

	cvPyrUp(dst, src);

	cvReleaseImage(&mini_thresh);
	cvReleaseImage(&mImg);
	cvReleaseImage(&tmp);
	cvReleaseImage(&dst);
	cvReleaseImage(&src_pyrdown);
	cvReleaseImage(&thresholed);
	cvReleaseImage(&dst_clone);
}
double Frame::ratioBW() {
	double w = cvCountNonZero(image);
	double b = ((image->height)*(image->width))-w;
	return w/b;
}
Ejemplo n.º 24
0
void CvMorphology::init( int _operation, int _max_width, int _src_dst_type,
                         int _element_shape, CvMat* _element,
                         CvSize _ksize, CvPoint _anchor,
                         int _border_mode, CvScalar _border_value )
{
    CV_FUNCNAME( "CvMorphology::init" );

    __BEGIN__;

    int depth = CV_MAT_DEPTH(_src_dst_type);
    int el_type = 0, nz = -1;
    
    if( _operation != ERODE && _operation != DILATE )
        CV_ERROR( CV_StsBadArg, "Unknown/unsupported morphological operation" );

    if( _element_shape == CUSTOM )
    {
        if( !CV_IS_MAT(_element) )
            CV_ERROR( CV_StsBadArg,
            "structuring element should be valid matrix if CUSTOM element shape is specified" );

        el_type = CV_MAT_TYPE(_element->type);
        if( el_type != CV_8UC1 && el_type != CV_32SC1 )
            CV_ERROR( CV_StsUnsupportedFormat, "the structuring element must have 8uC1 or 32sC1 type" );

        _ksize = cvGetMatSize(_element);
        CV_CALL( nz = cvCountNonZero(_element));
        if( nz == _ksize.width*_ksize.height )
            _element_shape = RECT;
    }

    operation = _operation;
    el_shape = _element_shape;

    CV_CALL( CvBaseImageFilter::init( _max_width, _src_dst_type, _src_dst_type,
        _element_shape == RECT, _ksize, _anchor, _border_mode, _border_value ));

    if( el_shape == RECT )
    {
        if( operation == ERODE )
        {
            if( depth == CV_8U )
                x_func = (CvRowFilterFunc)icvErodeRectRow_8u,
                y_func = (CvColumnFilterFunc)icvErodeRectCol_8u;
            else if( depth == CV_16U )
                x_func = (CvRowFilterFunc)icvErodeRectRow_16u,
                y_func = (CvColumnFilterFunc)icvErodeRectCol_16u;
            else if( depth == CV_32F )
                x_func = (CvRowFilterFunc)icvErodeRectRow_32f,
                y_func = (CvColumnFilterFunc)icvErodeRectCol_32f;
        }
        else
        {
            assert( operation == DILATE );
            if( depth == CV_8U )
                x_func = (CvRowFilterFunc)icvDilateRectRow_8u,
                y_func = (CvColumnFilterFunc)icvDilateRectCol_8u;
            else if( depth == CV_16U )
                x_func = (CvRowFilterFunc)icvDilateRectRow_16u,
                y_func = (CvColumnFilterFunc)icvDilateRectCol_16u;
            else if( depth == CV_32F )
                x_func = (CvRowFilterFunc)icvDilateRectRow_32f,
                y_func = (CvColumnFilterFunc)icvDilateRectCol_32f;
        }
    }
    else
    {
        int i, j, k = 0;
        int cn = CV_MAT_CN(src_type);
        CvPoint* nz_loc;

        if( !(element && el_sparse &&
            _ksize.width == element->cols && _ksize.height == element->rows) )
        {
            cvReleaseMat( &element );
            cvFree( &el_sparse );
            CV_CALL( element = cvCreateMat( _ksize.height, _ksize.width, CV_8UC1 ));
            CV_CALL( el_sparse = (uchar*)cvAlloc(
                ksize.width*ksize.height*(2*sizeof(int) + sizeof(uchar*))));
        }

        if( el_shape == CUSTOM )
        {
            CV_CALL( cvConvert( _element, element ));
        }
        else
        {
            CV_CALL( init_binary_element( element, el_shape, anchor ));
        }

        if( operation == ERODE )
        {
            if( depth == CV_8U )
                y_func = (CvColumnFilterFunc)icvErodeAny_8u;
            else if( depth == CV_16U )
                y_func = (CvColumnFilterFunc)icvErodeAny_16u;
            else if( depth == CV_32F )
                y_func = (CvColumnFilterFunc)icvErodeAny_32f;
        }
        else
        {
            assert( operation == DILATE );
            if( depth == CV_8U )
                y_func = (CvColumnFilterFunc)icvDilateAny_8u;
            else if( depth == CV_16U )
                y_func = (CvColumnFilterFunc)icvDilateAny_16u;
            else if( depth == CV_32F )
                y_func = (CvColumnFilterFunc)icvDilateAny_32f;
        }
        
        nz_loc = (CvPoint*)el_sparse;

        for( i = 0; i < ksize.height; i++ )
            for( j = 0; j < ksize.width; j++ )
            {
                if( element->data.ptr[i*element->step+j] )
                    nz_loc[k++] = cvPoint(j*cn,i);
            }
        if( k == 0 )
            nz_loc[k++] = cvPoint(anchor.x*cn,anchor.y);
        el_sparse_count = k;
    }

    if( depth == CV_32F && border_mode == IPL_BORDER_CONSTANT )
    {
        int i, cn = CV_MAT_CN(src_type);
        int* bt = (int*)border_tab;
        for( i = 0; i < cn; i++ )
            bt[i] = CV_TOGGLE_FLT(bt[i]);
    }

    __END__;
}
Ejemplo n.º 25
0
// parameters:
//  img - input video frame
//  dst - resultant motion picture
//  args - optional parameters
void  update_mhi( IplImage* img, IplImage* dst, int diff_threshold )
{
    double timestamp = (double)clock()/CLOCKS_PER_SEC; // get current time in seconds
    CvSize size = cvSize(img->width,img->height); // get current frame size
    int i, idx1 = last, idx2;
    IplImage* silh;
    CvSeq* seq;
    CvRect comp_rect;
    double count;
    double angle;
    CvPoint center;
    double magnitude;
    CvScalar color;

    // allocate images at the beginning or
    // reallocate them if the frame size is changed
    if( !mhi || mhi->width != size.width || mhi->height != size.height ) {
        if( buf == 0 ) {
            buf = (IplImage**)malloc(N*sizeof(buf[0]));
            memset( buf, 0, N*sizeof(buf[0]));
        }

        for( i = 0; i < N; i++ ) {
            cvReleaseImage( &buf[i] );
            buf[i] = cvCreateImage( size, IPL_DEPTH_8U, 1 );
            cvZero( buf[i] );
        }
        cvReleaseImage( &mhi );
        cvReleaseImage( &orient );
        cvReleaseImage( &segmask );
        cvReleaseImage( &mask );

        mhi = cvCreateImage( size, IPL_DEPTH_32F, 1 );
        cvZero( mhi ); // clear MHI at the beginning
        orient = cvCreateImage( size, IPL_DEPTH_32F, 1 );
        segmask = cvCreateImage( size, IPL_DEPTH_32F, 1 );
        mask = cvCreateImage( size, IPL_DEPTH_8U, 1 );
    }

    cvCvtColor( img, buf[last], CV_BGR2GRAY ); // convert frame to grayscale

    idx2 = (last + 1) % N; // index of (last - (N-1))th frame
    last = idx2;

    silh = buf[idx2];
    cvAbsDiff( buf[idx1], buf[idx2], silh ); // get difference between frames

    cvThreshold( silh, silh, diff_threshold, 1, CV_THRESH_BINARY ); // and threshold it
    cvUpdateMotionHistory( silh, mhi, timestamp, MHI_DURATION ); // update MHI

    // convert MHI to blue 8u image
    cvCvtScale( mhi, mask, 255./MHI_DURATION,
                (MHI_DURATION - timestamp)*255./MHI_DURATION );
    cvZero( dst );
    cvMerge( mask, 0, 0, 0, dst );

    // calculate motion gradient orientation and valid orientation mask
    cvCalcMotionGradient( mhi, mask, orient, MAX_TIME_DELTA, MIN_TIME_DELTA, 3 );

    printf("Nonzero count %d\n", cvCountNonZero(mask));

    if( !storage )
        storage = cvCreateMemStorage(0);
    else
        cvClearMemStorage(storage);

    // segment motion: get sequence of motion components
    // segmask is marked motion components map. It is not used further
    seq = cvSegmentMotion( mhi, segmask, storage, timestamp, MAX_TIME_DELTA );

    // iterate through the motion components,
    // One more iteration (i == -1) corresponds to the whole image (global motion)
    for( i = -1; i < seq->total; i++ ) {

        if( i < 0 ) { // case of the whole image
            comp_rect = cvRect( 0, 0, size.width, size.height );
            color = CV_RGB(255,255,255);
            magnitude = 100;
        }
        else { // i-th motion component
            comp_rect = ((CvConnectedComp*)cvGetSeqElem( seq, i ))->rect;
            if( comp_rect.width + comp_rect.height < 100 ) // reject very small components
                continue;
            color = CV_RGB(255,0,0);
            magnitude = 30;
        }

        // select component ROI
        cvSetImageROI( silh, comp_rect );
        cvSetImageROI( mhi, comp_rect );
        cvSetImageROI( orient, comp_rect );
        cvSetImageROI( mask, comp_rect );

        // calculate orientation
        angle = cvCalcGlobalOrientation( orient, mask, mhi, timestamp, MHI_DURATION);
        angle = 360.0 - angle;  // adjust for images with top-left origin

        count = cvNorm( silh, 0, CV_L1, 0 ); // calculate number of points within silhouette ROI

        cvResetImageROI( mhi );
        cvResetImageROI( orient );
        cvResetImageROI( mask );
        cvResetImageROI( silh );

        // check for the case of little motion
        if( count < comp_rect.width*comp_rect.height * 0.05 )
            continue;

        // draw a clock with arrow indicating the direction
        center = cvPoint( (comp_rect.x + comp_rect.width/2),
                          (comp_rect.y + comp_rect.height/2) );

        cvCircle( dst, center, cvRound(magnitude*1.2), color, 3, CV_AA, 0 );
        cvLine( dst, center, cvPoint( cvRound( center.x + magnitude*cos(angle*CV_PI/180)),
                                      cvRound( center.y - magnitude*sin(angle*CV_PI/180))), color, 3, CV_AA, 0 );
    }
}
Ejemplo n.º 26
0
	  				double proj = fabs(x*cos(i*binSize+M_PI/2) + y*sin(i*binSize+M_PI/2));
	  				double value = (proj < gap) ? 0.01: 1;

	  				CV_IMAGE_ELEM(leftDisc[i],float,y+wr,x+wr) = (theta < M_PI)   ? value :0;
	  				CV_IMAGE_ELEM(rightDisc[i],float,y+wr,x+wr)= (theta >= M_PI)  ? value :0;	
	  				if (theta < M_PI){ 
	  					leftDiscTot += value;
	  				}
	  				else{
	  					rightDiscTot += value;
	  				}		  
				}	
      		}
    		}          
    		// normalize
    		int count1 = cvCountNonZero(leftDisc[i]);
    		int count2 = cvCountNonZero(rightDisc[i]);
    		if (count1 != count2){
      		printf("Discs imBalance: count1 %d count2 %d\n",count1,count2);
      		exit(1);
    		}    
    		cvConvertScale(leftDisc[i],	leftDisc[i],	1.0/leftDiscTot);
    		cvConvertScale(rightDisc[i],	rightDisc[i],	     1.0/rightDiscTot);
  	}
}


// calculate c1c2c3 based 3D color histogram
void getColorHist(IplImage** imgc1c2c3_float, 
				const int numImg, 
				double* hist_c1c2c3, 
Ejemplo n.º 27
0
WSNATIVE_EXPORT bool ProcessFrame(IplImage * detection, IplImage * image, float m_GSD = 20, bgfg_cb_t bgfg_cb = 0)
{
    //too few or too much detection?
    float mask_area = image->width*image->height;
    int width = image->width, height = image->width;
    float det_area = cvCountNonZero(detection);
    if (det_area < mask_area*0.00001 || det_area > mask_area*0.5 )
        return false;
    
    CvMemStorage* storage, *storage1;        
    storage = cvCreateMemStorage(0);
    storage1 = cvCreateMemStorage(0);
    CvSeq* contour;
    
    cvFindContours( detection, storage, &contour, sizeof(CvContour), CV_RETR_TREE, CV_CHAIN_APPROX_NONE);

    CvRect rectBlob[MAXBLOBCNT];
    CvBox2D rectBlob2D[MAXBLOBCNT];

    int nBlobNum = 0;

    
    //note: this line erase all detected foreground pixels
    cvZero(detection);

    //go over all the blobs
    for( ; contour != 0; contour = contour->h_next )
    {
        double fContourArea = fabs(cvContourArea(contour, CV_WHOLE_SEQ ));
        CvRect tmpRect = cvBoundingRect(contour, 1);
        float fRatio = 1.0*tmpRect.height/tmpRect.width;
        float occupy = fContourArea/(tmpRect.height*tmpRect.width);

        //TODO: make it an option
        cvDrawContours(detection,contour,CV_RGB(255,255,255),CV_RGB(255,255,255),0,CV_FILLED,8);
        
        //get the GSD(Ground Sampling Distance) for that location
        float GSD = 20;
        /*
        if (tmpRect.height/2.0 +tmpRect.y < m_Y1 )
            GSD = m_GSD1;
        else if (tmpRect.height/2.0 +tmpRect.y > m_Y2 )
            GSD = m_GSD2;
        else
            GSD = 1.0*m_GSD1*(m_Y2 - tmpRect.y - tmpRect.height/2.0 )/(m_Y2-m_Y1)+ 1.0*m_GSD2*(tmpRect.height/2.0 +tmpRect.y -m_Y1)/(m_Y2-m_Y1);
        */
        
        CvRect tmpROI = cvBoundingRect( contour, 1);
        float boundary = 3; //pixels
        float min_area = GSD*GSD*0.10 /*0.25 for regular*/, max_area = width*height;
        
        if ( ( (tmpROI.x >= boundary && tmpROI.x+tmpROI.width <= width-boundary) || fContourArea > std::min<float>(1600.0 ,max_area/36) ) && 
             fContourArea<max_area && fContourArea >= min_area && 
             !( (occupy < 0.5 &&  fContourArea/(GSD*GSD) > 4.0 ) || occupy <0.33 ) && //TODO: make it adaptive?
            nBlobNum< MAXBLOBCNT)
        {
            rectBlob[nBlobNum] = tmpROI;

            CvBox2D box2d = cvFitEllipse2( contour);
            rectBlob2D[nBlobNum] = box2d;

            nBlobNum++;

            //draw outline
            //cvDrawContours(showimage,contour,CV_RGB(255,255,255),CV_RGB(255,255,255),1,1,8);
        }
    }

    for (int j = 0; j< nBlobNum; j++)
    {
    //get the attributes of each blob here
        if(bgfg_cb) {
            const CvRect& r = rectBlob[j];
            bgfg_cb(r.x, r.y, r.width, r.height);
        }
    }

    cvReleaseMemStorage( &storage); 
    cvReleaseMemStorage( &storage1); 

    return true;
}
Ejemplo n.º 28
0
/*************************************************************************
* @函数名称:
*	blurIdentify()
* @输入:
*   IplImage* input            - 输入灰度图像
* @返回值:
*   uchar                      - 返回图像标志,清晰返回1,模糊返回0
* @说明:
*   该函数通过计算梯度图像,并进行直方图分析,计算相关鉴别指标GMG和NGN,
*	实现模糊鉴别功能
*************************************************************************/
uchar blurIdentify(const IplImage* input)
{
	int rows=input->height;
	int cols=input->width;
	int i=0,j=0;
	uchar flag=0;
	CvSize size1,size2;
	size1.width=cols;
	size1.height=rows;
	size2.width=2*cols;
	size2.height=rows;


	IplImage* tempr=cvCreateImage(size1,IPL_DEPTH_8U,1);
	IplImage* tempc=cvCreateImage(size1,IPL_DEPTH_8U,1);
	cvZero(tempr);
	cvZero(tempc);

	/*计算水平梯度图像*/
	for (i=0;i<rows;i++)
	{
		uchar* input_data=(uchar*)(input->imageData+i*input->widthStep);
		uchar* tempr_data=(uchar*)(tempr->imageData+i*tempr->widthStep);
		for (j=0;j<cols-1;j++)
		{
			tempr_data[j]=abs(input_data[j+1]-input_data[j]);
		}
	}
	/*计算垂直梯度图像*/
	for (i = 0; i<rows - 1; i++)
	{
		uchar* input_data1 = (uchar*)(input->imageData + i*input->widthStep);
		uchar* input_data2 = (uchar*)(input->imageData + (i + 1)*input->widthStep);
		uchar* tempc_data = (uchar*)(tempc->imageData + i*tempc->widthStep);
		for (j = 0; j<cols; j++)
		{

			tempc_data[j]=abs(input_data2[j]-input_data1[j]);
		}
	}
	/*将两个梯度图像合并为一个,以便统计计算*/
	IplImage* gradient=cvCreateImage(size2,IPL_DEPTH_8U,1);
	cvZero(gradient);
	cvSetImageROI(gradient,cvRect(0,0,cols,rows));
	cvCopy(tempr,gradient);							//将水平梯度图存入
	cvResetImageROI(gradient);

	cvSetImageROI(gradient,cvRect(cols,0,cols,rows));
	cvCopy(tempc,gradient);							//将垂直梯度图存入
	cvResetImageROI(gradient);

	int nHistSize=256;
	float fRange[]={0,255};   //灰度级范围
	float* pfRanges[]={fRange};
	//CvHistogram* hist=cvCreateHist(1,&nHistSize,CV_HIST_ARRAY,pfRanges);	//CV_HIST_ARRAY多维密集数组
	//cvCalcHist(&gradient,hist);
	CvHistogram* hist1=cvCreateHist(1,&nHistSize,CV_HIST_ARRAY,pfRanges);	//CV_HIST_ARRAY多维密集数组
	CvHistogram* hist2=cvCreateHist(1,&nHistSize,CV_HIST_ARRAY,pfRanges);	//CV_HIST_ARRAY多维密集数组

	cvCalcHist(&tempr,hist1);
	cvCalcHist(&tempc, hist2);

	//int NGN=cvCountNonZero(hist->bins);
	int NX=cvCountNonZero(hist1->bins);
	int NY = cvCountNonZero(hist2->bins);

	double GMG=calGMG(input);
	double s = (NX + NY) / (2 * 256.0);
	double BIM=s*GMG;
	
	if(BIM>700)
	{
		flag=1;
	}
	//测试代码,显示梯度图像
	/*cvNamedWindow("gx",1);
	cvShowImage("gx",tempr);
	cvNamedWindow("gy",1);
	cvShowImage("gy",tempc);
	cvNamedWindow("g",1);
	cvShowImage("g",gradient);*/
	printf("GMG=%f,NX=%d,NY=%d,s=%f,BIM=%f\n", GMG, NX, NY,s, BIM);

	//for(i=0;i<256;i++)
	//{
	//	printf("%.f ",((CvMatND*)hist->bins)->data.fl[i]);
	//}

	cvReleaseImage(&tempr);
	cvReleaseImage(&tempc);
	cvReleaseImage(&gradient);
	cvReleaseHist(&hist1);
	cvReleaseHist(&hist2);


	return 1;
}
Ejemplo n.º 29
0
//--------------------------------------------------------------
void kinectTracker::update()
{
    bool newFrame = false;
    for (int i = 0; i < NUM_KINECTS; i++)
    {
        kinectDevice[i].update();
        if (kinectDevice[i].isFrameNew())
        {
            newFrame = true;
            kinectGrayFBO[i].begin();
            ofClear(0, 0, 0);
            kinectDevice[i].drawDepth(kinectOffset[i].x, kinectOffset[i].y, kinectGrayFBO[i].getWidth(), kinectGrayFBO[i].getHeight());
            kinectGrayFBO[i].end();
        }
    }
    
    if(newFrame)
    {
        for (int i = 0; i < NUM_KINECTS; i++)
        {
            kinectGrayFBO[i].readToPixels(kinectPixels[i]);
            cvi[i].setFromPixels(kinectPixels[i]);
            
            kinectGrayImage[i] = cvi[i];
            
            int rx = kinectDevice[i].getWidth() * (i%2);
            int ry = kinectDevice[i].getHeight() * (i/2);
            
            grayImage.setROI(rx, ry, kinectDevice[i].getWidth(), kinectDevice[i].getHeight());
            grayImage.setRoiFromPixels(kinectGrayImage[i].getPixels(), kinectGrayImage[i].width, kinectGrayImage[i].height);
        }

        grayImage.setROI(0, 0, grayImage.width, grayImage.height);
        
        // we do two thresholds - one for the far plane and one for the near plane
        // we then do a cvAnd to get the pixels which are a union of the two thresholds
        
        if (erodeIterations > 0)
        {
            grayImage.erode(erodeIterations);
        }
        
        if (dilateIterations > 0)
        {
            grayImage.dilate(dilateIterations);
        }
        
        if (enableBlur)
        {
            grayImage.blurHeavily();            
        }
        
        grayThreshNear = grayImage;
        grayThreshFar = grayImage;
        grayThreshNear.threshold(nearThreshold, true);
        grayThreshFar.threshold(farThreshold);
        cvAnd(grayThreshNear.getCvImage(), grayThreshFar.getCvImage(), grayImage.getCvImage(), NULL);
        grayImage.flagImageChanged();
        
        activePixels = cvCountNonZero(grayImage.getCvImage());
        
        cvXor(grayImage.getCvImage(), prevGrayImage.getCvImage(), grayImageXorred.getCvImage(), 0);
        deltaPixels = cvCountNonZero(grayImageXorred.getCvImage());
        grayImageXorred.flagImageChanged();
        
        activePixelsHistory.push_front(activePixels);
        deltaPixelsHistory.push_front(deltaPixels);
        
        while (activePixelsHistory.size() > numAverageSamples)
        {
            activePixelsHistory.pop_back();
        }
        
        while (deltaPixelsHistory.size() > numAverageSamples)
        {
            deltaPixelsHistory.pop_back();
        }
        
        prevGrayImage = grayImage;
        
//        contourFinder.findContours(grayImage, 1000, (grayImage.width*grayImage.height)/2, 20, false);
//        blobsManager.update(contourFinder.blobs);
        
        contourFinder.findContours(grayImageXorred, 1000, (grayImage.width*grayImage.height)/2, 20, false);
        blobsManager.update(contourFinder.blobs);
    }
}
Ejemplo n.º 30
0
/* Returns number of corresponding points */
int icvFindCorrForGivenPoints( IplImage *image1,/* Image 1 */
                                IplImage *image2,/* Image 2 */
                                CvMat *points1, 
                                CvMat *pntStatus1,
                                CvMat *points2,
                                CvMat *pntStatus2,
                                int useFilter,/*Use fundamental matrix to filter points */
                                double threshold)/* Threshold for good points in filter */
{
    int resNumCorrPoints = 0;
    CvPoint2D32f* cornerPoints1 = 0;
    CvPoint2D32f* cornerPoints2 = 0;
    char*  status = 0;
    float* errors = 0;
    CvMat* tmpPoints1 = 0;
    CvMat* tmpPoints2 = 0;
    CvMat* pStatus = 0;
    IplImage *grayImage1 = 0;
    IplImage *grayImage2 = 0;
    IplImage *pyrImage1 = 0;
    IplImage *pyrImage2 = 0;

    CV_FUNCNAME( "icvFindCorrForGivenPoints" );
    __BEGIN__;

    /* Test input data for errors */

    /* Test for null pointers */
    if( image1     == 0 || image2     == 0 || 
        points1    == 0 || points2    == 0 ||
        pntStatus1 == 0 || pntStatus2 == 0)
    {
        CV_ERROR( CV_StsNullPtr, "Some of parameters is a NULL pointer" );
    }

    /* Test image size */
    int w,h;
    w = image1->width;
    h = image1->height;

    if( w <= 0 || h <= 0)
    {
        CV_ERROR( CV_StsOutOfRange, "Size of image1 must be > 0" );
    }

    if( image2->width != w || image2->height != h )
    {
        CV_ERROR( CV_StsUnmatchedSizes, "Size of images must be the same" );
    }

    /* Test for matrices */
    if( !CV_IS_MAT(points1)    || !CV_IS_MAT(points2) || 
        !CV_IS_MAT(pntStatus1) || !CV_IS_MAT(pntStatus2) )
    {
        CV_ERROR( CV_StsUnsupportedFormat, "Input parameters (points and status) must be a matrices" );
    }

    /* Test type of status matrices */
    if( !CV_IS_MASK_ARR(pntStatus1) || !CV_IS_MASK_ARR(pntStatus2) )
    {
        CV_ERROR( CV_StsUnsupportedFormat, "Statuses must be a mask arrays" );
    }

    /* Test number of points */
    int numPoints;
    numPoints = points1->cols;

    if( numPoints <= 0 )
    {
        CV_ERROR( CV_StsOutOfRange, "Number of points1 must be > 0" );
    }

    if( points2->cols != numPoints || pntStatus1->cols != numPoints || pntStatus2->cols != numPoints )
    {
        CV_ERROR( CV_StsUnmatchedSizes, "Number of points and statuses must be the same" );
    }

    if( points1->rows != 2 || points2->rows != 2 )
    {
        CV_ERROR( CV_StsOutOfRange, "Number of points coordinates must be 2" );
    }

    if( pntStatus1->rows != 1 || pntStatus2->rows != 1 )
    {
        CV_ERROR( CV_StsOutOfRange, "Status must be a matrix 1xN" );
    }
    /* ----- End test ----- */


    /* Compute number of visible points on image1 */
    int numVisPoints;
    numVisPoints = cvCountNonZero(pntStatus1);

    if( numVisPoints > 0 )
    {
        /* Create temporary images */
        /* We must use iplImage againts hughgui images */

/*
        CvvImage grayImage1;
        CvvImage grayImage2;
        CvvImage pyrImage1;
        CvvImage pyrImage2;
*/

        /* Create Ipl images */
        CV_CALL( grayImage1 = cvCreateImage(cvSize(w,h),8,1) );
        CV_CALL( grayImage2 = cvCreateImage(cvSize(w,h),8,1) );
        CV_CALL( pyrImage1  = cvCreateImage(cvSize(w,h),8,1) );
        CV_CALL( pyrImage2  = cvCreateImage(cvSize(w,h),8,1) );

        CV_CALL( cornerPoints1 = (CvPoint2D32f*)cvAlloc( sizeof(CvPoint2D32f)*numVisPoints) );
        CV_CALL( cornerPoints2 = (CvPoint2D32f*)cvAlloc( sizeof(CvPoint2D32f)*numVisPoints) );
        CV_CALL( status = (char*)cvAlloc( sizeof(char)*numVisPoints) );
        CV_CALL( errors = (float*)cvAlloc( 2 * sizeof(float)*numVisPoints) );

        int i;
        for( i = 0; i < numVisPoints; i++ )
        {
            status[i] = 1;
        }

        /* !!! Need test creation errors */
        /*
        if( !grayImage1.Create(w,h,8)) EXIT;
        if( !grayImage2.Create(w,h,8)) EXIT;
        if( !pyrImage1. Create(w,h,8)) EXIT;
        if( !pyrImage2. Create(w,h,8)) EXIT;
        */

        cvCvtColor(image1,grayImage1,CV_BGR2GRAY);
        cvCvtColor(image2,grayImage2,CV_BGR2GRAY);

        /*
        grayImage1.CopyOf(image1,0);
        grayImage2.CopyOf(image2,0);
        */

        /* Copy points good points from input data */
        uchar *stat1 = pntStatus1->data.ptr;
        uchar *stat2 = pntStatus2->data.ptr;

        int curr = 0;
        for( i = 0; i < numPoints; i++ )
        {
            if( stat1[i] )
            {
                cornerPoints1[curr].x = (float)cvmGet(points1,0,i);
                cornerPoints1[curr].y = (float)cvmGet(points1,1,i);
                curr++;
            }
        }

        /* Define number of levels of pyramid */
        cvCalcOpticalFlowPyrLK( grayImage1, grayImage2,
                                pyrImage1, pyrImage2,
                                cornerPoints1, cornerPoints2,
                                numVisPoints, cvSize(10,10), 3,
                                status, errors, 
                                cvTermCriteria(CV_TERMCRIT_ITER|CV_TERMCRIT_EPS,20,0.03),
                                0/*CV_LKFLOW_PYR_A_READY*/ );

        
        memset(stat2,0,sizeof(uchar)*numPoints);

        int currVis = 0;
        int totalCorns = 0;

        /* Copy new points and set status */
        /* stat1 may not be the same as stat2 */
        for( i = 0; i < numPoints; i++ )
        {
            if( stat1[i] )
            {
                if( status[currVis] && errors[currVis] < 1000 )
                {
                    stat2[i] = 1;
                    cvmSet(points2,0,i,cornerPoints2[currVis].x);
                    cvmSet(points2,1,i,cornerPoints2[currVis].y);
                    totalCorns++;
                }
                currVis++;
            }
        }

        resNumCorrPoints = totalCorns;

        /* Filter points using RANSAC */
        if( useFilter )
        {
            resNumCorrPoints = 0;
            /* Use RANSAC filter for found points */
            if( totalCorns > 7 )
            {
                /* Create array with good points only */
                CV_CALL( tmpPoints1 = cvCreateMat(2,totalCorns,CV_64F) );
                CV_CALL( tmpPoints2 = cvCreateMat(2,totalCorns,CV_64F) );

                /* Copy just good points */
                int currPoint = 0;
                for( i = 0; i < numPoints; i++ )
                {
                    if( stat2[i] )
                    {
                        cvmSet(tmpPoints1,0,currPoint,cvmGet(points1,0,i));
                        cvmSet(tmpPoints1,1,currPoint,cvmGet(points1,1,i));

                        cvmSet(tmpPoints2,0,currPoint,cvmGet(points2,0,i));
                        cvmSet(tmpPoints2,1,currPoint,cvmGet(points2,1,i));

                        currPoint++;
                    }
                }

                /* Compute fundamental matrix */
                CvMat fundMatr;
                double fundMatr_dat[9];
                fundMatr = cvMat(3,3,CV_64F,fundMatr_dat);
        
                CV_CALL( pStatus = cvCreateMat(1,totalCorns,CV_32F) );

                int num = cvFindFundamentalMat(tmpPoints1,tmpPoints2,&fundMatr,CV_FM_RANSAC,threshold,0.99,pStatus);
                if( num > 0 )
                {
                    int curr = 0;
                    /* Set final status for points2 */
                    for( i = 0; i < numPoints; i++ )
                    {
                        if( stat2[i] )
                        {
                            if( cvmGet(pStatus,0,curr) == 0 )
                            {
                                stat2[i] = 0;
                            }
                            curr++;
                        }
                    }
                    resNumCorrPoints = curr;
                }
            }
        }
    }

    __END__;

    /* Free allocated memory */
    cvFree(&cornerPoints1);
    cvFree(&cornerPoints2);
    cvFree(&status);
    cvFree(&errors);
    cvFree(&tmpPoints1);
    cvFree(&tmpPoints2);
    cvReleaseMat( &pStatus );
    cvReleaseImage( &grayImage1 );
    cvReleaseImage( &grayImage2 );
    cvReleaseImage( &pyrImage1 );
    cvReleaseImage( &pyrImage2 );

    return resNumCorrPoints;
}