示例#1
0
CBlob getNearestBlob(CBlobResult blobs, coord coordinate){
	
	int tot = blobs.GetNumBlobs();
	CBlob Blob;
	float distance[10]; // 10 è il numero massimo di blob trovabile in un video
	float minimum;
	
	coord tempCoord;

	//Questo ciclo for fa la distanza manhattan tra le coordinate passate e tutti i blob catturati e crea il vettore con tutte le distanze.
	for (int i=0; i<tot; i++){
		Blob = blobs.GetBlob(i);
		tempCoord.set( (int) Blob.MaxX(), (int) Blob.MinX(), (int) Blob.MaxY(), (int) Blob.MinY());
		distance[i] = sqrt((double)(tempCoord.cX - coordinate.cX)*(tempCoord.cX - coordinate.cX) + (tempCoord.cY - coordinate.cY)*(tempCoord.cY - coordinate.cY));
	}

	int minDistanceId=0;
	
	//Questo ciclo for becca la minima distanza fra tutte quelle calcolate
	for (int j=0; j<tot; j++){
		minimum = min( distance[j], distance[minDistanceId]);	
		if ( distance[j] == minimum ) minDistanceId = j;
		}

	//Ottenuta la minima distanza si va a ritornare il Blob corrispondente
	Blob = blobs.GetBlob( minDistanceId );
	//delete[] distance;
	return Blob;

}
示例#2
0
/**
- FUNCTION: CBlobGetMaxYatMinX
- FUNCTIONALITY: Calculates the maximum Y on the minimum X
- PARAMETERS:
- RESULT:
- RESTRICTIONS:
- AUTHOR: Ricard Borr�
- CREATION DATE: 25-05-2005.
- MODIFICATION: Date. Author. Description.
*/
double CBlobGetMaxYatMinX::operator()(const CBlob &blob) const
{
	double MaxY_at_MinX = LONG_MIN;
	
	CvSeqReader reader;
	CvPoint edgeactual;
		
	cvStartReadSeq(blob.Edges(),&reader);
	
	for(int j=0;j<blob.Edges()->total;j++)
	{
		CV_READ_SEQ_ELEM(edgeactual,reader);
		if( (edgeactual.x == blob.MinY()) && (edgeactual.y > MaxY_at_MinX) )
		{
			MaxY_at_MinX = edgeactual.y;
		}
	}
		
	return MaxY_at_MinX;
}
示例#3
0
/**
- FUNCTION: CBlobGetMinXatMinY
- FUNCTIONALITY: Calculates the minimum X on the minimum Y
- PARAMETERS:
- RESULT:
- RESTRICTIONS:
- AUTHOR: Ricard Borr�
- CREATION DATE: 25-05-2005.
- MODIFICATION: Date. Author. Description.
*/
double CBlobGetMinXatMinY::operator()(const CBlob &blob) const
{
	double MinX_at_MinY = LONG_MAX;
	
	CvSeqReader reader;
	CvPoint edgeactual;
		
	cvStartReadSeq(blob.Edges(),&reader);
	
	for(int j=0;j<blob.Edges()->total;j++)
	{
		CV_READ_SEQ_ELEM(edgeactual,reader);
		if( (edgeactual.y == blob.MinY()) && (edgeactual.x < MinX_at_MinY) )
		{
			MinX_at_MinY = edgeactual.x;
		}
	}
		
	return MinX_at_MinY;
}
示例#4
0
coord extractBlob(CBlobResult blobs, coord selectedCoord){
   
	coord coordinate;
	CBlob Blob;

	if ( blobs.GetNumBlobs()==0 ) {
		coordinate.flag=false; 
		return coordinate;
	}
	else {
		
		//!Get the blob info
		Blob = getNearestBlob( blobs, selectedCoord);
		
		//!Creating the coordinate struct
		coordinate.set( (int) Blob.MaxX(), (int) Blob.MinX(), (int) Blob.MaxY(), (int) Blob.MinY());
		
		return coordinate;
	}

}
示例#5
0
/**
- FUNCTION: CBlobGetMinXatMinY
- FUNCTIONALITY: Calculates the minimum X on the minimum Y
- PARAMETERS:
- RESULT:
- RESTRICTIONS:
- AUTHOR: Ricard Borràs
- CREATION DATE: 25-05-2005.
- MODIFICATION: Date. Author. Description.
*/
double CBlobGetMinXatMinY::operator()(CBlob &blob)
{
	double result = LONG_MAX;
	
	//CvSeqReader reader;
	//CvPoint actualPoint;
	t_PointList externContour;
	
	externContour = blob.GetExternalContour()->GetContourPoints();
	if( externContour.size()==0 ) return result;
	t_PointList::iterator it=externContour.begin(),en=externContour.end();
	for(it;it!=en;it++)
	{
		Point &actualPoint = *it;

		if( (actualPoint.y == blob.MinY()) && (actualPoint.x < result) )
		{
			result = actualPoint.x;
		}	
	}

	return result;
}
示例#6
0
/**
- FUNCTION: CBlobGetMinXatMinY
- FUNCTIONALITY: Calculates the minimum X on the minimum Y
- PARAMETERS:
- RESULT:
- RESTRICTIONS:
- AUTHOR: Ricard Borràs
- CREATION DATE: 25-05-2005.
- MODIFICATION: Date. Author. Description.
*/
double CBlobGetMinXatMinY::operator()(CBlob &blob)
{
	double result = LONG_MAX;
	
	CvSeqReader reader;
	CvPoint actualPoint;
	t_PointList externContour;
	
	externContour = blob.GetExternalContour()->GetContourPoints();
	if( !externContour ) return result;
	cvStartReadSeq( externContour, &reader);

	for( int i=0; i< externContour->total; i++)
	{
		CV_READ_SEQ_ELEM( actualPoint, reader);

		if( (actualPoint.y == blob.MinY()) && (actualPoint.x < result) )
		{
			result = actualPoint.x;
		}	
	}

	return result;
}
//==============================================================================
void PanTiltCameraClass::blobTracking(IplImage* hsv_mask,
                                      IplImage* pFour,
                                      IplImage* pImg)
{
   //--- Get blobs and filter them using the blob area
   CBlobResult blobs;
   CBlob *currentBlob;
   
   //--- Create a thresholded image and display image --------------------
   //--- Creates binary image
   IplImage* originalThr = cvCreateImage(cvGetSize(hsv_mask), IPL_DEPTH_8U,1);
   
   //--- Create 3-channel image
   IplImage* display = cvCreateImage(cvGetSize(hsv_mask),IPL_DEPTH_8U,3);
   
   //--- Copies the original
   cvMerge( hsv_mask, hsv_mask, hsv_mask, NULL, display );
   
   //--- Makes a copy for processing
   cvCopy(hsv_mask,originalThr);
   
   //--- Find blobs in image ---------------------------------------------
   int blobThreshold = 0;
   bool blobFindMoments = true;
   blobs = CBlobResult( originalThr, originalThr, blobThreshold, blobFindMoments);
   
   //--- filters blobs according to size and radius constraints
   blobs.Filter( blobs, B_EXCLUDE, CBlobGetArea(), B_LESS, this->minBlobSize );
   
   //--- display filtered blobs ------------------------------------------
   
   //--- copies the original in (for background)
   cvMerge( originalThr, originalThr, originalThr, NULL, display );
   
   CvPoint pts[this->NUMBER_OF_CIRCLES];
   
   //--- This sequence marks all the blobs
   for (int i = 0; i < blobs.GetNumBlobs(); i++ )
   {
      currentBlob = blobs.GetBlob(i);
      currentBlob->FillBlob( display, CV_RGB(0,0,255));				
      
      //--- Get blobs centerpoint
      CvPoint bcg;
      bcg.x = (int)(currentBlob->MinX()+((currentBlob->MaxX()-currentBlob->MinX())/2));
      bcg.y = (int)(currentBlob->MinY()+((currentBlob->MaxY()-currentBlob->MinY())/2));
      
      //--- Print the CG on the picture
      char blobtext[40];
      for(int k=0;k<this->NUMBER_OF_CIRCLES;k++)
      {
         sprintf(blobtext,"%d",k+1);
         TargetReticle(display,&pts[k],blobtext,6,CV_RGB(255,0,0));
      }//for
   }//for each blob
   
   //--- Set the ROI in the pFour image
   cvSetImageROI(pFour,cvRect(pImg->width,pImg->height+80,pImg->width,pImg->height));
   cvCopy(display,pFour);
   
   //Reset region of interest
   cvResetImageROI(display);						
   
   //Clean up
   cvReleaseImage( &originalThr );
   cvReleaseImage( &display);
}
IplImage* blobDetection2(IplImage* imgThreshRed, IplImage* imgThreshGreen) {
    // get blobs and filter them using its area
    int i, j;
    //  int areaBlob = 100;
    float distMark = 10;
    CBlobResult blobsRed, blobsGreen, whiteRedBlobs, whiteGreenBlobs;
    CBlob *currentBlob;
    double px, py;

    // Create Image
    IplImage* displayedImage = cvCreateImage(cvGetSize(imgThreshRed), IPL_DEPTH_8U, 3);

    // find all the RED related blobs in the image
    blobsRed = CBlobResult(imgThreshRed, NULL, 0);
    // find all the GREEN related blobs in the image
    blobsGreen = CBlobResult(imgThreshGreen, NULL, 0);

    // select the ones with mean gray-level equal to 255 (white) and put
    // them in the whiteBlobs variable
    blobsRed.Filter(whiteRedBlobs, B_EXCLUDE, CBlobGetArea(), B_LESS, 1.0);
    blobsGreen.Filter(whiteGreenBlobs, B_EXCLUDE, CBlobGetArea(), B_LESS, 1.0);

#ifdef DEBUG_PRINT    
    printf("White Blobs: %d\n", whiteBlobs.GetNumBlobs());
#endif

    // display filtered blobs
    cvMerge(imgThreshRed, imgThreshRed, imgThreshRed, NULL, displayedImage);

    // RED
    CvPoint2D32f redCenter[whiteRedBlobs.GetNumBlobs()];

    for (i = 0; i < whiteRedBlobs.GetNumBlobs(); i++) {
        currentBlob = whiteRedBlobs.GetBlob(i);
        px = (currentBlob->MaxX() + currentBlob->MinX()) / 2.0;
        py = (currentBlob->MaxY() + currentBlob->MinY()) / 2.0;
        redCenter[i] = cvPoint2D32f(px, py);

#ifdef DEBUG_PRINT    
        printf("%2.2f\t%2.2f\n", px, py);
#endif

        if (currentBlob->Area() > areaBlob) {
            // Add Cross to the image
            currentBlob->FillBlob(displayedImage, CV_RGB(255, 0, 0));
            cvCircle(displayedImage, cvPointFrom32f(redCenter[i]), 2, cvScalar(255, 0, 0), 10, 8, 0);
        }
    }

    // GREEN
    CvPoint2D32f greenCenter[whiteGreenBlobs.GetNumBlobs()];

    for (i = 0; i < whiteGreenBlobs.GetNumBlobs(); i++) {
        currentBlob = whiteGreenBlobs.GetBlob(i);
        px = (currentBlob->MaxX() + currentBlob->MinX()) / 2.0;
        py = (currentBlob->MaxY() + currentBlob->MinY()) / 2.0;
        greenCenter[i] = cvPoint2D32f(px, py);

#ifdef DEBUG_PRINT    
        printf("%2.2f\t%2.2f\n", px, py);
#endif

        if (currentBlob->Area() > areaBlob) {
            // Add Cross to the image
            currentBlob->FillBlob(displayedImage, CV_RGB(255, 0, 0));
            cvCircle(displayedImage, cvPointFrom32f(greenCenter[i]), 2, cvScalar(0, 255, 0), 10, 8, 0);
        }
    }

    // Populating the list of potential robots
    
    potRobList.robNum = 0;

    for (i = 0; i < robMax; i++)
        potRobList.robList[i].active = 0;

    int redUsage[whiteRedBlobs.GetNumBlobs()];
    int greenUsage[whiteGreenBlobs.GetNumBlobs()];

    for (i = 0; i < whiteRedBlobs.GetNumBlobs(); i++)
        redUsage[i] = 0;

    for (j = 0; j < whiteGreenBlobs.GetNumBlobs(); j++)
        greenUsage[j] = 0;



    // Detect Robots
    float distCenter[whiteRedBlobs.GetNumBlobs()][whiteGreenBlobs.GetNumBlobs()];
    for (i = 0; i < min(whiteRedBlobs.GetNumBlobs(), robMax); i++) {
        currentBlob = whiteRedBlobs.GetBlob(i);
        if (currentBlob->Area() > areaBlob) {
            for (j = 0; j < min(whiteGreenBlobs.GetNumBlobs(), robMax); j++) {
                currentBlob = whiteGreenBlobs.GetBlob(j);
                if (currentBlob->Area() > areaBlob) {
                    distCenter[i][j] = computeDist(redCenter[i], greenCenter[j]);
                    //printf("[%d] - [%d]: %2.2f\n", i, j, distCenter[i][j]);
                    //printf("[%d] - [%d]: %2.2f\n", i, j, distCenter[i][j]);
                    // Print a connection line if this could be a robot
                    if (redUsage[i] == 0 && greenUsage[j] == 0 && checkDistMarker(distCenter[i][j], distMark)) {
                        cvLine(displayedImage, cvPointFrom32f(redCenter[i]), cvPointFrom32f(greenCenter[j]), cvScalar(0, 255, 255), 2, 8, 0);
                        // Check Robot
                        potRobList.robList[potRobList.robNum] = createRobot(redCenter[i], greenCenter[j]);

                        potRobList.robNum++;
                        redUsage[i] = 1;
                        greenUsage[j] = 1;
                        //                        printRobot(potRobList.robList[potRobList.robNum - 1]);


                        CvBox2D tmp;
                        tmp.angle = potRobList.robList[potRobList.robNum - 1].orientation;
                        tmp.center = potRobList.robList[potRobList.robNum - 1].center;
                        tmp.size = cvSize2D32f(30, 50);
                        cvEllipseBox(displayedImage, tmp, cvScalar(255, 255, 0), 4, 3, 0);
                        //			printRobot(potRobList.robList[potRobList.robNum-1]);

                    }

                }
            }
        }
    }


    // Matching The List of Potential Robots with previous List of Robots
    //    updateRobotListAndrea(&avRobList, potRobList);
  //  updateRobotList(&avRobList, potRobList);
    makelistRobot();

    /*
        // Print robots
        for (i = 0; i < robMax; i++) {
            if (avRobList.robList[i].active == 1) {
                CvBox2D tmp;
                tmp.angle = avRobList.robList[i].orientation;
                tmp.center = avRobList.robList[i].center;
                tmp.size = cvSize2D32f(50, 30);
                cvEllipseBox(displayedImage, tmp, cvScalar(255, 255, 0), 4, 3, 0);
                printRobot(avRobList.robList[i]);
            }
        }
     */



    /* Control Law */

    return displayedImage;



}
示例#9
0
 int main() {
  CvPoint pt1,pt2;
  CvRect regt;
  CvPoint cir_center;
  CvPoint frame_center;
  CvPoint A,B,C,D;
  CvPoint temp;
  double angle,spinsize;
  int cir_radius=1; 
  int frame_width=160, frame_height=120;
   CvCapture* capture = cvCaptureFromCAM( CV_CAP_ANY );
   if ( !capture ) {
     fprintf(stderr, "ERROR: capture is NULL \n" );
     getchar();
     return -1;
   }
  cvSetCaptureProperty(capture,CV_CAP_PROP_FRAME_WIDTH,frame_width);// 120x160 
  cvSetCaptureProperty(capture,CV_CAP_PROP_FRAME_HEIGHT,frame_height);
  //cvSetCaptureProperty(capture, CV_CAP_PROP_FPS,10);
//  cvSetCaptureProperty(capture,CV_CAP_PROP_POS_FRAMES,5);  
 // Create a window in which the captured images will be presented
   cvNamedWindow( "mywindow", CV_WINDOW_AUTOSIZE );
   // Show the image captured from the camera in the window and repeat
   while ( 1 ) {
     // Get one frame
     IplImage* frame = cvQueryFrame( capture );
     if ( !frame ) {
       fprintf( stderr, "ERROR: frame is null...\n" );
       getchar();
       break;
     }
     int modfheight, modfwidth;

     modfheight = frame->height;
     modfwidth = frame->width;
     // create modified frame with 1/4th the original size
     IplImage* modframe = cvCreateImage(cvSize((int)(modfwidth/4),(int)(modfheight/4)),frame->depth,frame->nChannels); //cvCreateImage(size of frame, depth, noofchannels)
     cvResize(frame, modframe,CV_INTER_LINEAR);
     // create HSV(Hue, Saturation, Value) frame
     IplImage* hsvframe = cvCreateImage(cvGetSize(modframe),8, 3);
     cvCvtColor(modframe, hsvframe, CV_BGR2HSV); //cvCvtColor(input frame,outputframe,method)
     // create a frame within threshold.
     IplImage* threshframe = cvCreateImage(cvGetSize(hsvframe),8,1);
     cvInRangeS(hsvframe,cvScalar(15, 100, 100),cvScalar(60, 220, 220),threshframe); //cvInRangeS(input frame, cvScalar(min range),cvScalar(max range),output frame)
     // created dilated image
     IplImage* dilframe = cvCreateImage(cvGetSize(threshframe),8,1);
     cvDilate(threshframe,dilframe,NULL,2); //cvDilate(input frame, output frame, mask, number of times to dilate)

     CBlobResult blobs;
     blobs = CBlobResult(dilframe,NULL,0); // CBlobresult(inputframe, mask, threshold) Will filter all white parts of image
     blobs.Filter(blobs,B_EXCLUDE,CBlobGetArea(),B_LESS,50);//blobs.Filter(input, cond, criteria, cond, const) Filter all images whose area is less than 50 pixels
     CBlob biggestblob;
     blobs.GetNthBlob(CBlobGetArea(),0,biggestblob); //GetNthBlob(criteria, number, output) Get only the largest blob based on CblobGetArea()
     // get 4 points to define the rectangle
     pt1.x = biggestblob.MinX()*4;
     pt1.y = biggestblob.MinY()*4;
     pt2.x = biggestblob.MaxX()*4;
     pt2.y = biggestblob.MaxY()*4;
     cir_center.x=(pt1.x+pt2.x)/2;
     cir_center.y=(pt1.y+pt2.y)/2;
     frame_center.x=frame_width/2;
     frame_center.y=frame_height/2;
     A.x=frame_center.x-4;
     A.y=frame_center.y;
     B.x=frame_center.x+4;
     B.y=frame_center.y;
     C.y=frame_center.y-4;
     C.x=frame_center.x;
     D.y=frame_center.y+4;
     D.x=frame_center.x;
     cvRectangle(frame,pt1,pt2,cvScalar(255,0,0),1,8,0); // draw rectangle around the biggest blob
     cvCircle( frame, cir_center, cir_radius, cvScalar(0,255,255), 1, 8, 0 ); // center point of the rectangle
     cvLine(frame, A, B,cvScalar(255,0,255),2,8,0);
     cvLine(frame, C, D,cvScalar(255,0,255),2,8,0);
     if (cir_center.x!=0&&cir_center.y!=0){
     spinsize=sqrt((cir_center.x-frame_center.x)*(cir_center.x-frame_center.x) +(cir_center.y-frame_center.y)*(cir_center.y-frame_center.y));
     angle = atan2((double)cir_center.y-frame_center.y,(double)cir_center.x-frame_center.x);
     temp.x=(int)(frame_center.x+spinsize/5*cos(angle+3.1416/4));
     temp.y=(int)(frame_center.y+spinsize/5*sin(angle+3.1415/4));
     cvLine(frame, temp, frame_center,cvScalar(0,255,0),1,8,0);	

     temp.x=(int)(frame_center.x+spinsize/5*cos(angle-3.1416/4));
     temp.y=(int)(frame_center.y+spinsize/5*sin(angle-3.1415/4));
     cvLine(frame, temp, frame_center,cvScalar(0,255,0),1,8,0);	
	
     cvLine(frame, cir_center, frame_center,cvScalar(0,255,0),1,8,0);

     //cvCircle( frame, frame_center, cir_radius, cvScalar(0,255,255), 2, 8, 0 );
}
     cvShowImage( "mywindow", frame); // show output image
     // Do not release the frame!
     //If ESC key pressed, Key=0x10001B under OpenCV 0.9.7(linux version),
     //remove higher bits using AND operator
     if ( (cvWaitKey(10) & 255) == 27 ) break;
   }
   // Release the capture device housekeeping
   cvReleaseCapture( &capture );
   cvDestroyWindow( "mywindow" );
   return 0;
 }
示例#10
0
 int main() {
  CvPoint pt1,pt2;
  CvRect regt;
   CvCapture* capture = cvCaptureFromCAM( CV_CAP_ANY );
   if ( !capture ) {
     fprintf(stderr, "ERROR: capture is NULL \n" );
     getchar();
     return -1;
   }
  cvSetCaptureProperty(capture,CV_CAP_PROP_FRAME_HEIGHT,144);
  cvSetCaptureProperty(capture,CV_CAP_PROP_FRAME_WIDTH,216);	 
  // Create a window in which the captured images will be presented
   cvNamedWindow( "mywindow", CV_WINDOW_AUTOSIZE );
   // Show the image captured from the camera in the window and repeat
   while ( 1 ) {
     // Get one frame
     IplImage* frame = cvQueryFrame( capture );
     if ( !frame ) {
       fprintf( stderr, "ERROR: frame is null...\n" );
       getchar();
       break;
     }
     int modfheight, modfwidth;

     modfheight = frame->height;
     modfwidth = frame->width;
     // create modified frame with 1/4th the original size
     IplImage* modframe = cvCreateImage(cvSize((int)(modfwidth/4),(int)(modfheight/4)),frame->depth,frame->nChannels); //cvCreateImage(size of frame, depth, noofchannels)
     cvResize(frame, modframe,CV_INTER_LINEAR);
     // create HSV(Hue, Saturation, Value) frame
     IplImage* hsvframe = cvCreateImage(cvGetSize(modframe),8, 3);
     cvCvtColor(modframe, hsvframe, CV_BGR2HSV); //cvCvtColor(input frame,outputframe,method)
     // create a frame within threshold. 
     IplImage* threshframe = cvCreateImage(cvGetSize(hsvframe),8,1);
     cvInRangeS(hsvframe,cvScalar(30, 25, 150),cvScalar(60, 60, 220),threshframe); //cvInRangeS(input frame, cvScalar(min range),cvScalar(max range),output frame)
     // created dilated image
     IplImage* dilframe = cvCreateImage(cvGetSize(threshframe),8,1);
     cvDilate(threshframe,dilframe,NULL,2); //cvDilate(input frame, output frame, mask, number of times to dilate)

     CBlobResult blobs; 
     blobs = CBlobResult(dilframe,NULL,0); // CBlobresult(inputframe, mask, threshold) Will filter all white parts of image
     blobs.Filter(blobs,B_EXCLUDE,CBlobGetArea(),B_LESS,50);//blobs.Filter(input, cond, criteria, cond, const) Filter all images whose area is less than 50 pixels
     CBlob biggestblob;
     blobs.GetNthBlob(CBlobGetArea(),0,biggestblob); //GetNthBlob(criteria, number, output) Get only  the largest blob based on CblobGetArea()
     // get 4 points to define the rectangle
     pt1.x = biggestblob.MinX()*4;
     pt1.y = biggestblob.MinY()*4;
     pt2.x = biggestblob.MaxX()*4;
     pt2.y = biggestblob.MaxY()*4;
     cvRectangle(frame,pt1,pt2,cvScalar(255,0,0),1,8,0); // draw rectangle around the biggest blob

     cvShowImage( "mywindow", frame); // show output image
     // Do not release the frame!
     //If ESC key pressed, Key=0x10001B under OpenCV 0.9.7(linux version),
     //remove higher bits using AND operator
     if ( (cvWaitKey(10) & 255) == 27 ) break;
   }
   // Release the capture device housekeeping
   cvReleaseCapture( &capture );
   cvDestroyWindow( "mywindow" );
   return 0;
 }
示例#11
0
文件: locator.cpp 项目: ashokzg/cpb
	void locator()
	{
		namedWindow("Tracking");
		int hMin, hMax, sMin, sMax, vMin, vMax,area_min;
		hMin = 0;
		//hMax = 124; // night values/???
		hMax = 255;
		//sMin = 95;
		sMin = 126;
		sMax = 255;
		//vMin = 139;
		vMin = 173;
		vMax = 255;
		area_min = 100;
		Mat smoothed, hsvImg, t_img;
		createTrackbar("blob min area","Tracking" ,&area_min ,1000);
		createTrackbar("Hue Min", "Tracking", &hMin, 255);
		createTrackbar("Hue Max", "Tracking", &hMax, 255);
		createTrackbar("Sat Min", "Tracking", &sMin, 255);
		createTrackbar("Sat Max", "Tracking", &sMax, 255);
		createTrackbar("Val Min", "Tracking", &vMin, 255);
		createTrackbar("Val MaX", "Tracking", &vMax, 255);
		while(ros::ok())
		{
			Mat source = imageB;
			Mat copy = imageB.clone();
			GaussianBlur(source, smoothed, Size(9,9), 4);
			cvtColor(smoothed, hsvImg, CV_BGR2HSV);
			inRange(hsvImg, Scalar(hMin, sMin, vMin), Scalar(hMax, sMax, vMax), t_img);

			CBlobResult blob;
			IplImage i_img = t_img;
			blob = CBlobResult(&i_img,NULL,0);
			int num_blobs = blob.GetNumBlobs();

			blob.Filter(blob, B_INCLUDE, CBlobGetArea(), B_INSIDE, area_min, blob_area_absolute_max_);
			num_blobs = blob.GetNumBlobs();

			std::string reference_frame = "/virtual_table"; // Table frame at ball_radius above the actual table plane

			tf::StampedTransform transform;
			tf_.waitForTransform(reference_frame, model.tfFrame(), ros::Time(0), ros::Duration(0.5));
			tf_.lookupTransform(reference_frame, model.tfFrame(), ros::Time(0), transform);

			for(int i =0;i<num_blobs;i++)
			{
				CBlob* bl = blob.GetBlob(i);
				Point2d uv(CBlobGetXCenter()(*bl), CBlobGetYCenter()(*bl));
				//Use the width as the height
				uv.y = bl->MinY() + (bl->MaxX() - bl->MinX()) * 0.5;
				circle(copy,uv,50,Scalar(255,0,0),5);

				cv::Point3d xyz;
				model.projectPixelTo3dRay(uv, xyz);
		
				// Intersect ray with plane in virtual table frame
				//Origin of camera frame wrt virtual table frame
				tf::Point P0 = transform.getOrigin();
				//Point at end of unit ray wrt virtual table frame
				tf::Point P1 = transform * tf::Point(xyz.x, xyz.y, xyz.z);
				// Origin of virtual table frame
				tf::Point V0 = tf::Point(0.0,0.0,0.0);
				// normal to the table plane
				tf::Vector3 n(0, 0, 1);
				// finding scaling value
				double scale = (n.dot(V0-P0))/(n.dot(P1-P0));
				tf::Point ball_pos = P0 + (P1-P0)*scale;
				cout <<ball_pos.x() << " " << ball_pos.y() << " " << ball_pos.z() <<endl;
			}
			imshow(WINDOW, copy);
			waitKey(3);

			imshow("edited", t_img);
			waitKey(3);

			ros::spinOnce();
		}
	}