Esempio n. 1
0
IplImage*
create_frequency_filtered_image(const IplImage *pImage, int low, int high)
{

  CvPoint2D32f  center;
  center.x = pImage->width / 2;
  center.y = pImage->height / 2;
  CvBox2D box;
  box.center = center;

  box.size.width = high;
  box.size.height = high;

  IplImage *pFilterMask = rb_cvCreateImage( cvGetSize(pImage), IPL_DEPTH_64F, 1 );
  IplImage *pFiltered = rb_cvCreateImage( cvGetSize(pImage), IPL_DEPTH_64F, 1 );

  cvZero(pFilterMask);
  cvZero(pFiltered);

  if(high > 0)
    cvEllipseBox(pFilterMask, box, cvScalar(255, 255, 255, 255), CV_FILLED, 8, 0);

  box.size.width = low;
  box.size.height = low;
  if(low > 0)
    cvEllipseBox(pFilterMask, box, cvScalar(0, 0, 0, 0), CV_FILLED, 8, 0);

  cvAnd(pImage, pFilterMask, pFiltered, NULL);

  cvReleaseImage(&pFilterMask);

  return pFiltered;
}
Esempio n. 2
0
CHandPoint CTransformImage::findFinger()
{
	findCenter();

	if(!m_transImage)
		return CHandPoint();

	int width   = m_transImage->width;
	int height  = 180;
	int moveX   = 0,     moveY  = height;
	BOOL bClick = FALSE, bWheel = FALSE;
	unsigned char ch;
	for(int y = m_center.y; y < height; ++y)
	{
		for(int x = m_center.x-100; x < m_center.x+50; ++x)
		{
			if(x < 0 || x >= width || y < 0 || y >= height)
				continue;

			ch = m_transImage->imageData[y*width+x];
			if(ch == 255)
			{
				moveX = x, moveY = y;
				if(x < m_center.x-50)
					bClick = TRUE;
				break;
			}

			// 			CvBox2D box;
			// 			box.center = cvPoint2D32f(x, y);
			// 			box.size   = cvSize2D32f(2, 2);
			// 			box.angle  = 90;
			// 			cvEllipseBox(m_image, box, CV_RGB(0,255,255), 1);
		}

		if(moveY != y)
			break;
	}

	// 좌표가 조금씩 흔들리는 것을 방지하기 위한 부분
	if(abs(m_pastPt.x-moveX) < 2 || abs(m_pastPt.y-moveY) < 2)
		moveX = m_pastPt.x, moveY = m_pastPt.y;

	m_pastPt.x = moveX, m_pastPt.y = moveY;

	CvBox2D box;
	box.center = cvPoint2D32f(moveX, moveY);
	box.size   = cvSize2D32f(2, 2);
	box.angle  = 90;
	cvEllipseBox(m_image, box, CV_RGB(0,255,0), 1);

	return CHandPoint(moveX, height-moveY, bClick, bWheel);
}
Esempio n. 3
0
CvPoint CTransformImage::findCenter()
{
	IplImage* dist8u  = cvCloneImage(m_transImage);
	IplImage* dist32f = cvCreateImage(cvGetSize(m_transImage), IPL_DEPTH_32F, 1);
	IplImage* dist32s = cvCreateImage(cvGetSize(m_transImage), IPL_DEPTH_32S, 1);

	// 거리 변환 행렬
	float mask[3] = {1.f, 1.5f, 0};

	// 거리 변환 함수 사용
	cvDistTransform(m_transImage, dist32f, CV_DIST_USER, 3, mask, NULL);

	// 눈에 보이게 변환
	cvConvertScale(dist32f, dist32f, 1000, 0);
	cvPow(dist32f, dist32f, 0.5);

	cvConvertScale(dist32f, dist32s, 1.0, 0.5);
	cvAndS(dist32s, cvScalarAll(255), dist32s, 0);
	cvConvertScale(dist32s, dist8u, 1, 0);

	// 가장 큰 좌표를 찾는다
	int max;
	for(int i = max = 0; i < dist8u->height; ++i)
	{
		int index = i * dist8u->widthStep;
		for(int j = 0; j < dist8u->width; ++j)
		{
			if((unsigned char)dist8u->imageData[index+j] > max)
			{
				max = (unsigned char)dist8u->imageData[index+j];
				m_center.x = j, m_center.y = i;
			}
		}
	}

	cvReleaseImage(&dist8u);
	cvReleaseImage(&dist32f);
	cvReleaseImage(&dist32s);

	if(m_center.x < 0 || m_center.y < 0)
		m_center.x = 0, m_center.y = 0;

	CvBox2D box;
	box.center = cvPoint2D32f(m_center.x, m_center.y);
	box.size   = cvSize2D32f(3, 3);
	box.angle  = 90;
	cvEllipseBox(m_image, box, CV_RGB(255,242,0), 3);

	return m_center;
}
Esempio n. 4
0
 void draw()
 {
     double scale = this->scale == 0? 1.0 : this->scale;
     CvScalar colors[5] = {
         {{cvRound(color[0].r * 255), cvRound(color[0].g * 255), cvRound(color[0].b * 255), cvRound(alpha * 255)}},
         {{cvRound(color[1].r * 255), cvRound(color[1].g * 255), cvRound(color[1].b * 255), cvRound(alpha * 255)}},
         {{cvRound(color[2].r * 255), cvRound(color[2].g * 255), cvRound(color[2].b * 255), cvRound(alpha * 255)}},
         {{cvRound(color[3].r * 255), cvRound(color[3].g * 255), cvRound(color[3].b * 255), cvRound(alpha * 255)}},
         {{cvRound(color[4].r * 255), cvRound(color[4].g * 255), cvRound(color[4].b * 255), cvRound(alpha * 255)}},
     };
     
     for (int i = 0; i < (objects ? objects->total : 0); i++)
     {
         CvRect* r = (CvRect*) cvGetSeqElem(objects, i);
         CvPoint center;
         int thickness = stroke <= 0? CV_FILLED : cvRound(stroke * 100);
         int linetype = antialias? CV_AA : 8;
         
         center.x = cvRound((r->x + r->width * 0.5) / scale);
         center.y = cvRound((r->y + r->height * 0.5) / scale);
         
         switch (shape == 1.0? (rand() % 3) : cvRound(shape * 10))
         {
         default:
         case 0:
             {
                 int radius = cvRound((r->width + r->height) * 0.25 / scale);
                 cvCircle(image, center, radius, colors[i % 5], thickness, linetype);
                 break;
             }
         case 1:
             {
                 CvBox2D box = {{center.x, center.y}, {r->width / scale, (r->height / scale) * 1.2}, 90};
                 cvEllipseBox(image, box, colors[i % 5], thickness, linetype);
                 break;
             }
         case 2:
             {
                 CvPoint pt1 = {r->x / scale, r->y / scale};
                 CvPoint pt2 = {(r->x + r->width) / scale, (r->y + r->height) / scale};
                 cvRectangle(image, pt1, pt2, colors[i % 5], thickness, linetype);
                 break;
             }
         }
     }
 }
void AdaptiveHistogramCamshift::PresentOutput(IplImage *img)
{
  // Draw track box
  if (m_tracking)
  {
    const bool trackBoxHasArea = (m_trackBox.size.width * m_trackBox.size.height) > 0;
    if (trackBoxHasArea)
    {
      cvEllipseBox(img, m_trackBox, colors[CYAN], 3, CV_AA, 0);
    }
  }
  // Show output
  cvShowImage(m_controlsGUIWndName.c_str(), img);
  // Show backproject
  if (m_showBackproject)
  {
    ShowBackproject();
  }

}
Esempio n. 6
0
void FaceBl0r::update() {

  if (!cascade) {
      cvSetNumThreads(cvRound(threads * 100));
      if (classifier.length() > 0) {
	if (classifier == old_classifier) {
	  // same as before, avoid repeating error messages
	  memcpy(out, in, size * 4); // of course assuming we are RGBA only
	  return;
	} else old_classifier = classifier;

	cascade = (CvHaarClassifierCascade*) cvLoad(classifier.c_str(), 0, 0, 0 );
	if (!cascade) {
	  fprintf(stderr, "ERROR in filter facebl0r, classifier cascade not found:\n");
	  fprintf(stderr, " %s\n", classifier.c_str());
	  memcpy(out, in, size * 4);
	  return;
	}
	storage = cvCreateMemStorage(0);
      }
      else {
	memcpy(out, in, size * 4);
	return;
      }
  }

  // sanitize parameters
  recheck = CLAMP(recheck, 0.001, 1.0);
  search_scale = CLAMP(search_scale, 0.11, 1.0);
  neighbors = CLAMP(neighbors, 0.01, 1.0);

  if( !image )
      image = cvCreateImage( cvSize(width,height), IPL_DEPTH_8U, 4 );

  memcpy(image->imageData, in, size * 4);

  /*
    no face*
     - look for (detect_face)
    yes face
     - track face
     - no more face
       no face*
   */
  if(face_notfound>0) {

      if(face_notfound % cvRound(recheck * 1000) == 0)
          face_rect = detect_face(image, cascade, storage);

      // if no face detected
      if (!face_rect) {
          face_notfound++;
      } else {
          //track detected face with camshift
          if(tracked_obj)
              destroy_tracked_object(tracked_obj);
          tracked_obj = create_tracked_object(image, face_rect);
          face_notfound = 0;
          face_found++;
      }

  }

  if(face_found>0) { 
      //track the face in the new frame
      face_box = camshift_track_face(image, tracked_obj);

      int min = cvRound(smallest * 1000);
          min = min? min : 10;
      int max = cvRound(largest * 10000);
      if( ( face_box.size.width < min )
          || (face_box.size.height < min )
          || (face_box.size.width > max )
          || (face_box.size.height > max )
          ) {
          face_found = 0;
          face_notfound++;
      }
      else {
////////////////////////////////////////////////////////////////////////
	      cvSetImageROI (image, tracked_obj->prev_rect);
//          cvSmooth (image, image, CV_BLUR, 22, 22, 0, 0);
		  cvSmooth (image, image, CV_BLUR, 23, 23, 0, 0);
//          cvSmooth (image, image, CV_GAUSSIAN, 11, 11, 0, 0);
		  cvResetImageROI (image);
////////////////////////////////////////////////////////////////////////
      
          //outline face ellipse
          if (ellipse)
              cvEllipseBox(image, face_box, CV_RGB(255,0,0), 2, CV_AA, 0);

          face_found++;
          if(face_found % cvRound(recheck * 1000) == 0)
              face_notfound = cvRound(recheck * 1000); // try recheck
      }
  }

  memcpy(out, image->imageData, size * 4);
  cvReleaseImage(&image);
}
Esempio n. 7
0
 void cvEllipseBox_wrap(CvArr * img , CvBox2D box , CvScalar color , int thickness , int line_type , int shift ){
	cvEllipseBox(/*CvArr*//***/img , /*CvBox2D*/box , /*CvScalar*/color , /*int*/thickness , /*int*/line_type , /*int*/shift);
}
Esempio n. 8
0
CvBox2D CamShiftIris::track( IplImage* image, CvRect selection, bool isIris){
	CamShiftIris camshift;
	select_object1=1;
	track_object1=-1;
	origin1=cvPoint(0,0);

///////////////////////////////

	int i, bin_w, c;
	//frame = cvQueryFrame( capture );

//
//	frame=cvCloneImage(image);
//        if( !frame )
//            return 0;
	if( image ){
		/* allocate all the buffers */
//		image = cvCreateImage( cvGetSize(frame), 8, 3 );
//		image->origin = frame->origin;
		hsv1 = cvCreateImage( cvGetSize(image), 8, 3 );
		h = cvCreateImage( cvGetSize(image), 8, 1 );
		s = cvCreateImage( cvGetSize(image), 8, 1 );
		v = cvCreateImage( cvGetSize(image), 8, 1);
		hue1 = cvCreateImage( cvGetSize(image), 8, 1 );
		mask1 = cvCreateImage( cvGetSize(image), 8, 1 );
		backproject1 = cvCreateImage( cvGetSize(image), 8, 1 );
		hist1= cvCreateHist( 1, &hdims1, CV_HIST_ARRAY, &hranges1, 1 );
		histimg1 = cvCreateImage( cvSize(320,200), 8, 3 );
		cvZero( histimg1 );
	}
	cvCvtColor( image, hsv1, CV_BGR2HSV );
	///////////////////Equalize v in hsv///////////
	cvSplit( hsv1, h, s, v, 0 );
	cvEqualizeHist(v,v);
	cvMerge(h,s,v,0,hsv1);
	///////////////////Equalize v in hsv///////////

	if( track_object1 !=0 ){
		int _vmin1 = vmin1, _vmax1 = vmax1;

		cvInRangeS( hsv1, cvScalar(0,smin1,MIN(_vmin1,_vmax1),0),
					cvScalar(180,256,MAX(_vmin1,_vmax1),0), mask1 );
		cvSplit( hsv1, hue1, 0, 0, 0 );

		if( track_object1 < 0 ){
			float max_val = 0.f;
			cvSetImageROI( hue1, selection );
			cvSetImageROI( mask1, selection );
			cvCalcHist( &hue1, hist1, 0, mask1 );
			cvGetMinMaxHistValue( hist1, 0, &max_val, 0, 0 );
			cvConvertScale( hist1->bins, hist1->bins, max_val ? 255. / max_val : 0., 0 );
			cvResetImageROI( hue1 );
			cvResetImageROI( mask1 );
			track_window1 = selection;
			track_object1 = 1;

			cvZero( histimg1 );
			bin_w = histimg1->width / hdims1;
			for( i = 0; i < hdims1; i++ )
			{
				int val = cvRound( cvGetReal1D(hist1->bins,i)*histimg1->height/255 );
				CvScalar color = camshift.hsvrgb(i*180.f/hdims1);
				cvRectangle( histimg1, cvPoint(i*bin_w,histimg1->height),
							 cvPoint((i+1)*bin_w,histimg1->height - val),
							 color, -1, 8, 0 );
			}
		}
		cvCalcBackProject( &hue1, backproject1, hist1);
		cvAnd( backproject1, mask1, backproject1, 0 );
		try{
		cvCamShift( backproject1, track_window1,
					cvTermCriteria( CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1 ),
					&track_comp1, &track_box1 );
		}catch(...){
			cvReleaseImage(&hsv1);
			cvReleaseImage(&h);
			cvReleaseImage(&s);
			cvReleaseImage(&v);
			cvReleaseImage(&hue1);
			cvReleaseImage(&mask1);
			cvReleaseImage(&backproject1);
			cvReleaseHist(&hist1);
			cvReleaseImage(&histimg1);
		}
		track_window1 = track_comp1.rect;
		if( backproject1_mode )
			cvCvtColor( backproject1, image, CV_GRAY2BGR );
		if( !image->origin )
			track_box1.angle = -track_box1.angle;
		if(isIris)
			cvEllipseBox( image, track_box1, CV_RGB(255,0,0), 3, CV_AA, 0 );
	}
	cvShowImage( "CamShift Tracking", image );
	//cvShowImage( "Histogram", histimg1 );

//	c = cvWaitKey(10);
//	if( (char) c == 27 )
//		cout<<"esc pressed";
//		//return; //break;
//	switch( (char) c ){
//	case 'b':
//		backproject1_mode ^= 1;
//		break;
//	case 'c':
//		track_object1 = 0;
//		cvZero( histimg1 );
//		break;
//	case 'h':
//		show_hist1^= 1;
//		if( !show_hist1)
//			cvDestroyWindow( "Histogram" );
//		else
//			cvNamedWindow( "Histogram", 1 );
//		break;
//	default:
//		;
//	}

	//cvReleaseImage(&image);
	cvReleaseImage(&hsv1);
	cvReleaseImage(&h);
	cvReleaseImage(&s);
	cvReleaseImage(&v);
	cvReleaseImage(&hue1);
	cvReleaseImage(&mask1);
	cvReleaseImage(&backproject1);
	cvReleaseHist(&hist1);
	cvReleaseImage(&histimg1);

    return track_box1;
}
Esempio n. 9
0
CHandPoint CTransformImage::findFingerInfo()
{
	if(!m_transImage)
		return CHandPoint();

	findCenter();

	CHandPoint handPt;
	std::vector<CvPoint> ptList;

	double pi = 3.1415;
	int width = m_transImage->width;
	int fingerCnt = 0;
	int x, y, radius = 80;
	unsigned char ch, pastCh = 0;
	for(double theta = 180; theta <= 360; ++theta)
	{
		x = (int)(m_center.x + radius*cos(theta*pi/180));
		y = (int)(m_center.y - radius*sin(theta*pi/180));

		ch = m_transImage->imageData[y*width+x];
		if(ch == 255 && pastCh == 0)		// Counting Finger
			ptList.push_back(cvPoint(x,y)), ++fingerCnt;

		pastCh = ch;

		// Draw OutLine
		CvBox2D box;
		box.center = cvPoint2D32f(x, y);
		box.size   = cvSize2D32f(1, 1);
		box.angle  = 90;
		cvEllipseBox(m_image, box, CV_RGB(255,242,0), 1);
	}

	// handPt Setting
	float dist = 0, dist2 = 0;
	switch(fingerCnt)
	{
	case 0: handPt.m_mode = CHandPoint::CLEAR;
		break;
	case 1: handPt.m_mode = CHandPoint::MOVE;	findEndPoint(&handPt.m_nX, &handPt.m_nY);
		break;
	case 2:
		{
			CvPoint a = ptList[0], b = ptList[1];
			float dist = sqrt((float)(abs(a.x-b.x)*abs(a.x-b.x) + abs(a.y-b.y)*abs(a.y-b.y)));
			if(dist < 70)		// DRAW mode
			{	handPt.m_mode = CHandPoint::CIRCLE;	handPt.m_nX = m_center.x, handPt.m_nY = m_center.y;	}
			else
			{	handPt.m_mode = CHandPoint::DRAW;	findEndPoint(&handPt.m_nX, &handPt.m_nY);	}
		}
		break;
	case 3: 
		{
			CvPoint a = ptList[0], b = ptList[1], c = ptList[2];
			dist  = sqrt((float)(abs(a.x-b.x)*abs(a.x-b.x) + abs(a.y-b.y)*abs(a.y-b.y)));
			dist2 = sqrt((float)(abs(c.x-b.x)*abs(c.x-b.x) + abs(c.y-b.y)*abs(c.y-b.y)));
			if(abs(dist-dist2) < 10)
			{	handPt.m_mode = CHandPoint::TRIANGE;	handPt.m_nX = m_center.x, handPt.m_nY = m_center.y;	}
			else
			{	handPt.m_mode = CHandPoint::SETTING;	}
		}
		break;
	case 4: handPt.m_mode = CHandPoint::RECT;	handPt.m_nX = m_center.x, handPt.m_nY = m_center.y;
		break;
	case 5: handPt.m_mode = CHandPoint::STAR;	handPt.m_nX = m_center.x, handPt.m_nY = m_center.y;
		break;
	default: handPt.m_mode = CHandPoint::NOTHING;
		break;
	}

	TCHAR buf[256] = {0,};
	swprintf(buf, sizeof(buf), _T("%d\t%f\n"), fingerCnt, dist);
	::OutputDebugString(buf);

	return handPt;
}
Esempio n. 10
0
int main222( int argc,   char** argv )
{
    CvCapture* capture = 0;

    if( argc == 1 || (argc == 2 && strlen(argv[1]) == 1 && isdigit(argv[1][0])))
        capture = cvCaptureFromCAM( argc == 2 ? argv[1][0] - '0' : 0 );
    else if( argc == 2 )
        capture = cvCaptureFromAVI( argv[1] );

    if( !capture )
    {
        fprintf(stderr,"Could not initialize capturing...\n");
        return -1;
    }

    printf( "Hot keys: \n"
        "\tESC - quit the program\n"
        "\tc - stop the tracking\n"
        "\tb - switch to/from backprojection view\n"
        "\th - show/hide object histogram\n"
        "To initialize tracking, select the object with mouse\n" );

    cvNamedWindow( "Histogram", 1 );
    cvNamedWindow( "CamShiftDemo", 1 );
    cvSetMouseCallback( "CamShiftDemo", on_mouse, 0 );
    cvCreateTrackbar( "Vmin", "CamShiftDemo", &vmin, 256, 0 );
    cvCreateTrackbar( "Vmax", "CamShiftDemo", &vmax, 256, 0 );
    cvCreateTrackbar( "Smin", "CamShiftDemo", &smin, 256, 0 );

    for(;;)
    {
        IplImage* frame = 0;
        int i, bin_w, c;


        if( !frame )
            break;

        if( !image )
        {
            /* allocate all the buffers */
            image = cvCreateImage( cvGetSize(frame), 8, 3 );
            image->origin = frame->origin;
            hsv = cvCreateImage( cvGetSize(frame), 8, 3 );
            hue = cvCreateImage( cvGetSize(frame), 8, 1 );
            mask = cvCreateImage( cvGetSize(frame), 8, 1 );
            backproject = cvCreateImage( cvGetSize(frame), 8, 1 );
            hist = cvCreateHist( 1, &hdims, CV_HIST_ARRAY, &hranges, 1 );
            histimg = cvCreateImage( cvSize(320,200), 8, 3 );
            cvZero( histimg );
        }

        cvCopy( frame, image, 0 );
        cvCvtColor( image, hsv, CV_BGR2HSV );

        if( track_object )
        {
            int _vmin = vmin, _vmax = vmax;

            cvInRangeS( hsv, cvScalar(0,smin,MIN(_vmin,_vmax),0),
                        cvScalar(180,256,MAX(_vmin,_vmax),0), mask );
            cvSplit( hsv, hue, 0, 0, 0 );

            if( track_object < 0 )
            {
                float max_val = 0.f;
                cvSetImageROI( hue, selection );
                cvSetImageROI( mask, selection );
                cvCalcHist( &hue, hist, 0, mask );
                cvGetMinMaxHistValue( hist, 0, &max_val, 0, 0 );
                cvConvertScale( hist->bins, hist->bins, max_val ? 255. / max_val : 0., 0 );
                cvResetImageROI( hue );
                cvResetImageROI( mask );
                track_window = selection;
                track_object = 1;

                cvZero( histimg );
                bin_w = histimg->width / hdims;
                for( i = 0; i < hdims; i++ )
                {
                    int val = cvRound( cvGetReal1D(hist->bins,i)*histimg->height/255 );
                    CvScalar color = hsv2rgb(i*180.f/hdims);
                    cvRectangle( histimg, cvPoint(i*bin_w,histimg->height),
                                 cvPoint((i+1)*bin_w,histimg->height - val),
                                 color, -1, 8, 0 );
                }
            }

            cvCalcBackProject( &hue, backproject, hist );
            cvAnd( backproject, mask, backproject, 0 );
            cvCamShift( backproject, track_window,
                        cvTermCriteria( CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1 ),
                        &track_comp, &track_box );
            track_window = track_comp.rect;

            if( backproject_mode )
                cvCvtColor( backproject, image, CV_GRAY2BGR );
            if( !image->origin )
                track_box.angle = -track_box.angle;
            cvEllipseBox( image, track_box, CV_RGB(255,0,0), 3, CV_AA, 0 );
        }

        if( select_object && selection.width > 0 && selection.height > 0 )
        {
            cvSetImageROI( image, selection );
            cvXorS( image, cvScalarAll(255), image, 0 );
            cvResetImageROI( image );
        }

        cvShowImage( "CamShiftDemo", image );
        cvShowImage( "Histogram", histimg );

        c = cvWaitKey(10);
        if( (char) c == 27 )
            break;
        switch( (char) c )
        {
        case 'b':
            backproject_mode ^= 1;
            break;
        case 'c':
            track_object = 0;
            cvZero( histimg );
            break;
        case 'h':
            show_hist ^= 1;
            if( !show_hist )
                cvDestroyWindow( "Histogram" );
            else
                cvNamedWindow( "Histogram", 1 );
            break;
        default:
            ;
        }
    }

    cvReleaseCapture( &capture );
    cvDestroyWindow("CamShiftDemo");

    return 0;
}
Esempio n. 11
0
  void Gesture1::trackMarker (IplImage* destImg, CvPoint _r, CvPoint _b, CvPoint _g, CvPoint _y) {
    
    // find tissue box!
    CvPoint* objPoints = objectDetector->detect(destImg);

    // draw
    world->Step(1.0F/6.0F, 10, 10);
    cvLine(destImg, cvPoint(0,HEIGHT), cvPoint(1000,HEIGHT), CV_RGB(0,255,0), 3);
    for (b2Body* b = world->GetBodyList(); b; b = b->GetNext()) {
      //printf("**draw body\n");
      Box2DData* userData = (Box2DData*)b->GetUserData();
      if (userData != NULL) {
        if (strcmp(userData->type, "Circle") == 0) {
          //b2Vec2 v = b->GetWorldCenter();
          b2Vec2 v = b->GetPosition();
          //printf("** x=%f y=%f r=%f\n", v.x, v.y, userData->radius);
          CvPoint center = cvPoint(v.x*WORLD_SCALE, v.y*WORLD_SCALE);
          cvCircle(destImg, center, userData->radius*WORLD_SCALE, CV_RGB(255,0,0), -1);
        } else if (strcmp(userData->type, "Box") == 0) {
          world->DestroyBody(b);
        }
      }      
    }
    if (objPoints != NULL) {
      printf("construct body\n");
      b2PolygonShape cs;
      b2Vec2 vertices[4] = {
        b2Vec2((float)(objPoints[0].x)/WORLD_SCALE, (float)(objPoints[0].y)/WORLD_SCALE),
        b2Vec2((float)(objPoints[1].x)/WORLD_SCALE, (float)(objPoints[1].y)/WORLD_SCALE),
        b2Vec2((float)(objPoints[2].x)/WORLD_SCALE, (float)(objPoints[2].y)/WORLD_SCALE),
        b2Vec2((float)(objPoints[3].x)/WORLD_SCALE, (float)(objPoints[3].y)/WORLD_SCALE)
      };
      cs.Set(vertices, 4);
      b2BodyDef bd;
      //bd.type = b2_staticBody;
      Box2DData* obj = new Box2DData();
      strcpy(obj->type, "Box");
      bd.userData = obj;
      b2Body* body1 = world->CreateBody(&bd);
      body1->CreateFixture(&cs, 0.0f);
    }

    if (_r.x < 0) return;
    Point2D r = toPoint2D(_r);
    
    // if marker is not moving for a while, reset the path
    int len = path.size();
    if (len > KEEP_MAX) {
      path.erase(path.begin());
    }
    int nearCount = 0;
    int actual = min(KEEP_COUNT, len);
    
    /*
     for(int i=0; i<actual; i++){
     Point2D p = path[len-1-i];
     double d = dist(p, r);
     //printf("dist=%f\n", d);
     if (d < NEAR_THRESHOLD) ++nearCount;
     }
     if (nearCount > (double)actual * DONT_MOVE_THRESHOLD_RATE) {
     // marker is not moving, so clear the path
     printf("cleared\n");
     path.clear();
     }
     */
    
    path.push_back(r);

    // decide if we should recognize
    time_t current;
    time(&current);
    double interval = difftime(current, lastTime);
    printf("interval=%f\n", interval);
    if (interval < INTERVAL_SEC) return;

    len = path.size();
    if (len < 5) return;

    RecognitionResult res = g.recognize(path);
    printf("%s:%f\n", res.name.c_str(), res.score);
    if (res.name == "Circle" && res.score > SCORE_THRESHOLD) {
      printf("##circle detect##\n");
      // convert to vector<Point2D> to CvSeq<CvPoint>
      CvSeqWriter writer;
      CvMemStorage* storage = cvCreateMemStorage(0);
      cvStartWriteSeq( CV_32SC2, sizeof(CvSeq), sizeof(CvPoint), storage, &writer);
      for (int i=0; i<len; i++) {
        CvPoint pt = toCvPoint(path[i]);
        CV_WRITE_SEQ_ELEM(pt, writer);
      }
      CvSeq* seq = cvEndWriteSeq(&writer);
      CvBox2D ellipse = cvFitEllipse2(seq);
      float radius = std::min(ellipse.size.width, ellipse.size.height)/(4.0F*WORLD_SCALE);
      cvEllipseBox(destImg, ellipse, CV_RGB(0,255,255), -1);

      // add Box2D object
      {
        b2CircleShape cs;
        cs.m_radius = radius;
        printf(" x=%f y=%f radius:%f\n", ellipse.center.x/WORLD_SCALE, ellipse.center.y/WORLD_SCALE, radius);
        b2BodyDef bd;
        bd.type = b2_dynamicBody;
        bd.position.Set(ellipse.center.x/WORLD_SCALE, ellipse.center.y/WORLD_SCALE);
        Box2DData* obj = new Box2DData();
        strcpy(obj->type, "Circle");
        obj->radius = radius;
        bd.userData = obj;
        b2Body* body1 = world->CreateBody(&bd);
        b2FixtureDef fixtureDef;
        fixtureDef.shape = &cs;
        fixtureDef.density = 1.0f;
        fixtureDef.friction = 0.3f;
        fixtureDef.restitution = 0.6f;
        body1->CreateFixture(&fixtureDef);
      }

      time(&lastTime);

      //cvEllipseBox(destImg, ellipse, CV_RGB(125,125,255));
    }
  }
int main( int argc, char** argv )
{
	char path[1024];
	IplImage* img;
	help();
	if (argc!=2)
	{
		strcpy(path,"puzzle.png");
		img = cvLoadImage( path, CV_LOAD_IMAGE_GRAYSCALE );
		if (!img)
		{
			printf("\nUsage: mser_sample <path_to_image>\n");
			return 0;
		}
	}
	else
	{
		strcpy(path,argv[1]);
		img = cvLoadImage( path, CV_LOAD_IMAGE_GRAYSCALE );
	}
	
	if (!img)
	{
		printf("Unable to load image %s\n",path);
		return 0;
	}
	IplImage* rsp = cvLoadImage( path, CV_LOAD_IMAGE_COLOR );
	IplImage* ellipses = cvCloneImage(rsp);
	cvCvtColor(img,ellipses,CV_GRAY2BGR);
	CvSeq* contours;
	CvMemStorage* storage= cvCreateMemStorage();
	IplImage* hsv = cvCreateImage( cvGetSize( rsp ), IPL_DEPTH_8U, 3 );
	cvCvtColor( rsp, hsv, CV_BGR2YCrCb );
	CvMSERParams params = cvMSERParams();//cvMSERParams( 5, 60, cvRound(.2*img->width*img->height), .25, .2 );

	double t = (double)cvGetTickCount();
	cvExtractMSER( hsv, NULL, &contours, storage, params );
	t = cvGetTickCount() - t;
	printf( "MSER extracted %d contours in %g ms.\n", contours->total, t/((double)cvGetTickFrequency()*1000.) );
	uchar* rsptr = (uchar*)rsp->imageData;
	// draw mser with different color
	for ( int i = contours->total-1; i >= 0; i-- )
	{
		CvSeq* r = *(CvSeq**)cvGetSeqElem( contours, i );
		for ( int j = 0; j < r->total; j++ )
		{
			CvPoint* pt = CV_GET_SEQ_ELEM( CvPoint, r, j );
			rsptr[pt->x*3+pt->y*rsp->widthStep] = bcolors[i%9][2];
			rsptr[pt->x*3+1+pt->y*rsp->widthStep] = bcolors[i%9][1];
			rsptr[pt->x*3+2+pt->y*rsp->widthStep] = bcolors[i%9][0];
		}
	}
	// find ellipse ( it seems cvfitellipse2 have error or sth?
	for ( int i = 0; i < contours->total; i++ )
	{
		CvContour* r = *(CvContour**)cvGetSeqElem( contours, i );
		CvBox2D box = cvFitEllipse2( r );
		box.angle=(float)CV_PI/2-box.angle;
		
		if ( r->color > 0 )
			cvEllipseBox( ellipses, box, colors[9], 2 );
		else
			cvEllipseBox( ellipses, box, colors[2], 2 );
			
	}

	cvSaveImage( "rsp.png", rsp );

	cvNamedWindow( "original", 0 );
	cvShowImage( "original", img );
	
	cvNamedWindow( "response", 0 );
	cvShowImage( "response", rsp );

	cvNamedWindow( "ellipses", 0 );
	cvShowImage( "ellipses", ellipses );

	cvWaitKey(0);

	cvDestroyWindow( "original" );
	cvDestroyWindow( "response" );
	cvDestroyWindow( "ellipses" );
	cvReleaseImage(&rsp);
	cvReleaseImage(&img);
	cvReleaseImage(&ellipses);
	
}
Esempio n. 13
0
int track( IplImage* frame, int flag,int Cx,int Cy,int R )
{

    {

        int i, bin_w, c;

        LOGE("#######################Check1############################");

        if( !image )
        {
            /* allocate all the buffers */
            image = cvCreateImage( cvGetSize(frame), 8, 3 );
            image->origin = frame->origin;
            hsv = cvCreateImage( cvGetSize(frame), 8, 3 );
            hue = cvCreateImage( cvGetSize(frame), 8, 1 );
            mask = cvCreateImage( cvGetSize(frame), 8, 1 );
            backproject = cvCreateImage( cvGetSize(frame), 8, 1 );
            hist = cvCreateHist( 1, &hdims, CV_HIST_ARRAY, &hranges, 1 );
            histimg = cvCreateImage( cvSize(320,200), 8, 3 );
            cvZero( histimg );
            LOGE("######################Check2###########################");
        }

        cvCopy( frame, image, 0 );
        cvCvtColor( image, hsv, CV_BGR2HSV );


        {
            int _vmin = vmin, _vmax = vmax;

            cvInRangeS( hsv, cvScalar(0,smin,MIN(_vmin,_vmax),0),
                        cvScalar(180,256,MAX(_vmin,_vmax),0), mask );
            cvSplit( hsv, hue, 0, 0, 0 );
            LOGE("###########################Check3######################");
            if(flag==0)
            {
            	LOGE("###############Initialized#############################");
				selection.x=Cx-R;
				selection.y=Cy-R;
				selection.height=2*R;
				selection.width=2*R;
                float max_val = 0.f;
                cvSetImageROI( hue, selection );
                cvSetImageROI( mask, selection );
                cvCalcHist( &hue, hist, 0, mask );
                cvGetMinMaxHistValue( hist, 0, &max_val, 0, 0 );
                cvConvertScale( hist->bins, hist->bins, max_val ? 255. / max_val : 0., 0 );
                cvResetImageROI( hue );
                cvResetImageROI( mask );
                track_window = selection;
                track_object = 1;

                cvZero( histimg );
                bin_w = histimg->width / hdims;
                for( i = 0; i < hdims; i++ )
                {
                    int val = cvRound( cvGetReal1D(hist->bins,i)*histimg->height/255 );
                    CvScalar color = hsv2rgb(i*180.f/hdims);
                    cvRectangle( histimg, cvPoint(i*bin_w,histimg->height),
                                 cvPoint((i+1)*bin_w,histimg->height - val),
                                 color, -1, 8, 0 );
                }
                LOGE("##############Check4#########################");
            }
            LOGE("##############Check5#########################");
            cvCalcBackProject( &hue, backproject, hist );
            cvAnd( backproject, mask, backproject, 0 );
            cvCamShift( backproject, track_window,
                        cvTermCriteria( CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1 ),
                        &track_comp, &track_box );
            track_window = track_comp.rect;
            char buffer[50];
            sprintf(buffer,"vals= %d %d and %d",track_window.x,track_window.y,track_window.width);
            LOGE(buffer);
            if( backproject_mode )
                cvCvtColor( backproject, image, CV_GRAY2BGR );
            if( image->origin )
                track_box.angle = -track_box.angle;
            cvEllipseBox( image, track_box, CV_RGB(255,0,0), 3, CV_AA, 0 );
        }

        if( select_object && selection.width > 0 && selection.height > 0 )
        {
            cvSetImageROI( image, selection );
            cvXorS( image, cvScalarAll(255), image, 0 );
            cvResetImageROI( image );
        }

        LOGE("!!!!!!!!!!!!!!!!!!Done Tracking!!!!!!!!!!!!!!!!!!!!!!!!!!!!");


    }



    return 0;
}
int
main(int argc, const char *argv[])
{
  /* RUN AS DAEMON
  pid_t pid;
  if((pid = fork())) return(pid < 0);
  */
  int ret_val = EXIT_FAILURE;
  int is_tracking = 0;
  int has_face;
  //XLIB VAR Init
  Display* display = XOpenDisplay(NULL);
  assert(display);
  //int Screen_Count = XScreenCount(display);
  int Screen_Count = XScreenCount(display); //For laptop
  Window* window = (Window *)malloc(sizeof(Window)*Screen_Count);
  Window ret;
  Mouse mouse;
  unsigned int mask;
  int i;


  //Capture Init
  CvCapture*		    capture	        = cvCaptureFromCAM(-1);
  CvMemStorage*		    mem_storage	        = cvCreateMemStorage(0);
  CvHaarClassifierCascade*  haarclassifier_face = (CvHaarClassifierCascade*)cvLoad(CASCADE_XML_FILENAME_FACE, 0, 0, 0);
  CvHaarClassifierCascade*  haarclassifier_nose = (CvHaarClassifierCascade*)cvLoad(CASCADE_XML_FILENAME_NOSE, 0, 0, 0);
  CvHaarClassifierCascade*  haarclassifier_eyel = (CvHaarClassifierCascade*)cvLoad(CASCADE_XML_FILENAME_EYEL, 0, 0, 0);
  CvHaarClassifierCascade*  haarclassifier_eyer = (CvHaarClassifierCascade*)cvLoad(CASCADE_XML_FILENAME_EYER, 0, 0, 0);

  IplImage* image;
  //cvSetCaptureProperty(capture,CV_CAP_PROP_FRAME_WIDTH, 1280);
  //cvSetCaptureProperty(capture,CV_CAP_PROP_FRAME_HEIGHT, 1024);
  int res_w = cvGetCaptureProperty(capture, CV_CAP_PROP_FRAME_WIDTH);
  int res_h = cvGetCaptureProperty(capture, CV_CAP_PROP_FRAME_HEIGHT);
  //double fps = cvGetCaptureProperty(capture, CV_CAP_PROP_FPS);
  int counter = 0;

  printf("Capturing : %dx%d \n", res_w, res_h);
  cvNamedWindow("Window", CV_WINDOW_NORMAL);

  CvRect tracking_window;
  CvPoint nosetip, lefteye, righteye;
  CvRect  face, l_eye, r_eye, nose;
  TrackObject face_obj;

  //isophote_init();
  while(1)
  {
   for(i = 0; i < Screen_Count; i++)
    {
      window[i] = XRootWindow(display, i);
      if(XQueryPointer(display, window[i], &ret, &ret, 
	    &mouse.root.x, &mouse.root.y, &mouse.win.x, &mouse.win.y, &mask))
	break;
    }



    has_face = 0;
    image = cvQueryFrame(capture);
    if(is_tracking && CAMSHIFT)
    {
      //CAMSHIFT
      if(CAMSHIFT_MAX_ITER > camshift(image, &face_obj))
	continue;
      has_face = 1;
      cvEllipseBox(image, face_obj.track_box, CV_RGB(255, 0, 0), 3, CV_AA, 0);
      tracking_window = face_obj.track_window;
      tracking_window.y += tracking_window.height*0.2;
      tracking_window.height *= 0.4;
      tracking_window.width *= 0.6;
    }
    else if(!Haar_Detect(image, haarclassifier_face, mem_storage, &face))
    {
      /*
      tracking_window.x += tracking_window.width*0.1;
      tracking_window.width *= 0.8;
      tracking_window.height *= 0.8;
      */
      cvSetImageROI(image, face);
#ifdef DEBUG
      cvSaveImage("face.png", image, 0);
#endif

#if CAMSHIFT
      camshift_init(image, &face_obj);
      printf("Face Found, Start Tracking...\n");
#endif
      cvResetImageROI(image);
      is_tracking = 1;
      has_face = 1;
    }

    //Once face is detected
    if(has_face)
    {
      //Draw Face Area
      cvRectangle(image, cvPoint(face.x, face.y),
	cvPoint(face.x+face.width, face.y+face.height),
	CV_RGB(255, 255, 255), 3, 8, 0);
      //Estimate eyes and nose (NO ROI)
      nose = face; //nose
      nose.y += (1-NOSE_UPPER)*face.height;
      nose.height *= (NOSE_UPPER-NOSE_LOWER);
      nose.x += NOSE_LR*face.width;
      nose.width *= (1-2*NOSE_LR);

      l_eye = face;
      l_eye.y += (1-EYE_UPPER)*face.height;
      l_eye.height *= EYE_UPPER-EYE_LOWER;
      l_eye.x += EYE_LR*face.width;
      l_eye.width *= EYE_SIZE;

      r_eye = l_eye;
      r_eye.x += (1-2*EYE_LR)*face.width - r_eye.width;
      //detect nose
      /* NOSE AREA
      cvRectangle(image, cvPoint(tracking_window.x, tracking_window.y),
	cvPoint(tracking_window.x+tracking_window.width, tracking_window.y+tracking_window.height),
	CV_RGB(0, 255, 0), 3, 8, 0);
	*/

      cvSetImageROI(image, nose);
      if(!Haar_Detect(image, haarclassifier_nose, mem_storage, &tracking_window))
      {
	nosetip = CALC_POINT(tracking_window);
	cvRectangle(image, cvPoint(nosetip.x-3, nosetip.y-3),
	    cvPoint(nosetip.x+3, nosetip.y+3),
	    CV_RGB(255, 0, 0), 3, 8, 0);
	nosetip.x += cvGetImageROI(image).x;
	nosetip.y += cvGetImageROI(image).y;
      }
#ifdef POS_DISPLAY
      printf("Nose: %d, %d ", nosetip.x, nosetip.y);
#endif
	/* NOSE 2
	cvRectangle(image, cvPoint(tracking_window.x, tracking_window.y),
	  cvPoint(tracking_window.x+tracking_window.width, tracking_window.y+tracking_window.height),
	  CV_RGB(0, 255, 0), 3, 8, 0);
	  */
      //no nose detected, use kalman

      //find pupil using isophote curvature
      //LEFT EYE
      cvSetImageROI(image, l_eye);
#ifdef USE_HAAR_REFINE
      if(!Haar_Detect(image, haarclassifier_eyel, mem_storage, &tracking_window))
      {
	l_eye.x += tracking_window.x;
	l_eye.y += tracking_window.y;
	l_eye.width = tracking_window.width;
	l_eye.height = tracking_window.height;
	//printf("eye:%d, %d @ %d, %d\n", l_eye.x, l_eye.y, l_eye.x, l_eye.y);
	cvSetImageROI(image, l_eye);
      }
#endif
      cvRectangle(image, cvPoint(0, 0),
	cvPoint(l_eye.width, l_eye.height),
	CV_RGB(0, 0, 255), 3, 8, 0);
#ifdef DEBUG
      cvSaveImage("lefteye.png", image, 0);
#endif
#ifdef CENTERMAP
      calc_stable_ic(image, &tracking_window);
      //cvRectangle(image, cvPoint(tracking_window.x, tracking_window.y),
//	cvPoint(tracking_window.x+tracking_window.width, tracking_window.y+tracking_window.height),
//	CV_RGB(255, 0, 0), 3, 8, 0);
      cvCircle(image, CALC_POINT(tracking_window),3,
	  CV_RGB(255, 0, 0), 1, 8, 0);
      //l_eye.x += CALC_POINT(tracking_window).x - PUPIL_SIZE/2;
      //l_eye.y += CALC_POINT(tracking_window).y - PUPIL_SIZE/2;
      lefteye.x = tracking_window.x+PUPIL_SIZE/2+l_eye.x;
      lefteye.y = tracking_window.y+PUPIL_SIZE/2+l_eye.y;
#else
      cvCircle(image, lefteye = calc_heyecenter(image),3,
	  CV_RGB(255, 0, 0), 1, 8, 0);
      lefteye.x += l_eye.x;
      lefteye.y += l_eye.y;
#endif
#ifdef POS_DISPLAY
      printf("LEYE: %d, %d ", tracking_window.x+PUPIL_SIZE/2+l_eye.x, tracking_window.y+PUPIL_SIZE/2+l_eye.y);
#endif

      //RIGHT EYE
      cvSetImageROI(image, r_eye);
#ifdef USE_HAAR_REFINE
      if(!Haar_Detect(image, haarclassifier_eyer, mem_storage, &tracking_window))
      {
	r_eye.x += tracking_window.x;
	r_eye.y += tracking_window.y;
	r_eye.width = tracking_window.width;
	r_eye.height = tracking_window.height;
	//printf("right eye:%d, %d @ %d, %d\n", r_eye.x, r_eye.y, r_eye.x, r_eye.y);
	cvSetImageROI(image, r_eye);
      }
#endif
      cvRectangle(image, cvPoint(0, 0),
	cvPoint(r_eye.width, r_eye.height),
	CV_RGB(0, 0, 255), 3, 8, 0);
      /*
  counter++;
  char filename[32];
  sprintf(filename, "%d.png", counter);
  cvSaveImage(filename, image, 0);
  */
#ifdef DEBUG
      cvSaveImage("right.png", image, 0);
#endif
#ifdef CENTERMAP
      calc_stable_ic(image, &tracking_window);
      cvCircle(image, CALC_POINT(tracking_window),3,
	  CV_RGB(255, 0, 0), 1, 8, 0);
      righteye.x = tracking_window.x+PUPIL_SIZE/2+r_eye.x;
      righteye.y = tracking_window.y+PUPIL_SIZE/2+r_eye.y+300;
#else
      cvCircle(image, righteye = calc_heyecenter(image),3,
	  CV_RGB(255, 0, 0), 1, 8, 0);
      righteye.x += r_eye.x;
      righteye.y += r_eye.y;
#endif
#ifdef POS_DISPLAY
      printf("REYE: %d, %d                               \r", tracking_window.x+PUPIL_SIZE/2+r_eye.x, tracking_window.y+PUPIL_SIZE/2+r_eye.y);
#endif
      cvResetImageROI(image);
    }
    cvShowImage("Window", image);
    //printf("%d %d %d %d : %d                     \r", mouse.root.x, mouse.root.y, mouse.win.x, mouse.win.y, i);
    fflush(stdout);

    /*
    mouse.win.x = X_A0*(lefteye.x-nosetip.x+42)*LREYE_WEIGHT+X_A0*(righteye.x-nosetip.x-52)*(1-LREYE_WEIGHT) +1920*(1-LREYE_WEIGHT);
    mouse.win.y = Y_A0*(lefteye.y-nosetip.y+74)*LREYE_WEIGHT+Y_A0*(righteye.y-nosetip.y+65)*(1-LREYE_WEIGHT) +1080*(1-LREYE_WEIGHT);
    //if(abs(mouse.win.x-mouse.root.x) < 10 && abs((mouse.win.y-mouse.root.y) < 10))
    {
      mouse.root.x += mouse.win.x;
      mouse.root.y += mouse.win.y;
      mouse.root.x /= 2;
      mouse.root.y /= 2;
      XWarpPointer(display, window[i], window[i], 0, 0, 0, 0, mouse.root.x, mouse.root.y);
    }
    */
      mouse.root.x = 1920+NOSE_AX*nosetip.x;
      mouse.root.y = -540+NOSE_AY*nosetip.y;
      mouse.root.x += X_A0*((lefteye.x+righteye.x)/2-nosetip.x);
      //mouse.root.y += Y_A0*((lefteye.y+righteye.y)/2-nosetip.y-73)+800;
      XWarpPointer(display, 0, window[i], 0, 0, 0, 0, mouse.root.x, mouse.root.y);
      printf("%d  \r",X_A0*((lefteye.x+righteye.x)/2-nosetip.x)); 
      //printf("\n%d %d %d %d : %d                     \r", mouse.root.x, mouse.root.y, mouse.win.x, mouse.win.y, i);
    //Save video
    //cvCreateVideoWriter
    if(cvWaitKey(30) == 'q')
      goto RELEASE_OpenCV_RESOURCE;
      //goto RELEASE_XLib_RESOURCE;
      //
      //
      //

  }

  ret_val = EXIT_SUCCESS;

RELEASE_OpenCV_RESOURCE:
#if CAMSHIFT
  camshift_free(&face_obj);
#endif
  cvDestroyWindow("Window");
  /* Let OS Handle It !
  cvReleaseImage(&image);
  cvReleaseHaarClassifierCascade(&haarclassifier_eyer);
  cvReleaseHaarClassifierCascade(&haarclassifier_eyel);
  cvReleaseHaarClassifierCascade(&haarclassifier_nose);
  cvReleaseHaarClassifierCascade(&haarclassifier_face);
  cvReleaseMemStorage(&mem_storage);
  cvReleaseCapture(&capture);
  */
RELEASE_XLib_RESOURCE:
  free(window);
  XCloseDisplay(display);

  exit(ret_val);
}