//////////////////////////////////
// initAll()
//
int initAll()
{
	// Create the display window
	cvNamedWindow( DISPLAY_WINDOW, 1 );
	// Initialize tracker
	pframe=cvQueryFrame(capture);
	if( !createTracker(pframe) ) 
		return 0;
	initHandDet("/home/vignesh/Downloads/hand.xml");
	//initHandDet1("v.xml");
	// Set Camshift parameters
	setVmin(60);
	setSmin(50);
	initface();
	return 1;
}
Ejemplo n.º 2
0
int initAll()
{
	initConfiguration();

//	cout << "Use webcam? (Y/N)" <<endl;
//
//	char cc = fgetc(stdin);
	if( !initCapture(true,cam_id)) //!initCapture(cc == 'Y' || cc == 'y',cam_id) ) 
		return 0;

	if( !initFaceDet((OPENCV_ROOT + "/data/haarcascades/haarcascade_frontalface_default.xml").c_str()))
	{
		cerr << "failed initFaceDet with" << OPENCV_ROOT << "/data/haarcascades/haarcascade_frontalface_default.xml" << endl;
		return 0;
	}
	// Startup message tells user how to begin and how to exit
	printf( "\n********************************************\n"
	        "To exit, click inside the video display,\n"
	        "then press the ESC key\n\n"
			"Press <ENTER> to begin"
			"\n********************************************\n" );
	fgetc(stdin);

	// Create the display window
	cvNamedWindow( DISPLAY_WINDOW, 1 );

	// Initialize tracker
	captureVideoFrame();
	if( !createTracker(pfd_pVideoFrameCopy) ) return 0;

	// Set Camshift parameters
	setVmin(60);
	setSmin(50);

	FdInit();

	return 1;
}
Ejemplo n.º 3
0
int main( int argc, char** argv )
{
	// Store the first detected face
	CvRect *pFaceRect = NULL;

	// Store the tracked face
	CvRect nextFaceRect;
	//CvBox2D faceBox; 

	// Points to draw the face rectangle
	CvPoint pt1 = cvPoint(0,0);
	CvPoint pt2 = cvPoint(0,0);
	
	char c = 0;  

	// Object faceDetection of the class "FaceDetection"
    FaceDetection faceDetection;

	// Object captureFrame of the class "CaptureFrame"
	CaptureFrame captureFrame; 

	// Create a new window 
    cvNamedWindow("tracked face", 1);

	printf("\nPress r to re-initialise tracking");

	// Capture from the camera
	captureFrame.StartCapture();

	bool finished = captureFrame.CaptureNextFrame(); // capture into frameCopy
	if (finished) // if video is finished
	 {
	   captureFrame.DeallocateFrames();
	   releaseTracker();
	   cvDestroyWindow("tracked face");
	   return 0;
	 }
		      
    // Create the tracker
    if(!createTracker(captureFrame.getFrameCopy())) 
	   fprintf( stderr, "ERROR: tracking initialisation\n" );

	// Set Camshift parameters
	setVmin(30);
	setSmin(20);

	// Capture video until a face is detected
	pFaceRect = waitForFaceDetect(&faceDetection, &captureFrame);
	// Start tracking
	if (pFaceRect == NULL)
	{
	 captureFrame.DeallocateFrames();
	 releaseTracker();
	 // Destroy the window previously created
	 cvDestroyWindow("tracked face");
	 return 0;
	}
	// Start tracking
	startTracking(captureFrame.getFrameCopy(), pFaceRect);


	// Track the detected face using CamShift
	while(1)
	{
		finished = captureFrame.CaptureNextFrame(); //capture to frameCopy
		
		if (finished) 
		{
	      captureFrame.DeallocateFrames();
		  releaseTracker();
		  cvDestroyWindow("tracked face");
		  return 0;
		}
			 
		// Track the face in the new video frame
		nextFaceRect = track(captureFrame.getFrameCopy());
		//faceBox = track(captureFrame.getFrameCopy());

		pt1.x = nextFaceRect.x;
        pt1.y = nextFaceRect.y;
        pt2.x = pt1.x + nextFaceRect.width;
        pt2.y = pt1.y + nextFaceRect.height;

		// Draw face rectangle
		cvRectangle(captureFrame.getFrameCopy(), pt1, pt2, CV_RGB(255,0,0), 3, 8, 0 );

		// Draw face ellipse
		//cvEllipseBox(captureFrame.getFrameCopy(), faceBox,
		             //CV_RGB(255,0,0), 3, CV_AA, 0 );

		cvShowImage("tracked face", captureFrame.getFrameCopy());
			
		c = cvWaitKey(100);
		switch(c)
		{		
			case 27: break;
				break;
			case 'r': printf("\nKey pressed for re-initialisation");
				// Capture video until a face is detected
				pFaceRect = waitForFaceDetect(&faceDetection, &captureFrame);
				
				if (pFaceRect == NULL) 
				{
				 captureFrame.DeallocateFrames();
				 releaseTracker();
				 // Destroy the window previously created
				 cvDestroyWindow("tracked face");
			     return 0;
				}
				releaseTracker();
				// Start tracking
				startTracking(captureFrame.getFrameCopy(), pFaceRect);
				break;
		}
	}

	// Release the image and tracker
  	captureFrame.DeallocateFrames();
    releaseTracker();

    // Destroy the window previously created
    cvDestroyWindow("tracked face");
    return 0;
}