CvRect* waitForFaceDetect(FaceDetection* fD, CaptureFrame* cF) { CvRect *pFaceRect = NULL; bool finished = false; // Load face cascades fD->InitFaceDetection(); while(1) { finished = cF->CaptureNextFrame(); //capture to frameCopy if (finished) { cF->DeallocateFrames(); releaseTracker(); cvDestroyWindow("tracked face"); return NULL; } // Detect face pFaceRect = fD->detectFace(cF->getFrameCopy()); // Wait for a while before proceeding to the next frame if(cvWaitKey(10) >= 0); // When a face is found, quit the loop if(pFaceRect) { printf("\nFound a face rectangle data %d, %d", pFaceRect->height, pFaceRect->width); return pFaceRect; } } }
////////////////////////////////// // exitProgram() // void exitProgram(int code) { // Release resources allocated in this file cvDestroyWindow( DISPLAY_WINDOW ); cvReleaseImage( &pframe ); releaseface(); // Release resources allocated in other project files cvReleaseCapture(&capture); closeHandDet(); releaseTracker(); exit(code); }
int main( int argc, char** argv ) { // Store the first detected face CvRect *pFaceRect = NULL; // Store the tracked face CvRect nextFaceRect; //CvBox2D faceBox; // Points to draw the face rectangle CvPoint pt1 = cvPoint(0,0); CvPoint pt2 = cvPoint(0,0); char c = 0; // Object faceDetection of the class "FaceDetection" FaceDetection faceDetection; // Object captureFrame of the class "CaptureFrame" CaptureFrame captureFrame; // Create a new window cvNamedWindow("tracked face", 1); printf("\nPress r to re-initialise tracking"); // Capture from the camera captureFrame.StartCapture(); bool finished = captureFrame.CaptureNextFrame(); // capture into frameCopy if (finished) // if video is finished { captureFrame.DeallocateFrames(); releaseTracker(); cvDestroyWindow("tracked face"); return 0; } // Create the tracker if(!createTracker(captureFrame.getFrameCopy())) fprintf( stderr, "ERROR: tracking initialisation\n" ); // Set Camshift parameters setVmin(30); setSmin(20); // Capture video until a face is detected pFaceRect = waitForFaceDetect(&faceDetection, &captureFrame); // Start tracking if (pFaceRect == NULL) { captureFrame.DeallocateFrames(); releaseTracker(); // Destroy the window previously created cvDestroyWindow("tracked face"); return 0; } // Start tracking startTracking(captureFrame.getFrameCopy(), pFaceRect); // Track the detected face using CamShift while(1) { finished = captureFrame.CaptureNextFrame(); //capture to frameCopy if (finished) { captureFrame.DeallocateFrames(); releaseTracker(); cvDestroyWindow("tracked face"); return 0; } // Track the face in the new video frame nextFaceRect = track(captureFrame.getFrameCopy()); //faceBox = track(captureFrame.getFrameCopy()); pt1.x = nextFaceRect.x; pt1.y = nextFaceRect.y; pt2.x = pt1.x + nextFaceRect.width; pt2.y = pt1.y + nextFaceRect.height; // Draw face rectangle cvRectangle(captureFrame.getFrameCopy(), pt1, pt2, CV_RGB(255,0,0), 3, 8, 0 ); // Draw face ellipse //cvEllipseBox(captureFrame.getFrameCopy(), faceBox, //CV_RGB(255,0,0), 3, CV_AA, 0 ); cvShowImage("tracked face", captureFrame.getFrameCopy()); c = cvWaitKey(100); switch(c) { case 27: break; break; case 'r': printf("\nKey pressed for re-initialisation"); // Capture video until a face is detected pFaceRect = waitForFaceDetect(&faceDetection, &captureFrame); if (pFaceRect == NULL) { captureFrame.DeallocateFrames(); releaseTracker(); // Destroy the window previously created cvDestroyWindow("tracked face"); return 0; } releaseTracker(); // Start tracking startTracking(captureFrame.getFrameCopy(), pFaceRect); break; } } // Release the image and tracker captureFrame.DeallocateFrames(); releaseTracker(); // Destroy the window previously created cvDestroyWindow("tracked face"); return 0; }