/**
 * @function detectAndDisplay
 */
vector<Rect> detectFace(IplImage *frame, bool detectall)
{
   std::vector<Rect> faces, rects;
   Mat frame_gray;
   Mat frame1(frame,0);
   cvtColor( frame1, frame_gray, COLOR_BGR2GRAY );

   equalizeHist( frame_gray, frame_gray );

   //-- Detect faces
   face_cascade.detectMultiScale( frame_gray, faces, 1.1, 2, 0, Size(80, 80) );
   if (faces.size()>=1)
    {
      Mat faceROI = frame_gray( faces[0] );
      //-- Draw the face
      Point center( faces[0].x + faces[0].width/2, faces[0].y + faces[0].height/2 );
//      ellipse( frame1, center, Size( faces[0].width/2, faces[0].height/2), 0, 0, 360, Scalar( 255, 0, 0 ), 2, 8, 0 );
      Point face_topleft(faces[0].x+5, faces[0].y);
      Point face_bottomright(faces[0].x+faces[0].width-5, faces[0].y+faces[0].height+25);
      rectangle(frame1, face_topleft,face_bottomright, Scalar(0,255,0),3, 8, 0);
      rects.push_back(faces[0]);
      if (detectall)
      {
          std::vector<Rect> eyes;
          //-- In each face, detect eyes
          eyes_cascade.detectMultiScale( faceROI, eyes, 1.1, 2, 0 |CV_HAAR_SCALE_IMAGE, Size(30, 30) );
          if( eyes.size() == 2)
          {
              for( size_t j = 0; j < eyes.size(); j++ )
              { //-- Draw the eyes
                  Point eye_center( faces[0].x + eyes[j].x + eyes[j].width/2, faces[0].y + eyes[j].y + eyes[j].height/2 );
                  int radius = cvRound( (eyes[j].width + eyes[j].height)*0.25 );
                  circle( frame1, eye_center, radius, Scalar( 255, 0, 0 ), 3, 8, 0 );
                  //            rectangle(frame1,Rect(faces[0].x+eyes[j].x, faces[0].y+eyes[j].y,eyes[j].width,eyes[j].height),Scalar(255,0,255),3,8,0);
                  rects.push_back(eyes[j]);
              }

          }
          std::vector<Rect> mouths;
          mouth_cascade.detectMultiScale(faceROI, mouths, 1.1, 2, 0, Size(30, 30));
          if (mouths.size()>=1)
          {
              rectangle(frame1, Rect(faces[0].x+mouths[0].x, faces[0].y+mouths[0].y,mouths[0].width,mouths[0].height),Scalar(0,255,255),3,8,0);
              rects.push_back(mouths[0]);
          }

          std::vector<Rect> noses;
          nose_cascade.detectMultiScale(faceROI, noses, 1.1, 2, 0, Size(30, 30));
          if (noses.size()==1)
          {
              Point nose_center( faces[0].x + noses[0].x + noses[0].width/2, faces[0].y + noses[0].y + noses[0].height/2 );
              int radius = cvRound( (noses[0].width + noses[0].height)*0.15 );
              circle( frame1, nose_center, radius, Scalar( 255, 0, 255), 3, 8, 0 );
              //          rectangle(frame1, Rect(faces[0].x+noses[0].x, faces[0].y+noses[0].y,noses[0].width,noses[0].height),Scalar(0,255,255),3,8,0);
          }
      }
    }
   frame = &IplImage(frame1);
   return rects;
}
예제 #2
0
void detectAndDisplay(Mat frame)
{
	std::vector<Rect> faces;
	Mat frame_gray;

	cvtColor(frame, frame_gray, COLOR_BGR2GRAY);
	equalizeHist(frame_gray, frame_gray);

	//-- Detect faces
	face_cascade.detectMultiScale(frame_gray, faces, 1.1, 2, 0 | CASCADE_SCALE_IMAGE, Size(30, 30));

	for (size_t i = 0; i < faces.size(); i++)
	{
		Point center(faces[i].x + faces[i].width / 2, faces[i].y + faces[i].height / 2);
		ellipse(frame, center, Size(faces[i].width / 2, faces[i].height / 2), 0, 0, 360, Scalar(255, 0, 255), 4, 8, 0);

		Mat faceROI = frame_gray(faces[i]);
		std::vector<Rect> eyes;

		//-- In each face, detect eyes
		eyes_cascade.detectMultiScale(faceROI, eyes, 1.1, 2, 0 | CASCADE_SCALE_IMAGE, Size(30, 30));


		for (size_t j = 0; j < eyes.size(); j++)
		{
			Point eye_center(faces[i].x + eyes[j].x + eyes[j].width / 2, faces[i].y + eyes[j].y + eyes[j].height / 2);
			int radius = cvRound((eyes[j].width + eyes[j].height)*0.25);
			circle(frame, eye_center, radius, Scalar(255, 0, 0), 4, 8, 0);
		}
	}
	//-- Show what you got
	imshow(window_name, frame);
}
  void do_work(const sensor_msgs::ImageConstPtr& msg, const std::string input_frame_from_msg)
  {
    // Work on the image.
    try
    {
      // Convert the image into something opencv can handle.
      cv::Mat frame = cv_bridge::toCvShare(msg, msg->encoding)->image;

      // Messages
      opencv_apps::FaceArrayStamped faces_msg;
      faces_msg.header = msg->header;

      // Do the work
      std::vector<cv::Rect> faces;
      cv::Mat frame_gray;

      cv::cvtColor( frame, frame_gray, cv::COLOR_BGR2GRAY );
      cv::equalizeHist( frame_gray, frame_gray );
      //-- Detect faces
#if OPENCV3
      face_cascade_.detectMultiScale( frame_gray, faces, 1.1, 2, 0, cv::Size(30, 30) );
#else
      face_cascade_.detectMultiScale( frame_gray, faces, 1.1, 2, 0 | CV_HAAR_SCALE_IMAGE, cv::Size(30, 30) );
#endif

      for( size_t i = 0; i < faces.size(); i++ )
      {
        cv::Point center( faces[i].x + faces[i].width/2, faces[i].y + faces[i].height/2 );
        cv::ellipse( frame,  center, cv::Size( faces[i].width/2, faces[i].height/2), 0, 0, 360, cv::Scalar( 255, 0, 255 ), 2, 8, 0 );
        opencv_apps::Face face_msg;
        face_msg.face.x = center.x;
        face_msg.face.y = center.y;
        face_msg.face.width = faces[i].width;
        face_msg.face.height = faces[i].height;

        cv::Mat faceROI = frame_gray( faces[i] );
        std::vector<cv::Rect> eyes;

        //-- In each face, detect eyes
#if OPENCV3
        eyes_cascade_.detectMultiScale( faceROI, eyes, 1.1, 2, 0, cv::Size(30, 30) );
#else
        eyes_cascade_.detectMultiScale( faceROI, eyes, 1.1, 2, 0 | CV_HAAR_SCALE_IMAGE, cv::Size(30, 30) );
#endif

        for( size_t j = 0; j < eyes.size(); j++ )
        {
          cv::Point eye_center( faces[i].x + eyes[j].x + eyes[j].width/2, faces[i].y + eyes[j].y + eyes[j].height/2 );
          int radius = cvRound( (eyes[j].width + eyes[j].height)*0.25 );
          cv::circle( frame, eye_center, radius, cv::Scalar( 255, 0, 0 ), 3, 8, 0 );

          opencv_apps::Rect eye_msg;
          eye_msg.x = eye_center.x;
          eye_msg.y = eye_center.y;
          eye_msg.width = eyes[j].width;
          eye_msg.height = eyes[j].height;
          face_msg.eyes.push_back(eye_msg);
        }

        faces_msg.faces.push_back(face_msg);
      }
      //-- Show what you got
      if( debug_view_) {
        cv::imshow( "Face detection", frame );
        int c = cv::waitKey(1);
      }

      // Publish the image.
      sensor_msgs::Image::Ptr out_img = cv_bridge::CvImage(msg->header, msg->encoding,frame).toImageMsg();
      img_pub_.publish(out_img);
      msg_pub_.publish(faces_msg);
    }
    catch (cv::Exception &e)
    {
      NODELET_ERROR("Image processing error: %s %s %s %i", e.err.c_str(), e.func.c_str(), e.file.c_str(), e.line);
    }

    prev_stamp_ = msg->header.stamp;
  }
예제 #4
0
//////////////////////////////////////////////
// detectAndDisplay
//////////////////////////////////////////////
int detectAndDisplay()
{
    // detect faces
    //FacialRecognitionManager::FindFaces();

    //RaspiCvCam::QuickFaceCascade.detectMultiScale(imageMat, RaspiCvCam::Faces, 1.1, 3, CV_HAAR_SCALE_IMAGE, Size(80,80));
    //RaspiCvCam::QuickFaceCascade.detectMultiScale( *frame, RaspiCvCam::Faces, 1.1, 2, 0|CV_HAAR_SCALE_IMAGE, Size(50, 50) );

    //.detectMultiScale(imageMat, RaspiCvCam::Faces, 1.1, 3, CV_HAAR_SCALE_IMAGE, Size(80,80));

    // simplify : we only take picture with one face !
    //DEBUG printf("(D) detectAndDisplay : nb face=%d\n",faces.size());
    if (FacialRecognitionManager::FaceCount==0)
    {
        statusText = "No faces found!";
        return 0;
    }
    else
    {
        //qDebug("- face found. looking for eyes");
        statusText = "Face found!";
        Rect rect = FacialRecognitionManager::Faces[0];

        faceMat = RaspiCvCam::ImageMat( rect );
        cv::resize(faceMat, faceResizedMat, Size(Mydest_sz), 1.0, 1.0, CV_INTER_NN);
        faceImage = QImage((uchar*)faceResizedMat.data, faceResizedMat.cols, faceResizedMat.rows, faceResizedMat.step, QImage::Format_Indexed8);
        faceImage.setColorTable(RaspiCvCam::GrayscaleColorTable);

        if (searching == false)
            return 0;

        std::vector<Rect> eyes;

        //-- In each face, detect eyes
        FacialRecognitionManager::EyesCascade.detectMultiScale( faceMat, eyes, 1.05, 3, 0 |CV_HAAR_SCALE_IMAGE, Size(20, 20) );

        // if no glasses
        if (eyes.size()==2)
        {
            statusText = "Found face without glasses!";
            //qDebug("-- face without glasses");
            // detect eyes
            for( size_t j=0; j<2; j++ )
            {
                Point eye_center( rect.x + eyes[1-j].x + eyes[1-j].width/2, rect.y + eyes[1-j].y + eyes[1-j].height/2 );

                if (j==0) // left eye
                {
                    Myeye_left.x = eye_center.x;
                    Myeye_left.y = eye_center.y;
                }
                if (j==1) // right eye
                {
                    Myeye_right.x = eye_center.x;
                    Myeye_right.y = eye_center.y;
                }
            }
        }
        else
        {
            //qDebug("-- checking for glasses");
            // tests with glasses
            FacialRecognitionManager::GlassesCascade.detectMultiScale( faceMat, eyes, 1.05, 3, 0 |CV_HAAR_SCALE_IMAGE, Size(20, 20) );
            if (eyes.size() != 2)
                return 0;
            else
            {

                //qDebug("-- face with glasses");
                statusText = "Found face with glasses!";

                for( size_t j=0; j<2; j++ )
                {
                    Point eye_center( rect.x + eyes[1-j].x + eyes[1-j].width/2, rect.y + eyes[1-j].y + eyes[1-j].height/2 );
                    if (j==0) // left eye
                    {
                        Myeye_left.x = eye_center.x;
                        Myeye_left.y = eye_center.y;
                    }
                    if (j==1) // right eye
                    {
                        Myeye_right.x = eye_center.x;
                        Myeye_right.y = eye_center.y;
                    }
                }
            }
        }
    }

    // sometimes eyes are inversed ! we switch them
    if (Myeye_right.x < Myeye_left.x)
    {
        int tmpX = Myeye_right.x;
        int tmpY = Myeye_right.y;
        Myeye_right.x = Myeye_left.x;
        Myeye_right.y = Myeye_left.y;
        Myeye_left.x = tmpX;
        Myeye_left.y = tmpY;
        qDebug("-- eyes are switched, flipping");
    }

    return 1;
}
예제 #5
0
void detectAndDisplay(cv::Mat frame) {
  std::vector<cv::Rect> faces;
  cv::Mat frame_gray;

  cv::cvtColor(frame, frame_gray, cv::COLOR_BGR2GRAY);
  cv::equalizeHist(frame_gray, frame_gray);
  
  // Detect Faces
  face_cascade.detectMultiScale(frame_gray, // image
				faces, // objects
				1.1, // scale factor
				2, // min neighbors
				0|cv::CASCADE_SCALE_IMAGE, // flags
				cv::Size(30, 30)); // min size

  for (std::size_t i = 0; i < faces.size(); i++) {
    cv::Point center(faces[i].x + faces[i].width/2,
		     faces[i].y + faces[i].height/2);

    cv::ellipse(frame,
		center,
		cv::Size(faces[i].width/2, faces[i].height/2),
		0,
		0,
		360,
		cv::Scalar(255, 0, 255),
		4,
		8,
		0);

    cv::Mat faceROI = frame_gray(faces[i]);
    std::vector<cv::Rect> eyes;

    // in each face, detect eyes
    eyes_cascade.detectMultiScale(faceROI,
				  eyes,
				  1.1,
				  2,
				  0 | cv::CASCADE_SCALE_IMAGE,
				  cv::Size(30, 30));

    for (std::size_t j = 0; j < eyes.size(); j++) {
      cv::Point eye_center(faces[i].x + eyes[j].x + eyes[j].width/2,
			   faces[i].y + eyes[j].y + eyes[j].height/2);

      int radius = cvRound((eyes[j].width + eyes[j].height) * 0.25);
      cv::circle(frame, 
		 eye_center,
		 radius, 
		 cv::Scalar(255, 0, 0),
		 4,
		 8,
		 0);
    }

  }

  // Show what you got
  cv::imshow(window_name, frame);

}