Пример #1
0
void FaceDet::DetectFaces_( // call once per image to find all the faces
    const Image& img,       // in: the image (grayscale)
    const char*  imgpath,   // in: used only for debugging
    bool         multiface, // in: if false, want only the best face
    int          minwidth,  // in: min face width as percentage of img width
    void*        user,      // in: unused (match virt func signature)
    cv::CascadeClassifier cascade)
{
    (void) imgpath;
    (void) user;
    DetectFaces(detpars_, img, minwidth, cascade);
    DiscardMissizedFaces(detpars_);

    if (multiface) // order faces on increasing distance from left margin
    {
        sort(detpars_.begin(), detpars_.end(), IncreasingLeftMargin);
    }
    else
    {
        // order faces on decreasing width, keep only the first (the largest face)
        sort(detpars_.begin(), detpars_.end(), DecreasingWidth);
        if (NSIZE(detpars_))
            detpars_.resize(1);
    }
    iface_ = 0; // next invocation of NextFace_ must get first face
}
Пример #2
0
void FaceDet::DetectFaces_( // call once per image to find all the faces
    const Image& img,       // in: the image (grayscale)
    const char*  imgpath,   // in: used only for debugging
    bool         multiface, // in: if false, want only the best face
    int          minwidth,  // in: min face width as percentage of img width
    void*        user)      // in: unused (match virt func signature)
{
    CV_Assert(user == NULL);
    DetectFaces(detpars_, img, minwidth);
    char tracepath[SLEN];
    sprintf(tracepath, "%s_00_unsortedfacedet.bmp", Base(imgpath));
    TraceFaces(detpars_, img, tracepath);
    DiscardMissizedFaces(detpars_);
    if (multiface) // order faces on increasing distance from left margin
    {
        sort(detpars_.begin(), detpars_.end(), IncreasingLeftMargin);
        sprintf(tracepath, "%s_05_facedet.bmp", Base(imgpath));
        TraceFaces(detpars_, img, tracepath);
    }
    else
    {
        // order faces on decreasing width, keep only the first (the largest face)
        sort(detpars_.begin(), detpars_.end(), DecreasingWidth);
        sprintf(tracepath, "%s_05_sortedfaces.bmp", Base(imgpath));
        TraceFaces(detpars_, img, tracepath);
        if (NSIZE(detpars_))
            detpars_.resize(1);
    }
    iface_ = 0; // next invocation of NextFace_ must get first face
}
Пример #3
0
//=======================================================================
//function : Perform
//purpose  : 
//=======================================================================
void GEOMAlgo_GlueDetector::Perform()
{
  myErrorStatus=0;
  myWarningStatus=0;
  //
  CheckData();
  if (myErrorStatus) {
    return;
  }
  //
  DetectVertices();
  if (myErrorStatus) {
    return;
  }
  //
  DetectEdges();
  if (myErrorStatus) {
    return;
  }
  //
  DetectFaces();
  if (myErrorStatus) {
    return;
  }
}
Пример #4
0
void FaceDet::DetectFaces_(  // call once per image to find all the faces
    const Image& img,        // in: the image (grayscale)
    const char*,             // in: unused (match virt func signature)
    bool         multiface,  // in: if false, want only the best face
    int          minwidth,   // in: min face width as percentage of img width
    void*        user)       // in: unused (match virt func signature)
{
    CV_Assert(user == NULL);
    CV_Assert(!facedet_g.empty()); // check that OpenFaceDetector_ was called
    DetectFaces(detpars_, img, minwidth);
    TraceFaces(detpars_, img, "facedet_BeforeDiscardMissizedFaces.bmp");
    DiscardMissizedFaces(detpars_);
    TraceFaces(detpars_, img, "facedet_AfterDiscardMissizedFaces.bmp");
    if (multiface) // order faces on increasing distance from left margin
    {
        sort(detpars_.begin(), detpars_.end(), IncreasingLeftMargin);
        TraceFaces(detpars_, img, "facedet.bmp");
    }
    else
    {
        // order faces on decreasing width, keep only the first (the largest face)
        sort(detpars_.begin(), detpars_.end(), DecreasingWidth);
        TraceFaces(detpars_, img, "facedet.bmp");
        if (NSIZE(detpars_))
            detpars_.resize(1);
    }
    iface_ = 0; // next invocation of NextFace_ must get first face
}
//=======================================================================
//function : Perform
//purpose  :
//=======================================================================
  void GEOMAlgo_GlueAnalyser::Perform()
{
  myErrorStatus=0;
  myWarningStatus=0;
  //
  mySolidsToGlue.Clear();
  mySolidsAlone.Clear();
  //
  CheckData();
  if (myErrorStatus) {
    return;
  }
  //
  // Initialize the context
  GEOMAlgo_ShapeAlgo::Perform();
  //
  InnerTolerance();
  if (myErrorStatus) {
    return;
  }
  //
  DetectVertices();
  if (myErrorStatus) {
    return;
  }
  //
  DetectEdges();
  if (myErrorStatus) {
    return;
  }
  //
  DetectFaces();
  if (myErrorStatus) {
    return;
  }
  //
  DetectSolids();
  if (myErrorStatus) {
    return;
  }
}
Пример #6
0
void FaceDetector::classify(const ed::Entity& e, const std::string& property, const ed::perception::CategoricalDistribution& prior,
                            ed::perception::ClassificationOutput& output) const
{
    if (property != "type" && property != "name")
        return;

    // If we already know that this is not going to be a human, skip face detection altogether
    double prior_human;
    if (prior.getScore("human", prior_human) && prior_human == 0)
        return;

    // ---------- Prepare measurement ----------

    // Get the best measurement from the entity
    ed::MeasurementConstPtr msr = e.lastMeasurement();

    if (!msr)
        return;

    // get color image
    const cv::Mat& color_image = msr->image()->getRGBImage();

    // get depth image
    const cv::Mat& depth_image = msr->image()->getDepthImage();

    // Mask color image
    cv::Rect rgb_roi;
    cv::Mat color_image_masked = ed::perception::maskImage(color_image, msr->imageMask(), rgb_roi);

    // ---------- Detect faces ----------

    std::vector<cv::Rect> faces_front;
    std::vector<cv::Rect> faces_profile;

    // Detect faces in the measurment and assert the results
    if (DetectFaces(color_image_masked(rgb_roi), faces_front, faces_profile))
    {
        // write face information to config if a frontal face was found
        int face_counter = 0;
        if (faces_front.size() > 0)
        {
            output.data.writeArray("faces_front");
            writeFaceDetectionResult(*msr, rgb_roi, faces_front, face_counter, output.data);
            output.data.endArray();
        }

        // write face information to config if a profile face was found
        if (faces_profile.size() > 0)
        {
            output.data.writeArray("faces_profile");
            writeFaceDetectionResult(*msr, rgb_roi, faces_profile, face_counter, output.data);
            output.data.endArray();
        }

        if (property == "type")
        {
            output.likelihood.setScore("human", 1);
        }
        else if (property == "name")
        {
            if (!faces_front.empty())
            {
                recognizeFace(color_image_masked(rgb_roi), faces_front[0], output);
            }
        }
    }
}
Пример #7
0
IMP_BOOL IMP_VFD_Process(IMP_HANDLE hModule, GRAY_IMAGE_S *pstImage)
{
#ifdef IMP_VFD_HAAR
	VFD_RESULT_S *pstVfdResult;
	VFD_HAAR_MODULE *hVfd = (VFD_HAAR_MODULE *)hModule;
	IMP_Seq* faces;
	int i = 0;
	Imp_Size min_size;
	min_size.height = 0;
	min_size.width = 0;
	pstVfdResult = &hVfd->m_stVfdResult;
    /* use the fastest variant */
	faces = IMP_HaarDetectObjects( hModule, pstImage, hVfd->Cascade, hVfd->storage, 1.2, 2, IMP_HAAR_DO_CANNY_PRUNING,min_size );
    /* draw all the rectangles */
	pstVfdResult->stFaceSet.s32FaceNum = faces->total;
    for( i = 0; i < faces->total; i++ )
    {
		Imp_Rect face_rect;
        /* extract the rectanlges only */
		if (i >= 50)
		{
			break;
		}
        face_rect = *(Imp_Rect*)IMP_GetSeqElem( faces, i );
        pstVfdResult->stFaceSet.astFaces[i].centerX = face_rect.x + face_rect.width/2;
		pstVfdResult->stFaceSet.astFaces[i].centerY = face_rect.y + face_rect.height/2;
		pstVfdResult->stFaceSet.astFaces[i].rect.s16X1 = face_rect.x;
		pstVfdResult->stFaceSet.astFaces[i].rect.s16Y1 = face_rect.y;
		pstVfdResult->stFaceSet.astFaces[i].rect.s16X2 = face_rect.x + face_rect.width;
		pstVfdResult->stFaceSet.astFaces[i].rect.s16Y2 = face_rect.y + face_rect.height;
    }
#else
	int i = 0;
	VFD_MODULE *hVfd = (VFD_MODULE*)hModule;
    IMP_S32 nFaces = IMP_MAX_FACE_CNT;
	IMP_U8 *pYRaw8 = pstImage->pu8Data;
	IMP_U32 u32Width = pstImage->s32W;
	IMP_U32 u32Height = pstImage->s32H;
    FACEINFO *io_pFace = (FACEINFO *)&hVfd->m_stVfdResult.stFaceSet.astFaces;
    IMP_U32 nResult;
    hVfd->m_stVfdResult.stFaceSet.s32FaceNum = 0;
    hVfd->m_stVfdResult.stEventSet.s32EventNum = 0;

    nResult = DetectFaces(&hVfd->m_pFaceInstance, pYRaw8, u32Width, u32Height, &nFaces, io_pFace);
	if ( nResult != 0 )
		return IMP_FALSE;
  //  printf("nFaces = %d\n",nFaces);
    hVfd->m_stVfdResult.stFaceSet.s32FaceNum = nFaces;
	/*for (i = 0; i < nFaces; i++)
	{
        printf("centerX=%d,centerY=%d,faceSize=%d,rect.s16X1=%d,rect.s16Y1=%d,rect.s16X2=%d,rect.s16Y2=%d\n",
			hVfd->m_stVfdResult.stFaceSet.astFaces[i].centerX,
			hVfd->m_stVfdResult.stFaceSet.astFaces[i].centerY,
			hVfd->m_stVfdResult.stFaceSet.astFaces[i].faceSize,
			hVfd->m_stVfdResult.stFaceSet.astFaces[i].rect.s16X1,
			hVfd->m_stVfdResult.stFaceSet.astFaces[i].rect.s16Y1,
			hVfd->m_stVfdResult.stFaceSet.astFaces[i].rect.s16X2,
			hVfd->m_stVfdResult.stFaceSet.astFaces[i].rect.s16Y2
			);
	}*/

	//VFD_FaceCaptureAnalysis(hVfd);

	//VFD_FaceCamouflageAnalysis(hVfd);

	//VFD_PasswordPeeprAnalysis(hVfd);
#endif
    return IMP_TRUE;
}
Пример #8
0
bool CPlanarGraph::LoadGraphFromXML(const char* fileName, bool flagDetectFaces /* = true */, bool flagIgnoreIndiv /* = true */)
{
	// Clear the current graph...
	ClearGraph();

	TiXmlDocument doc;
	bool loadFlag = doc.LoadFile(fileName);
	if ( loadFlag == false )
	{
		std::cout << "Failed to load graph from " << fileName << "!\n";
		return loadFlag;
	}
	//doc.Print();

	TiXmlNode* xmlRoot = 0;
	xmlRoot = doc.RootElement();
	assert( xmlRoot );

	TiXmlNode* xmlNode = xmlRoot->FirstChild();
	while ( xmlNode != 0 )
	{
		if ( strcmp(xmlNode->Value(), "Node") == 0 )
		{
			// Parse a node...
			CGraphNode graphNode;
			float px, py;
			int rx = xmlNode->ToElement()->QueryFloatAttribute("px", &px);
			int ry = xmlNode->ToElement()->QueryFloatAttribute("py", &py);
			if ( rx != TIXML_SUCCESS || ry != TIXML_SUCCESS )
			{
				graphNode.RandomlyInitPos();
			}
			else
			{
				graphNode.SetPos(px, py);
			}
			int type;
			int r = xmlNode->ToElement()->QueryIntAttribute("type", &type);
			if ( r == TIXML_SUCCESS )
			{
				graphNode.SetType(type);
			}
			int boundary;
			int rb = xmlNode->ToElement()->QueryIntAttribute("boundary", &boundary);
			if ( rb == TIXML_SUCCESS )
			{
				graphNode.SetBoundaryType(boundary);
			}
			int fixed;
			int rf = xmlNode->ToElement()->QueryIntAttribute("fix", &fixed);
			if ( rf == TIXML_SUCCESS && fixed != 0 )
			{
				graphNode.SetFlagFixed(true);
			}
			const char* str = xmlNode->ToElement()->Attribute("name");
			if ( *str != '\0' )
			{
				graphNode.SetName(str);
			}
			AddGraphNode(graphNode);
		}
		else if ( strcmp(xmlNode->Value(), "Edge") == 0 )
		{
			// Parse an edge...
			int idx0 = -1;
			int idx1 = -1;
			int r0 = xmlNode->ToElement()->QueryIntAttribute("node0", &idx0);
			int r1 = xmlNode->ToElement()->QueryIntAttribute("node1", &idx1);
			if ( r0 != TIXML_SUCCESS )
			{
				const char* str = xmlNode->ToElement()->Attribute("name0");
				idx0 = FindNodeAccordingToName(str);
			}
			if ( r1 != TIXML_SUCCESS )
			{
				const char* str = xmlNode->ToElement()->Attribute("name1");
				idx1 = FindNodeAccordingToName(str);
			}
			if ( idx0 >= 0 && idx1 >= 0 )
			{
				CGraphEdge graphEdge(idx0, idx1);
				AddGraphEdge(graphEdge);
			}
		}

		// Move to the next sibling...
		xmlNode = xmlNode->NextSibling();
	}

	SetNodeNeighbors();
	if ( flagIgnoreIndiv == true )
	{
		RemoveIndividualNodes();
	}
	if ( flagDetectFaces == false )
	{
		return loadFlag;
	}
	//PrintGraph();

	// Step 1: Detect faces of the planar graph...
	DetectFaces();
	return loadFlag;
}
Пример #9
0
  bool LocateFaceBox::Process() {
    _pFace_model = LBModel::ReadModel(_ffacemodel);
    _pNonface_model = LBModel::ReadModel(_fnonfacemodel);
    
    list<LBBox> candidates;
    
    DetectFaces(candidates);
    
    if (candidates.empty()) {
      cout << "!!! No qualified face found !!!" << endl;
      return false;
    }
    
    LBBox fa;
    float dmax=0.0;
    int i=0;
    for (list<LBBox>::iterator ci=candidates.begin();ci!=candidates.end();ci++) {
      float d = ci->deltan - ci->deltaf;

      if (i==0 || d>dmax) {
      	dmax = d;
      	fa = *ci;
      }      
      i++;
    }
    
    if (Verbose()>2)
    cout << "fa.l=" << fa.l << " "
	 << "fa.t=" << fa.t << " "
	 << "fa.w=" << fa.w << " "
	 << "fa.degree=" << fa.degree << " "
	 << "fa.deltaf=" << fa.deltaf << " "
	 << "fa.deltan=" << fa.deltan << endl;

    imagedata* pImg = getImg()->accessFrame();
    int w0 = pImg->width();
    int h0 = pImg->height();
    float xc0 = 0.5*(w0-1);
    float yc0 = 0.5*(h0-1);
    float scale = (float)w0/fa.w;
    float xc = 0.5*(fa.w-1);
    float yc = 0.5*(h0/scale-1);
    float theta = PI * fa.degree / 180;    

    scalinginfo si(theta, xc, yc);
    float ls,ts;
    si.rotate_dst_xy_c(fa.l, fa.t, ls, ts);
    int l = round(scale*ls);
    int t = round(scale*ts);
    int w = round(scale*WIDTH_BOX_FACIALPARTS);
    int h = round(scale*HEIGHT_BOX_FACIALPARTS);
    float irxc, iryc;
    si.rotate_dst_xy_c((fa.l+0.5*WIDTH_BOX_FACIALPARTS),
		       (fa.t+0.5*HEIGHT_BOX_FACIALPARTS),
		       irxc, iryc);
    irxc = scale * irxc;
    iryc = scale * iryc;

    stringstream faceinnerbox;
    faceinnerbox << l << " " << t << " "
		 << w << " " << h << " "
		 << theta << " " << irxc << " " << iryc;

    SegmentationResult faceinnerboxres;
    faceinnerboxres.name="faceinnerbox";
    faceinnerboxres.type="rotated-box";
    faceinnerboxres.value=faceinnerbox.str();
    AddResultToXML(faceinnerboxres);

    float W = 30.0; // standard width of face inner box
    float HFORHEAD = 15.0; // standard forhead height
    float WF = 46.0;
    float HF = 56.0;
    float s = w / W; // scale between real face inner box and standard
                     // face inner box
    float wf = WF * s;
    float hf = HF * s;
    float lr, tr;

    scalinginfo si0(theta, xc0, yc0);

    si0.rotate_src_xy_c(lr, tr, l, t);
    float flfr = lr - 0.5*(WF-W)*s;
    float ftfr = tr - HFORHEAD*s;

    float flf, ftf;
    si0.rotate_dst_xy_c(flfr, ftfr, flf, ftf);

    float frxc, fryc;
    si0.rotate_dst_xy_c(flfr+0.5*wf, ftfr+0.5*hf, frxc, fryc);
  	
    stringstream facebox;
    facebox << round(flf) << " " << round(ftf) << " "
	    << round(wf) << " " << round(hf) << " "
            << theta << " " << frxc << " " << fryc;
    
    SegmentationResult faceboxres;
    faceboxres.name="face";
    faceboxres.type="rotated-box";
    faceboxres.value=facebox.str();
    AddResultToXML(faceboxres);

    float wh = hf;
    float WH = HF;
    float hlfr = lr - 0.5*(WH-W)*s;
    float htfr = ftfr;

    float hlf, htf;
    si0.rotate_dst_xy_c(hlfr, htfr, hlf, htf);

    float hrxc, hryc;
    si0.rotate_dst_xy_c(hlfr+0.5*wh, htfr+0.5*wh, hrxc, hryc);

    stringstream headbox;
    headbox << round(hlf) << " " << round(htf) << " "
	    << round(wh) << " " << round(wh) << " "
            << theta << " " << hrxc << " " << hryc;
    
    SegmentationResult headboxres;
    headboxres.name="head";
    headboxres.type="rotated-box";
    headboxres.value=headbox.str();
    AddResultToXML(headboxres);
    
    if (Verbose()>2)
      ShowFaceBox(fa);

    return ProcessNextMethod();
  }
Пример #10
0
int MakeFeatureInMem(
		IplImage* 	RGBA,
		IplImage* 	depth,
		IplImage*	mask,
		FEATURE* 	feature){
	if (RGBA==NULL){
		fprintf(stderr, "image file is required to create feature set!");
		return 1;
	}

	IplImage *hsv_img, *h, *s, *v;
	if (HUELBP_ON){
		// convert to hsv image
		hsv_img = cvCreateImage( cvGetSize(RGBA), IPL_DEPTH_8U, 3);
		cvCvtColor(RGBA, hsv_img, CV_RGB2HSV);

		h = cvCreateImage( cvGetSize(hsv_img), IPL_DEPTH_8U, 1 );
		s = cvCreateImage( cvGetSize(hsv_img), IPL_DEPTH_8U, 1 );
		v = cvCreateImage( cvGetSize(hsv_img), IPL_DEPTH_8U, 1 );

		// Split image onto the color planes
		cvSplit( hsv_img, h, s, v, NULL );
	}
	// convert to grayscale-image
	IplImage* gray_img = RGBA;
	if (RGBA->nChannels > 1) {
		gray_img = cvCreateImage(cvGetSize(RGBA), IPL_DEPTH_8U, 1 );
		cvCvtColor( RGBA, gray_img, CV_RGB2GRAY );
	}
//	cvEqualizeHist(gray_img,gray_img);

	feature->grid_x 	= GRID_X;
	feature->grid_y 	= GRID_Y;
	feature->radius 	= RADIUS;
	feature->neighbors	= NEIGHBORS;

	int numPatterns = UNIFORM_ON? (NEIGHBORS+2) : pow(2.0, NEIGHBORS);
	//detect faces
	CvSeq* faces;
	int retCode = DetectFaces(gray_img, depth, &faces, FOREGRND_ON);
	if (retCode){//no faces found
		feature->histogram 		= NULL;
		feature->hue_histogram 	= NULL;
		feature->num_faces 		= 0;
		return 0;
	}else{
		//calculate features
		feature->num_faces 		= faces->total;
		feature->histogram 		= (CvMat**) malloc(faces->total*sizeof(CvMat*));
		feature->hue_histogram 	= (CvMat**) malloc(faces->total*sizeof(CvMat*));
		for(int i = 0; i < faces->total; i++ )
		{
			// Create a new rectangle for drawing the face
			CvRect* r = (CvRect*)cvGetSeqElem( faces, i ); // Find the dimensions of the face, and scale it if necessary
			IplImage* face_img = CreateSubImg(gray_img, *r);
			IplImage* lbp_img =  CalcLBP(face_img, RADIUS, NEIGHBORS, UNIFORM_ON);

			if (lbp_img==NULL){
				fprintf(stderr, "failed to create lbp image!\n");
				return 1;
			}
			feature->histogram[i] = CalcSpatialHistogram(lbp_img, numPatterns, GRID_X, GRID_Y);
			if (feature->histogram[i]==NULL){
				fprintf(stderr, "failed to create spatial histogram!\n");
				return 2;
			}
			cvReleaseImage(&face_img);
			cvReleaseImage(&lbp_img);

			if (HUELBP_ON){
				// Create a hue face image
				IplImage* hue_face_img = CreateSubImg(h, *r);

				//Create Hue LBP
				IplImage* hue_lbp = CalcLBP(hue_face_img, RADIUS, NEIGHBORS, UNIFORM_ON);
				if (hue_lbp==NULL){
					fprintf(stderr, "failed to create hue-lbp image!\n");
					return 1;
				}
				//Create Hue Spatial Histogram
				feature->hue_histogram[i] = CalcSpatialHistogram(hue_lbp, numPatterns, GRID_X, GRID_Y);
				if (feature->hue_histogram[i]==NULL){
					fprintf(stderr, "failed to create hue spatial histogram!\n");
					return 2;
				}

				cvReleaseImage(&hue_face_img);
				cvReleaseImage(&hue_lbp);
			}


		}
	}
	if (HUELBP_ON){
		cvReleaseImage(&hsv_img);
		cvReleaseImage(&h);
		cvReleaseImage(&s);
		cvReleaseImage(&v);
	}
	if (RGBA->nChannels > 1) {
		cvReleaseImage(&gray_img);
	}
	return 0;

}