Exemplo n.º 1
0
//==============================================================================
int main(int argc,char** argv)
{
	//parse command line arguments
	if(argc < 2){cout << usage << endl; return 0;}
	if(parse_help(argc,argv)){cout << usage << endl; return 0;}
	
	//load detector model
	face_detector detector = load_ft<face_detector>(argv[1]);

	//open video stream
	VideoCapture cam; 
	if(argc > 2)cam.open(argv[2]); else cam.open(0);
	if(!cam.isOpened()){
		cout << "Failed opening video file." << endl
		 << usage << endl; return 0;
	}
	//detect until user quits
	namedWindow("face detector");
	while(cam.get(CV_CAP_PROP_POS_AVI_RATIO) < 0.999999){
		Mat im; cam >> im;     
		vector<Point2f> p = detector.detect(im);
		if(p.size() > 0){
			for(int i = 0; i < int(p.size()); i++)
		circle(im,p[i],1,CV_RGB(0,255,0),2,CV_AA);
		}
		imshow("face detector",im);
		if(waitKey(10) == 'q')break;
	}
	destroyWindow("face detector"); cam.release(); return 0;
}
//==============================================================================
int main(int argc,char** argv)
{
    //parse command line arguments
    if(parse_help(argc,argv)){cout << usage << endl; return 0;}
    if(argc < 2){cout << usage << endl; return 0;}
    
    if (argc<1) {
        return 0;
    }
    string fname =string(argv[1]);
    ft_data  ftdata = load_ft_jzp(fname);
    
    //load detector model
    face_tracker tracker = load_ft<face_tracker>(string(ftdata.baseDir+"trackermodel.yaml").c_str());
    tracker.detector.baseDir = ftdata.baseDir;
    
    
    //create tracker parameters
    face_tracker_params p; p.robust = false;
    p.ssize.resize(3);
    p.ssize[0] = Size(21,21);
    p.ssize[1] = Size(11,11);
    p.ssize[2] = Size(5,5);
    
    //open video stream
    VideoCapture cam;
    if(argc > 2)cam.open(argv[2]); else cam.open(0);
    if(!cam.isOpened()){
        cout << "Failed opening video file." << endl
        << usage << endl; return 0;
    }
    //detect until user quits
    namedWindow("face tracker");
    while(cam.get(CV_CAP_PROP_POS_AVI_RATIO) < 0.999999){
        Mat im; cam >> im;
        if(tracker.track(im,p))
            tracker.draw(im);
        imshow("face tracker",im);
        int c = waitKey(10)%256;
        if(c == 'q')break;
    }
    destroyWindow("face tracker"); cam.release(); return 0;
}
Exemplo n.º 3
0
//==============================================================================
int main(int argc,char** argv)
{
  //parse command line arguments
  if(parse_help(argc,argv)){cout << usage << endl; return 0;}
  if(argc < 2){cout << usage << endl; return 0;}
  
  //load detector model
  face_tracker tracker = load_ft<face_tracker>(argv[1]);

  //create tracker parameters
  face_tracker_params p; p.robust = false;
  p.ssize.resize(3);
  p.ssize[0] = Size(21,21);
  p.ssize[1] = Size(11,11);
  p.ssize[2] = Size(5,5);

  //open video stream
  VideoCapture cam; 
  if(argc > 2)cam.open(argv[2]); else cam.open(0);
  if(!cam.isOpened()){
    cout << "Failed opening video file." << endl
     << usage << endl; return 0;
  }
  //detect until user quits
  namedWindow("face tracker");
  while(cam.get(CV_CAP_PROP_POS_AVI_RATIO) < 0.999999){
    Mat im; cam >> im; 
    if(tracker.track(im,p))tracker.draw(im);
    draw_string(im,"d - redetection");
    tracker.timer.display_fps(im,Point(1,im.rows-1));
    imshow("face tracker",im);
    int c = waitKey(10);
    if(c == 'q')break;
    else if(c == 'd')tracker.reset();
  }
  destroyWindow("face tracker"); cam.release(); return 0;
}
Exemplo n.º 4
0
//==============================================================================
int main(int argc,char** argv)
{
  //parse cmd line options
  if(parse_help(argc,argv)){
    cout << "usage: ./annotate [-v video] [-m muct_dir] [-d output_dir]" 
     << endl; return 0;
  }
  string odir = parse_odir(argc,argv);
  string ifile; int type = parse_ifile(argc,argv,ifile);
  string fname = odir + "annotations.yaml"; //file to save annotation data to

  //get data
  namedWindow(annotation.wname);  
  if(type == 2){ //MUCT data
    string lmfile = ifile + "muct-landmarks/muct76-opencv.csv";
    ifstream file(lmfile.c_str()); 
    if(!file.is_open()){
      cerr << "Failed opening " << lmfile << " for reading!" << endl; return 0;
    }
    string str; getline(file,str);
    while(!file.eof()){
      getline(file,str); if(str.length() == 0)break;
      muct_data d(str,ifile); if(d.name.length() == 0)continue;
      annotation.data.imnames.push_back(d.name);
      annotation.data.points.push_back(d.points);
    }
    file.close();
    annotation.data.rm_incomplete_samples();
  }else{
    //open video stream
    VideoCapture cam; 
    if(type == 1)cam.open(ifile); else cam.open(1);
    if(!cam.isOpened()){
      cout << "Failed opening video file." << endl
       << "usage: ./annotate [-v video] [-m muct_dir] [-d output_dir]" 
       << endl; return 0;
    }
    //get images to annotate
    annotation.set_capture_instructions();
    while(cam.get(CV_CAP_PROP_POS_AVI_RATIO) < 0.999999){
      Mat im,img; cam >> im; annotation.image = im.clone(); 
      annotation.draw_instructions();
      imshow(annotation.wname,annotation.image); int c = waitKey(10);
      if(c == 'q')break;
      else if(c == 's'){
    int idx = annotation.data.imnames.size(); char str[1024]; 
    if     (idx < 10)sprintf(str,"%s00%d.png",odir.c_str(),idx);
    else if(idx < 100)sprintf(str,"%s0%d.png",odir.c_str(),idx);
    else               sprintf(str,"%s%d.png",odir.c_str(),idx);
    imwrite(str,im); annotation.data.imnames.push_back(str);
    im = Scalar::all(255); imshow(annotation.wname,im); waitKey(10);
      }
    }
    if(annotation.data.imnames.size() == 0)return 0;
    annotation.data.points.resize(annotation.data.imnames.size());

    //annotate first image
    setMouseCallback(annotation.wname,pp_MouseCallback,0);
    annotation.set_pick_points_instructions();
    annotation.set_current_image(0);
    annotation.draw_instructions();
    annotation.idx = 0;
    while(1){ annotation.draw_points();
      imshow(annotation.wname,annotation.image); if(waitKey(0) == 'q')break;
    }
    if(annotation.data.points[0].size() == 0)return 0;
    annotation.replicate_annotations(0);
  }
  save_ft(fname.c_str(),annotation.data);
  
  //annotate connectivity
  setMouseCallback(annotation.wname,pc_MouseCallback,0);
  annotation.set_connectivity_instructions();
  annotation.set_current_image(0);
  annotation.draw_instructions();
  annotation.idx = 0;
  while(1){ annotation.draw_connections(); 
    imshow(annotation.wname,annotation.image); if(waitKey(0) == 'q')break;
  }
  save_ft(fname.c_str(),annotation.data); 

  //annotate symmetry
  setMouseCallback(annotation.wname,ps_MouseCallback,0);
  annotation.initialise_symmetry(0);
  annotation.set_symmetry_instructions();
  annotation.set_current_image(0);
  annotation.draw_instructions();
  annotation.idx = 0; annotation.pidx = -1;
  while(1){ annotation.draw_symmetry(); 
    imshow(annotation.wname,annotation.image); if(waitKey(0) == 'q')break;
  }
  save_ft(fname.c_str(),annotation.data); 

  //annotate the rest
  if(type != 2){
    setMouseCallback(annotation.wname,mv_MouseCallback,0);
    annotation.set_move_points_instructions();
    annotation.idx = 1; annotation.pidx = -1;
    while(1){
      annotation.set_current_image(annotation.idx);
      annotation.draw_instructions();
      annotation.set_clean_image();
      annotation.draw_connections();
      imshow(annotation.wname,annotation.image); 
      int c = waitKey(0);
      if     (c == 'q')break;
      else if(c == 'p'){annotation.idx++; annotation.pidx = -1;}
      else if(c == 'o'){annotation.idx--; annotation.pidx = -1;}
      if(annotation.idx < 0)annotation.idx = 0;
      if(annotation.idx >= int(annotation.data.imnames.size()))
    annotation.idx = annotation.data.imnames.size()-1;
    }
  }
  save_ft(fname.c_str(),annotation.data); destroyWindow("Annotate"); return 0;
}