示例#1
0
extern "C" JNIEXPORT void JNICALL
Java_budny_moneykeeper_cv_Recognizer_nativeRecognize(
       JNIEnv*, jobject,
       jlong j_recognizer, jlong j_src_mat, jlong j_dst_mat) {
    Recognizer* recognizer = reinterpret_cast<Recognizer*>(j_recognizer);
    cv::Mat& src_mat = *(reinterpret_cast<cv::Mat*>(j_src_mat));
    cv::Mat& dst_mat = *(reinterpret_cast<cv::Mat*>(j_dst_mat));
    recognizer->recognize(src_mat, dst_mat);
}
示例#2
0
int main(int argc, char *argv[])
{
    QApplication a(argc, argv);
    Recognizer w;
    w.show();


    return a.exec();
}
示例#3
0
文件: main.cpp 项目: fxia22/facerec
int main(int argc, char** argv)
{
    printf("Recognizer began to work\n");
    Recognizer* rec = new Recognizer();
    bool flag = rec->Init(argc, argv);

    if (flag)
    {
        rec->Run();
    };

    delete rec;
    printf("Bye!\n");
    return 0;
}
int main(int argc, const char** argv)
{
	//----------------------------------- variables initialisation ---------------------------------------------------------
	std::thread t[2];
	// Video is the class for use video and detection
	Video myVideo(0); //0 is the defaut camera, use -1 for any cammera or a number between 1 to 99
	Recognizer myRecognizer;

	std::vector<cv::Mat> lastFacesDetected;
	int i = 0;
	std::string namePersonRecognized, goodName;
	time_t timer;
	time(&timer); //set timer to the current date in ms in

	cv::Mat imgNull;

	//Launch video in a thread
	t[0] = std::thread([&] {myVideo.start(); });
	myVideo.startFaceDetect();

	while (true)
	{
		//dont recognize if the model is not trained or if no face never get Recognize
		if (abs(time(NULL) - timer) > 1 && !myVideo.getLastFacesDetected().empty() && !myRecognizer.equalTest(myVideo.getLastFacesDetected(), lastFacesDetected))//Maybe add myVideo.getLastFacesDetected() != oldLastFacesDetected for not recognize the same image again and again
		{
			lastFacesDetected = myVideo.getLastFacesDetected(); // get all the current face in video stream
			i = 0;
			for (std::vector<cv::Mat>::iterator face = lastFacesDetected.begin(); face != lastFacesDetected.end(); face++, i++)//for each face detected
			{
				myVideo.setImgToPrint(*face); // display in another windows in the video thread the region of interest corresponding to the face detected

				myRecognizer.askForAddImageToCurrentTrainingAndSave(*face, "customDatabase/"); //Ask if you want to add the face

				myVideo.setImgToPrint(imgNull); //remove the second windows
			}
			//std::cout << "face analyzed = " << i << std::endl;
			time(&timer);
		}
		cv::waitKey(1);
	}
	return 0;
}
示例#5
0
文件: main.cpp 项目: AlFoX/Challenges
int main(int argc, char *argv[])
{
    QCoreApplication a(argc, argv);
    QStringList args = a.arguments();

    QString inFile;
    QString outFile;

   if (argc > 1)
       inFile = args[1];
   if (argc > 2)
       outFile = args[2];

   if (inFile.isEmpty()) {
       std::cout << "run: story_1 \"input file\" [\"output file\"] " << std::endl;
       return 0;
   }

    Recognizer r;
    r.parseFile(inFile, outFile);

}
示例#6
0
void *classify(void *threadid)
{
	Detector detector;
	long tid;
	tid = (long)threadid;
	char img[100];
	char buf[1024];
	sprintf(img, "data/%d.jpg", tid);
	int* ids = new int[3];
	int* sims = new int[3];
	string* paths = new string[3];
	for (int j = 0; j < 10; j++){
		Mat landmarks;
		Mat face = detector.detectNorm(img, rec.getWidth(), rec.getHeight(), rec.getPatchSize(), landmarks, rec.getNumLandmarks(), true, false);
		int num = rec.classify(face, landmarks, 3, ids, sims, paths);
		//int num = rec.classify(img, 3, ids, sims, paths);
		for (int i = 0; i < num; i++){
			sprintf(buf, "thread %d, prediction: %d %d, similarity: %d\n", tid, i, ids[i], sims[i]);
		}
		cout<<buf;
	}
   pthread_exit(NULL);
}
示例#7
0
void recognizing(string filename, vector<uint> ids) {

	Mat image = imread(filename);

	Recognizer* rcg = new Recognizer(0.70f, 0.065f);
	Preprocessor* pr = new Preprocessor(image);
    pr->preprocess(10);

    Mat cover;

    if(!pr->warpObject()) {
    	cout << "#WARP-UNSUCCESSFUL" << endl;
    	cover = image.clone();
    	image.release();
    	Size newSize = (cover.size().width > cover.size().height) ? Size(1024, 768) : Size(768, 1024);
    	resize(cover, cover, newSize);
    	cout << "using the whole image..." << endl;
    } else {
    	cover = pr->WarpedCover();
    }

    Mat trainingImage;

    for(size_t i = 0; i < ids.size(); ++i) {
    	cout << "training " << i << endl;
    	rcg->train(ids.at(i));
	}

	if(rcg->recognize(Preprocessor::kuwaharaNagaoFilter(cover)))
	{
   		Book result = rcg->LastResult();
        cout << "#BEGIN-RESULT " << result.Id() << " #END-RESULT";
    } else {
    	cout << "#BEGIN-RESULT null #END-RESULT";
    }
    cout << endl;
}
示例#8
0
int main(int argc, char** argv){
	struct timeval begin, end;
	gettimeofday(&begin, NULL);
	pthread_t threads[NUM_THREADS];

	  
	//rec.cleanSHM();
	//rec.loadModel(datpath);
	rec.loadModelFromSHM();
	gettimeofday(&end, NULL);
    double elapsed = (end.tv_sec - begin.tv_sec) + 
              ((end.tv_usec - begin.tv_usec)/1000000.0);
	//rec.evaluate();
	cout<<"Model loaded in "<<elapsed<<" seconds"<<endl;		
	int* ids = new int[3];
	int* sims = new int[3];
	string* paths = new string[3];
	int count = 0;
	
	/*while(true){
		cout<<"please input test img"<<endl;
		string filename, simpath;
		cin>>filename;
		//filename = "data/test.jpg";
		int num = rec.classify(filename.data(), 3, ids, sims, paths);
		cout<<count<<" "<<num<<endl;
		count++;
		for (int i = 0; i < num; i++){
			cout<<"Prediction "<<i<<" "<<ids[i]<<" similarity: "<<sims[i]<<endl;;
			cout<<"Most similar image: "<<paths[i]<<endl;
		}
	}*/
	for(int i=0; i < NUM_THREADS; i++ ){
		cout << "main() : creating thread, " << i << endl;
		int rc = pthread_create(&threads[i], NULL, classify, (void *)i);
		if (rc){
		 cout << "Error:unable to create thread," << rc << endl;
		 exit(-1);
		}
	}
	pthread_exit(NULL); 
	return 0;
}
示例#9
0
文件: main.cpp 项目: Ipallis/Fritzing
int main (int argc, char *argv[])
{
  QCoreApplication app (argc, argv);

  bool generate_dot = false;
  bool generate_report = false;
  bool no_lines = false;
  bool debug_info = true;
  bool troll_copyright = false;
  QString file_name = 0;

  QStringList args = app.arguments ();
  args.removeFirst ();

  foreach (QString arg, args)
    {
      if (arg == QLatin1String ("-h") || arg == QLatin1String ("--help"))
        help_me ();

      else if (arg == QLatin1String ("-v") || arg == QLatin1String ("--verbose"))
        generate_report = true;

      else if (arg == QLatin1String ("--dot"))
        generate_dot = true;

      else if (arg == QLatin1String ("--no-lines"))
        no_lines = true;

      else if (arg == QLatin1String ("--no-debug"))
        debug_info = false;

      else if (arg == QLatin1String ("--troll"))
        troll_copyright = true;

      else if (file_name.isEmpty ())
	file_name = arg;

      else
        qerr << "*** Warning. Ignore argument `" << arg << "'" << endl;
    }

  if (file_name.isEmpty ())
    {
      help_me ();
      exit (EXIT_SUCCESS);
    }

  Grammar grammar;
  Recognizer p (&grammar, no_lines);

  if (! p.parse (file_name))
    exit (EXIT_FAILURE);

  if (grammar.rules.isEmpty ())
    {
      qerr << "*** Fatal. No rules!" << endl;
      exit (EXIT_FAILURE);
    }

  else if (grammar.start == grammar.names.end ())
    {
      qerr << "*** Fatal. No start symbol!" << endl;
      exit (EXIT_FAILURE);
    }

  grammar.buildExtendedGrammar ();
  grammar.buildRuleMap ();

  Automaton aut (&grammar);
  aut.build ();

  CppGenerator gen (p, grammar, aut, generate_report);
  gen.setDebugInfo (debug_info);
  gen.setTrollCopyright (troll_copyright);
  gen ();

  if (generate_dot)
    {
      DotGraph genDotFile (qout);
      genDotFile (&aut);
    }

  else if (generate_report)
    {
      ParseTable genParseTable (qout);
      genParseTable(&aut);
    }

  return EXIT_SUCCESS;
}
void StrokesRecognizer::onRecognizeClicked() {
    Recognizer rec;
    QString recognizedChar = rec.recognize(mpCanvas->strokes());

    mpCharacterResult->setText(recognizedChar);
}
示例#11
0
int main(int argc,char** argv){
	if (argc < 2){
		std::cout<<"Please enter <input.pcd> file"<<std::endl;
		return 0;
	}
	if (pcl::io::loadPCDFile (argv[1], *model) < 0)
	{
    	std::cout << "Error loading model cloud." << std::endl;
    	return (-1);
  	}
  	model_name = argv[1];
  	model_name = model_name.substr(0,model_name.size()-4);
  	if(pcl::console::find_switch(argc,argv,"-vfh")){
  		vfh = true;
  	}
  	else if(pcl::console::find_switch(argc,argv,"-rv")){
  		std::cout<<"performing Resultant vector feature calculation"<<std::endl;
  		rv = true;
  	}else{
  		std::cout<<"no algorithm specified using default algorithm vfh"<<std::endl;
  		vfh = true;
  	}
  	if (pcl::console::find_switch (argc, argv, "-ds"))
  	{
    	_downsample = true;
    	std::cout<<"performing downsampling on the input file"<<std::endl;
  	}
  	if (pcl::console::find_switch (argc, argv, "-sn"))
  	{
    	show_normals = true;
    	std::cout<<"showing calclated normals"<<std::endl;
  	}
  	if(_downsample){
  		rec.setInputCloud(model,_leaf_size);
  		std::cout<<"saving downsampled file to model_down.pcd"<<std::endl;
  		pcl::io::savePCDFileASCII ("model_down.pcd", *(rec.getCloud()));
  	}
  	else{
  		rec.setInputCloud(model);
  		std::cout<<"setting input model without further downsampling"<<std::endl;
  	}
  	if(pcl::console::find_switch(argc,argv,"--showaxis")){
  		_show_axis = true;
  	}
	if(vfh){
		std::cout<<"estimating VFH features"<<std::endl;
		rec.Estimate_VFH_Features();
	}
	else if(rv){
		std::cout<<"estimating features using the resultant vector"<<std::endl;
		rec.Estimate_RV_Features();
		PointNormalCloudT::Ptr cloud (new PointNormalCloudT);
		pcl::PointCloud<pcl::Normal>::Ptr norm_cloud (new pcl::PointCloud<pcl::Normal>);
		cloud = rec.getPointNormalCloud();
		norm_cloud = rec.getNormalCloud();
		pcl::PointCloud<pcl::PointXYZ>::Ptr plaincloud (new pcl::PointCloud<pcl::PointXYZ>);
		pcl::copyPointCloud(*cloud,*plaincloud);
		//pcl::PointXYZ mass_center(rec.rv_centroid.x,rec.rv_centroid.y,rec.rv_centroid.z);
		pcl::PointXYZ mass_center(0,0,0);
		pcl::PointXYZ kinectZ(0,0,-1);
		pcl::PointXYZ res_vec (rec.rv_resultant.normal_x,rec.rv_resultant.normal_y,rec.rv_resultant.normal_z);
		//pcl::visualization::PointCloudColorHandlerRGBField<pcl::PointXYZ> rgb(plaincloud);
        //viewer.addPointCloud<pcl::PointXYZ> (plaincloud, rgb, "model_cloud");
        
        pcl::visualization::PointCloudColorHandlerCustom<pcl::PointNormal> single_color(cloud, double(0), double(255), double(0));
        viewer.addPointCloud(cloud,single_color,"sample cloud");
        if(_show_axis){
        	viewer.addLine(mass_center,res_vec,1.0f,0.0f,0.0f,"resultantvector");
        	viewer.addLine(mass_center,kinectZ,1.0f,1.0f,0.0f,"KinectZ");
        }
        std::cout<<"resultant vector :"<<res_vec.x<<" i"<<" + "<<res_vec.y<<" j"<<" + "<<res_vec.z<<" k"<<std::endl;
        if(show_normals){
        	std::cout<<"showing 1 in "<<normalsratio<<" normals"<<std::endl;
        	viewer.addPointCloudNormals<pcl::PointNormal,pcl::Normal>(cloud, norm_cloud,normalsratio, 0.05, "normalscloud");
        }
        while(!viewer.wasStopped())
        	viewer.spinOnce();
	}
	std::cout<<"feature calculation complete"<<std::endl;
	
	ofstream myfile;
	
	if (vfh){
		std::stringstream ss;
		ss<<"vfh_"<<model_name<<".txt";
		myfile.open(ss.str().c_str());
		for(size_t k =0 ;k<308;k++){
			if(k != 307)
				myfile << rec._vfh_features->points[0].histogram[k]<<",";
			else 
				myfile << rec._vfh_features->points[0].histogram[k];
		}
		std::cout<<"wrote the histogram to file :" <<ss.str()<<std::endl;
		myfile.close();
	}else if(rv){
		std::stringstream ss;
		ss<<"rv_"<<model_name<<".txt";
		myfile.open(ss.str().c_str());
		for(int k = 0;k<181;k++){
			if(k != 180)
				myfile << rec._rv_features_181[k]<<",";
			else 
				myfile << rec._rv_features_181[k];
		}
		std::cout<<"wrote the histogram to file: "<< ss.str()<<std::endl;
		//myfile.open(ss.c_str());
	}
    
}