int main(int argc, char **argv) { unsigned short *image565 = screen_init(); struct image *image = image_new(WIDTH, HEIGHT); struct image *font = image_new(BLOCK_X, BLOCK_Y*50); struct Glyph glyph[50]; int i; recognize_init(); event_init(); image_load(font, "data.raw"); memset(glyph, 0, sizeof(glyph)); printf("Press any key to start..."); getc(stdin); printf("Recognizing 1~25 ...\n"); screen_capture(image565); rgb565_to_rgb24(image->buf, image565); threshold(THRESHOLD, image->buf); recognize(image, font, glyph, 0); for (i = 0; i < 25; ++i) { send_touch(glyph[i].x, glyph[i].y); usleep(100); } printf("\n\nPress any key to continue..."); getc(stdin); printf("Recognizing 26~50 ...\n"); screen_capture(image565); rgb565_to_rgb24(image->buf, image565); threshold(THRESHOLD, image->buf); recognize(image, font, glyph, 1); for (i = 24; i < 50; ++i) { send_touch(glyph[i].x, glyph[i].y); usleep(100); } image_destroy(font); event_destroy(); image_destroy(image); screen_destroy(image565); return 0; }
Widget::Widget(QWidget *parent) : QWidget(parent) { Potencial *potencial = new Potencial(); connect(potencial, SIGNAL(signalNumberOfSample(int,QVector<double>)), &samples, SIGNAL(signalRecognizedIndex(int,QVector<double>))); connect(&samples, SIGNAL(signalSamples(QVector<QVector<QLabel*> >)), potencial,SLOT(slotSetSamples(QVector<QVector<QLabel*> >))); connect(&plot, SIGNAL(pattern(QImage)), potencial, SLOT(slotSetPattern(QImage))); connect(&plot, SIGNAL(recogrize()), potencial, SLOT(recognize())); QHBoxLayout *hl = new QHBoxLayout; hl->addStretch(); hl->addWidget(&plot); hl->addStretch(); QVBoxLayout *l = new QVBoxLayout; l->addLayout(hl); l->addWidget(&charset); l->addWidget(&samples); setLayout(l); connect(&charset, SIGNAL(updateCharset(QString)), &samples, SLOT(slotUpdateSamples(QString))); setMinimumSize(800,600); charset.setText("iDKfA"); }
void IsolateHMM::patchRun(vector<SLR_ST_Skeleton> vSkeletonData, vector<Mat> vDepthData, vector<IplImage*> vColorData, int rank[], double score[]) { SLR_ST_Skeleton skeletonCurrent; //The 3 current data. Mat depthCurrent; IplImage *frameCurrent; //Decide the frames to be used or not. frameSelect is the mask. int frameSize = vColorData.size(); int heightLimit = min(vSkeletonData[0]._2dPoint[7].y, vSkeletonData[0]._2dPoint[11].y) - 20; for (int i=0; i<frameSize; i++) { frameSelect_inMatch(heightLimit, vSkeletonData[i]._2dPoint[7].y, vSkeletonData[i]._2dPoint[11].y); } //Read in data and extract the hand postures in each available frame. for (int i=0; i<frameSize; i++) { skeletonCurrent = vSkeletonData[i]; depthCurrent = vDepthData[i]; frameCurrent = vColorData[i]; int framID = i; readIndata(skeletonCurrent, depthCurrent, frameCurrent, framID); } //Extract the SP and hog feature, and recognize recognize(rank, score); }
bool worldRepresentationROS::recognizeROSWrapper (recognition_srv_definitions::multiview_recognize::Request & req, recognition_srv_definitions::multiview_recognize::Response & response) { if (req.cloud.data.size()==0) { ROS_ERROR("Point cloud is empty!"); return false; } pcl::PointCloud<pcl::PointXYZRGB>::Ptr pInputCloud (new pcl::PointCloud<pcl::PointXYZRGB>); pcl::fromROSMsg(req.cloud, *pInputCloud ); const std::string scene_name = req.scene_name.data; const std::string view_name = req.view_name.data; const size_t timestamp = req.timestamp.data.toNSec(); // std::stringstream save_filepath; // save_filepath << "/media/Data/datasets/TUW/test_set/set_00017/" << view_name << ".pcd"; // pcl::io::savePCDFileBinary(save_filepath.str(), *pInputCloud); std::vector<double> global_trans_v; for(size_t i=0; i < req.transform.size(); i++) global_trans_v.push_back(req.transform[i]); std::vector<ModelTPtr> models_mv; std::vector<Eigen::Matrix4f, Eigen::aligned_allocator<Eigen::Matrix4f> > transforms_mv; bool rec_error = recognize(pInputCloud, scene_name, view_name, timestamp, global_trans_v, models_mv, transforms_mv); respondSrvCall(req, response); return rec_error; }
DiagramRecognizer::DiagramRecognizer(QWidget *parent) : QWidget(parent) { QPushButton *clearButton = new QPushButton(tr("Clear")); clearButton->setFont(QFont("Times", 18, QFont::Bold)); QPushButton *recognizeButton = new QPushButton(tr("Recognize")); recognizeButton->setFont(QFont("Times", 18, QFont::Bold)); QPushButton *recognizeImageButton = new QPushButton(tr("Recognize Image")); recognizeImageButton->setFont(QFont("Times", 18, QFont::Bold)); Output *PrintedDiagram = new Output; connect(clearButton, SIGNAL(clicked()), this, SLOT(clear())); connect(recognizeButton, SIGNAL(clicked()), this, SLOT(recognize())); connect(recognizeImageButton, SIGNAL(clicked()), this, SLOT(recognizeImage())); connect(this, SIGNAL(print(PathVector, Bitmap *, FormSegmentator *)), PrintedDiagram, SLOT(draw(PathVector, Bitmap *, FormSegmentator *))); QGridLayout *gridLayout = new QGridLayout; gridLayout->addWidget(clearButton, 0, 0); gridLayout->addWidget(recognizeButton, 1, 0); gridLayout->addWidget(recognizeImageButton, 2, 0); gridLayout->addWidget(PrintedDiagram, 1, 1, 3, 1); gridLayout->setColumnStretch(1, 10); setLayout(gridLayout); mRecognized = false; mComponentPoint.setX(-1000); mComponentPoint.setY(-1000); mGesturesManager = new MixedGesturesManager(); mAbstractRecognizer = new AbstractRecognizer(mGesturesManager, SimpleFormsInitializer::initialForms()); }
void filterObjects(Image *im, ObjectDB *test, ObjectDB *known) { int i, j, px, n=0; recognize(test, known); for (i=0; i < test->nObjects; ++i) { if (test->objs[i].label) { n++; } } for (i=0; i < getNRows(im); ++i) { for (j=0; j < getNCols(im); ++j) { px=getPixel(im, i, j); if ((px > 0) && !test->objs[px-1].label) { setPixel(im, i, j, 0); } } } i=0; for (j=n; j < test->nObjects && i < n; ++j) { while (i < n && test->objs[i].label) i++; if (test->objs[j].label) { memcpy(test->objs+i, test->objs+j, sizeof(Object)); } } test->objs=(Object *)realloc(test->objs, n*sizeof(Object)); if (test->objs == NULL) { fprintf(stderr, "ran out of memory while filtering objects.\n"); exit(1); } test->nObjects=n; }
static int initializeArray(FILE *fp, register int *cwi) { BOOL cont = TRUE, found = FALSE; long opos = ftell(fp); int code = 0, width = 0, i = 0, error = 0; register char *keyword; while (cont) { keyword = token(fp); if (keyword == NULL) { error = AFM_earlyEOF; break; /* get out of loop */ } switch(recognize(keyword)) { case COMMENT: keyword = linetoken(fp); break; case CODE: code = atoi(token(fp)); break; case XWIDTH: width = atoi(token(fp)); break; case CHARNAME: keyword = token(fp); if (MATCH(keyword, Space)) { cont = FALSE; found = TRUE; } break; case ENDCHARMETRICS: cont = FALSE; break; case ENDFONTMETRICS: cont = FALSE; error = normalEOF; break; case NOPE: default: error = AFM_parseError; break; } /* switch */ } /* while */ if (!found) width = 250; for (i = 0; i < 256; ++i) cwi[i] = width; fseek(fp, opos, 0); return(error); } /* initializeArray */
int main(int argc, char** argv){ int x,i,l; char name[128]; FILE *f, *f1; if(argc<2){ printf("\nError! Need at least 1 argument!!!\n"); exit(1); } int global=0; //asks to recognize only once if(strcmp(argv[1],"cp1251")==0 || strcmp(argv[1],"CP1251")==0){ --argc; ++argv; global=1; x=1; } if(strcmp(argv[1],"-r")==0 && global != 1){ printf("Recognize first file\n"); --argc; ++argv; //analog of shell's "shift" global=1; f=fopen(argv[1],"r"); x=recognize(f); fclose(f); } for (i=1; i<argc; i++){ f=fopen(argv[i],"r"); if (global==0) x=recognize(f); sprintf(name, "enc.%d", i); f1=fopen("tmp_encode","w"); l=fgetc(f); while(l!=EOF){ fputc(encode(l,x),f1); //this is encoding itself l=fgetc(f); } fclose(f1); fclose(f); unlink(argv[i]);//rename tmp->argv[i] if(link("tmp_encode",argv[i])<0) printf("error moving file %s\n",argv[i]); unlink("tmp_encode"); printf("File %s is done\n",argv[i]); } printf("\nAll files are processed!!!\n"); return 0; }
void InputZone::add(const CompositeImage & image) { if (!m_images.empty() && !m_images.back().empty()) { step(); m_images.pop_back(); } m_images.push_back(image); recognize(); }
int main(void){ int a,b; while(scanf("%d %d\n",&N,&n) > 0){ if ((0 == N)&&(0 == n)) break; int sol[4] = {0,0,0,0}; for (a = 0; a < N; a++) for (b = 0; b < N; b++) scanf(" %c",&big[a][b]); for (a = 0; a < n; a++) for (b = 0; b < n; b++) scanf(" %c",&small[0][a][b]); for (a = 0; a < n; a++) for (b = 0; b < n; b++) small[1][b][n-a-1] = small[0][a][b]; for (a = 0; a < n; a++) for (b = 0; b < n; b++) small[2][b][n-a-1] = small[1][a][b]; for (a = 0; a < n; a++) for (b = 0; b < n; b++) small[3][b][n-a-1] = small[2][a][b]; for (a = 0; a < N; a++) for (b = 0; b < N; b++){ if (big[a][b] == small[0][0][0]) sol[0] += recognize(a,b,0); if (big[a][b] == small[1][0][0]) sol[1] += recognize(a,b,1); if (big[a][b] == small[2][0][0]) sol[2] += recognize(a,b,2); if (big[a][b] == small[3][0][0]) sol[3] += recognize(a,b,3); } printf("%d %d %d %d\n",sol[0],sol[1],sol[2],sol[3]); } return 0; }
int listening ( void ) { debug(); FD_COPY ( & listeningToRead, & listenersCanRead ); FD_COPY ( & listeningToWrite, & listenersCanWrite ); FD_COPY ( & listeningForFailure, & listenersHaveFailed ); //struct timeval timeout; //timeout.tv_sec = 2; int result = select ( getHighestFileDescriptor () + 1, & listenersCanRead, & listenersCanWrite, & listenersHaveFailed, NULL //& timeout ); if ( result < 0 ) { error ( __FILE__ ); stillListening = false; } for ( int listener = 0; listener < numberOfListeners; listener++ ) recognize ( listener ); return stillListening; }
string WritingTool::recognizeCharacter(){ vector<cv::Point> tem_points(points_); int pt_num = tem_points.size(); int pt[pt_num*2+4]; for(int i=0;i<pt_num;i++){ pt[2*i]=tem_points[i].x; pt[2*i+1]=tem_points[i].y; } pt[pt_num*2]=-1; pt[pt_num*2+1]=0; pt[pt_num*2+2]=-1; pt[pt_num*2+3]=-1; const int candidate_count = 10; float fScan[candidate_count*2]; char candidate[candidate_count*2]; recognize(candidate, pt, pt_num, 0x00000400, 0, fScan); string cdd=""; for(int i=0; i< candidate_count; i++){ char* unicode = new char[2]; unicode[0] = candidate[i]; unicode[1] = candidate[i+1]; char* utf8 = new char[2]; memset(utf8, 0, sizeof(char)*2); WritingTool::UnicodeToUTF8(utf8, unicode); string str(utf8,3); cdd+=str; } LOG(ERROR)<<"No.1 candidate: "<<cdd.substr(0,3) <<"No.2 candidate: "<<cdd.substr(3,3) <<"No.3 candidate: "<<cdd.substr(6,3) <<"No.4 candidate: "<<cdd.substr(9,3) <<"No.5 candidate: "<<cdd.substr(12,3) <<"No.6 candidate: "<<cdd.substr(15,3) <<"No.7 candidate: "<<cdd.substr(18,3) <<"No.8 candidate: "<<cdd.substr(21,3) <<"No.9 candidate: "<<cdd.substr(24,3) <<"No.10 candidate: "<<cdd.substr(27,3) ; // char* unicode = new char[2]; //啊 0xb0a1; // unicode[0] = candidate[0]; // unicode[1] = candidate[1]; // char* utf8 = new char[2]; // memset(utf8, 0, sizeof(char)*2); // WritingTool::UnicodeToUTF8(utf8, unicode); // string str(utf8,3); // LOG(ERROR)<<"No.1 candidate: "<<str; return cdd; }
cameraInformation camera(){ cameraInformation infoFlag; infoFlag.flagDetect = false; infoFlag.flagRight = false; infoFlag.flagLeft = false; infoFlag.rangePers = 0; recognize(cap,classifier,kevinRER,1,&infoFlag);// 1 : nombre de reconnaissance demandé ps : afin d'initiliser attendre 10 sec avant de se mettre devant la cam return infoFlag; }
void MainForm::recognizeAll() { QStringList files = sideBar->getFileNames(); if (files.empty()) recognize(); else { QProgressDialog progress(trUtf8("Recognizing pages..."), trUtf8("Abort"), 0, files.count(), this); //progress.setWindowModality(Qt::WindowModal); progress.setWindowTitle("YAGF"); progress.show(); progress.setValue(0); for (int i = 0; i < files.count(); i++) { progress.setValue(i); if (progress.wasCanceled()) break; //rotation = ((FileToolBar *) m_toolBar)->getRotation(files.at(i)); loadFile(files.at(i)); recognize(); } } }
DiagramRecognizer::DiagramRecognizer(QWidget *parent) : QMainWindow(parent), ui(new Ui::DiagramRecognizer) { ui->setupUi(this); connect(ui->clearButton, SIGNAL(clicked()), this, SLOT(clear())); connect(ui->recognizeButton, SIGNAL(clicked()), this, SLOT(recognize())); connect(ui->recognizeImageButton, SIGNAL(clicked()), this, SLOT(recognizeImage())); mRecognized = false; mLeftButtonPressed = false; mComponentPoint.setX(-1000); mComponentPoint.setY(-1000); mGesturesManager = new MixedGesturesManager(); mAbstractRecognizer = new AbstractRecognizer(mGesturesManager, SimpleFormsInitializer::initialForms()); }
int main( int argc, char** argv ) { // validate that an input was specified if( argc != 2 ) { printUsage(); return 1; } if( !strcmp(argv[1], "train") ) learn(); else if( !strcmp(argv[1], "test") ) recognize(); else { printf("Unknown command: %s\n", argv[1]); } return 0; }
//主函数,功能:添加新的脸,批量添加新脸,识别 void main( int argc, char** argv ) { // validate that an input was specified if( argc < 3 ) { printUsage(); return; } //通过判断命令行参数分别执行学习和识别代码 if( !strcmp(argv[1], "addFace") ) learn(); else if( !strcmp(argv[1], "test") ) recognize(); else { printf("Unknown command: %s\n", argv[1]); printUsage(); } }
void generator::identify(INT *data, INT sz, INT *transporter, INT &orbit_at_level, INT verbose_level) { INT f_v = (verbose_level >= 1); //INT f_vv = (verbose_level >= 2); INT f_implicit_fusion = FALSE; INT final_node; if (f_v) { cout << "generator::identify" << endl; } if (f_v) { cout << "generator::identify identifying the set "; INT_vec_print(cout, data, sz); cout << endl; } if (f_v) { cout << "generator::identify before recognize" << endl; } recognize(this, data, sz, transporter, f_implicit_fusion, final_node, verbose_level); if (f_v) { cout << "generator::identify after recognize" << endl; } longinteger_object go; orbit_at_level = final_node - first_oracle_node_at_level[sz]; get_stabilizer_order(sz, orbit_at_level, go); if (f_v) { cout << "generator::identify trace returns final_node = " << final_node << " which is isomorphism type " << orbit_at_level << " with ago=" << go << endl; } if (f_v) { cout << "generator::identify transporter:" << endl; A->element_print_quick(transporter, cout); } if (f_v) { cout << "generator::identify done" << endl; } }
void FaceDetector_Surf::process(const cv::Mat &in, cv::Mat &out) { // Create a new image based on the input image out = in.clone(); // There can be more than one face in an image std::vector<cv::Rect> faces; // Detect the objects and store them in the sequence this->_classifier.detectMultiScale(out, faces, 1.2, 3, CV_HAAR_DO_CANNY_PRUNING, cv::Size(25, 25)); // Loop the number of faces found. for( size_t i = 0; i < faces.size(); ++i ) { // Draw the rectangle in the input image cv::rectangle( out, faces.at(i), cv::Scalar(255,0,0), 3, 8, 0); } cv::Point point; point.x=20; point.y=20; std::ostringstream faces_; faces_ << "Number of Faces : " << faces.size(); cv::putText(out, faces_.str(), point, cv::FONT_HERSHEY_COMPLEX_SMALL, 0.8, cv::Scalar(255,0,0), 1, CV_AA); try { if (faces.size()>0){ bool recon = recognize(in, faces.at(0)); std::string name=""; cv::Point label; label.x=((faces.at(0)).br()).x+10; label.y=((faces.at(0)).br()).y+10; if (recon) { name ="Simon"; }else { name ="John Doe"; } cv::putText(out, name, label, cv::FONT_HERSHEY_COMPLEX_SMALL, 0.8, cv::Scalar(255,0,0), 1, CV_AA); std::cout << "\n corner x avant :"<< this->getCorner().x ; std::cout << " | corner x apres :"<< faces.at(0).br().x << std::endl; this->setCorner((faces.at(0)).br()); } } catch(std::exception e){} return; }
void loop() { digitalWrite(b1, LOW); digitalWrite(b0, LOW); readings[0]=analogRead(1); readings[1]=analogRead(2); readings[2]=analogRead(3); readings[3]=analogRead(4); digitalWrite(b0, HIGH); readings[4]=analogRead(1); readings[5]=analogRead(2); readings[6]=analogRead(3); readings[7]=analogRead(4); digitalWrite(b1, HIGH); digitalWrite(b0, LOW); readings[8]=analogRead(1); readings[9]=analogRead(2); readings[10]=analogRead(3); if(blueToothSerial.available ()) { char ch = Serial.read(); if (ch >='a') { peter=addAction(peter, readings, ch); int i; for(i=0;i<11;i++) { blueToothSerial.println(peter->action->high[i]); } } else { ch = ch + 32; removeAction(ch); } } p=recognize(readings, p); }
void MainWindow::setup_connections() { //connect buttons for choosing colors for distribution visualization connect(ui.chooseColorButton1, SIGNAL(clicked()), this, SLOT(choose_color_1())); connect(ui.chooseColorButton2, SIGNAL(clicked()), this, SLOT(choose_color_2())); //connect button for load distributions from file connect(ui.loadButton, SIGNAL(clicked()), this, SLOT(load())); //connect button for load regret matrix // connect(ui.loadRegretMatrixPushButton, SIGNAL(clicked()), this, SLOT(load_regret_matrix())); //connect button for saving selection connect(ui.buttonSaveSelection, SIGNAL(clicked()), this, SLOT(save_selection())); //connect button for generate selection connect(ui.generateButton, SIGNAL(clicked()), this, SLOT(generate())); //connect button for recognizing connect(ui.recognizeButton, SIGNAL(clicked()), this, SLOT(recognize())); //connect button for calculate values connect(ui.drawButton, SIGNAL(clicked()), this, SLOT(calculate_values())); connect(ui.component1, SIGNAL(editingFinished()), this, SLOT(calculate_values())); connect(ui.component2, SIGNAL(editingFinished()), this, SLOT(calculate_values())); //connect zoom sliders connect(ui.zoomX, SIGNAL(valueChanged(int)), this, SLOT(draw())); connect(ui.zoomY, SIGNAL(valueChanged(int)), this, SLOT(draw())); //connect checkboxes connect(ui.checkBoxDrawAxises, SIGNAL(clicked()), this, SLOT(draw())); connect(ui.checkBoxDrawReal, SIGNAL(clicked()), this, SLOT(draw())); connect(ui.checkBoxIsolines, SIGNAL(clicked()), this, SLOT(draw())); connect(ui.checkBoxMiddle, SIGNAL(clicked()), this, SLOT(draw())); connect(ui.checkBoxRecognized, SIGNAL(clicked()), this, SLOT(draw())); connect(ui.checkBoxSelection, SIGNAL(clicked()), this, SLOT(draw())); //connect button click action to transformationMatrixDialog show action connect(ui.transformationButton, SIGNAL(clicked()), this, SLOT(show_transformation_matrix())); connect(ui.showCorrelationMatrixPushButton1, SIGNAL(clicked()), this, SLOT(show_matrix_of_correlations1())); connect(ui.showCorrelationMatrixPushButton2, SIGNAL(clicked()), this, SLOT(show_matrix_of_correlations2())); }
FindPersonDialog::FindPersonDialog( QWidget *parent /* = NULL */ ) : QDialog( parent ) { findImage = NULL; faceLabel = new QLabel( tr( "人脸" ), this ); faceLineEdit = new QLineEdit; faceLineEdit->setReadOnly( true ); faceLabel->setBuddy( faceLineEdit ); browsePushButton = new QPushButton( tr( "浏览..." ) ); findPushButton = new QPushButton( tr( "识别" ) ); cancelPushButton = new QPushButton( tr( "取消" ) ); connect( browsePushButton, SIGNAL( clicked() ), this, SLOT( addFace() ) ); connect( findPushButton, SIGNAL( clicked() ), this, SLOT( recognize() ) ); connect( cancelPushButton, SIGNAL( clicked() ), this, SLOT( close() ) ); QHBoxLayout *topLayout = new QHBoxLayout; topLayout->addWidget( faceLabel ); topLayout->addWidget( faceLineEdit ); topLayout->addWidget( browsePushButton ); QHBoxLayout *bottomLayout = new QHBoxLayout; bottomLayout->addStretch(); bottomLayout->addWidget( findPushButton ); bottomLayout->addWidget( cancelPushButton ); QVBoxLayout *mainLayout = new QVBoxLayout; mainLayout->addLayout( topLayout ); mainLayout->addLayout( bottomLayout ); setLayout( mainLayout ); setWindowTitle( tr( "识别人脸" ) ); }
MainWindow::MainWindow(QWidget *parent) : QMainWindow(parent), ui(new Ui::MainWindow) { ui->setupUi(this); connect(ui->browseSampleButton, SIGNAL(clicked()), this, SLOT(browseSampleImages())); connect(ui->browseTestButton, SIGNAL(clicked()), this, SLOT(browseTestImage())); connect(ui->recognizeButton, SIGNAL(clicked()), this, SLOT(recognize())); connect(ui->adjustButton, SIGNAL(clicked()), this, SLOT(adjustImage())); //connect(ui->teachButton, SIGNAL(clicked()), this, SLOT(teach())); ui->recognizeButton->setEnabled(false); ui->noiseSpinBox->setEnabled(false); ui->adjustButton->setEnabled(false); imageLabels[0] = ui->sample00ImageLabel; imageLabels[1] = ui->sample01ImageLabel; imageLabels[2] = ui->sample02ImageLabel; imageLabels[3] = ui->sample03ImageLabel; imageLabels[4] = ui->sample10ImageLabel; imageLabels[5] = ui->sample11ImageLabel; imageLabels[6] = ui->sample12ImageLabel; imageLabels[7] = ui->sample13ImageLabel; imageLabels[8] = ui->sample20ImageLabel; imageLabels[9] = ui->sample21ImageLabel; imageLabels[10] = ui->sample22ImageLabel; imageLabels[11] = ui->sample23ImageLabel; similarityLabels[0] = ui->similarity0Label; similarityLabels[1] = ui->similarity1Label; similarityLabels[2] = ui->similarity2Label; this->setFixedSize(this->size()); }
//Faz a chamada para a opção escolhida void realize_action(int in){ switch (in){ case 1: puts("Em qual diretório estão as imagens?\n"); puts("Use . para o diretório atual\n"); char dir[MAX_STRING_LENGTH]; get_input_char(dir); //DEBUG printf("READ: %s", dir); create_vector(dir); puts("Treinamento realizado com sucesso.\n"); break; case 2: puts("Qual o endereço da imagem que deseja reconhecer?\n"); char file[MAX_STRING_LENGTH]; get_input_char(file); recognize(file); break; case 3: puts("Tchau!\n"); break; default: puts("Este não é um comando válido!\n"); break; } }
int main() { char* input1="zabcbababcbabbac\0"; printf("heheh ---%s \n",input1); recognize(input1); return 0; }
int main( int argc, char** argv ) { int selected_image_num = 1; char show_ch = 's'; IplImage* images[NUM_IMAGES]; IplImage* selected_image = NULL; IplImage* temp_image = NULL; IplImage* red_point_image = NULL; IplImage* connected_reds_image = NULL; IplImage* connected_background_image = NULL; IplImage* result_image = NULL; CvSeq* red_components = NULL; CvSeq* background_components = NULL; // Load all the images. for (int file_num=1; (file_num <= NUM_IMAGES); file_num++) { if( (images[0] = cvLoadImage("./RealRoadSigns.jpg",-1)) == 0 ) return 0; if( (images[1] = cvLoadImage("./RealRoadSigns2.jpg",-1)) == 0 ) return 0; if( (images[2] = cvLoadImage("./ExampleRoadSigns.jpg",-1)) == 0 ) return 0; if( (images[3] = cvLoadImage("./Parking.jpg",-1)) == 0 ) return 0; if( (images[4] = cvLoadImage("./NoParking.jpg",-1)) == 0 ) return 0; } //load the template images and do normalization. IplImage* templates[TEMPLATES_NUM]; load_templates(templates); normalize_template(templates); // Explain the User Interface printf( "Hot keys: \n" "\tESC - quit the program\n" "\t1 - Real Road Signs (image 1)\n" "\t2 - Real Road Signs (image 2)\n" "\t3 - Synthetic Road Signs\n" "\t4 - Synthetic Parking Road Sign\n" "\t5 - Synthetic No Parking Road Sign\n" "\tr - Show red points\n" "\tc - Show connected red points\n" "\th - Show connected holes (non-red points)\n" "\ts - Show optimal signs\n" ); // Create display windows for images //cvNamedWindow( "Original", 1 ); cvNamedWindow( "Processed Image", 1 ); // Setup mouse callback on the original image so that the user can see image values as they move the // cursor over the image. cvSetMouseCallback( "Original", on_mouse_show_values, 0 ); window_name_for_on_mouse_show_values="Original"; image_for_on_mouse_show_values=selected_image; int user_clicked_key = 0; do { // Create images to do the processing in. if (red_point_image != NULL) { cvReleaseImage( &red_point_image ); cvReleaseImage( &temp_image ); cvReleaseImage( &connected_reds_image ); cvReleaseImage( &connected_background_image ); cvReleaseImage( &result_image ); } selected_image = images[selected_image_num-1]; red_point_image = cvCloneImage( selected_image ); result_image = cvCloneImage( selected_image ); temp_image = cvCloneImage( selected_image ); connected_reds_image = cvCloneImage( selected_image ); connected_background_image = cvCloneImage( selected_image ); // Process image image_for_on_mouse_show_values = selected_image; find_red_points( selected_image, red_point_image, temp_image ); red_components = connected_components( red_point_image, connected_reds_image ); invert_image( red_point_image, temp_image ); background_components = connected_components( temp_image, connected_background_image ); determine_optimal_sign_classification( selected_image, red_point_image, red_components, background_components, result_image ); //recognize the result_image(with white/black/red only) with the processed templates. recognize(selected_image,result_image,templates); // Show the original & result //cvShowImage( "Original", selected_image ); do { if ((user_clicked_key == 'r') || (user_clicked_key == 'c') || (user_clicked_key == 'h') || (user_clicked_key == 's')) show_ch = user_clicked_key; switch (show_ch) { case 'c': cvShowImage( "Processed Image", connected_reds_image ); break; case 'h': cvShowImage( "Processed Image", connected_background_image ); break; case 'r': cvShowImage( "Processed Image", red_point_image ); break; case 's': default: cvShowImage( "Processed Image", result_image ); break; } user_clicked_key = cvWaitKey(0); } while ((!((user_clicked_key >= '1') && (user_clicked_key <= '0'+NUM_IMAGES))) && ( user_clicked_key != ESC )); if ((user_clicked_key >= '1') && (user_clicked_key <= '0'+NUM_IMAGES)) { selected_image_num = user_clicked_key-'0'; } } while ( user_clicked_key != ESC ); return 1; }
std::vector<std::string> recognize(const boost::python::list& words, const boost::python::list& postags){ return recognize(py_list_to_std_vector<std::string>(words), py_list_to_std_vector<std::string>(postags)); }
static int parseCompCharData(FILE *fp, register Font_Info *fi) { BOOL cont = TRUE, firstTime = TRUE, save = (fi->ccd != NULL); int pos = 0, j = 0, error = AFM_ok, ccount = 0, pcount = 0; register char *keyword; while (cont) { keyword = token(fp); if (keyword == NULL) /* Have reached an early and unexpected EOF. */ /* Set flag and stop parsing */ { error = AFM_earlyEOF; break; /* get out of loop */ } if (ccount > fi->numOfComps) { error = AFM_parseError; break; /* get out of loop */ } if (!save) /* get tokens until the end of the Composite Character info */ /* section without saving any of the data */ switch(recognize(keyword)) { case ENDCOMPOSITES: cont = FALSE; break; case ENDFONTMETRICS: cont = FALSE; error = normalEOF; break; default: break; } /* switch */ else /* otherwise parse entire Composite Character info section, */ /* saving the data */ switch(recognize(keyword)) { case COMMENT: keyword = linetoken(fp); break; case COMPCHAR: if (ccount < fi->numOfComps) { keyword = token(fp); if (pcount != fi->ccd[pos].numOfPieces) error = AFM_parseError; pcount = 0; if (firstTime) firstTime = FALSE; else pos++; fi->ccd[pos].ccName = (char *) malloc(strlen(keyword) + 1); strcpy(fi->ccd[pos].ccName, keyword); keyword = token(fp); fi->ccd[pos].numOfPieces = atoi(keyword); fi->ccd[pos].pieces = (Pcc *) calloc(fi->ccd[pos].numOfPieces, sizeof(Pcc)); j = 0; ccount++; } else { error = AFM_parseError; cont = FALSE; } break; case COMPCHARPIECE: if (pcount < fi->ccd[pos].numOfPieces) { keyword = token(fp); fi->ccd[pos].pieces[j].pccName = (char *) malloc(strlen(keyword) + 1); strcpy(fi->ccd[pos].pieces[j].pccName, keyword); keyword = token(fp); fi->ccd[pos].pieces[j].deltax = atoi(keyword); keyword = token(fp); fi->ccd[pos].pieces[j++].deltay = atoi(keyword); pcount++; } else error = AFM_parseError; break; case ENDCOMPOSITES: cont = FALSE; break; case ENDFONTMETRICS: cont = FALSE; error = normalEOF; break; case NOPE: default: error = AFM_parseError; break; } /* switch */ } /* while */ if (error == AFM_ok && ccount != fi->numOfComps) error = AFM_parseError; return(error); } /* parseCompCharData */
static int parsePairKernData(FILE *fp, register Font_Info *fi) { BOOL cont = TRUE, save = (fi->pkd != NULL); int pos = 0, error = AFM_ok, pcount = 0; register char *keyword; while (cont) { keyword = token(fp); if (keyword == NULL) { error = AFM_earlyEOF; break; /* get out of loop */ } if (!save) /* get tokens until the end of the Pair Kerning Data */ /* section without saving any of the data */ switch(recognize(keyword)) { case ENDKERNPAIRS: case ENDKERNDATA: cont = FALSE; break; case ENDFONTMETRICS: cont = FALSE; error = normalEOF; break; default: break; } /* switch */ else /* otherwise parse entire Pair Kerning Data section, */ /* saving the data */ switch(recognize(keyword)) { case COMMENT: keyword = linetoken(fp); break; case KERNPAIR: if (pcount < fi->numOfPairs) { keyword = token(fp); fi->pkd[pos].name1 = (char *) malloc(strlen(keyword) + 1); strcpy(fi->pkd[pos].name1, keyword); keyword = token(fp); fi->pkd[pos].name2 = (char *) malloc(strlen(keyword) + 1); strcpy(fi->pkd[pos].name2, keyword); keyword = token(fp); fi->pkd[pos].xamt = atoi(keyword); keyword = token(fp); fi->pkd[pos++].yamt = atoi(keyword); pcount++; } else { error = AFM_parseError; cont = FALSE; } break; case KERNPAIRXAMT: if (pcount < fi->numOfPairs) { keyword = token(fp); fi->pkd[pos].name1 = (char *) malloc(strlen(keyword) + 1); strcpy(fi->pkd[pos].name1, keyword); keyword = token(fp); fi->pkd[pos].name2 = (char *) malloc(strlen(keyword) + 1); strcpy(fi->pkd[pos].name2, keyword); keyword = token(fp); fi->pkd[pos++].xamt = atoi(keyword); pcount++; } else { error = AFM_parseError; cont = FALSE; } break; case ENDKERNPAIRS: case ENDKERNDATA: cont = FALSE; break; case ENDFONTMETRICS: cont = FALSE; error = normalEOF; break; case NOPE: default: error = AFM_parseError; break; } /* switch */ } /* while */ if (error == AFM_ok && pcount != fi->numOfPairs) error = AFM_parseError; return(error); } /* parsePairKernData */
static int parseTrackKernData(FILE *fp, register Font_Info *fi) { BOOL cont = TRUE, save = (fi->tkd != NULL); int pos = 0, error = AFM_ok, tcount = 0; register char *keyword; while (cont) { keyword = token(fp); if (keyword == NULL) { error = AFM_earlyEOF; break; /* get out of loop */ } if (!save) /* get tokens until the end of the Track Kerning Data */ /* section without saving any of the data */ switch(recognize(keyword)) { case ENDTRACKKERN: case ENDKERNDATA: cont = FALSE; break; case ENDFONTMETRICS: cont = FALSE; error = normalEOF; break; default: break; } /* switch */ else /* otherwise parse entire Track Kerning Data section, */ /* saving the data */ switch(recognize(keyword)) { case COMMENT: keyword = linetoken(fp); break; case TRACKKERN: if (tcount < fi->numOfTracks) { keyword = token(fp); fi->tkd[pos].degree = atoi(keyword); keyword = token(fp); fi->tkd[pos].minPtSize = (float)atof(keyword); if (errno == ERANGE) error = AFM_parseError; keyword = token(fp); fi->tkd[pos].minKernAmt = (float)atof(keyword); if (errno == ERANGE) error = AFM_parseError; keyword = token(fp); fi->tkd[pos].maxPtSize = (float)atof(keyword); if (errno == ERANGE) error = AFM_parseError; keyword = token(fp); fi->tkd[pos++].maxKernAmt = (float)atof(keyword); if (errno == ERANGE) error = AFM_parseError; tcount++; } else { error = AFM_parseError; cont = FALSE; } break; case ENDTRACKKERN: case ENDKERNDATA: cont = FALSE; break; case ENDFONTMETRICS: cont = FALSE; error = normalEOF; break; case NOPE: default: error = AFM_parseError; break; } /* switch */ } /* while */ if (error == AFM_ok && tcount != fi->numOfTracks) error = AFM_parseError; return(error); } /* parseTrackKernData */