PatternTree::PatternTree(earl::EventTrace* trace, Analyzer& analyzer) { vector<Pattern*> pv; get_patternv(pv, trace, analyzer.get_opt() ); for ( size_t i = 0; i < pv.size(); i++ ) if ( pv[i]->isenabled() ) this->add_node(pv[i]->get_id(), pv[i], pv[i]->get_parent_id()); else delete pv[i]; // register callbacks this->regcb(analyzer); }
void TestVariation::onPlayerConnected() { Analyzer *analyzer = sender(); team.importFromTxt(getFileContent("team1.txt").trimmed()); TeamHolder holder; holder.team() = team; if (count == 0) { holder.name() = "Weavile"; } else if (count == 1) { holder.name() = "daleck"; } ++count; /* Use different IPs to make sure we can find a battle against each other */ analyzer->notify(NetworkCli::SetIP, holder.name()); analyzer->login(holder,true); /* Timeout on test */ setTimeout(10); }
/** * Test Analyzer::segmentString(). * \param analyzer analyzer * \param is input stream */ void testSentenceLength(Analyzer& analyzer, istream& is) { string line; while(getline(is, line)) { const char* p = line.c_str(); string sentStr; while(size_t len = analyzer.sentenceLength(p)) { sentStr.assign(p, len); // get each sentence cout << sentStr << endl; // print each sentence p += len; // move to the begining of next sentence } cout << endl; } }
/** * Test Analyzer::segmentString(). * \param analyzer analyzer */ void testSegmentString(Analyzer& analyzer) { vector<LanguageRegion> regionVec; string line; while(getline(cin, line)) { if(! analyzer.segmentString(line.c_str(), regionVec)) { cerr << "error to segment string " << line << endl; exit(1); } printStringRegion(line.c_str(), regionVec); cout << endl; } }
/** * Test Analyzer::languageListFromFile(). * \param analyzer analyzer * \param fileName file name */ void testLanguageListFromFile(Analyzer& analyzer, const char* fileName) { vector<LanguageID> idVec; if(! analyzer.languageListFromFile(fileName, idVec)) { cerr << "error to get language ID list from file " << fileName << endl; exit(1); } cout << "raw file name: " << fileName << endl; cout << "multiple language types in descending order of sentence count:" << endl; for(unsigned int i=0; i<idVec.size(); ++i) { cout << i << ": " << Knowledge::getLanguageNameFromID(idVec[i]) << endl; } cout << endl; }
void check_analyzer_expected(Analyzer& ana, corpus::document doc, uint64_t num_unique, uint64_t length) { ana.tokenize(doc); ASSERT_EQUAL(doc.counts().size(), num_unique); ASSERT_EQUAL(doc.length(), length); ASSERT_EQUAL(doc.id(), 47ul); if (doc.contains_content()) { ASSERT_EQUAL(doc.path(), "/home/person/filename.txt"); ASSERT_EQUAL(doc.name(), "filename.txt"); } else { ASSERT_EQUAL(doc.path(), "../data/sample-document.txt"); ASSERT_EQUAL(doc.name(), "sample-document.txt"); } }
/** * Test Analyzer::languageListFromString(). * \param analyzer analyzer */ void testLanguageListFromString(Analyzer& analyzer) { vector<LanguageID> idVec; string line; while(getline(cin, line)) { if(! analyzer.languageListFromString(line.c_str(), idVec)) { cerr << "error to get language ID list from string " << line << endl; exit(1); } cout << "multiple language types in descending order of sentence count:" << endl; for(unsigned int i=0; i<idVec.size(); ++i) { cout << i << ": " << Knowledge::getLanguageNameFromID(idVec[i]) << endl; } cout << endl; } }
void Analyze::results_json(Json::Value &root) { Json::Value tests; uint16_t total_evilness= 0; for (std::vector<Analyzer*>::iterator it = _analyzers.begin(); it != _analyzers.end(); ++it) { Analyzer *analyzer = *it; const std::string name = analyzer->name(); if (tests[name].isNull()) { Json::Value test_info(Json::objectValue); test_info["description"] = analyzer->description(); test_info["name"] = name; test_info["results"] = Json::Value(Json::arrayValue); tests[name] = test_info; } analyzer->results_json_results(tests[name]["results"]); tests[name]["status"] = analyzer->status_as_string(); tests[name]["severity-score"] = tests[name]["severity-score"].asLargestUInt() + analyzer->severity_score(); if (!pure_output()) { tests[name]["evilness"] = tests[name]["severity-score"]; tests[name]["evilness-is-deprecated"] = "Use severity-score"; } total_evilness += analyzer->severity_score(); } root["severity-score"] = total_evilness; if (!pure_output()) { root["evilness"] = root["severity-score"]; root["evilness-is-deprecated"] = "Use severity-score"; } root["tests"] = tests; results_json_add_version(root); results_json_add_statistics(root); }
int main( int argc, char * argv[] ) { string ltl_file; try { Timer timer; timer.start(); // Command line option handling. po::options_description desc ("CodeThorn V1.2\n" "Written by Markus Schordan and Adrian Prantl 2012\n" "Supported options"); desc.add_options() ("help,h", "produce this help message") ("rose-help", "show help for compiler frontend options") ("version,v", "display the version") ("internal-checks", "run internal consistency checks (without input program)") ("verify", po::value< string >(), "verify all LTL formulae in the file [arg]") ("ltl-verifier",po::value< int >(),"specify which ltl-verifier to use [=1|2]") ("debug-mode",po::value< int >(),"set debug mode [arg]") ("csv-ltl", po::value< string >(), "output LTL verification results into a CSV file [arg]") ("csv-assert", po::value< string >(), "output assert reachability results into a CSV file [arg]") ("csv-assert-live", po::value< string >(), "output assert reachability results during analysis into a CSV file [arg]") ("csv-stats",po::value< string >(),"output statistics into a CSV file [arg]") ("tg1-estate-address", po::value< string >(), "transition graph 1: visualize address [=yes|no]") ("tg1-estate-id", po::value< string >(), "transition graph 1: visualize estate-id [=yes|no]") ("tg1-estate-properties", po::value< string >(), "transition graph 1: visualize all estate-properties [=yes|no]") ("tg2-estate-address", po::value< string >(), "transition graph 2: visualize address [=yes|no]") ("tg2-estate-id", po::value< string >(), "transition graph 2: visualize estate-id [=yes|no]") ("tg2-estate-properties", po::value< string >(), "transition graph 2: visualize all estate-properties [=yes|no]") ("colors",po::value< string >(),"use colors in output [=yes|no]") ("report-stdout",po::value< string >(),"report stdout estates during analysis [=yes|no]") ("report-stderr",po::value< string >(),"report stderr estates during analysis [=yes|no]") ("report-failed-assert",po::value< string >(), "report failed assert estates during analysis [=yes|no]") ("precision-intbool",po::value< string >(), "use precise top with intbool-(and/or) operators (used in int-analyzer) [=yes|no]") ("precision-exact-constraints",po::value< string >(), "(experimental) use precise constraint extraction [=yes|no]") ("tg-ltl-reduced",po::value< string >(),"(experimental) compute LTL-reduced transition graph based on a subset of computed estates [=yes|no]") ("semantic-fold",po::value< string >(),"compute semantically folded state transition graph [=yes|no]") ("post-semantic-fold",po::value< string >(),"compute semantically folded state transition graph only after the complete transition graph has been computed. [=yes|no]") ("report-semantic-fold",po::value< string >(),"report each folding operation with the respective number of estates. [=yes|no]") ("semantic-fold-threshold",po::value< int >(),"Set threshold with <arg> for semantic fold operation (experimental)") ("post-collapse-stg",po::value< string >(),"compute collapsed state transition graph after the complete transition graph has been computed. [=yes|no]") ("viz",po::value< string >(),"generate visualizations (dot) outputs [=yes|no]") ("update-input-var",po::value< string >(),"For testing purposes only. Default is Yes. [=yes|no]") ("run-rose-tests",po::value< string >(),"Run ROSE AST tests. [=yes|no]") ("reduce-cfg",po::value< string >(),"Reduce CFG nodes which are not relevant for the analysis. [=yes|no]") ("threads",po::value< int >(),"Run analyzer in parallel using <arg> threads (experimental)") ("display-diff",po::value< int >(),"Print statistics every <arg> computed estates.") ("ltl-output-dot",po::value< string >(),"LTL visualization: generate dot output.") ("ltl-show-derivation",po::value< string >(),"LTL visualization: show derivation in dot output.") ("ltl-show-node-detail",po::value< string >(),"LTL visualization: show node detail in dot output.") ("ltl-collapsed-graph",po::value< string >(),"LTL visualization: show collapsed graph in dot output.") ("input-var-values",po::value< string >(),"specify a set of input values (e.g. \"{1,2,3}\")") ("input-var-values-as-constraints",po::value<string >(),"represent input var values as constraints (otherwise as constants in PState)") ("arith-top",po::value< string >(),"Arithmetic operations +,-,*,/,% always evaluate to top [=yes|no]") ("abstract-interpreter",po::value< string >(),"Run analyzer in abstract interpreter mode. Use [=yes|no]") ("rers-binary",po::value< string >(),"Call rers binary functions in analysis. Use [=yes|no]") ("print-all-options",po::value< string >(),"print all yes/no command line options.") ; po::store(po::command_line_parser(argc, argv). options(desc).allow_unregistered().run(), args); po::notify(args); if (args.count("help")) { cout << desc << "\n"; return 0; } if (args.count("rose-help")) { argv[1] = strdup("--help"); } if (args.count("version")) { cout << "CodeThorn version 1.2\n"; cout << "Written by Markus Schordan and Adrian Prantl 2012\n"; return 0; } boolOptions.init(argc,argv); boolOptions.registerOption("tg1-estate-address",false); boolOptions.registerOption("tg1-estate-id",false); boolOptions.registerOption("tg1-estate-properties",true); boolOptions.registerOption("tg2-estate-address",false); boolOptions.registerOption("tg2-estate-id",true); boolOptions.registerOption("tg2-estate-properties",false); boolOptions.registerOption("colors",true); boolOptions.registerOption("report-stdout",false); boolOptions.registerOption("report-stderr",false); boolOptions.registerOption("report-failed-assert",false); boolOptions.registerOption("precision-intbool",true); boolOptions.registerOption("precision-exact-constraints",false); boolOptions.registerOption("tg-ltl-reduced",false); boolOptions.registerOption("semantic-fold",false); boolOptions.registerOption("post-semantic-fold",false); boolOptions.registerOption("report-semantic-fold",false); boolOptions.registerOption("post-collapse-stg",true); boolOptions.registerOption("viz",false); boolOptions.registerOption("update-input-var",true); boolOptions.registerOption("run-rose-tests",false); boolOptions.registerOption("reduce-cfg",true); boolOptions.registerOption("print-all-options",false); boolOptions.registerOption("ltl-output-dot",false); boolOptions.registerOption("ltl-show-derivation",true); boolOptions.registerOption("ltl-show-node-detail",true); boolOptions.registerOption("ltl-collapsed-graph",false); boolOptions.registerOption("input-var-values-as-constraints",false); boolOptions.registerOption("arith-top",false); boolOptions.registerOption("abstract-interpreter",false); boolOptions.registerOption("rers-binary",false); boolOptions.registerOption("relop-constraints",false); // not accessible on command line yet boolOptions.processOptions(); if(boolOptions["print-all-options"]) { cout<<boolOptions.toString(); // prints all bool options } if (args.count("internal-checks")) { if(CodeThorn::internalChecks(argc,argv)==false) return 1; else return 0; } Analyzer analyzer; // clean up verify and csv-ltl option in argv if (args.count("verify")) { ltl_file = args["verify"].as<string>(); for (int i=1; i<argc; ++i) { if ((string(argv[i]) == "--verify") || (string(argv[i]) == "--csv-ltl")) { // do not confuse ROSE frontend argv[i] = strdup(""); assert(i+1<argc); argv[i+1] = strdup(""); } } } if(args.count("csv-assert-live")) { analyzer._csv_assert_live_file=args["csv-assert-live"].as<string>(); } if(args.count("input-var-values")) { string setstring=args["input-var-values"].as<string>(); cout << "STATUS: input-var-values="<<setstring<<endl; stringstream ss(setstring); if(ss.peek()=='{') ss.ignore(); else throw "Error: option input-var-values: wrong input format (at start)."; int i; while(ss>>i) { //cout << "DEBUG: input-var-string:i:"<<i<<" peek:"<<ss.peek()<<endl; analyzer.insertInputVarValue(i); if(ss.peek()==','||ss.peek()==' ') ss.ignore(); } #if 0 if(ss.peek()=='}') ss.ignore(); else throw "Error: option input-var-values: wrong input format (at end)."; #endif } int numberOfThreadsToUse=1; if(args.count("threads")) { numberOfThreadsToUse=args["threads"].as<int>(); } analyzer.setNumberOfThreadsToUse(numberOfThreadsToUse); if(args.count("semantic-fold-threshold")) { int semanticFoldThreshold=args["semantic-fold-threshold"].as<int>(); analyzer.setSemanticFoldThreshold(semanticFoldThreshold); } if(args.count("display-diff")) { int displayDiff=args["display-diff"].as<int>(); analyzer.setDisplayDiff(displayDiff); } if(args.count("ltl-verifier")) { int ltlVerifier=args["ltl-verifier"].as<int>(); analyzer.setLTLVerifier(ltlVerifier); } if(args.count("debug-mode")) { option_debug_mode=args["debug-mode"].as<int>(); } // clean up string-options in argv for (int i=1; i<argc; ++i) { if (string(argv[i]) == "--csv-assert" || string(argv[i])=="--csv-stats" || string(argv[i])=="--csv-assert-live" || string(argv[i])=="--threads" || string(argv[i])=="--display-diff" || string(argv[i])=="--input-var-values" || string(argv[i])=="--ltl-verifier" ) { // do not confuse ROSE frontend argv[i] = strdup(""); assert(i+1<argc); argv[i+1] = strdup(""); } } // Build the AST used by ROSE cout << "INIT: Parsing and creating AST."<<endl; SgProject* sageProject = frontend(argc,argv); double frontEndRunTime=timer.getElapsedTimeInMilliSec(); if(boolOptions["run-rose-tests"]) { cout << "INIT: Running ROSE AST tests."<<endl; // Run internal consistency tests on AST AstTests::runAllTests(sageProject); } SgNode* root=sageProject; checkProgram(root); timer.start(); cout << "INIT: Running variable<->symbol mapping check."<<endl; //VariableIdMapping varIdMap; analyzer.getVariableIdMapping()->computeVariableSymbolMapping(sageProject); cout << "STATUS: Variable<->Symbol mapping created."<<endl; if(!analyzer.getVariableIdMapping()->isUniqueVariableSymbolMapping()) { cerr << "WARNING: Variable<->Symbol mapping not bijective."<<endl; //varIdMap.reportUniqueVariableSymbolMappingViolations(); } #if 0 analyzer.getVariableIdMapping()->toStream(cout); #endif cout << "INIT: creating solver."<<endl; analyzer.initializeSolver1("main",root); analyzer.initLabeledAssertNodes(sageProject); double initRunTime=timer.getElapsedTimeInMilliSec(); timer.start(); cout << "=============================================================="<<endl; if(boolOptions["semantic-fold"]) { analyzer.runSolver2(); } else { analyzer.runSolver1(); } if(boolOptions["post-semantic-fold"]) { cout << "Performing post semantic folding (this may take some time):"<<endl; analyzer.semanticFoldingOfTransitionGraph(); } double analysisRunTime=timer.getElapsedTimeInMilliSec(); // since CT1.2 the ADT TransitionGraph ensures that no duplicates can exist #if 0 long removed=analyzer.getTransitionGraph()->removeDuplicates(); cout << "Transitions reduced: "<<removed<<endl; #endif cout << "=============================================================="<<endl; // TODO: reachability in presence of semantic folding if(!boolOptions["semantic-fold"] && !boolOptions["post-semantic-fold"]) { printAsserts(analyzer,sageProject); } if (args.count("csv-assert")) { string filename=args["csv-assert"].as<string>().c_str(); generateAssertsCsvFile(analyzer,sageProject,filename); cout << "=============================================================="<<endl; } if(boolOptions["tg-ltl-reduced"]) { cout << "(Experimental) Reducing transition graph ..."<<endl; set<const EState*> xestates=analyzer.nonLTLRelevantEStates(); cout << "Size of transition graph before reduction: "<<analyzer.getTransitionGraph()->size()<<endl; cout << "Number of EStates to be reduced: "<<xestates.size()<<endl; analyzer.getTransitionGraph()->reduceEStates(xestates); cout << "Size of transition graph after reduction : "<<analyzer.getTransitionGraph()->size()<<endl; cout << "=============================================================="<<endl; } timer.start(); if (ltl_file.size()) { generateLTLOutput(analyzer,ltl_file); cout << "=============================================================="<<endl; } double ltlRunTime=timer.getElapsedTimeInMilliSec(); // TODO: reachability in presence of semantic folding if(boolOptions["semantic-fold"] || boolOptions["post-semantic-fold"]) { cout << "NOTE: no reachability results with semantic folding (not implemented yet)."<<endl; } else { printAssertStatistics(analyzer,sageProject); } cout << "=============================================================="<<endl; double totalRunTime=frontEndRunTime+initRunTime+ analysisRunTime+ltlRunTime; long pstateSetSize=analyzer.getPStateSet()->size(); long pstateSetBytes=analyzer.getPStateSet()->memorySize(); long pstateSetMaxCollisions=analyzer.getPStateSet()->maxCollisions(); long pstateSetLoadFactor=analyzer.getPStateSet()->loadFactor(); long eStateSetSize=analyzer.getEStateSet()->size(); long eStateSetBytes=analyzer.getEStateSet()->memorySize(); long eStateSetMaxCollisions=analyzer.getEStateSet()->maxCollisions(); double eStateSetLoadFactor=analyzer.getEStateSet()->loadFactor(); long transitionGraphSize=analyzer.getTransitionGraph()->size(); long transitionGraphBytes=transitionGraphSize*sizeof(Transition); long numOfconstraintSets=analyzer.getConstraintSetMaintainer()->numberOf(); long constraintSetsBytes=analyzer.getConstraintSetMaintainer()->memorySize(); long constraintSetsMaxCollisions=analyzer.getConstraintSetMaintainer()->maxCollisions(); double constraintSetsLoadFactor=analyzer.getConstraintSetMaintainer()->loadFactor(); cout << "Number of stdin-estates : "<<color("cyan")<<(analyzer.getEStateSet()->numberOfIoTypeEStates(InputOutput::STDIN_VAR))<<color("white")<<endl; cout << "Number of stdout-estates : "<<color("cyan")<<(analyzer.getEStateSet()->numberOfIoTypeEStates(InputOutput::STDOUT_VAR))<<color("white")<<endl; cout << "Number of stderr-estates : "<<color("cyan")<<(analyzer.getEStateSet()->numberOfIoTypeEStates(InputOutput::STDERR_VAR))<<color("white")<<endl; cout << "Number of failed-assert-estates: "<<color("cyan")<<(analyzer.getEStateSet()->numberOfIoTypeEStates(InputOutput::FAILED_ASSERT))<<color("white")<<endl; cout << "=============================================================="<<endl; cout << "Number of pstates : "<<color("magenta")<<pstateSetSize<<color("white")<<" (memory: "<<color("magenta")<<pstateSetBytes<<color("white")<<" bytes)"<<" ("<<""<<pstateSetLoadFactor<< "/"<<pstateSetMaxCollisions<<")"<<endl; cout << "Number of estates : "<<color("cyan")<<eStateSetSize<<color("white")<<" (memory: "<<color("cyan")<<eStateSetBytes<<color("white")<<" bytes)"<<" ("<<""<<eStateSetLoadFactor<< "/"<<eStateSetMaxCollisions<<")"<<endl; cout << "Number of transitions : "<<color("blue")<<transitionGraphSize<<color("white")<<" (memory: "<<color("blue")<<transitionGraphBytes<<color("white")<<" bytes)"<<endl; cout << "Number of constraint sets : "<<color("yellow")<<numOfconstraintSets<<color("white")<<" (memory: "<<color("yellow")<<constraintSetsBytes<<color("white")<<" bytes)"<<" ("<<""<<constraintSetsLoadFactor<< "/"<<constraintSetsMaxCollisions<<")"<<endl; cout << "=============================================================="<<endl; long totalMemory=pstateSetBytes+eStateSetBytes+transitionGraphBytes+constraintSetsBytes; cout << "Memory total : "<<color("green")<<totalMemory<<" bytes"<<color("normal")<<endl; cout << "Time total : "<<color("green")<<readableruntime(totalRunTime)<<color("normal")<<endl; cout << "=============================================================="<<endl; if(args.count("csv-stats")) { string filename=args["csv-stats"].as<string>().c_str(); stringstream text; text<<"Sizes,"<<pstateSetSize<<", " <<eStateSetSize<<", " <<transitionGraphSize<<", " <<numOfconstraintSets<<endl; text<<"Memory,"<<pstateSetBytes<<", " <<eStateSetBytes<<", " <<transitionGraphBytes<<", " <<constraintSetsBytes<<", " <<totalMemory<<endl; text<<"Runtime(readable)," <<readableruntime(frontEndRunTime)<<", " <<readableruntime(initRunTime)<<", " <<readableruntime(analysisRunTime)<<", " <<readableruntime(ltlRunTime)<<", " <<readableruntime(totalRunTime)<<endl; text<<"Runtime(ms)," <<frontEndRunTime<<", " <<initRunTime<<", " <<analysisRunTime<<", " <<ltlRunTime<<", " <<totalRunTime<<endl; text<<"hashset-collisions," <<pstateSetMaxCollisions<<", " <<eStateSetMaxCollisions<<", " <<constraintSetsMaxCollisions<<endl; text<<"hashset-loadfactors," <<pstateSetLoadFactor<<", " <<eStateSetLoadFactor<<", " <<constraintSetsLoadFactor<<endl; text<<"threads,"<<numberOfThreadsToUse<<endl; write_file(filename,text.str()); cout << "generated "<<filename<<endl; } if(boolOptions["viz"]) { Visualizer visualizer(analyzer.getLabeler(),analyzer.getVariableIdMapping(),analyzer.getFlow(),analyzer.getPStateSet(),analyzer.getEStateSet(),analyzer.getTransitionGraph()); cout << "generating graphviz files:"<<endl; string dotFile="digraph G {\n"; dotFile+=visualizer.transitionGraphToDot(); dotFile+="}\n"; write_file("transitiongraph1.dot", dotFile); cout << "generated transitiongraph1.dot."<<endl; string dotFile3=visualizer.foldedTransitionGraphToDot(); write_file("transitiongraph2.dot", dotFile3); cout << "generated transitiongraph2.dot."<<endl; string datFile1=(analyzer.getTransitionGraph())->toString(); write_file("transitiongraph1.dat", datFile1); cout << "generated transitiongraph1.dat."<<endl; assert(analyzer.startFunRoot); //analyzer.generateAstNodeInfo(analyzer.startFunRoot); //dotFile=astTermWithNullValuesToDot(analyzer.startFunRoot); analyzer.generateAstNodeInfo(sageProject); cout << "generated node info."<<endl; dotFile=functionAstTermsWithNullValuesToDot(sageProject); write_file("ast.dot", dotFile); cout << "generated ast.dot."<<endl; write_file("cfg.dot", analyzer.flow.toDot(analyzer.cfanalyzer->getLabeler())); cout << "generated cfg.dot."<<endl; } #if 0 { cout << "EStateSet:\n"<<analyzer.getEStateSet()->toString()<<endl; } #endif #if 0 { cout << "MAP:"<<endl; cout << analyzer.getLabeler()->toString(); } #endif #if 0 // check output var to be constant in transition graph TransitionGraph* tg=analyzer.getTransitionGraph(); for(TransitionGraph::iterator i=tg->begin();i!=tg->end();++i) { const EState* es1=(*i).source; InputOutput myio=es1->io; assert(myio.op==InputOutput::STDOUT_VAR && es1->pstate->varIsConst(es1->io.var) ); } #endif // reset terminal cout<<color("normal")<<"done."<<endl; } catch(char* str) {
void generateLTLOutput(Analyzer& analyzer, string ltl_file) { extern CodeThorn::LTL::Formula* ltl_val; // // Verification // int n = 0; int n_yes = 0; int n_no = 0; int n_undecided = 0; int n_failed = 0; assert(analyzer.getEStateSet()); assert(analyzer.getTransitionGraph()); if (ltl_file.size()) { CodeThorn::FixpointLTL::Checker* checker1 = 0; CodeThorn::UnifiedLTL::UChecker* checker2 = 0; switch(analyzer.getLTLVerifier()) { case 1: checker1 = new CodeThorn::FixpointLTL::Checker(*analyzer.getEStateSet(), *analyzer.getTransitionGraph()); break; case 2: checker2 = new CodeThorn::UnifiedLTL::UChecker(*analyzer.getEStateSet(), *analyzer.getTransitionGraph()); break; default: cerr << "Error: unknown ltl-verifier specified with ltl-verifier option."<<endl; exit(1); } ltl_input = fopen(ltl_file.c_str(), "r"); if (ltl_input == NULL) cerr<<"Error: could not open file "<<ltl_file.c_str()<<endl; assert(ltl_input); ofstream* csv = NULL; if (args.count("csv-ltl")) { csv = new ofstream(); // use binary and \r\n tp enforce DOS line endings // http://tools.ietf.org/html/rfc4180 csv->open(args["csv-ltl"].as<string>().c_str(), ios::trunc|ios::binary); //*csv << "Index,\"LTL formula\",Result,Confidence\r\n"; } while ( !ltl_eof) { try { ltl_label = 0; if (ltl_parse()) { cerr<<color("red")<< "Syntax error" <<color("normal")<<endl; ++n; ++n_failed; continue; } if (ltl_val == NULL) { // empty line continue; } } catch(const char* s) { if (ltl_val) cout<<color("normal")<<string(*ltl_val)<<endl; cout<< s<<endl<<color("red")<< "Grammar Error" <<color("normal")<<endl; ++n; ++n_failed; continue; } catch(...) { cout<<color("red")<< "Parser exception" << endl; ++n; ++n_failed; continue; } ++n; string formula = *ltl_val; cout<<endl<<"Verifying formula "<<color("white")<<formula<<color("normal")<<"."<<endl; //if (csv) *csv << n <<";\"" <<formula<<"\";"; if (csv) *csv << n+60 <<","; try { AType::BoolLattice result; if (checker1) result = checker1->verify(*ltl_val); if (checker2) result = checker2->verify(*ltl_val); if (result.isTrue()) { ++n_yes; cout<<color("green")<<"YES"<<color("normal")<<endl; if (csv) *csv << "yes,9\r\n"; } else if (result.isFalse()) { ++n_no; cout<<color("cyan")<<"NO"<<color("normal")<<endl; if (csv) *csv << "no,9\r\n"; } else { ++n_undecided; cout<<color("magenta")<<"UNKNOWN"<<color("normal")<<endl; if (csv) *csv << "unknown,0\r\n"; } } catch(const char* str) { ++n_failed; cerr << "Exception raised: " << str << endl; cout<<color("red")<<"ERROR"<<color("normal")<<endl; if (csv) *csv << "error,0\r\n"; } catch(string str) { ++n_failed; cerr << "Exception raised: " << str << endl; cout<<color("red")<<"ERROR"<<color("normal")<<endl; if (csv) *csv << "error,0\r\n"; } catch(...) { ++n_failed; cout<<color("red")<<"ERROR"<<color("normal")<<endl; if (csv) *csv << "error,0\r\n"; } } fclose(ltl_input); if (csv) delete csv; if (checker1) delete checker1; if (checker2) delete checker2; assert(n_yes+n_no+n_undecided+n_failed == n); cout<<"\nStatistics "<<endl <<"========== "<<endl <<n_yes <<"/"<<n<<color("green") <<" YES, " <<color("normal") <<n_no <<"/"<<n<<color("cyan") <<" NO, " <<color("normal") <<n_undecided<<"/"<<n<<color("magenta")<<" UNKNOWN, " <<color("normal") <<n_failed <<"/"<<n<<color("red") <<" ERROR" <<color("normal") <<endl; } }
/** * Main function. */ int main(int argc, char* argv[]) { string inputFile, typeStr; bool isAnalyzeFile = false; bool isTestLine = false; try { po::options_description config("Allowed options"); config.add_options() ("help,h", "print help message") ("file,f", po::value<string>(&inputFile), "use an input file instead of standard input") ("type,t", po::value<string>(&typeStr)->default_value("language"), "type of function: [encoding, language, list, segment, sentence]") ("line,l", "test each line in input file") ; po::options_description cmdline_options; cmdline_options.add(config); po::variables_map vm; store(po::command_line_parser(argc, argv).options(cmdline_options).run(), vm); po::notify(vm); if(vm.count("help")) { cout << cmdline_options << endl; exit(1); } if(vm.count("file")) { isAnalyzeFile = true; //cout << "input file: " << inputFile << endl; } if(typeStr == "encoding" || typeStr == "language" || typeStr == "list" || typeStr == "segment" || typeStr == "sentence") { //cout << "function type: " << typeStr << endl; } else { cerr << "unknown function type: " << typeStr << endl; cout << cmdline_options << endl; exit(1); } if(vm.count("line")) { isTestLine = true; //cout << "testing each line" << endl; if(! isAnalyzeFile) { cerr << "error: no input file is given by -f." << endl; exit(1); } if(typeStr != "encoding" && typeStr != "language") { cerr << "error: line test mode is only valid for \"encoding\" or \"language\" type." << endl; exit(1); } } //cout << endl; } catch(std::exception& e) { cerr << "error: " << e.what() << "\n"; exit(1); } // create instances Factory* factory = Factory::instance(); Analyzer* analyzer = factory->createAnalyzer(); Knowledge* knowledge = factory->createKnowledge(); // model files const char* encodingModel = "../../db/langid/model/encoding.bin"; const char* languageModel = "../../db/langid/model/language.bin"; // load encoding model for encoding identification if(! knowledge->loadEncodingModel(encodingModel)) { cerr << "error: fail to load file " << encodingModel << endl; exit(1); } // load language model for language identification or sentence tokenization if(! knowledge->loadLanguageModel(languageModel)) { cerr << "error: fail to load file " << languageModel << endl; exit(1); } // not to limit analyze size //analyzer->setOption(Analyzer::OPTION_TYPE_LIMIT_ANALYZE_SIZE, 0); // set minimum block size //analyzer->setOption(Analyzer::OPTION_TYPE_BLOCK_SIZE_THRESHOLD, 100); // identify Chinese Traditional text as Chinese Simplified language //analyzer->setOption(Analyzer::OPTION_TYPE_NO_CHINESE_TRADITIONAL, 1); // set knowledge analyzer->setKnowledge(knowledge); // identify character encoding if(typeStr == "encoding") { if(isAnalyzeFile) { if(isTestLine) testEncodingFromFileLine(*analyzer, inputFile.c_str()); else testEncodingFromFile(*analyzer, inputFile.c_str()); } else testEncodingFromString(*analyzer); } // identify the single primary language in UTF-8 encoding else if(typeStr == "language") { if(isAnalyzeFile) { if(isTestLine) testLanguageFromFileLine(*analyzer, inputFile.c_str()); else testLanguageFromFile(*analyzer, inputFile.c_str()); } else testLanguageFromString(*analyzer); } // identify the list of multiple languages in UTF-8 encoding else if(typeStr == "list") { if(isAnalyzeFile) testLanguageListFromFile(*analyzer, inputFile.c_str()); else testLanguageListFromString(*analyzer); } // segment the UTF-8 multi-lingual text into single-language regions else if(typeStr == "segment") { if(isAnalyzeFile) testSegmentFile(*analyzer, inputFile.c_str()); else testSegmentString(*analyzer); } // tokenize the UTF-8 text into sentences else if(typeStr == "sentence") { if(isAnalyzeFile) { ifstream ifs(inputFile.c_str()); if(! ifs) { cerr << "error in opening file " << inputFile << endl; exit(1); } testSentenceLength(*analyzer, ifs); } else testSentenceLength(*analyzer, cin); } delete knowledge; delete analyzer; return 0; }
int main(int argc, char * argv[]) { // HbondMap map; // map.push_pair (0,1); // map.push_pair (1,2); // map.push_pair (2,3); // map.push_pair (3,4); // map.push_pair (4,5); // map.push_pair (5,0); // map.push_pair (0,2); // map.push_pair (1,3); // map.push_pair (2,4); // map.push_pair (3,5); // map.push_pair (4,0); // map.push_pair (5,1); // // map.push_pair (0, 1); // // map.push_pair (0, 2); // // map.push_pair (0, 3); // // map.push_pair (1, 3); // // map.push_pair (2, 3); // // map.push_pair (1, 4); // // map.push_pair (2, 5); // // map.push_pair (4, 6); // // map.push_pair (5, 6); // // map.push_pair (101, 102); // // map.push_pair (101, 103); // // map.push_pair (102, 103); // // map.push_pair (202, 203); // Tree tree; // Circles cs; // while (!map.empty()){ // tree.addRoot(map, 0); // tree.addGenerations(map); // // tree.print(); // Circles tmpc; // tree.buildCircles (tmpc); // tmpc.uniqueCircles(); // // std::cout << std::endl; // // std::cout << "before simplified:" << std::endl; // // tmpc.print(); // tmpc.simplifyCircles (); // // std::cout << std::endl; // // std::cout << "simplified circles:" << std::endl; // // tmpc.print(); // cs.add (tmpc); // HbondMap newMap; // tree.renewMap (map, newMap); // map = newMap; // } // cs.uniqueCircles(); // cs.sortCircles(); // std::cout << std::endl; // std::cout << "simplified circles:" << std::endl; // cs.print (); std::vector<Hbond> bonds; bonds.push_back (Hbond(0,1)); bonds.push_back (Hbond(1,2)); bonds.push_back (Hbond(2,3)); bonds.push_back (Hbond(3,4)); bonds.push_back (Hbond(4,5)); bonds.push_back (Hbond(5,0)); bonds.push_back (Hbond(0,2)); bonds.push_back (Hbond(1,3)); bonds.push_back (Hbond(2,4)); bonds.push_back (Hbond(3,5)); bonds.push_back (Hbond(4,0)); bonds.push_back (Hbond(5,1)); // bonds.push_back (Hbond(0,1)); // bonds.push_back (Hbond(0,2)); // bonds.push_back (Hbond(1,3)); // bonds.push_back (Hbond(2,3)); // bonds.push_back (Hbond(2,4)); // bonds.push_back (Hbond(1,4)); Analyzer ana; ana.readData (bonds); std::cout << std::endl; std::cout << "simplified circles:" << std::endl; ana.getCircles().print (); return 0; }
void Sonogram::demo(QPainter& p) { analyze(p, Scope(m_fht->size(), 0), new_frame_); }
/** * Main function. */ int main(int argc, char* argv[]) { const char* sysdict = TEST_JMA_DEFAULT_SYSTEM_DICT; const char* stopdict = TEST_JMA_DEFAULT_STOPWORD_DICT; for(int optIndex=1; optIndex+1<argc; optIndex+=2) { if(! strcmp(argv[optIndex], "--stop")) stopdict = argv[optIndex+1]; else if(! strcmp(argv[optIndex], "--dict")) sysdict = argv[optIndex+1]; else { cerr << "unknown option: " << argv[optIndex] << endl; printUsage(); exit(1); } } cout << "system dictionary: " << sysdict << endl; cout << "stop word dictionary: " << stopdict << endl; // create factory JMA_Factory* factory = JMA_Factory::instance(); // create analyzer and knowledge Analyzer* analyzer = factory->createAnalyzer(); Knowledge* knowledge = factory->createKnowledge(); // load dictioanry files knowledge->setSystemDict(sysdict); if(knowledge->loadDict() == 0) { cerr << "error: fail to load dictionary files" << endl; exit(1); } cout << "encoding type of system dictionary: " << Knowledge::encodeStr(knowledge->getEncodeType()) << endl; // load stop word dictionary if(knowledge->loadStopWordDict(stopdict) == 0) { cerr << "error: fail to load stop word dictionary" << endl; exit(1); } // set knowledge if(analyzer->setKnowledge(knowledge) == 0) { cerr << "fail to set knowledge" << endl; exit(1); } Sentence s; string line; while(getline(cin, line)) { s.setString(line.c_str()); if(analyzer->runWithSentence(s) != 1) { cerr << "error: fail in Analyzer::runWithSentence()" << endl; exit(1); } // get one-best result int i= s.getOneBestIndex(); if(i == -1) cout << "no one-best result exists." << endl; else { for(int j=0; j<s.getCount(i); ++j) cout << s.getLexicon(i, j) << "/" << s.getStrPOS(i, j) << " "; cout << endl; } } delete knowledge; delete analyzer; return 0; }
/** * Main function. */ int main(int argc, char* argv[]) { if(argc < 3) { printUsage(); exit(1); } // set default dictionary file const char* sysdict = TEST_JMA_DEFAULT_SYSTEM_DICT; if(argc > 3) { if(argc == 5 && ! strcmp(argv[3], "--dict")) sysdict = argv[4]; else { cerr << "unknown command option " << argv[3] << endl; printUsage(); exit(1); } } // create factory JMA_Factory* factory = JMA_Factory::instance(); // create analyzer and knowledge Analyzer* analyzer = factory->createAnalyzer(); Knowledge* knowledge = factory->createKnowledge(); // load dictioanry files knowledge->setSystemDict(sysdict); cout << "system dictionary: " << sysdict << endl; if(knowledge->loadDict() == 0) { cerr << "fail to load dictionary files" << endl; exit(1); } cout << "encoding type of system dictionary: " << Knowledge::encodeStr(knowledge->getEncodeType()) << endl; // set knowledge if(analyzer->setKnowledge(knowledge) == 0) { cerr << "fail to set knowledge" << endl; exit(1); } // open files const char* source = argv[1]; const char* dest = argv[2]; assert(source && dest); ifstream from(source); if(! from) { cerr << "error in opening file: " << source << endl; exit(1); } ofstream to(dest); if(! to) { cerr << "error in opening file: " << dest << endl; exit(1); } // split sentences string line; vector<Sentence> sentVec; while(getline(from, line)) { sentVec.clear(); analyzer->splitSentence(line.c_str(), sentVec); for(size_t i=0; i<sentVec.size(); ++i) { string str(sentVec[i].getString()); trimString(str); if(!str.empty()) to << str << endl; } } // destroy instances delete knowledge; delete analyzer; return 0; }