void XdgMenuApplinkProcessor::step1() { fillAppFileInfoList(); createRules(); // Check Include rules & mark as allocated ............ XdgMenuAppFileInfoHashIterator i(mAppFileInfoHash); while(i.hasNext()) { i.next(); XdgDesktopFile* file = i.value()->desktopFile(); if (mRules.checkInclude(i.key(), *file)) { if (!mOnlyUnallocated) i.value()->setAllocated(true); if (!mRules.checkExclude(i.key(), *file)) { mSelected.append(i.value()); } } } // Process childs menus ............................... foreach (XdgMenuApplinkProcessor* child, mChilds) child->step1(); }
void AggregationPerfTest::setup(bool express) { packetSink = new PacketSink(); packetManager = new InstanceManager<Packet>(); Rules* rules = createRules(); int inactiveBufferTime = 5; // maximum number of seconds until non-active flows are exported int activeBufferTime = 10; // maximum number of seconds until active flows are exported // note: we do not need to specify any receiving module for the ipfixaggregator, // as deconstruction of unused instances is done with shared pointers ipfixAggregator = new IpfixAggregator(rules, inactiveBufferTime, activeBufferTime); if (express) { hookingFilter = new ExpressHookingFilter(ipfixAggregator); } else { hookingFilter = new HookingFilter(ipfixAggregator); } filter = new Filter(); filter->addProcessor(hookingFilter); filter->setReceiver(packetSink); // start all needed threads packetSink->runSink(); ipfixAggregator->runSink(); ipfixAggregator->start(); filter->startFilter(); }
Test::TestResult AggregationPerfTest::execTest() { // create a packet sampler which lets only half of the packets through // NOTICE: the sampler will be destroyed by the d'tor of FilterModule ConnectionQueue<Packet*> queue1(10); TestQueue<IpfixRecord*> tqueue; PacketAggregator agg(1); Rules* rules = createRules(); agg.buildAggregator(rules, 0, 0, 16); queue1.connectTo(&agg); agg.connectTo(&tqueue); agg.start(); queue1.start(); struct timeval starttime; REQUIRE(gettimeofday(&starttime, 0) == 0); sendPacketsTo(&queue1, numPackets); // check that at least one record was received IpfixRecord* rec; ASSERT(tqueue.pop(1000, &rec), "received timeout when should have received flow!"); struct timeval stoptime; REQUIRE(gettimeofday(&stoptime, 0) == 0); struct timeval difftime; REQUIRE(timeval_subtract(&difftime, &stoptime, &starttime) == 0); printf("Aggregator: needed time for processing %d packets: %d.%06d seconds\n", numPackets, (int)difftime.tv_sec, (int)difftime.tv_usec); queue1.shutdown(); agg.shutdown(); return PASSED; }
void ExtLang::build(const Lang &lang,ulen map_atom_count) { original_atom_count=lang.getAtomCount(); // atoms { ulen len=LenAdd(lang.getAtomCount(),lang.getRuleCount()); auto atoms=createAtoms(len); ulen index=0; for(auto &atom : lang.getAtoms() ) { atoms->index=index++; atoms->name=pool.dup(atom.name); atoms->map_index=atom.map_index; ++atoms; } for(auto &rule : lang.getRules() ) { atoms->index=index++; atoms->name=pool.cat(StrLen("@",1),rule.ret->name,StrLen("::",2),rule.name); atoms->map_index=rule.map_index+map_atom_count; ++atoms; } } // synts { ulen len=lang.getSyntCount(); auto synts=createSynts(len); for(auto &synt : lang.getSynts() ) { synts->index=synt.index; synts->name=pool.dup(synt.name); synts->is_lang=synt.is_lang; synts->map_index=synt.map_index; synts->rules.len=synt.rules.len; ++synts; } } // rules { ulen len=lang.getRuleCount(); auto rules=createRules(len); auto atoms=getAtoms(); auto synts=getSynts(); ulen delta=original_atom_count; for(auto &rule : lang.getRules() ) { rules->index=rule.index; rules->name=pool.dup(rule.name); rules->map_index=rule.map_index; rules->ret=&(synts[rule.ret->index]); auto args=createElements(*rules,LenAdd(rule.args.len,1)); for(auto element : rule.args ) { element.apply( [=] (const AtomDesc *atom) { args->ptr=&(atoms[atom->index]); } , [=] (const SyntDesc *synt) { args->ptr=&(synts[synt->index]); } ); ++args; } args->ptr=&(atoms[rule.index+delta]); ++rules; } } // synts.rules { auto synts=this->synts; auto *ptr=rules.ptr; for(; +synts ;++synts) { ulen len=synts->rules.len; synts->rules.ptr=ptr; ptr+=len; } } pool.shrink_extra(); }
TopLang::TopLang(const CondLang &clang) { Collector<RuleRec> collector; DynArray<ulen> map(DoRaw(clang.getSyntCount())); // atoms { auto range=clang.getAtoms(); auto atoms=createAtoms(range.len); for(; +atoms ;++atoms,++range) { atoms->index=range->index; atoms->name=pool.dup(range->name); atoms->map_index=range->index; } } // synts { auto range=clang.getSynts(); ulen len=0; for(auto &synt : range ) len=LenAdd(len, Max<ulen>(synt.kinds.len,1) ); auto synts=createSynts(len); ulen index=0; ulen map_index=0; for(; +range ;++range,++map_index) if( +range->kinds ) { map[map_index]=index; StrLen name=range->name; bool is_lang=range->is_lang; ulen desc_map_index=range->index; for(auto &kind : range->kinds ) { synts->rules.len=makeRules(collector,*range,kind.index); synts->index=index++; synts->name=pool.cat(name,StrLen(".",1),kind.name); synts->is_lang=is_lang; synts->map_index=desc_map_index; synts->kind_index=kind.index; ++synts; } } else { map[map_index]=index; synts->rules.len=makeRules(collector,*range); synts->index=index++; synts->name=pool.dup(range->name); synts->is_lang=range->is_lang; synts->map_index=range->index; ++synts; } } // rules { auto range=collector.flat(); auto rules=createRules(range.len); auto atoms=getAtoms(); auto synts=getSynts(); ulen index=0; for(; +rules ;++rules,++range) { rules->index=index++; rules->name=range->name; rules->map_index=range->map_index; auto arange=Range_const(range->args); auto args=createElements(*rules,arange.len); for(; +args ;++args,++arange) { arange->element.apply( [=] (const CondLangBase::AtomDesc *atom) { args->ptr=&(atoms[atom->index]); } , [=,&map] (const CondLangBase::SyntDesc *synt) { args->ptr=&(synts[map[synt->index]+arange->kind_index]); } ); } } } // synt.rules rules.ret { auto synts=this->synts; auto *ptr=rules.ptr; for(; +synts ;++synts) { ulen len=synts->rules.len; synts->rules.ptr=ptr; for(auto &rule : Range(ptr,len) ) rule.ret=synts.ptr; ptr+=len; } } pool.shrink_extra(); }
int main( int argc, const char** argv) { try { if (argc <= 1) { printUsage( argc, argv); return 0; } unsigned int nofThreads = 0; bool doOptimize = false; int argidx = 1; for (; argidx < argc && argv[argidx][0] == '-'; ++argidx) { if (std::strcmp( argv[argidx], "-h") == 0) { printUsage( argc, argv); return 0; } else if (std::strcmp( argv[argidx], "-o") == 0) { doOptimize = true; } } if (argc - argidx < 4) { std::cerr << "ERROR too few arguments" << std::endl; printUsage( argc, argv); return 1; } else if (argc - argidx > 5) { std::cerr << "ERROR too many arguments" << std::endl; printUsage( argc, argv); return 1; } initRand(); g_errorBuffer = strus::createErrorBuffer_standard( 0, 1+nofThreads, NULL/*debug trace interface*/); if (!g_errorBuffer) { std::cerr << "construction of error buffer failed" << std::endl; return -1; } unsigned int nofFeatures = strus::utils::getUintValue( argv[ argidx+0]); unsigned int nofDocuments = strus::utils::getUintValue( argv[ argidx+1]); unsigned int documentSize = strus::utils::getUintValue( argv[ argidx+2]); unsigned int nofPatterns = strus::utils::getUintValue( argv[ argidx+3]); const char* outputpath = (argc - argidx > 4)? argv[ argidx+4] : 0; strus::local_ptr<strus::PatternMatcherInterface> pt( strus::createPatternMatcher_std( g_errorBuffer)); if (!pt.get()) throw std::runtime_error("failed to create pattern matcher"); strus::local_ptr<strus::PatternMatcherInstanceInterface> ptinst( pt->createInstance()); if (!ptinst.get()) throw std::runtime_error("failed to create pattern matcher instance"); GlobalContext ctx( nofFeatures, nofPatterns); std::vector<strus::utils::Document> docs = createRandomDocuments( nofDocuments, documentSize, nofFeatures); std::vector<TreeNode*> treear = createRandomTrees( &ctx, docs); KeyTokenMap keyTokenMap; fillKeyTokens( keyTokenMap, treear); createRules( ptinst.get(), &ctx, treear); if (doOptimize) { ptinst->compile(); } if (g_errorBuffer->hasError()) { throw std::runtime_error( "error creating automaton for evaluating rules"); } #ifdef STRUS_LOWLEVEL_DEBUG std::cout << "patterns processed" << std::endl; std::vector<TreeNode*>::const_iterator ti = treear.begin(), te = treear.end(); for (; ti != te; ++ti) { std::cout << (*ti)->tostring() << std::endl; } #endif std::cerr << "starting rule evaluation ..." << std::endl; std::map<std::string,double> stats; unsigned int totalNofMatches = processDocuments( ptinst.get(), keyTokenMap, treear, docs, stats, outputpath); unsigned int totalNofDocs = docs.size(); if (g_errorBuffer->hasError()) { throw std::runtime_error("uncaugth exception"); } std::cerr << "OK" << std::endl; std::cerr << "processed " << nofPatterns << " patterns on " << totalNofDocs << " documents with total " << totalNofMatches << " matches" << std::endl; std::cerr << "statistiscs:" << std::endl; std::map<std::string,double>::const_iterator gi = stats.begin(), ge = stats.end(); for (; gi != ge; ++gi) { int value; if (gi->first == "nofTriggersAvgActive") { value = (int)(gi->second/totalNofDocs + 0.5); } else { value = (int)(gi->second + 0.5); } std::cerr << "\t" << gi->first << ": " << value << std::endl; } delete g_errorBuffer; return 0; } catch (const std::runtime_error& err) { if (g_errorBuffer && g_errorBuffer->hasError()) { std::cerr << "error processing pattern matching: " << g_errorBuffer->fetchError() << " (" << err.what() << ")" << std::endl; } else { std::cerr << "error processing pattern matching: " << err.what() << std::endl; } } catch (const std::bad_alloc&) { std::cerr << "out of memory processing pattern matching" << std::endl; } delete g_errorBuffer; return -1; }
PluralRules* U_EXPORT2 PluralRules::createDefaultRules(UErrorCode& status) { return createRules(UnicodeString(TRUE, PLURAL_DEFAULT_RULE, -1), status); }
BottomLang::BottomLang(const CondLang &clang) { // atoms { auto range=clang.getAtoms(); auto atoms=createAtoms(range.len); for(; +atoms ;++atoms,++range) { atoms->index=range->index; atoms->name=pool.dup(range->name); } } // synts { auto range=clang.getSynts(); auto synts=createSynts(range.len); for(; +synts ;++synts,++range) { synts->index=range->index; synts->name=pool.dup(range->name); synts->is_lang=range->is_lang; } } // rules { auto range=clang.getRules(); auto rules=createRules(range.len); for(; +rules ;++rules,++range) { rules->index=range->index; rules->name=pool.dup(range->name); } } // synts.rules { auto range=clang.getSynts(); auto synts=this->synts; ulen off=0; auto rules=getRules(); for(; +synts ;++synts,++range) { ulen len=range->rules.len; synts->rules=rules.part(off,len); off+=len; } } // rules.ret rules.args { auto range=clang.getRules(); auto rules=this->rules; auto atoms=getAtoms(); auto synts=getSynts(); for(; +rules ;++rules,++range) { rules->ret=&(synts[range->ret->index]); auto arange=range->args; auto args=createElements(*rules,arange.len); for(; +args ;++args,++arange) { arange->apply( [=] (const CondLangBase::AtomDesc *atom) { args->ptr=&(atoms[atom->index]); } , [=] (const CondLangBase::SyntDesc *synt) { args->ptr=&(synts[synt->index]); } ); } } } pool.shrink_extra(); }