CPerftSuite::CPerftSuite(void) { std::ifstream params; params.open("perftsuite.epd", std::ifstream::in); if (params.is_open()) { std::string line; int square = chess::A8; while (! params.eof()) { getline(params, line); if (line[0] == '#') continue; boost::char_separator<char> sep(";"); boost::char_separator<char> sep2(" "); boost::tokenizer<boost::char_separator<char>> toks(line, sep); // 1st tok is fen auto tok = toks.begin(); const std::string fen = *tok++; CBoard b; b.set_fen_position(fen); std::cout << b.fen() << " "; int depth = 1; while (tok != toks.end()) { boost::tokenizer<boost::char_separator<char>> toks2(*tok, sep2); auto tok2 = toks2.begin(); tok2++; uint64_t i = std::stoull(*tok2); if (i > 100000000) break; // takes too long CPerft p(b); auto res = p.DoPerft(depth); if (res.nodes == i) { std::cout << "."; } else { std::cout << "WRONG (depth " << depth << ": " << res.nodes << " != expected " << i << ")" << std::endl; ASSERT(res.nodes == i); break; } depth++; tok++; } } std::cout << "completed"; } }
std::vector<int> ConfigurationDialog::getBackgroundChannels() { std::string bkgStr = m_bkgChanEntry.get_text(); boost::char_separator<char> sep2(","); boost::tokenizer< boost::char_separator<char> > tokens(bkgStr,sep2); std::vector<int> bkgChans; for(const auto& t : tokens) bkgChans.push_back(boost::lexical_cast<int>(t)); return bkgChans; }
std::vector<int> ConfigurationDialog::getContaminatedChannels() { std::string contStr = m_contChanEntry.get_text(); boost::char_separator<char> sep2(","); boost::tokenizer< boost::char_separator<char> > tokens(contStr,sep2); std::vector<int> contChans; for(const auto& t : tokens) contChans.push_back(boost::lexical_cast<int>(t)); return contChans; }
double strDate2double(const std::string &s) { size_t sep ( s.find(".",0) ); int d ( atoi(s.substr(0, sep).c_str()) ); size_t sep2 ( s.find(".", sep+1) ); int m ( atoi(s.substr(sep+1,sep2-(sep+1)).c_str()) ); int y ( atoi(s.substr(sep2+1, s.length()-(sep2+1)).c_str()) ); return date2double(y, m, d); }
int strDate2int(const std::string &s) { std::string str(s); if (s.length() > 10) str = s.substr(0,10); std::size_t sep ( str.find(".",0) ); int d ( atoi(str.substr(0, sep).c_str()) ); std::size_t sep2 ( str.find(".", sep + 1) ); int m ( atoi(str.substr(sep + 1,sep2 - (sep + 1)).c_str()) ); int y ( atoi(str.substr(sep2 + 1, s.length() - (sep2 + 1)).c_str()) ); return date2int(y, m, d); }
static std::vector<mm::mastermind_t::remote_t> parse_remotes(const std::string &remotes) { typedef boost::char_separator<char> separator_t; typedef boost::tokenizer<separator_t> tokenizer_t; std::vector<mm::mastermind_t::remote_t> result; separator_t sep1(","); tokenizer_t tok1(remotes, sep1); separator_t sep2(":"); for (auto it = tok1.begin(), end = tok1.end(); it != end; ++it) { tokenizer_t tok2(*it, sep2); auto jt = tok2.begin(); if (tok2.end() == jt) { throw std::runtime_error("remotes are malformed"); } auto host = *jt++; uint16_t port = 10053; if (tok2.end() != jt) { port = boost::lexical_cast<uint16_t>(*jt++); } if (tok2.end() != jt) { throw std::runtime_error("remotes are malformed"); } result.emplace_back(std::make_pair(std::move(host), port)); } return result; }
void ConfigurationDialog::on_add_button_clicked() { std::vector<int> chans; for(int i = 0; i < m_synapseChannels.size(); i++){ if(m_synapseChannels.at(i)->get_active()) chans.push_back(i); } SynapseCollection* sc = new SynapseCollection(chans); if(m_overlapButton.get_active()){ sc->setUseOverlap(true); sc->setOverlapThreshold((uint32_t)boost::lexical_cast<int>(m_thresholdEntry.get_text())); } else{ sc->setUseOverlap(false); sc->setDistanceThreshold(boost::lexical_cast<double>(m_thresholdEntry.get_text())); } if(m_reqAllButton.get_active()) sc->setRequireAll(true); else{ sc->setRequireAll(false); boost::char_separator<char> sep("{}"); std::string reqs = m_requirementsEntry.get_text(); boost::tokenizer< boost::char_separator<char> > tokens(reqs,sep); for(const auto& t : tokens){ boost::char_separator<char> sep2(","); boost::tokenizer< boost::char_separator<char> > tokens2(t,sep2); std::vector<int> reqChans; for(const auto& t2 : tokens2) reqChans.push_back(boost::lexical_cast<int>(t2)); sc->addRequiredColocalization(reqChans); } } sc->setDescription(m_descriptionEntry.get_text()); m_toolkit->addSynapseDefinition(sc); Gtk::TreeModel::Row row = *(m_refTreeModel2->append()); row[m_indexColumn] = m_scIndex; m_scIndex++; row[m_descriptionColumn] = sc->description(); }
void length_check::inspect( const string & library_name, const path & full_path, // ex: c:/foo/boost/filesystem/path.hpp const string & contents) // contents of file to be inspected { if (contents.find("hpxinspect:" "length") != string::npos) return; string pathname = full_path.string(); if (pathname.find("CMakeLists.txt") != string::npos) return; //Temporary, until we are ready to format documentation files in this limitation. if (library_name.find(".qbk") != string::npos) return; string total, linenum; long errors = 0, currline = 0; size_t p = 0; vector<string> someline, lineorder; char_separator<char> sep("\n", "", boost::keep_empty_tokens); tokenizer<char_separator<char>> tokens(contents, sep); for (const auto& t : tokens) { size_t rend = t.find_first_of("\r"), size = t.size(); if (rend == size - 1) { someline.push_back(t); } else { char_separator<char> sep2("\r", "", boost::keep_empty_tokens); tokenizer<char_separator<char>> tokens2(t, sep2); for (const auto& u : tokens2) { someline.push_back(u); } } } while (p < someline.size()) { currline++; size_t rend = someline[p].find_last_of("\r"); bool check_not = 0; boost::regex error_note, http_note; error_note = "\\s*#\\s*error"; http_note = "https?://"; boost::smatch m; if (boost::regex_search(someline[p], m, error_note)) //#error { if (m.position() == 0) { check_not = 1; } } else if (boost::regex_search(someline[p], m, http_note)) //http::// { check_not = 1; } size_t size = someline[p].size(); if (size > limit && check_not == 0) { errors++; linenum = to_string(currline); lineorder.push_back(linenum); } p++; } p = 0; while (p < lineorder.size()) { total += lineorder[p]; if (p < lineorder.size() - 1) { total += ", "; } p++; } if (errors > 0) { string errored = "Character Limit*: " + total; error(library_name, full_path, errored); ++m_files_with_errors; } }
void whitespace_check::inspect( const string & library_name, const path & full_path, // ex: c:/foo/boost/filesystem/path.hpp const string & contents) // contents of file to be inspected { if (contents.find("hpxinspect:" "endlinewhitespace") != string::npos) return; string whitespace(" \t\f\v\r\n"), total, linenum; long errors = 0, currline = 0; size_t p = 0, extend = 0; vector<string> someline, lineorder; char_separator<char> sep("\n", "", boost::keep_empty_tokens); tokenizer<char_separator<char>> tokens(contents, sep); for (const auto& t : tokens) { size_t rend = t.find_first_of("\r"), size = t.size(); if (rend == size - 1) { someline.push_back(t); } else { char_separator<char> sep2("\r", "", boost::keep_empty_tokens); tokenizer<char_separator<char>> tokens2(t, sep2); for (const auto& u : tokens2) { someline.push_back(u); } } } while (p < someline.size()) { currline++; size_t rend = someline[p].find_last_of("\r"); size_t found = someline[p].find_last_not_of(whitespace); if (rend != string::npos) { extend = 2; } else { extend = 1; } size_t size = someline[p].size(); if (found < size - extend || (found == someline[p].npos && size > 1)) { errors++; linenum = to_string(currline); lineorder.push_back(linenum); } p++; } p = 0; while (p < lineorder.size()) { total += linelink(full_path, lineorder[p]); //linelink is located in function_hyper.hpp if (p < lineorder.size() - 1) { total += ", "; } p++; } if (errors > 0) { string errored = "*Endline Whitespace*: " + total; error(library_name, full_path, errored); ++m_files_with_errors; } }
void Scanner::run() { std::vector<double> kernelWidth; std::vector<double> windowSize; std::vector<double> peakThreshold; std::vector<double> floorThreshold; std::vector<int> signalFindingIterations; std::vector<double> reclusterThreshold; std::ostringstream dispstr; dispstr << "--------------------SCAN PARAMETERS:\n--------------------\n\n"; //----- Read configuration file ----- std::ifstream phil(m_configFile); if(!(phil.is_open())){ hide(); return; } std::string line; boost::char_separator<char> sep(":"); while(getline(phil,line)){ dispstr << line << "\n"; boost::tokenizer< boost::char_separator<char> > tokens(line,sep); boost::tokenizer< boost::char_separator<char> >::iterator tit = tokens.begin(); std::string param = *tit; tit++; boost::char_separator<char> sep2(","); boost::tokenizer< boost::char_separator<char> > tokens2(*tit,sep2); if(param.compare("kernel width") == 0){ for(const auto& t2 : tokens2) kernelWidth.push_back(boost::lexical_cast<double>(t2)); } else if(param.compare("window size") == 0){ for(const auto& t2 : tokens2) windowSize.push_back(boost::lexical_cast<double>(t2)); } else if(param.compare("peak threshold") == 0){ for(const auto& t2 : tokens2) peakThreshold.push_back(boost::lexical_cast<double>(t2)); } else if(param.compare("floor threshold") == 0){ for(const auto& t2 : tokens2) floorThreshold.push_back(boost::lexical_cast<double>(t2)); } else if(param.compare("signal finding iterations") == 0){ for(const auto& t2 : tokens2) signalFindingIterations.push_back(boost::lexical_cast<int>(t2)); } else if(param.compare("recluster threshold") == 0){ for(const auto& t2 : tokens2) reclusterThreshold.push_back(boost::lexical_cast<double>(t2)); } } phil.close(); //----------------------------------- dispstr << "\n--------------------\nINPUT FILES:\n--------------------\n\n"; //----- Load images ----- int tot_tasks = 0; std::vector<int> series_ntasks; std::vector< std::vector<ImRecord*>* > templateRecords; m_fileManager = new FileManager(); for(std::vector<std::string>::iterator fit = m_inputFiles.begin(); fit != m_inputFiles.end(); fit++){ dispstr << *fit << "\n"; ImageAnalysisToolkit tmp; std::vector<ImRecord*>* recs = new std::vector<ImRecord*>(); recs->clear(); FileManager::input_file infile = FileConverter::read(m_fileManager,&tmp,recs,*fit,nia::niaVersion); templateRecords.push_back(recs); int itasks = infile.np * infile.nt; series_ntasks.push_back(itasks); tot_tasks += itasks; m_fileManager->addInputFile(infile); } m_textBuffer->set_text(dispstr.str()); show_all_children(); tot_tasks *= kernelWidth.size()*windowSize.size()*peakThreshold.size()*floorThreshold.size()*reclusterThreshold.size()*signalFindingIterations.size(); m_progressWindow.launch(tot_tasks); m_fileManager->reset(); ImSeries* m_data = m_fileManager->loadNext(); int seriesID = 0; int scanID = 0; while(m_data){ for(std::vector<double>::iterator kwit = kernelWidth.begin(); kwit != kernelWidth.end(); kwit++){ for(std::vector<double>::iterator wsit = windowSize.begin(); wsit != windowSize.end(); wsit++){ for(std::vector<double>::iterator ptit = peakThreshold.begin(); ptit != peakThreshold.end(); ptit++){ for(std::vector<double>::iterator ftit = floorThreshold.begin(); ftit != floorThreshold.end(); ftit++){ for(std::vector<double>::iterator rtit = reclusterThreshold.begin(); rtit != reclusterThreshold.end(); rtit++){ for(std::vector<int>::iterator sfit = signalFindingIterations.begin(); sfit != signalFindingIterations.end(); sfit++){ ImageAnalysisToolkit* toolkit = new ImageAnalysisToolkit(); toolkit->setSaturationThreshold(65534); toolkit->setKernelWidth(*kwit); toolkit->setLocalWindow(*wsit); toolkit->setPeakThreshold(*ptit); toolkit->setFloorThreshold(*ftit); toolkit->setReclusterThreshold(*rtit); toolkit->setMaxSignalFindingIterations(*sfit); std::vector<ImRecord*>* scanRecs = new std::vector<ImRecord*>(); scanRecs->clear(); for(int i = 0; i < series_ntasks[seriesID]; i++) scanRecs->push_back(templateRecords[seriesID]->at(i)->emptyCopy()); m_records.push_back(scanRecs); for(int i = 0; i < scanRecs->at(0)->nSynapseCollections(); i++) toolkit->addSynapseDefinition(scanRecs->at(0)->getSynapseCollection(i)->emptyCopy()); m_ntasks.push_back(series_ntasks[seriesID]); for(int ps = 0; ps < m_data->npos(); ps++){ for(int tm = 0; tm < m_data->nt(); tm++){ while(m_activeThreads == m_maxThreads) boost::this_thread::sleep(boost::posix_time::millisec(60000)); m_mtx.lock(); m_threadpool.create_thread(boost::bind(&Scanner::run_analysis, this, m_data, toolkit, scanID, ps, tm, seriesID)); m_activeThreads++; boost::this_thread::sleep(boost::posix_time::millisec(1000)); m_mtx.unlock(); } } boost::this_thread::sleep(boost::posix_time::millisec(1000)); scanID++; } } } } } } m_threadpool.join_all(); nia::nout << "Finished parameter scan for sample " << m_fileManager->getName(seriesID) << "\n"; delete m_data; m_data = m_fileManager->loadNext(); seriesID++; } delete m_fileManager; hide(); //----------------------- }
string Lsystem::parse_lsystem(char const* filename, int iteration) { //Falls Datei noch nicht geparsed if(lsystem_.empty() == true){ //Datei einlesen file_iterator<> first = load(filename); file_iterator<> last = first.make_end(); typedef char char_t; typedef file_iterator <char_t> iterator_t; string input(first, last); // std::cout << "Eingabe:" << "\n" << input << "\n" << std::endl; lsystem_grammer lsys; //Parsevorgang parse_info<iterator_t> pi = parse(first, last, lsys, space_p); if (pi.hit) { if (pi.full) { std::cout << "LSystem: file reading successfully" << std::endl; std::cout << "LSystem: " << pi.length << "characters read" << std::endl; std::cout << "LSystem: parsing ..." << std::endl; // std::cout << "Vektor:" << gl_lsys_file.size() << "\n" << std::endl; //Eingabe splitten typedef boost::tokenizer<boost::char_separator<char> > tokenizer; //Axiom //string s1 = get_axiom(); //boost::char_separator<char> sep1(": "); //tokenizer tok(s1, sep1); //tokenizer::iterator it = tok.begin(); //string start = *(++it); boost::char_separator<char> sep1(": "); string start = get_axiom(); std::string ret; //Produkion(en) vector<string> s2 = get_production(); // int last_pos = 0; int left = 1; //Zufallszahlen bis 100 // srand(j * time(0)); //int prob = rand() % 100 + 1; // std::cout << "ZZ:" << prob << std::endl; // std::cout<<s2.size()<<std::endl; std::map<int, string> mapProbToProd; int zaehler=0; for (int i = 0; i < s2.size(); i++) { int position; tokenizer tok2(s2[i], sep1); tokenizer::iterator it = tok2.begin(); string temp = *(++it); //Alles nach dem Doppelpunkt boost::char_separator<char> sep2("="); tokenizer tok3(temp, sep2); tokenizer::iterator it2 = tok3.begin(); string temp1 = *(it2); //Alles vor dem Gleichheitszeichen string temp2 = *(++it2); //Alles nach dem Gleichheitszeichen if (temp2.find(",") == string::npos) { //DOL stochastic_ = false; //Ersetzung boost::replace_all(start, temp1, temp2); lsystem_.push_back(start); lsystem_.push_back(start); //DOL mit meherere Produktionen existieren } else if (temp2.find(",") != string::npos) { //stochastische L-Systeme stochastic_ = true; boost::char_separator<char> sep3(","); tokenizer tok4(temp2, sep3); tokenizer::iterator it3 = tok4.begin(); string sto = *(it3); //Alles vor Komma (Wahrscheinlichkeit) string temp3 = *(++it3); //Alles nach Komma (Regel) int cur_pos = atof(sto.data()) * 100; //aktuelle Wahrscheinlichkeit int right = last_pos + cur_pos; //Bereich rechts for(int k = 0; k < cur_pos ; ++k ) { mapProbToProd.insert ( std::pair<int, string>(zaehler,temp3)); ++zaehler; } // } } for (int j = 1; j <= iteration * 2; j++) { std::string result; // result.reserve(14*start.size()); for(int i = 0 ; i < start.size() ; ++i) { srand(i*time(0)); int k = rand() % 100; if(start[i]=='F') result += mapProbToProd[k]; else result += start[i]; } lsystem_.push_back(result); start=result; } std::cout << "LSystem: parsing finished\n\n"; //} if (stochastic_ == false) { // std::cout << "Ergebnis: " << lsystem_[(s2.size()) * iteration - 1] << std::endl; return lsystem_[(s2.size() * iteration - 1)]; } else if (stochastic_ == true) { // std::cout << "Ergebnis: " << lsystem_[iteration - 1] << std::endl; return lsystem_[iteration - 1]; } } else { std::cout << "LSystem: ERROR parsing data partially" << std::endl; std::cout << "LSystem: ERROR " << pi.length << "characters parsed \n\n" << std::endl; } } else std::cout << "LSystem: ERROR parsing failed; stopped at '" << pi.stop << "'\n\n" << std::endl; } //erneutes Parsen verhindern else if(iteration <= lsystem_.size()) { if (stochastic_ == false) { // std::cout << "Ergebnis: " << lsystem_[(get_production().size()) * iteration - 1] << std::endl; return lsystem_[(get_production().size() * iteration - 1)]; } else if (stochastic_ == true) { // std::cout << "Ergebnis: " << lsystem_[iteration - 1] << std::endl; return lsystem_[iteration - 1]; } } //Vektor vergrößern else if(iteration > lsystem_.size()){ erase_old(); parse_lsystem(filename, iteration); if (stochastic_ == false) { // std::cout << "Ergebnis: " << lsystem_[(get_production().size()) * iteration - 1] << std::endl; return lsystem_[(get_production().size() * iteration - 1)]; } else if (stochastic_ == true) { // std::cout << "Ergebnis: " << lsystem_[iteration - 1] << std::endl; return lsystem_[iteration - 1]; } } return std::string(); }