Example #1
0
int main(int argc, char** argv)
{
  std::string str1("good thing");
  const char* str2("bad thing");
  
  utils::piece pie1(str1);
  utils::piece pie2(str2);
  
  std::cout << "piece1: " << pie1 << " base: " << (void*) pie1.c_str() << std::endl
	    << "piece2: " << pie2 << " base: " << (void*) pie2.c_str() << std::endl;
  
  std::cout << "piece1 substr: " << pie1.substr(5) << " base: " << (void*) pie1.substr(5).c_str() << std::endl
	    << "piece2 substr: " << pie2.substr(4) << " base: " << (void*) pie2.substr(4).c_str() << std::endl
	    << "equal? " << (pie1.substr(5) == pie2.substr(4)) << std::endl;
  
  typedef boost::tokenizer<utils::space_separator, utils::piece::const_iterator, utils::piece> tokenizer_type;
  
  tokenizer_type tokens1(pie1);
  tokenizer_type tokens2(pie2);
  
  for (tokenizer_type::iterator iter = tokens1.begin(); iter != tokens1.end(); ++ iter)
    std::cout << "token1: " << *iter << " base: " << (void*) (*iter).c_str() << " cast: " << static_cast<std::string>(*iter) << std::endl;
  for (tokenizer_type::iterator iter = tokens2.begin(); iter != tokens2.end(); ++ iter)
    std::cout << "token2: " << *iter << " base: " << (void*) (*iter).c_str() << " cast: " << static_cast<std::string>(*iter) << std::endl;

  const char* str3 = "Good";
  const char* str4 = "GOOD";
  const char* str5 = "Good-bye";
  
  std::cout << "less: " << (utils::piece(str3) < str4) << std::endl
	    << "greater: " << (utils::piece(str3) > str4) << std::endl
	    << "equal: " << (utils::piece(str3) == str4) << std::endl;
  
  std::cout << "less: " << (utils::ipiece(str3) < str4) << std::endl
	    << "greater: " << (utils::ipiece(str3) > str4) << std::endl
	    << "equal: " << (utils::ipiece(str3) == str4) << std::endl;
  
  std::cout << "less: " << (utils::ipiece(str3) < str5) << std::endl
	    << "greater: " << (utils::ipiece(str3) > str5) << std::endl
	    << "equal: " << (utils::ipiece(str3) == str5) << std::endl;
  
  std::cout << "cast: " << boost::lexical_cast<int>(utils::ipiece("500")) << std::endl;
}
void ConfigurationDialog::on_add_button_clicked()
{
  std::vector<int> chans;
  for(int i = 0; i < m_synapseChannels.size(); i++){
    if(m_synapseChannels.at(i)->get_active()) chans.push_back(i);
  }
  SynapseCollection* sc = new SynapseCollection(chans);
  if(m_overlapButton.get_active()){
    sc->setUseOverlap(true);
    sc->setOverlapThreshold((uint32_t)boost::lexical_cast<int>(m_thresholdEntry.get_text()));
  }
  else{
    sc->setUseOverlap(false);
    sc->setDistanceThreshold(boost::lexical_cast<double>(m_thresholdEntry.get_text()));
  }
  if(m_reqAllButton.get_active()) sc->setRequireAll(true);
  else{
    sc->setRequireAll(false);
    boost::char_separator<char> sep("{}");
    std::string reqs = m_requirementsEntry.get_text();
    boost::tokenizer< boost::char_separator<char> > tokens(reqs,sep);
    for(const auto& t : tokens){
      boost::char_separator<char> sep2(",");
      boost::tokenizer< boost::char_separator<char> > tokens2(t,sep2);
      std::vector<int> reqChans;
      for(const auto& t2 : tokens2) reqChans.push_back(boost::lexical_cast<int>(t2));
      sc->addRequiredColocalization(reqChans);
    }
  }
  sc->setDescription(m_descriptionEntry.get_text());
  m_toolkit->addSynapseDefinition(sc);
  Gtk::TreeModel::Row row = *(m_refTreeModel2->append());
  row[m_indexColumn] = m_scIndex;
  m_scIndex++;
  row[m_descriptionColumn] = sc->description();  
}
Example #3
0
    void checkP(const char code[]) {
        // Clear the error buffer..
        errout.str("");

        // Raw tokens..
        std::vector<std::string> files;
        files.push_back("test.cpp");
        std::istringstream istr(code);
        const simplecpp::TokenList tokens1(istr, files, files[0]);

        // Preprocess..
        simplecpp::TokenList tokens2(files);
        std::map<std::string, simplecpp::TokenList*> filedata;
        simplecpp::preprocess(tokens2, tokens1, files, filedata, simplecpp::DUI());

        // Tokenize..
        Tokenizer tokenizer(&settings, this);
        tokenizer.createTokens(&tokens2);
        tokenizer.simplifyTokens1("");

        // Check...
        CheckSizeof checkSizeof(&tokenizer, &settings, this);
        checkSizeof.runChecks(&tokenizer, &settings, this);
    }
Example #4
0
void length_check::inspect(
    const string & library_name,
    const path & full_path,   // ex: c:/foo/boost/filesystem/path.hpp
    const string & contents)     // contents of file to be inspected
{
    if (contents.find("hpxinspect:" "length") != string::npos)
        return;
    string pathname = full_path.string();
    if (pathname.find("CMakeLists.txt") != string::npos)
        return;
    //Temporary, until we are ready to format documentation files in this limitation.
    if (library_name.find(".qbk") != string::npos)
        return;

    string total, linenum;
    long errors = 0, currline = 0;
    size_t p = 0;
    vector<string> someline, lineorder;

    char_separator<char> sep("\n", "", boost::keep_empty_tokens);
    tokenizer<char_separator<char>> tokens(contents, sep);
    for (const auto& t : tokens) {
        size_t rend = t.find_first_of("\r"), size = t.size();
        if (rend == size - 1)
        {
            someline.push_back(t);
        }
        else
        {
            char_separator<char> sep2("\r", "", boost::keep_empty_tokens);
            tokenizer<char_separator<char>> tokens2(t, sep2);
            for (const auto& u : tokens2) {
                someline.push_back(u);
            }
        }
    }
    while (p < someline.size())
    {
        currline++;
        size_t rend = someline[p].find_last_of("\r");
        bool check_not = 0;
        boost::regex error_note, http_note;
        error_note = "\\s*#\\s*error";
        http_note = "https?://";
        boost::smatch m;
        if (boost::regex_search(someline[p], m, error_note)) //#error
        {
            if (m.position() == 0)
            {
                check_not = 1;
            }
        }
        else if (boost::regex_search(someline[p], m, http_note)) //http:://
        {
            check_not = 1;
        }
        size_t size = someline[p].size();
        if (size > limit && check_not == 0)
        {
            errors++;
            linenum = to_string(currline);
            lineorder.push_back(linenum);
        }
        p++;
    }
    p = 0;
    while (p < lineorder.size())
    {
        total += lineorder[p];
        if (p < lineorder.size() - 1)
        {
            total += ", ";
        }
        p++;
    }
    if (errors > 0)
    {
        string errored = "Character Limit*: " + total;
        error(library_name, full_path, errored);
        ++m_files_with_errors;
    }
}
Example #5
0
        void whitespace_check::inspect(
            const string & library_name,
            const path & full_path,   // ex: c:/foo/boost/filesystem/path.hpp
            const string & contents)     // contents of file to be inspected
        {
            if (contents.find("hpxinspect:" "endlinewhitespace") != string::npos)
                return;

            string whitespace(" \t\f\v\r\n"), total, linenum;
            long errors = 0, currline = 0;
            size_t p = 0, extend = 0;
            vector<string> someline, lineorder;


            char_separator<char> sep("\n", "", boost::keep_empty_tokens);
            tokenizer<char_separator<char>> tokens(contents, sep);
            for (const auto& t : tokens) {
                size_t rend = t.find_first_of("\r"), size = t.size();
                if (rend == size - 1)
                {
                    someline.push_back(t);
                }
                else
                {
                    char_separator<char> sep2("\r", "", boost::keep_empty_tokens);
                    tokenizer<char_separator<char>> tokens2(t, sep2);
                    for (const auto& u : tokens2) {
                        someline.push_back(u);
                    }
                }
            }
            while (p < someline.size())
            {
                currline++;
                size_t rend = someline[p].find_last_of("\r");
                size_t found = someline[p].find_last_not_of(whitespace);
                if (rend != string::npos)
                {
                    extend = 2;
                }
                else
                {
                    extend = 1;
                }
                size_t size = someline[p].size();
                if (found < size - extend || (found == someline[p].npos && size > 1))
                {
                    errors++;
                    linenum = to_string(currline);
                    lineorder.push_back(linenum);
                }
                p++;
            }
            p = 0;
            while (p < lineorder.size())
            {
                total += linelink(full_path, lineorder[p]);
                //linelink is located in function_hyper.hpp
                if (p < lineorder.size() - 1)
                {
                    total += ", ";
                }
                p++;
            }
            if (errors > 0)
            {
                string errored = "*Endline Whitespace*: " + total;
                error(library_name, full_path, errored);
                ++m_files_with_errors;
            }
        }
/**
 *  Parse the basis set file and generate a set of reference shells
 *  from which local and external basis set objects are constructed
 */
void BasisSet::parseGlobal(){

  std::string readString;
  std::string nameOfAtom;
  std::string shSymb;
  int         contDepth;
  int atomicNumber;
  int indx;
  std::vector<libint2::Shell> tmpShell;

  bool readRec = false;
  bool newRec  = false;
  bool firstRec = true;
  int nEmpty = 0;
  int nComm  = 0;
  int nRec   = 0;

  while(!this->basisFile_->eof()){
    std::getline(*this->basisFile_,readString);
    if(readString.size() == 0)    nEmpty++;
    else if(readString[0] == '!') nComm++;
    else if(!readString.compare("****")){
      std::getline(*this->basisFile_,readString);
      if(readString.size() == 0) { nEmpty++; readRec = false; continue;}
      nRec++;
      readRec = true;
      newRec  = true;
    }

    if(readRec){
      std::istringstream iss(readString);
      std::vector<std::string> tokens(std::istream_iterator<std::string>{iss},
        std::istream_iterator<std::string>{});
      if(newRec){
        if(!firstRec) {
          this->refShells_.push_back(ReferenceShell{atomicNumber,indx,tmpShell});
        }
        indx = HashAtom(tokens[0],0);
        atomicNumber = elements[indx].atomicNumber;
        newRec = false;
        firstRec = false;
        tmpShell.clear();
      } else {
        contDepth = std::stoi(tokens[1]);
        shSymb    = tokens[0];
        std::vector<double> exp;
        std::vector<double> contPrimary;
        std::vector<double> contSecondary;

        for(auto i = 0; i < contDepth; i++) {
          std::getline(*this->basisFile_,readString);
          std::istringstream iss2(readString);
          std::vector<std::string> tokens2(std::istream_iterator<std::string>{iss2},
            std::istream_iterator<std::string>{});

          exp.push_back(std::stod(tokens2[0]));
          contPrimary.push_back(std::stod(tokens2[1]));
          if(!shSymb.compare("SP"))
            contSecondary.push_back(std::stod(tokens2[2]));
        }

        if(!shSymb.compare("SP")) {
          tmpShell.push_back(
            libint2::Shell{ exp, {{0,this->doSph_,contPrimary}}, {{0,0,0}} }
          );
          tmpShell.push_back(
            libint2::Shell{ exp, {{1,this->doSph_,contSecondary}}, {{0,0,0}} }
          );
        } else {
          tmpShell.push_back(
            libint2::Shell{ exp, {{HashL(shSymb),this->doSph_,contPrimary}}, {{0,0,0}} }
          );
        }
      }
    }
  }
  // Append the last Rec
  this->refShells_.push_back(ReferenceShell{atomicNumber,indx,tmpShell}); 
 
}; // BasisSet::parseGlobal
Example #7
0
void Scanner::run()
{
  std::vector<double> kernelWidth;
  std::vector<double> windowSize;
  std::vector<double> peakThreshold;
  std::vector<double> floorThreshold;
  std::vector<int> signalFindingIterations;
  std::vector<double> reclusterThreshold;
  std::ostringstream dispstr;
  dispstr << "--------------------SCAN PARAMETERS:\n--------------------\n\n";
  
  //----- Read configuration file -----
  std::ifstream phil(m_configFile);
  if(!(phil.is_open())){
    hide();
    return;
  }
  std::string line;
  boost::char_separator<char> sep(":");
  while(getline(phil,line)){
    dispstr << line << "\n";
    boost::tokenizer< boost::char_separator<char> > tokens(line,sep);
    boost::tokenizer< boost::char_separator<char> >::iterator tit = tokens.begin();
    std::string param = *tit;
    tit++;
    boost::char_separator<char> sep2(",");
    boost::tokenizer< boost::char_separator<char> > tokens2(*tit,sep2);
    if(param.compare("kernel width") == 0){
      for(const auto& t2 : tokens2) kernelWidth.push_back(boost::lexical_cast<double>(t2));
    }
    else if(param.compare("window size") == 0){
      for(const auto& t2 : tokens2) windowSize.push_back(boost::lexical_cast<double>(t2));
    }
    else if(param.compare("peak threshold") == 0){
      for(const auto& t2 : tokens2) peakThreshold.push_back(boost::lexical_cast<double>(t2));
    }
    else if(param.compare("floor threshold") == 0){
      for(const auto& t2 : tokens2) floorThreshold.push_back(boost::lexical_cast<double>(t2));
    }
    else if(param.compare("signal finding iterations") == 0){
      for(const auto& t2 : tokens2) signalFindingIterations.push_back(boost::lexical_cast<int>(t2));
    }
    else if(param.compare("recluster threshold") == 0){
      for(const auto& t2 : tokens2) reclusterThreshold.push_back(boost::lexical_cast<double>(t2));
    }
  }
  phil.close();
  //-----------------------------------

  dispstr << "\n--------------------\nINPUT FILES:\n--------------------\n\n";

  //----- Load images -----
  int tot_tasks = 0;
  std::vector<int> series_ntasks;
  std::vector< std::vector<ImRecord*>* > templateRecords;
  m_fileManager = new FileManager();
  for(std::vector<std::string>::iterator fit = m_inputFiles.begin(); fit != m_inputFiles.end(); fit++){
    dispstr << *fit << "\n";
    ImageAnalysisToolkit tmp;
    std::vector<ImRecord*>* recs = new std::vector<ImRecord*>();
    recs->clear();
    FileManager::input_file infile = FileConverter::read(m_fileManager,&tmp,recs,*fit,nia::niaVersion);
    templateRecords.push_back(recs);
    int itasks = infile.np * infile.nt;
    series_ntasks.push_back(itasks);
    tot_tasks += itasks;
    m_fileManager->addInputFile(infile);
  }

  m_textBuffer->set_text(dispstr.str());
  show_all_children();

  tot_tasks *= kernelWidth.size()*windowSize.size()*peakThreshold.size()*floorThreshold.size()*reclusterThreshold.size()*signalFindingIterations.size();
  m_progressWindow.launch(tot_tasks);
  m_fileManager->reset();
  ImSeries* m_data = m_fileManager->loadNext();
  int seriesID = 0;
  int scanID = 0;
  while(m_data){
    for(std::vector<double>::iterator kwit = kernelWidth.begin(); kwit != kernelWidth.end(); kwit++){
      for(std::vector<double>::iterator wsit = windowSize.begin(); wsit != windowSize.end(); wsit++){
	for(std::vector<double>::iterator ptit = peakThreshold.begin(); ptit != peakThreshold.end(); ptit++){
	  for(std::vector<double>::iterator ftit = floorThreshold.begin(); ftit != floorThreshold.end(); ftit++){
	    for(std::vector<double>::iterator rtit = reclusterThreshold.begin(); rtit != reclusterThreshold.end(); rtit++){
	      for(std::vector<int>::iterator sfit = signalFindingIterations.begin(); sfit != signalFindingIterations.end(); sfit++){
		ImageAnalysisToolkit* toolkit = new ImageAnalysisToolkit();
		toolkit->setSaturationThreshold(65534);
		toolkit->setKernelWidth(*kwit);
		toolkit->setLocalWindow(*wsit);
		toolkit->setPeakThreshold(*ptit);
		toolkit->setFloorThreshold(*ftit);
		toolkit->setReclusterThreshold(*rtit);
		toolkit->setMaxSignalFindingIterations(*sfit);
		std::vector<ImRecord*>* scanRecs = new std::vector<ImRecord*>();
		scanRecs->clear();
		for(int i = 0; i < series_ntasks[seriesID]; i++) scanRecs->push_back(templateRecords[seriesID]->at(i)->emptyCopy());
		m_records.push_back(scanRecs);
		for(int i = 0; i < scanRecs->at(0)->nSynapseCollections(); i++) toolkit->addSynapseDefinition(scanRecs->at(0)->getSynapseCollection(i)->emptyCopy());
		m_ntasks.push_back(series_ntasks[seriesID]);
		for(int ps = 0; ps < m_data->npos(); ps++){
		  for(int tm = 0; tm < m_data->nt(); tm++){
		    while(m_activeThreads == m_maxThreads) boost::this_thread::sleep(boost::posix_time::millisec(60000));
		    m_mtx.lock();
		    m_threadpool.create_thread(boost::bind(&Scanner::run_analysis, this, m_data, toolkit, scanID, ps, tm, seriesID));
		    m_activeThreads++;
		    boost::this_thread::sleep(boost::posix_time::millisec(1000));
		    m_mtx.unlock();
		  }
		}
		boost::this_thread::sleep(boost::posix_time::millisec(1000));
		scanID++;
	      }
	    }
	  }
	}
      }
    }
    m_threadpool.join_all();
    nia::nout << "Finished parameter scan for sample " << m_fileManager->getName(seriesID) << "\n";
    delete m_data;
    m_data = m_fileManager->loadNext();
    seriesID++;
  }
  delete m_fileManager;
  hide();
  //-----------------------
}