/* * \brief Non-recursive GLOB operation */ std::vector<boost::filesystem::path> Glob::glob( const boost::filesystem::path& dir ) const { std::vector<boost::filesystem::path> files; if (boost::filesystem::is_directory(dir)) { for( boost::filesystem::directory_iterator it(dir), end; it != end; ++it) { boost::smatch match; if (boost::regex_match( it->path().leaf().string(), match, pattern_ )) { EAGLE_DEBUG(4, "... " << it->path().leaf() ); files.push_back( it->path() ); } } } else { boost::smatch match; if (boost::regex_match( dir.leaf().string(), match, pattern_ )) { EAGLE_DEBUG(4, "... " << dir.leaf() ); // ... return vector with only one element files.push_back( dir ); } } if (files.empty()) { EAGLE_WARNING( (boost::format("Regex \"%s\" did not match any files in %s") % pattern_ % dir ).str() ); } else { std::stable_sort(files.begin(),files.end()); } return files; }
inline std::string leaf(boost::filesystem::path const& p) { #if BOOST_FILESYSTEM_VERSION >= 3 return p.leaf().string(); #else return p.leaf(); #endif }
inline void copy_files(const boost::filesystem::path& path_from, const boost::filesystem::path& path_to) { if (path_from.empty() || path_to.empty()) throw std::runtime_error("empty path"); if (! boost::filesystem::exists(path_from) || ! boost::filesystem::exists(path_to)) throw std::runtime_error("no path"); if (! boost::filesystem::is_directory(path_to)) throw std::runtime_error("destination is not a directory"); boost::filesystem::path destination = path_to / path_from.leaf(); if (! boost::filesystem::is_directory(path_from)) boost::filesystem::copy_file(path_from, destination); else { boost::filesystem::create_directory(destination); boost::filesystem::directory_iterator iter_end; for (boost::filesystem::directory_iterator iter(path_from); iter != iter_end; ++ iter) copy_files(*iter, destination); } }
void MakePackedImage( boost::filesystem::path const& ImagePath, boost::filesystem::path const& ScriptPath, std::string const& DictionaryName, std::vector<std::string> const& SourceList) { std::vector<ImageEntryType> ImageList; boost::filesystem::ofstream DictionaryFile(ScriptPath, std::ios::trunc); for (auto i=begin(SourceList); i!=end(SourceList); ++i) { ScanFile(ImageList, *i); } // Construct the module name from the filename std::string ModuleName; { auto p=ScriptPath.leaf(); p.replace_extension(); ModuleName=p.string(); } // Sort according to size (max edge length) std::sort(ImageList.begin(), ImageList.end(), [](ImageEntryType const& Lhs, ImageEntryType const& Rhs) ->bool { auto&& a=*Lhs.Image; auto&& b=*Rhs.Image; return std::max(a.get_width(), a.get_height()) > std::max(b.get_width(), b.get_height()); }); PackImages(ImagePath, ImageList); WriteDictionary(DictionaryFile, ModuleName, DictionaryName, "NinePatches", ImageList); }
void ScanFile(std::vector<ImageEntryType>& List, boost::filesystem::path const& Path) { if (!is_directory(Path)) { AddFile(List, Path, Path.leaf()); return; } using namespace boost::filesystem; typedef boost::filesystem::recursive_directory_iterator Iterator; std::size_t BaseOffset=std::distance(Path.begin(), Path.end()); for (Iterator i(Path), ie; i!=ie; ++i) { if (is_symlink(*i)) i.no_push(); if (!is_regular_file(*i)) continue; path Absolute=*i; path Relative; auto j=Absolute.begin(); std::size_t c=0; for (; j!=Absolute.end(); ++j, ++c) if (c >= BaseOffset) Relative /= *j; if (!Relative.empty()) AddFile(List, Path/Relative, Relative); } }
BOOST_FOREACH(const boost::filesystem::path& fname,filesFound) { boost::smatch what; - std::string leaf = fname.leaf(); + std::string leaf = fname.filename().string(); if (!boost::regex_match(leaf, what, re)) continue;
//! get the relative file path from the host directory static std::string get_rel_file_path(const fs::path &file){ fs::path abs_path = file.branch_path(); fs::path rel_path = file.leaf(); while (not abs_path.empty() and abs_path.leaf() != "host"){ rel_path = abs_path.leaf() / rel_path; abs_path = abs_path.branch_path(); } return rel_path.string(); }
CSaveImporterBase::OVERWRITE_PROMPT_RETURN MemoryCardManagerDialog::OnImportOverwrite(const boost::filesystem::path& filePath) { std::string fileName = filePath.leaf().string(); QString msg("File %1 already exists.\n\nOverwrite?"); QMessageBox::StandardButton resBtn = QMessageBox::question(this, "Overwrite?", msg.arg(fileName.c_str()), QMessageBox::Yes | QMessageBox::No, QMessageBox::Yes); return (resBtn == QMessageBox::Yes) ? CSaveImporterBase::OVERWRITE_YES : CSaveImporterBase::OVERWRITE_NO; }
void AddFile(std::vector<ImageEntryType>& List, boost::filesystem::path const& FilePath, boost::filesystem::path const& RelativeFilePath) { // Check if this is a supported image format if (extension(FilePath)!=".png") return; std::cout << "Loading " << FilePath.string() << std::endl; auto Image=replay::pixbuf_io::load_from_file(FilePath); ImageEntryType Entry; Entry.RelativePath=RelativeFilePath; // Check if this is a 9-patch if (boost::algorithm::ends_with(FilePath.leaf().string(), ".9.png")) { auto w=Image->get_width(), h=Image->get_height(); if (Image->get_channels() != 4) throw std::invalid_argument("9-patch must have alpha channel"); if (w < 4 || h < 4) throw std::invalid_argument("9-patch images must be at least 4x4"); // Analyze top and left auto ScalableX=AnalyzeLine(*Image, h-1, 0); auto ScalableY=AnalyzeLine(*Image, 0, 1); // Analyze bottom and right auto FillX=AnalyzeLine(*Image, 0, 0); auto FillY=AnalyzeLine(*Image, w-1, 1); if (std::get<0>(FillX)==std::get<1>(FillX)) FillX=ScalableX; if (std::get<0>(FillY)==std::get<1>(FillY)) FillY=ScalableY; Entry.ScaleableArea.set(std::get<0>(ScalableX), std::get<0>(ScalableY), std::get<1>(ScalableX), std::get<1>(ScalableY)); Entry.FillArea.set(std::get<0>(FillX), std::get<0>(FillY), std::get<1>(FillX), std::get<1>(FillY)); // Extract the actual image data Entry.Image=Image->get_sub_image(1, 1, w-2, h-2); Entry.IsNinePatch=true; } else { Entry.Image=Image; } List.push_back(Entry); }
static xmlDocPtr get_arena_xml_content(const boost::filesystem::path &path) { std::ifstream is(path.string()); std::string content((std::istreambuf_iterator<char>(is)), std::istreambuf_iterator<char>()); std::string base_file_name = path.leaf().string(); size_t pos; while ((pos = content.find(base_file_name)) != -1) { content.replace(pos, base_file_name.length(), "arena"); } xmlDocPtr doc = xmlReadMemory(content.c_str(), (int) content.length(), base_file_name.c_str(), NULL, 0); return doc; }
inline SAGA_EXPORT std::string leaf(boost::filesystem::path const& p) { #if BOOST_FILESYSTEM_VERSION == 3 return p.filename().string(); #else #if BOOST_VERSION >= 103600 return p.empty() ? std::string() : *--p.end(); #else return p.leaf(); #endif #endif }
/* * Load images for a given directory. * @param dir The directory of the image set. */ void load_images( const std::string& dir ) { filesystem::path directory(dir); if (not filesystem::is_directory(directory)) { std::cerr << "Error: failed to locate image directory.\n"; return; } int num = 0; for (auto it = filesystem::directory_iterator(directory); it != filesystem::directory_iterator(); ++it, ++num) { const filesystem::path imagepath = it->path(); cv::Mat image = cv::imread(imagepath.string(), cv::IMREAD_GRAYSCALE); if (image.empty()) std::cerr << "\tWarning: failed to load image " << imagepath.string() << "\n"; else retriever.addImage(image, imagepath.leaf().string()); } }
fs::path JobDir::set_old(fs::path const& file) { fs::path new_path; fs::path old_path; try { new_path = m_impl->new_dir / file.leaf(); old_path = m_impl->old_dir / file.leaf(); } catch (boost::filesystem::filesystem_error const& e) { throw JobDirError(e.what()); } bool e = std::rename(new_path.string().c_str(), old_path.string().c_str()); if (e) { std::string msg("rename failed for "); msg += new_path.string(); msg += " (" + boost::lexical_cast<std::string>(errno) + ')'; throw JobDirError(msg); } return old_path; }
void run_scripts(const boost::filesystem::path &p, PyObject *m) { std::regex filePattern("^.+\\.py$"); if (!boost::filesystem::is_directory(p)) { if (std::regex_match(p.leaf().string(), filePattern)) { std::ifstream input(p.string().c_str()); if (input.is_open()) { std::string str((std::istreambuf_iterator<char>(input)), std::istreambuf_iterator<char>()); PyRun_String(str.c_str(), Py_file_input, m, m); input.close(); } } } }
void TorrentFilesModel::handleFileRenamed (int torrent, int file, const QString& newName) { if (torrent != Index_) return; const auto filePos = std::find_if (Path2Node_.begin (), Path2Node_.end (), [file] (const Path2Node_t::value_type& pair) { return pair.second->FileIndex_ == file; }); if (filePos == Path2Node_.end ()) { qWarning () << Q_FUNC_INFO << "unknown file index" << file << "for torrent" << torrent << "was renamed to" << newName; return; } const auto node = filePos->second; ClearEmptyParents (filePos->first); const boost::filesystem::path newPath { newName.toUtf8 ().constData () }; const auto& parentNode = MkParentIfDoesntExist (newPath, true); node->Name_ = QString::fromUtf8 (newPath.leaf ().string ().c_str ()); node->Reparent (parentNode); beginInsertRows (FindIndex (newPath.branch_path ()), parentNode->GetRowCount (), parentNode->GetRowCount ()); Path2Node_ [newPath] = node; parentNode->AppendExisting (node); endInsertRows (); UpdateSizeGraph (RootNode_); }
const ::boost::filesystem::path Native::getPath() const throw(RuntimeException) { // Pre-condition SLM_ASSERT("bundle not initialized", m_bundle != 0 ); ::boost::filesystem::path result; const ::boost::filesystem::path fullModulePath( m_bundle->getLocation() / m_modulePath ); #if BOOST_FILESYSTEM_VERSION > 2 const ::boost::regex nativeFileRegex( m_nameDecorator->getNativeName(fullModulePath.filename().string()) ); #else const ::boost::regex nativeFileRegex( m_nameDecorator->getNativeName(fullModulePath.leaf()) ); #endif // BOOST_FILESYSTEM_VERSION > 2 // Walk through the module directory, seeking for a matching file. ::boost::filesystem::directory_iterator curDirEntry(fullModulePath.parent_path()); ::boost::filesystem::directory_iterator endDirEntry; for(; curDirEntry != endDirEntry; ++curDirEntry) { ::boost::filesystem::path curEntryPath( *curDirEntry ); #if BOOST_FILESYSTEM_VERSION > 2 if( ::boost::regex_match( curEntryPath.filename().string(), nativeFileRegex ) ) { result = m_modulePath.parent_path() / curEntryPath.filename(); break; } #else if( ::boost::regex_match( curEntryPath.leaf(), nativeFileRegex ) ) { result = m_modulePath.parent_path() / curEntryPath.leaf(); break; } #endif // BOOST_FILESYSTEM_VERSION > 2 } return result; }
Status Carver::carve(const boost::filesystem::path& path) { PlatformFile src(path.string(), PF_OPEN_EXISTING | PF_READ); PlatformFile dst((carveDir_ / path.leaf()).string(), PF_CREATE_NEW | PF_WRITE); if (!dst.isValid()) { return Status(1, "Destination tmp FS is not valid."); } auto blkCount = ceil(static_cast<double>(src.size()) / static_cast<double>(FLAGS_carver_block_size)); std::vector<char> inBuff(FLAGS_carver_block_size, 0); for (size_t i = 0; i < blkCount; i++) { inBuff.clear(); auto bytesRead = src.read(inBuff.data(), FLAGS_carver_block_size); auto bytesWritten = dst.write(inBuff.data(), bytesRead); if (bytesWritten < 0) { return Status(1, "Error writing bytes to tmp fs"); } } return Status(0, "Ok"); };
bool file_filter(boost::filesystem::path const& filename) { if (filename.leaf()[0] == '.') return false; return true; }
std::string get_filename() const { return module_.leaf(); }
void process_ipp_file(const fs::path& file, bool positive_test) { std::cout << "Info: Scanning file: " << file.string() << std::endl; // our variables: std::string file_text; std::string macro_name; std::string namespace_name; fs::path positive_file; fs::path negative_file; // load the file into memory so we can scan it: fs::ifstream ifs(file); std::copy(std::istreambuf_iterator<char>(ifs), std::istreambuf_iterator<char>(), std::back_inserter(file_text)); ifs.close(); // scan for the macro name: boost::regex macro_regex("//\\s*MACRO\\s*:\\s*(\\w+)"); boost::smatch macro_match; if(boost::regex_search(file_text, macro_match, macro_regex)) { macro_name = macro_match[1]; macro_list.insert(macro_name); namespace_name = boost::regex_replace(file_text, macro_regex, "\\L$1", boost::format_first_only | boost::format_no_copy); } if(macro_name.empty()) { std::cout << "Error: no macro definition found in " << file.string(); } else { std::cout << "Info: Macroname: " << macro_name << std::endl; } // get the output filesnames: boost::regex file_regex("boost_([^.]+)\\.ipp"); positive_file = file.branch_path() / boost::regex_replace(file.leaf().string(), file_regex, "$1_pass.cpp"); negative_file = file.branch_path() / boost::regex_replace(file.leaf().string(), file_regex, "$1_fail.cpp"); write_test_file(positive_file, macro_name, namespace_name, file.leaf().string(), positive_test, true); write_test_file(negative_file, macro_name, namespace_name, file.leaf().string(), positive_test, false); // always create config_test data, // positive and negative tests go to separate streams, because for some // reason some compilers choke unless we put them in a particular order... std::ostream* pout = positive_test ? &config_test1a : &config_test1; *pout << "#if"; if(!positive_test) *pout << "n"; *pout << "def " << macro_name << "\n#include \"" << file.leaf().string() << "\"\n#else\nnamespace " << namespace_name << " = empty_boost;\n#endif\n"; config_test2 << " if(0 != " << namespace_name << "::test())\n" " {\n" " std::cerr << \"Failed test for " << macro_name << " at: \" << __FILE__ << \":\" << __LINE__ << std::endl;\n" " ++error_count;\n" " }\n"; // always generate the jamfile data: jamfile << "test-suite \"" << macro_name << "\" : \n" "[ run " << positive_file.leaf().string() << " <template>config_options ]\n" "[ compile-fail " << negative_file.leaf().string() << " <template>config_options ] ;\n"; jamfile_v2 << "test-suite \"" << macro_name << "\" : \n" "[ run ../" << positive_file.leaf().string() << " ]\n" "[ compile-fail ../" << negative_file.leaf().string() << " ] ;\n"; // Generate data for the Build-checks test file: build_config_test << "#ifdef TEST_" << macro_name << std::endl; build_config_test << "# include \"../test/" << file.leaf().string() << "\"\n"; build_config_test << "namespace test = " << namespace_name << ";\n#endif\n"; // Generate data for the build-checks Jamfile: static const boost::regex feature_regex("boost_(?:no|has)_(.*)"); std::string feature_name = boost::regex_replace(namespace_name, feature_regex, "\\1"); build_config_jamfile << "run-simple test_case.cpp : : : <define>TEST_" << macro_name << " : " << feature_name << " ;\n"; build_config_jamfile << "alias " << feature_name << " : " << feature_name << ".output ;\n"; build_config_jamfile << "explicit " << feature_name << " ;\n"; }
void drillDown( boost::filesystem::path root, bool use_db, bool use_coll, bool oplogReplayLimit, bool top_level=false) { bool json_metadata = false; LOG(2) << "drillDown: " << root.string() << endl; // skip hidden files and directories if (root.leaf()[0] == '.' && root.leaf() != ".") return; if ( is_directory( root ) ) { boost::filesystem::directory_iterator end; boost::filesystem::directory_iterator i(root); boost::filesystem::path indexes; while ( i != end ) { boost::filesystem::path p = *i; i++; if (use_db) { if (boost::filesystem::is_directory(p)) { error() << "ERROR: root directory must be a dump of a single database" << endl; error() << " when specifying a db name with --db" << endl; printHelp(cout); return; } } if (use_coll) { if (boost::filesystem::is_directory(p) || i != end) { error() << "ERROR: root directory must be a dump of a single collection" << endl; error() << " when specifying a collection name with --collection" << endl; printHelp(cout); return; } } // don't insert oplog if (top_level && !use_db && p.leaf() == "oplog.bson") continue; if ( p.leaf() == "system.indexes.bson" ) { indexes = p; } else { drillDown(p, use_db, use_coll, oplogReplayLimit); } } if (!indexes.empty() && !json_metadata) { drillDown(indexes, use_db, use_coll, oplogReplayLimit); } return; } if ( endsWith( root.string().c_str() , ".metadata.json" ) ) { // Metadata files are handled when the corresponding .bson file is handled return; } if ( ! ( endsWith( root.string().c_str() , ".bson" ) || endsWith( root.string().c_str() , ".bin" ) ) ) { error() << "don't know what to do with file [" << root.string() << "]" << endl; return; } log() << root.string() << endl; if ( root.leaf() == "system.profile.bson" ) { log() << "\t skipping" << endl; return; } string ns; if (use_db) { ns += _db; } else { string dir = root.branch_path().string(); if ( dir.find( "/" ) == string::npos ) ns += dir; else ns += dir.substr( dir.find_last_of( "/" ) + 1 ); if ( ns.size() == 0 ) ns = "test"; } verify( ns.size() ); string oldCollName = root.leaf(); // Name of the collection that was dumped from oldCollName = oldCollName.substr( 0 , oldCollName.find_last_of( "." ) ); if (use_coll) { ns += "." + _coll; } else { ns += "." + oldCollName; } if (oplogReplayLimit) { error() << "The oplogLimit option cannot be used if " << "normal databases/collections exist in the dump directory." << endl; exit(EXIT_FAILURE); } log() << "\tgoing into namespace [" << ns << "]" << endl; if ( _drop ) { if (root.leaf() != "system.users.bson" ) { log() << "\t dropping" << endl; conn().dropCollection( ns ); } else { // Create map of the users currently in the DB BSONObj fields = BSON("user" << 1); scoped_ptr<DBClientCursor> cursor(conn().query(ns, Query(), 0, 0, &fields)); while (cursor->more()) { BSONObj user = cursor->next(); _users.insert(user["user"].String()); } } } BSONObj metadataObject; if (_restoreOptions || _restoreIndexes) { boost::filesystem::path metadataFile = (root.branch_path() / (oldCollName + ".metadata.json")); if (!boost::filesystem::exists(metadataFile.string())) { // This is fine because dumps from before 2.1 won't have a metadata file, just print a warning. // System collections shouldn't have metadata so don't warn if that file is missing. if (!startsWith(metadataFile.leaf(), "system.")) { log() << metadataFile.string() << " not found. Skipping." << endl; } } else { metadataObject = parseMetadataFile(metadataFile.string()); } } _curns = ns.c_str(); _curdb = NamespaceString(_curns).db; _curcoll = NamespaceString(_curns).coll; // If drop is not used, warn if the collection exists. if (!_drop) { scoped_ptr<DBClientCursor> cursor(conn().query(_curdb + ".system.namespaces", Query(BSON("name" << ns)))); if (cursor->more()) { // collection already exists show warning warning() << "Restoring to " << ns << " without dropping. Restored data " "will be inserted without raising errors; check your server log" << endl; } } if (_restoreOptions && metadataObject.hasField("options")) { // Try to create collection with given options createCollectionWithOptions(metadataObject["options"].Obj()); } processFile( root ); if (_drop && root.leaf() == "system.users.bson") { // Delete any users that used to exist but weren't in the dump file for (set<string>::iterator it = _users.begin(); it != _users.end(); ++it) { BSONObj userMatch = BSON("user" << *it); conn().remove(ns, Query(userMatch)); } _users.clear(); } if (_restoreIndexes && metadataObject.hasField("indexes")) { vector<BSONElement> indexes = metadataObject["indexes"].Array(); for (vector<BSONElement>::iterator it = indexes.begin(); it != indexes.end(); ++it) { createIndex((*it).Obj(), false); } } }
void parseFile(fs::path repository_path, string disProbFilePath, string tranProbFilePath){ rh::State state[150]; int feature[300]; int numOfState = 0; int numOfFeature = 0; int tranState[150]; //used to calculate transition probabilityy int trainingTimes = 0; //used to calculate transition probability double optimisedTranMatrixSource[100]; double optimisedTransitionMatrix[100][100];// = new double[100][100]; // for(int i=0; i<15; i++){ // for(int j=0; j<15; j++){ // cout<<optimisedTransitionMatrix[i][j]<<"\t"; // } // cout<<endl; // } try{ //trversal the subdirecotry fs::directory_iterator end_sub_itr; for(fs::directory_iterator sub_itr(repository_path); sub_itr!=end_sub_itr; ++sub_itr){ if(is_directory(*sub_itr)){ //do nothing }else{ string observationPath = "./data/trainingData/localInitialData/"+repository_path.leaf()+"/"+sub_itr->leaf(); string line; fs::ifstream observationFile(observationPath); if(!observationFile){ cout<<"Cannot open file.\n"; }else{ while(!observationFile.eof()){ getline(observationFile, line); if(line.compare("")==0){//do nothing }else{ feature[numOfFeature]=rh::convertToInt(line); numOfFeature++; } } } observationFile.close(); rh::ViterbiResult result; try{ result = rh::Viterbi::Calculate_path_and_probability(disProbFilePath, observationPath, tranProbFilePath); // cout<<"finish processing: "<<observationPath<<endl; }catch(...){ cout<<"Viterbi Exception when processing file "+observationPath+".\n"; } //intermedia value: the state sequence -- start string stateSequanceDirectoryPath = "./data/trainingData/localOptimisedData/"+repository_path.leaf(); fs::create_directory(stateSequanceDirectoryPath); string stateSequencePath = stateSequanceDirectoryPath+"/"+sub_itr->leaf(); fs::ofstream stateSequence(stateSequencePath); if(!stateSequence){ cout<<"Cannot open file!"<<endl; } for(int i=0; i<result.path.size(); i++){ stateSequence<<result.path.at(i)<<endl; tranState[result.path.at(i)]++;//calculate the total number of the feature in each state } stateSequence.close(); //intermedia value -- end for(int i=0; i<result.path.size(); i++){ int stateIndex = result.path.at(i); if(stateIndex > numOfState){ numOfState = stateIndex; } state[stateIndex].vector[feature[stateIndex]]++; } } // trainingTimes++;//should not be used anymore // //tst // cout<<trainingTimes<<endl; // for(int i=0; i<15; i++){ // for(int j=0; j<15; j++){ // cout<<optimisedTransitionMatrix[i][j]<<"\t"; // } // cout<<endl; // } // //tst end } }catch(...){ cout<<"Exception when calculating viterbi for "+disProbFilePath+"\n"; } try{ fs::ofstream optimisedDistributionFile("./data/trainingData/localOptimisedData/"+repository_path.leaf()+"_dis.txt"); //calculate distribution probability for(int i=0; i<=numOfState; i++){ double sum = 0; double sumIncludeZero = 0; int numOfZero = 0;//used to change all the zero to 1/(80*numberOfZero) for(int k=0; k<16; k++){ sum += state[i].vector[k]; if(state[i].vector[k]==0){ numOfZero++; } } // cout<<sum<<endl; for(int k=0; k<16; k++){ // cout<<state[i].vector[k]<<" "; if(state[i].vector[k]==0){ state[i].vector[k]=1.0/(80*numOfZero); // cout<<numOfZero<<" "<<state[i].vector[k]<<"\t"; }else{ state[i].vector[k] = state[i].vector[k]/sum; } // cout<<sum<<" "<<state[i].vector[k]<<"\t"; // sumIncludeZero += state[i].vector[k]; optimisedDistributionFile<<state[i].vector[k]<<endl; // cout<<state[i].vector[k]<<" "; } // for(int k=0; k<16; k++){ // state[i].vector[k] /= sumIncludeZero; // optimisedDistributionFile<<state[i].vector[k]<<endl; // } // cout<<endl; } optimisedDistributionFile.close(); }catch(...){ cout<<"Exception when outputting to distribution file\n"; } try{ //calculate the transition probability for(int i=0; i<=numOfState; i++){ optimisedTranMatrixSource[i] = tranState[i]; // cout<<"feature: "<<tranState[i]<<endl; // cout<<"training times: "<<trainingTimes<<endl; // cout<<"average :"<<optimisedTranMatrixSource[i]<<endl; } // double optimisedTransitionNormalisation[100];//used to normalise the transition matrix //initilised optimisedTransitionMatrix for(int i=0; i<=numOfState; i++){ for(int j=0; j<=numOfState; j++){ optimisedTransitionMatrix[i][j]=0; } } for(int i=0; i<=numOfState-1; i++){ int actualJumpNo; double optimisedTransitionNormalisation=0;//used to normalise the transition matrix actualJumpNo = rh::STATENO - (i%rh::STATENO) - 1; if(actualJumpNo>rh::JUMPNO){ actualJumpNo=rh::JUMPNO; } if(optimisedTranMatrixSource[i]==0){ optimisedTransitionMatrix[i][i]=0; }else if(optimisedTranMatrixSource[i]==1){ optimisedTransitionMatrix[i][i]=0; optimisedTransitionMatrix[i][i+1]=1; optimisedTransitionNormalisation+=optimisedTransitionMatrix[i][i+1]; for(int j=2; j<=actualJumpNo; j++){ optimisedTransitionMatrix[i][i+j]=0.5*optimisedTransitionMatrix[i][i+j-1]; optimisedTransitionNormalisation+=optimisedTransitionMatrix[i][i+j]; } }else{ optimisedTransitionMatrix[i][i]=(optimisedTranMatrixSource[i]-1)/optimisedTranMatrixSource[i]; optimisedTransitionNormalisation+=optimisedTransitionMatrix[i][i]; for(int j=1; j<=actualJumpNo; j++){ optimisedTransitionMatrix[i][i+j]=0.5*optimisedTransitionMatrix[i][i+j-1]; optimisedTransitionNormalisation+=optimisedTransitionMatrix[i][i+j]; } } //normalise the transition matrix for(int j=0; j<=actualJumpNo; j++){ if(optimisedTransitionNormalisation!=0){ optimisedTransitionMatrix[i][i+j]*=(1.0/optimisedTransitionNormalisation); } } //handle the last state transition in each stroke if(i%rh::STATENO==(rh::STATENO-1)){ optimisedTransitionMatrix[i][i+1]=1; } } /* fix later --need remove start for(int i=0; i<=numOfState-1; i++){ optimisedTransitionMatrix[i][i]=(optimisedTranMatrixSource[i]-1)/optimisedTranMatrixSource[i]; optimisedTransitionMatrix[i][i+1]=1/optimisedTranMatrixSource[i]; }*///need remove end //normalise the transition matrix // for(int i=0; i<=numOfState-1; i++){ // for(int j=0; j<=rh::JUMPNO; j++){ // if(optimisedTransitionNormalisation[i]!=0){ //// cout<<optimisedTransitionNormalisation[i]<<endl; //// cout<<optimisedTransitionMatrix[i][i+j]<<endl; // optimisedTransitionMatrix[i][i+j]*=(1.0/optimisedTransitionNormalisation[i]); //// cout<<optimisedTransitionMatrix[i][i+j]<<endl<<endl; // } // } // } optimisedTransitionMatrix[numOfState][numOfState]=1; }catch(...){ cout<<"Exception when calculating the transition probability\n"; } //fix later try{ //output to file fs::ofstream optimisedTransitionFile("./data/trainingData/localOptimisedData/"+repository_path.leaf()+"_tran.txt"); for(int i=0; i<=numOfState; i++){ for(int j=0; j<=numOfState; j++){ optimisedTransitionFile<<optimisedTransitionMatrix[i][j]<<endl; } if(i!=numOfState){ optimisedTransitionFile<<"newRow"<<endl; } } optimisedTransitionFile.close(); }catch(...){ cout<<"Exception when outputting to transition file\n"; } }
std::pair<const char*, bool> alert(const char* message_text, const char* window_name, const char* button_0_name, const char* button_1_name, const char* button_2_name, const char* checkbox_name, const boost::filesystem::path& icon_path, std::size_t default_button_index, std::size_t cancel_button_index) { if ((button_0_name || button_1_name || button_2_name) == false) return std::make_pair<const char*, bool>(0, false); std::stringstream sheet; std::stringstream layout; boost::filesystem::path icon_directory_path; // The sheet for the alert dialog sheet << "sheet alert_sheet\n" "{\n" "interface:\n" " checkbox_value: false;\n" "output:\n" " result <== { checkbox_value: checkbox_value };\n" "}\n" ; // Start by filling out the header for the layout of the alert layout << "layout alert_layout\n" "{\n" " view dialog(name: '" << window_name << "', placement: place_row, spacing: 10)\n" " {\n"; if (icon_path != boost::filesystem::path()) { icon_directory_path = icon_path.branch_path(); layout << " label_t(image: '" << icon_path.leaf() << "')\n" ; } layout << " column()\n" " {\n" " static_text(name:'" << message_text << "', horizontal: align_fill, characters: 25);\n" ; // add the checkbox if we have a name for one if (checkbox_name) { layout << "checkbox(name: '" << checkbox_name << "', bind: @checkbox_value);\n" ; } // add the button set layout << "row(horizontal: align_right)\n" "{\n" ; // add the buttons in *reverse* order, so the first is rightmost append_button_to_layout(layout, button_2_name, 2, default_button_index == 2, cancel_button_index == 2); append_button_to_layout(layout, button_1_name, 1, default_button_index == 1, cancel_button_index == 1); append_button_to_layout(layout, button_0_name, 0, default_button_index == 0, cancel_button_index == 0); // close out the rest of the layout layout << " }\n" // row " }\n" // column " }\n" // dialog "}\n" // layout ; // finally set up the params for the modal dialog interface call dialog_result_t result(handle_dialog(dictionary_t(), dictionary_t(), dictionary_t(), dialog_display_s, layout, sheet, &always_break, icon_directory_path)); bool is_checked(get_value(result.command_m, static_name_t("checkbox_value")).cast<bool>()); // NOTE (fbrereto) : Here is why we require the name of the action to be "bN", where N is the index std::size_t index(std::atoi(&result.terminating_action_m.c_str()[1])); if (index == 0) return std::make_pair(button_0_name, is_checked); else if (index == 1) return std::make_pair(button_1_name, is_checked); return std::make_pair(button_2_name, is_checked); }
void drillDown( boost::filesystem::path root, bool use_db, bool use_coll, bool top_level=false ) { LOG(2) << "drillDown: " << root.string() << endl; // skip hidden files and directories if (root.leaf().string()[0] == '.' && root.leaf().string() != ".") return; if ( is_directory( root ) ) { boost::filesystem::directory_iterator end; boost::filesystem::directory_iterator i(root); while ( i != end ) { boost::filesystem::path p = *i; i++; if (use_db) { if (boost::filesystem::is_directory(p)) { error() << "ERROR: root directory must be a dump of a single database" << endl; error() << " when specifying a db name with --db" << endl; printHelp(cout); return; } } if (use_coll) { if (boost::filesystem::is_directory(p) || i != end) { error() << "ERROR: root directory must be a dump of a single collection" << endl; error() << " when specifying a collection name with --collection" << endl; printHelp(cout); return; } } // don't insert oplog if (top_level && !use_db && p.leaf() == "oplog.bson") continue; // Only restore indexes from a corresponding .metadata.json file. if ( p.leaf() != "system.indexes.bson" ) { drillDown(p, use_db, use_coll); } } return; } if ( endsWith( root.string().c_str() , ".metadata.json" ) ) { // Metadata files are handled when the corresponding .bson file is handled return; } if ( ! ( endsWith( root.string().c_str() , ".bson" ) || endsWith( root.string().c_str() , ".bin" ) ) ) { error() << "don't know what to do with file [" << root.string() << "]" << endl; return; } log() << root.string() << endl; if ( root.leaf() == "system.profile.bson" ) { log() << "\t skipping" << endl; return; } string ns; if (use_db) { ns += _db; } else { ns = root.parent_path().filename().string(); if (ns.empty()) ns = "test"; } verify( ns.size() ); string oldCollName = root.leaf().string(); // Name of the collection that was dumped from oldCollName = oldCollName.substr( 0 , oldCollName.find_last_of( "." ) ); if (use_coll) { ns += "." + _coll; } else { ns += "." + oldCollName; } log() << "\tgoing into namespace [" << ns << "]" << endl; if ( _drop ) { if (root.leaf() != "system.users.bson" ) { log() << "\t dropping" << endl; conn().dropCollection( ns ); } else { // Create map of the users currently in the DB BSONObj fields = BSON("user" << 1); scoped_ptr<DBClientCursor> cursor(conn().query(ns, Query(), 0, 0, &fields)); while (cursor->more()) { BSONObj user = cursor->next(); _users.insert(user["user"].String()); } } } BSONObj metadataObject; if (_restoreOptions || _restoreIndexes) { boost::filesystem::path metadataFile = (root.branch_path() / (oldCollName + ".metadata.json")); if (!boost::filesystem::exists(metadataFile.string())) { // This is fine because dumps from before 2.1 won't have a metadata file, just print a warning. // System collections shouldn't have metadata so don't warn if that file is missing. if (!startsWith(metadataFile.leaf().string(), "system.")) { log() << metadataFile.string() << " not found. Skipping." << endl; } } else { metadataObject = parseMetadataFile(metadataFile.string()); } } _curns = ns.c_str(); NamespaceString nss(_curns); _curdb = nss.db; _curcoll = nss.coll; // If drop is not used, warn if the collection exists. if (!_drop) { scoped_ptr<DBClientCursor> cursor(conn().query(_curdb + ".system.namespaces", Query(BSON("name" << ns)))); if (cursor->more()) { // collection already exists show warning warning() << "Restoring to " << ns << " without dropping. Restored data " "will be inserted without raising errors; check your server log" << endl; } } vector<BSONObj> indexes; if (_restoreIndexes && metadataObject.hasField("indexes")) { const vector<BSONElement> indexElements = metadataObject["indexes"].Array(); for (vector<BSONElement>::const_iterator it = indexElements.begin(); it != indexElements.end(); ++it) { // Need to make sure the ns field gets updated to // the proper _curdb + _curns value, if we're // restoring to a different database. const BSONObj indexObj = renameIndexNs(it->Obj()); indexes.push_back(indexObj); } } const BSONObj options = _restoreOptions && metadataObject.hasField("options") ? metadataObject["options"].Obj() : BSONObj(); if (_doBulkLoad) { RemoteLoader loader(conn(), _curdb, _curcoll, indexes, options); processFile( root ); loader.commit(); } else { // No bulk load. Create collection and indexes manually. if (!options.isEmpty()) { createCollectionWithOptions(options); } // Build indexes last - it's a little faster. processFile( root ); for (vector<BSONObj>::iterator it = indexes.begin(); it != indexes.end(); ++it) { createIndex(*it); } } if (_drop && root.leaf() == "system.users.bson") { // Delete any users that used to exist but weren't in the dump file for (set<string>::iterator it = _users.begin(); it != _users.end(); ++it) { BSONObj userMatch = BSON("user" << *it); conn().remove(ns, Query(userMatch)); } _users.clear(); } }
bool file_filter(boost::filesystem::path const& filename) { if (filename.leaf()[0] == '.') return false; std::cerr << filename << std::endl; return true; }