TEST_F( Folders, ListFolders ) { auto f = std::static_pointer_cast<Folder>( ml->folder( mock::FileSystemFactory::Root ) ); ASSERT_NE( f, nullptr ); auto subFolders = f->folders(); ASSERT_EQ( 1u, subFolders.size() ); auto subFolder = subFolders[0]; auto subFiles = subFolder->files(); ASSERT_EQ( 1u, subFiles.size() ); ASSERT_EQ( mock::FileSystemFactory::SubFolder + "subfile.mp4", subFiles[0]->mrl() ); // Now again, without cache. No need to wait for fs discovery reload here Reload(); f = std::static_pointer_cast<Folder>( ml->folder( f->mrl() ) ); subFolders = f->folders(); ASSERT_EQ( 1u, subFolders.size() ); subFolder = subFolders[0]; subFiles = subFolder->files(); ASSERT_EQ( 1u, subFiles.size() ); ASSERT_EQ( mock::FileSystemFactory::SubFolder + "subfile.mp4", subFiles[0]->mrl() ); }
const NXLFSFolder* NXLFSFolder::findfolder(const wchar_t* path) const { if(path == NULL) return NULL; // ERR //NXMutexLocker ml(m_mutex, "findfolder", "n/a"); //if(path[0] != L'/' && path[0] != L'\\') return NULL; // ERR while(path[0] == L'/' || path[0] == L'\\') path++; // ERR const wchar_t* s = wcschr(path, L'/'); if(s == NULL) s = wcschr(path, L'\\'); if(s == NULL) { if(path[0] == L'\0') return this; s = path+wcslen(path); } const NXLFSFolder* fd = folders(); while(fd) { if(_wcsnicmp(fd->name(), path, s-path) == 0) { if(*s) return fd->findfolder(s+1); return fd; } fd = fd->next(); } return NULL; }
AIInventoryFetchDescendentsObserver::AIInventoryFetchDescendentsObserver(AIStateMachine* statemachine, LLUUID const& folder) : mStateMachine(statemachine) { mStateMachine->idle(); folder_ref_t folders(1, folder); fetchDescendents(folders); gInventory.addObserver(this); if (isEverythingComplete()) done(); }
QStringList BookmarkManager::bookmarkFolders() const { QStringList folders(tr("Bookmarks")); QList<QStandardItem*>list = treeModel->findItems(QLatin1String("*"), Qt::MatchWildcard | Qt::MatchRecursive, 0); QString data; foreach (const QStandardItem *item, list) { data = item->data(Qt::UserRole + 10).toString(); if (data == QLatin1String("Folder")) folders << item->data(Qt::DisplayRole).toString(); }
Folder* Folder::folderBelow() { QList<Folder*> lst = folders(); if (!lst.isEmpty()) return lst.first(); Folder *parentFolder = (Folder *)parent(); Folder *childFolder = this; while (parentFolder && childFolder){ lst = parentFolder->folders(); int pos = lst.indexOf(childFolder) + 1; if (pos < lst.size()) return lst.at(pos); childFolder = parentFolder; parentFolder = (Folder *)parentFolder->parent(); } return NULL; }
const NXLFSFile* NXLFSFolder::findfile(const wchar_t* path) const { if(path == NULL) return NULL; // ERR //NXMutexLocker ml(m_mutex, "findfile", "n/a"); while(path[0] == L'.' && (path[1] == L'/' || path[1] == L'\\')) path+=2; // note: there for nxpar2 while(path[0] == L'/' || path[0] == L'\\') path++; const NXLFSFolder* fd = folders(); const wchar_t* s = wcschr(path, L'/'); if(s == NULL) s = wcschr(path, L'\\'); if(s) { while(fd) { if(_wcsnicmp(fd->name(), path, s-path) == 0) { return fd->findfile(s+1); } fd = fd->next(); } return NULL; } const NXLFSFile* ff = files(); while(ff) { if(_wcsicmp(ff->name(), path) == 0) { break; } ff = ff->next(); } return ff; }
//ag_merge [-n _start_N_value_] [-a _start_alpha_value_] -d _directory1_ _directory2_ [_directory3_] //[_directory4_] ... int main(int argc, char* argv[]) { try{ //0. Set log file LogStream clog; LogStream::init(true); clog << "\n-----\nag_merge "; for(int argNo = 1; argNo < argc; argNo++) clog << argv[argNo] << " "; clog << "\n\n"; //1. Set input parameters from command line int startTiGN = 1; double startAlpha = 0.5; int firstDirNo = 0; stringv args(argc); for(int argNo = 0; argNo < argc; argNo++) args[argNo] = string(argv[argNo]); //parse and save input parameters for(int argNo = 1; argNo < argc; argNo += 2) { if(!args[argNo].compare("-n")) startTiGN = atoiExt(argv[argNo + 1]); else if(!args[argNo].compare("-a")) startAlpha = atofExt(argv[argNo + 1]); else if(!args[argNo].compare("-d")) { firstDirNo = argNo + 1; break; } else throw INPUT_ERR; } //check that there are at least two directories if(argc < (firstDirNo + 2)) throw INPUT_ERR; //convert names of input directories to strings and check that they exist int folderN = argc - firstDirNo; stringv folders(folderN); for(int argNo = firstDirNo; argNo < argc; argNo++) { folders[argNo - firstDirNo] = string(argv[argNo]); struct stat status; if((stat(argv[argNo], &status) != 0) || !(status.st_mode & S_IFDIR)) throw DIR_ERR; } //1.a) delete all temp files from the previous run and create a directory AGTemp #ifdef _WIN32 //in windows CreateDirectory("AGTemp", NULL); #else // in linux system("rm -rf ./AGTemp/"); system("mkdir ./AGTemp/"); #endif //2. Set parameters from AGTemp/params.txt from the first directory TrainInfo ti; //set of model parameters in the current directory double prevBest; //best value of performance achieved on the previous run fstream fparam; string paramPathName = folders[0] + "/AGTemp/params.txt"; fparam.open(paramPathName.c_str(), ios_base::in); string modeStr, metric; fparam >> ti.seed >> ti.trainFName >> ti.validFName >> ti.attrFName >> ti.minAlpha >> ti.maxTiGN >> ti.bagN >> modeStr >> metric; //modeStr should be "fast" or "slow" or "layered" if(modeStr.compare("fast") == 0) ti.mode = FAST; else if(modeStr.compare("slow") == 0) ti.mode = SLOW; else if(modeStr.compare("layered") == 0) ti.mode = LAYERED; else throw TEMP_ERR; //metric should be "roc" or "rms" if(metric.compare("rms") == 0) ti.rms = true; else if(metric.compare("roc") == 0) ti.rms = false; else throw TEMP_ERR; if(fparam.fail()) throw TEMP_ERR; fparam.close(); fparam.clear(); //read best value of performance on previous run fstream fbest; double stub; int itemN; // number of data points in the train set, need to calculate possible values of alpha string fbestPathName = folders[0] + "/AGTemp/best.txt"; fbest.open(fbestPathName.c_str(), ios_base::in); fbest >> prevBest >> stub >> stub >> stub >> itemN; if(fbest.fail()) throw TEMP_ERR; fbest.close(); int alphaN = getAlphaN(ti.minAlpha, itemN); //number of different alpha values int tigNN = getTiGNN(ti.maxTiGN); //direction of initialization (1 - up, 0 - right), used in fast mode only doublevv dir(tigNN, doublev(alphaN, 0)); //outer array: column (by TiGN) //middle array: row (by alpha) //direction of initialization (1 - up, 0 - right), collects average in the slow mode doublevv dirStat(tigNN, doublev(alphaN, 0)); if(ti.mode == FAST) {//read part of the directions table from file fstream fdir; string fdirPathName = folders[0] + "/AGTemp/dir.txt"; fdir.open(fdirPathName.c_str(), ios_base::in); for(int tigNNo = 0; tigNNo < tigNN; tigNNo++) for(int alphaNo = 0; alphaNo < alphaN; alphaNo++) fdir >> dir[tigNNo][alphaNo]; if(fdir.fail()) throw TEMP_ERR; fdir.close(); } //3. Read main parameters from all other directories and check that they match int allBagN = ti.bagN; intv bagNs(folderN, 0); bagNs[0] = ti.bagN; intv prevBagNs(folderN + 1, 0); //sums of bagNs of all previous directories prevBagNs[1] = ti.bagN; int lastSeed = ti.seed; for(int folderNo = 1; folderNo < folderN; folderNo++) { TrainInfo extraTI; //set of model parameters in the additional directory string fparamPathName = folders[folderNo] + "/AGTemp/params.txt"; fparam.open(fparamPathName.c_str(), ios_base::in); fparam >> extraTI.seed >> extraTI.trainFName >> extraTI.validFName >> extraTI.attrFName >> extraTI.minAlpha >> extraTI.maxTiGN >> extraTI.bagN; if(fparam.fail()) { clog << fparamPathName << '\n'; throw TEMP_ERR; } fparam.close(); if((ti.minAlpha != extraTI.minAlpha) || (ti.maxTiGN != extraTI.maxTiGN)) { clog << fparamPathName << '\n'; throw MERGE_MISMATCH_ERR; } if(extraTI.seed == ti.seed) throw SAME_SEED_ERR; if(folderNo == (folderN - 1)) lastSeed = extraTI.seed; allBagN += extraTI.bagN; bagNs[folderNo] = extraTI.bagN; prevBagNs[folderNo + 1] = allBagN; string fdirStatPathName = folders[folderNo] + "/AGTemp/dirstat.txt"; fstream fdirStat; fdirStat.open("./AGTemp/dirstat.txt", ios_base::in); for(int alphaNo = 0; alphaNo < alphaN; alphaNo++) for(int tigNNo = 0; tigNNo < tigNN; tigNNo++) { double ds; fdirStat >> ds; dirStat[tigNNo][alphaNo] += ds * extraTI.bagN; } } //4. Load data INDdata data("", ti.validFName.c_str(), "", ti.attrFName.c_str()); CGrove::setData(data); CTreeNode::setData(data); doublev validTar; int validN = data.getTargets(validTar, VALID); clog << "Alpha = " << ti.minAlpha << "\nN = " << ti.maxTiGN << "\n" << allBagN << " bagging iterations\n"; if(ti.mode == FAST) clog << "fast mode\n\n"; else if(ti.mode == SLOW) clog << "slow mode\n\n"; else //if(ti.mode == LAYERED) clog << "layered mode\n\n"; //5. Initialize some internal process variables //surfaces of performance values for validation set. //Always calculate rms (for convergence analysis), if needed, calculate roc doublevvv rmsV(tigNN, doublevv(alphaN, doublev(allBagN, 0))); doublevvv rocV; if(!ti.rms) rocV.resize(tigNN, doublevv(alphaN, doublev(allBagN, 0))); //outer array: column (by TiGN) //middle array: row (by alpha) //inner array: bagging iterations. Performance is kept for all iterations to create bagging curves //sums of predictions for each data point (raw material to calculate performance) doublevvv predsumsV(tigNN, doublevv(alphaN, doublev(validN, 0))); //outer array: column (by TiGN) //middle array: row (by alpha) //inner array: data points in the validation set //6. Read and merge models from the directories int startAlphaNo = getAlphaN(startAlpha, itemN) - 1; int startTiGNNo = getTiGNN(startTiGN) - 1; for(int alphaNo = startAlphaNo; alphaNo < alphaN; alphaNo++) { double alpha; if(alphaNo < alphaN - 1) alpha = alphaVal(alphaNo); else //this is a special case because minAlpha can be zero alpha = ti.minAlpha; cout << "Merging models with alpha = " << alpha << endl; for(int tigNNo = startTiGNNo; tigNNo < tigNN; tigNNo++) { int tigN = tigVal(tigNNo); //number of trees in the current grove //temp file in the extra directory that keeps models corresponding to alpha and tigN string prefix = string("/AGTemp/ag.a.") + alphaToStr(alpha) + ".n." + itoa(tigN, 10); string tempFName = prefix + ".tmp"; //this will kill the pre-existing file in the output directory fstream fsave((string(".") + tempFName).c_str(), ios_base::binary | ios_base::out); for(int folderNo = 0; folderNo < folderN; folderNo++) { string inTempFName = folders[folderNo] + tempFName; fstream ftemp((inTempFName).c_str(), ios_base::binary | ios_base::in); if(ftemp.fail()) { clog << inTempFName << '\n'; throw TEMP_ERR; } //merge all extra models with the same (alpha, tigN) parameter values into existing models for(int bagNo = prevBagNs[folderNo]; bagNo < prevBagNs[folderNo + 1]; bagNo++) { //retrieve next grove CGrove extraGrove(alpha, tigN); try{ extraGrove.load(ftemp); }catch(TE_ERROR err){ clog << inTempFName << '\n'; throw err; } //add the loaded grove to a model file with alpha and tigN values in the name extraGrove.save((string(".") + tempFName).c_str()); //generate predictions and performance for validation set doublev predictions(validN); for(int itemNo = 0; itemNo < validN; itemNo++) { predsumsV[tigNNo][alphaNo][itemNo] += extraGrove.predict(itemNo, VALID); predictions[itemNo] = predsumsV[tigNNo][alphaNo][itemNo] / (bagNo + 1); } if(bagNo == allBagN - 1) { string predsFName = prefix + ".preds.txt"; fstream fpreds((string(".") + predsFName).c_str(), ios_base::out); for(int itemNo = 0; itemNo < validN; itemNo++) fpreds << predictions[itemNo] << endl; fpreds.close(); } rmsV[tigNNo][alphaNo][bagNo] = rmse(predictions, validTar); if(!ti.rms) rocV[tigNNo][alphaNo][bagNo] = roc(predictions, validTar); }// end for(int bagNo = ti.bagN; bagNo < ti.bagN + extraTI.bagN; bagNo++) ftemp.close(); }//end for(int folderNo = 0; folderNo < folderN; folderNo++) }//end for(int tigNNo = 0; tigNNo < tigNN; tigNNo++) }//end for(int alphaNo = 0; alphaNo < alphaN; alphaNo++) //4. Output ti.bagN = allBagN; ti.seed = lastSeed; if(ti.rms) trainOut(ti, dir, rmsV, rmsV, predsumsV, itemN, dirStat, startAlphaNo, startTiGNNo); else trainOut(ti, dir, rmsV, rocV, predsumsV, itemN, dirStat, startAlphaNo, startTiGNNo); }catch(TE_ERROR err){
// GenerateVCXProjFilters //------------------------------------------------------------------------------ const AString & VSProjectGenerator::GenerateVCXProjFilters( const AString & projectFile ) { // preallocate to avoid re-allocations m_Tmp.SetReserved( MEGABYTE ); m_Tmp.SetLength( 0 ); // determine folder for project const char * lastProjSlash = projectFile.FindLast( NATIVE_SLASH ); AStackString<> projectBasePath( projectFile.Get(), lastProjSlash ? lastProjSlash + 1 : projectFile.Get() ); // header Write( "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n" ); Write( "<Project ToolsVersion=\"4.0\" xmlns=\"http://schemas.microsoft.com/developer/msbuild/2003\">\n" ); // list of all folders Array< AString > folders( 1024, true ); // files { Write( " <ItemGroup>\n" ); const AString * const fEnd = m_Files.End(); for ( const AString * fIt = m_Files.Begin(); fIt!=fEnd; ++fIt ) { // get folder part, relative to base dir AStackString<> folder; GetFolderPath( *fIt, folder ); const char * fileName = fIt->BeginsWithI( projectBasePath ) ? fIt->Get() + projectBasePath.GetLength() : fIt->Get(); Write( " <CustomBuild Include=\"%s\">\n", fileName ); if ( !folder.IsEmpty() ) { Write( " <Filter>%s</Filter>\n", folder.Get() ); } Write( " </CustomBuild>\n" ); // add new folders if ( !folder.IsEmpty() ) { for (;;) { // add this folder if unique bool found = false; for ( const AString * it=folders.Begin(); it!=folders.End(); ++it ) { if ( it->CompareI( folder ) == 0 ) { found = true; break; } } if ( !found ) { folders.Append( folder ); } // handle intermediate folders const char * lastSlash = folder.FindLast( BACK_SLASH ); if ( lastSlash == nullptr ) { break; } folder.SetLength( (uint32_t)( lastSlash - folder.Get() ) ); } } } Write( " </ItemGroup>\n" ); } // folders { const AString * const fEnd = folders.End(); for ( const AString * fIt = folders.Begin(); fIt!=fEnd; ++fIt ) { Write( " <ItemGroup>\n" ); Write( " <Filter Include=\"%s\">\n", fIt->Get() ); Write( " <UniqueIdentifier>{%08x-6c94-4f93-bc2a-7f5284b7d434}</UniqueIdentifier>\n", CRC32::Calc( *fIt ) ); Write( " </Filter>\n" ); Write( " </ItemGroup>\n" ); } } // footer Write( "</Project>" ); // no carriage return m_OutputVCXProjFilters = m_Tmp; return m_OutputVCXProjFilters; }
QList<BookmarksModel::Bookmark> HtmlImporter::exportBookmarks() { QList<BookmarksModel::Bookmark> list; QString bookmarks = QString::fromUtf8(m_file.readAll()); m_file.close(); // Converting tags to lower case -,- // For some reason Qt::CaseInsensitive is not everytime insensitive :-D bookmarks.replace("<DL", "<dl"); bookmarks.replace("</DL", "</dl"); bookmarks.replace("<DT", "<dt"); bookmarks.replace("</DT", "</dt"); bookmarks.replace("<P", "<p"); bookmarks.replace("</P", "</p"); bookmarks.replace("<A", "<a"); bookmarks.replace("</A", "</a"); bookmarks.replace("HREF=", "href="); bookmarks.replace("<H3", "<h3"); bookmarks.replace("</H3", "</h3"); bookmarks = bookmarks.left(bookmarks.lastIndexOf("</dl><p>")); int start = bookmarks.indexOf("<dl><p>"); QStringList folders("Html Import"); while (start > 0) { QString string = bookmarks.mid(start); int posOfFolder = string.indexOf("<dt><h3"); int posOfEndFolder = string.indexOf("</dl><p>"); int posOfLink = string.indexOf("<dt><a"); int nearest = qzMin(posOfLink, qzMin(posOfFolder, posOfEndFolder)); if (nearest == -1) { break; } if (nearest == posOfFolder) { // Next is folder QRegExp rx("<dt><h3(.*)>(.*)</h3>"); rx.setMinimal(true); rx.indexIn(string); // QString arguments = rx.cap(1); QString folderName = rx.cap(2); folders.append(folderName); start += posOfFolder + rx.cap(0).size(); } else if (nearest == posOfEndFolder) { // Next is end of folder if (!folders.isEmpty()) { folders.removeLast(); } start += posOfEndFolder + 8; } else { // Next is link QRegExp rx("<dt><a(.*)>(.*)</a>"); rx.setMinimal(true); rx.indexIn(string); QString arguments = rx.cap(1); QString linkName = rx.cap(2); QRegExp rx2("href=\"(.*)\""); rx2.setMinimal(true); rx2.indexIn(arguments); QUrl url = QUrl::fromEncoded(rx2.cap(1).toUtf8()); start += posOfLink + rx.cap(0).size(); if (linkName.isEmpty() || url.isEmpty() || url.scheme() == "place" || url.scheme() == "about") { continue; } BookmarksModel::Bookmark b; b.folder = folders.last(); b.title = linkName; b.url = url; list.append(b); } } return list; }
// Commit //------------------------------------------------------------------------------ /*virtual*/ bool FunctionSLN::Commit( const BFFIterator & funcStartIter ) const { AStackString<> solutionOutput; Array< AString > solutionProjects( 8, true ); if ( !GetString( funcStartIter, solutionOutput, ".SolutionOutput", true ) || !GetStrings( funcStartIter, solutionProjects, ".SolutionProjects", false ) ) { return false; } // optional inputs AString solutionBuildProject; AString solutionVisualStudioVersion; AString solutionMinimumVisualStudioVersion; if ( !GetString( funcStartIter, solutionBuildProject, ".SolutionBuildProject", false ) || !GetString( funcStartIter, solutionVisualStudioVersion, ".SolutionVisualStudioVersion", false ) || !GetString( funcStartIter, solutionMinimumVisualStudioVersion, ".SolutionMinimumVisualStudioVersion", false ) ) { return false; } // base config VSProjectConfig baseConfig; // create configs Array< VSProjectConfig > configs( 16, true ); const BFFVariable * solutionConfigs = BFFStackFrame::GetVar( ".SolutionConfigs" ); if ( solutionConfigs ) { if ( solutionConfigs->IsArrayOfStructs() == false ) { Error::Error_1050_PropertyMustBeOfType( funcStartIter, this, ".SolutionConfigs", solutionConfigs->GetType(), BFFVariable::VAR_ARRAY_OF_STRUCTS ); return false; } const Array< const BFFVariable * > & structs = solutionConfigs->GetArrayOfStructs(); const BFFVariable * const * end = structs.End(); for ( const BFFVariable ** it = structs.Begin(); it != end; ++it ) { const BFFVariable * s = *it; // start with the base configuration VSProjectConfig newConfig( baseConfig ); // .Platform must be provided if ( !GetStringFromStruct( s, ".Platform", newConfig.m_Platform ) ) { // TODO:B custom error Error::Error_1101_MissingProperty( funcStartIter, this, AStackString<>( ".Platform" ) ); return false; } // .Config must be provided if ( !GetStringFromStruct( s, ".Config", newConfig.m_Config ) ) { // TODO:B custom error Error::Error_1101_MissingProperty( funcStartIter, this, AStackString<>( ".Config" ) ); return false; } configs.Append( newConfig ); } } else { // no user specified configs, make some defaults // start from the default VSProjectConfig config( baseConfig ); // make the configs config.m_Platform = "Win32"; config.m_Config = "Debug"; configs.Append( config ); config.m_Config = "Release"; configs.Append( config ); config.m_Platform = "x64"; configs.Append( config ); config.m_Config = "Debug"; configs.Append( config ); } // sort project configs by config and by platform (like visual) configs.Sort( VSProjectConfigComp() ); // create solution folders Array< SLNSolutionFolder > folders( 16, true ); const BFFVariable * solutionFolders = BFFStackFrame::GetVar( ".SolutionFolders" ); if ( solutionFolders ) { if ( solutionFolders->IsArrayOfStructs() == false ) { Error::Error_1050_PropertyMustBeOfType( funcStartIter, this, ".SolutionFolders", solutionFolders->GetType(), BFFVariable::VAR_ARRAY_OF_STRUCTS ); return false; } const Array< const BFFVariable * > & structs = solutionFolders->GetArrayOfStructs(); const BFFVariable * const * end = structs.End(); for ( const BFFVariable ** it = structs.Begin(); it != end; ++it ) { const BFFVariable * s = *it; // start with the base configuration SLNSolutionFolder newFolder; // .Path must be provided if ( !GetStringFromStruct( s, ".Path", newFolder.m_Path ) ) { // TODO:B custom error Error::Error_1101_MissingProperty( funcStartIter, this, AStackString<>( ".Path" ) ); return false; } newFolder.m_Path.Replace( OTHER_SLASH, NATIVE_SLASH ); // check if this path was already defined { const SLNSolutionFolder * const end2 = folders.End(); for ( const SLNSolutionFolder * it2 = folders.Begin() ; it2 != end2 ; ++it2 ) { if ( it2->m_Path == newFolder.m_Path ) { // TODO:B custom error Error::Error_1100_AlreadyDefined( funcStartIter, this, it2->m_Path ); return false; } } } // .Projects must be provided if ( !GetStringOrArrayOfStringsFromStruct( funcStartIter, s, ".Projects", newFolder.m_ProjectNames ) ) { return false; // GetStringOrArrayOfStringsFromStruct has emitted an error } // check if this project is included in the solution for ( const AString & projectName : newFolder.m_ProjectNames ) { if ( solutionProjects.Find( projectName ) == nullptr ) { solutionProjects.Append( projectName ); } } folders.Append( newFolder ); } } NodeGraph & ng = FBuild::Get().GetDependencyGraph(); // Check for existing node if ( ng.FindNode( solutionOutput ) ) { Error::Error_1100_AlreadyDefined( funcStartIter, this, solutionOutput ); return false; } // resolves VCXProject nodes associated to solutionProjects Array< VCXProjectNode * > projects( solutionProjects.GetSize(), false ); { const AString * const end = solutionProjects.End(); for ( const AString * it = solutionProjects.Begin(); it != end; ++it ) { VCXProjectNode * project = ResolveVCXProject( funcStartIter, *it ); if ( project == nullptr ) { return false; // ResolveVCXProject will have emitted error } // check that this project contains all .SolutionConfigs const Array< VSProjectConfig > & projectConfigs = project->GetConfigs(); const size_t configsSize = configs.GetSize(); for ( size_t i = 0 ; i < configsSize ; ++i ) { bool containsConfig = false; const VSProjectConfig * const config = &configs[i]; const VSProjectConfig * const end2 = projectConfigs.End(); for ( const VSProjectConfig * it2 = projectConfigs.Begin(); it2 != end2; ++it2 ) { if ( it2->m_Platform == config->m_Platform && it2->m_Config == config->m_Config ) { containsConfig = true; break; } } if ( containsConfig == false ) { // TODO: specific error message "ProjectConfigNotFound" AStackString<> configName; configName.Format( "%s|%s", config->m_Platform.Get(), config->m_Config.Get() ); Error::Error_1104_TargetNotDefined( funcStartIter, this, configName.Get(), project->GetName() ); return false; } } // append vcxproject node to solution projects.Append( project ); } } // sort projects by name (like visual) projects.Sort( VCXProjectNodeComp() ); // resolves VCXProject nodes associated to solutionFolders { SLNSolutionFolder * const end = folders.End(); for ( SLNSolutionFolder * it = folders.Begin(); it != end; ++it ) { // retrieves full path of contained vcxprojects AString * const end2 = it->m_ProjectNames.End(); for ( AString * it2 = it->m_ProjectNames.Begin(); it2 != end2; ++it2 ) { // Get associate project file VCXProjectNode * project = ResolveVCXProject( funcStartIter, *it2 ); if ( project == nullptr ) { return false; // ResolveVCXProjectRecurse will have emitted error } ASSERT( projects.Find( project ) ); // Sanity check in global list // fixup name to be to final project *it2 = project->GetName(); } } } // resolves VCXProject node referenced by solutionBuildProject if ( solutionBuildProject.GetLength() > 0 ) { // Get associate project file const VCXProjectNode * project = ResolveVCXProject( funcStartIter, solutionBuildProject ); if ( project == nullptr ) { return false; // ResolveVCXProject will have emitted error } if ( projects.Find( project ) == nullptr ) { // project referenced in .SolutionBuildProject is not referenced in .SolutionProjects Error::Error_1104_TargetNotDefined( funcStartIter, this, ".SolutionBuildProject", project->GetName() ); return false; } solutionBuildProject = project->GetName(); } // Project Dependencies Array< SLNDependency > slnDeps( 0, true ); const BFFVariable * projectDepsVar = BFFStackFrame::GetVar( ".SolutionDependencies" ); if ( projectDepsVar ) { if ( projectDepsVar->IsArrayOfStructs() == false ) { Error::Error_1050_PropertyMustBeOfType( funcStartIter, this, ".SolutionDependencies", projectDepsVar->GetType(), BFFVariable::VAR_ARRAY_OF_STRUCTS ); return false; } slnDeps.SetCapacity( projectDepsVar->GetArrayOfStructs().GetSize() ); for ( const BFFVariable * s : projectDepsVar->GetArrayOfStructs() ) { // .Projects must be provided // .Dependencies must be provided SLNDependency deps; if ( !GetStringOrArrayOfStringsFromStruct( funcStartIter, s, ".Projects", deps.m_Projects ) || !GetStringOrArrayOfStringsFromStruct( funcStartIter, s, ".Dependencies", deps.m_Dependencies ) ) { return false; // GetStringOrArrayOfStringsFromStruct has emitted an error } // fixup for ( AString & projectName : deps.m_Projects ) { // Get associated project file const VCXProjectNode * project = ResolveVCXProject( funcStartIter, projectName ); if ( project == nullptr ) { return false; // ResolveVCXProject will have emitted error } projectName = project->GetName(); } for ( AString & projectName : deps.m_Dependencies ) { // Get associated project file const VCXProjectNode * project = ResolveVCXProject( funcStartIter, projectName ); if ( project == nullptr ) { return false; // ResolveVCXProject will have emitted error } projectName = project->GetName(); } slnDeps.Append( deps ); } } SLNNode * sln = ng.CreateSLNNode( solutionOutput, solutionBuildProject, solutionVisualStudioVersion, solutionMinimumVisualStudioVersion, configs, projects, slnDeps, folders ); ASSERT( sln ); return ProcessAlias( funcStartIter, sln ); }
bool TreeviewSampleApp::InitializeDatabase() { bool ok = true; const char* sqlCommands[] = { "pragma foreign_keys=1;", "CREATE TABLE IF NOT EXISTS projects (\ pid integer not null,\ prjtitle varchar(72),\ primary key (pid));", "CREATE TABLE IF NOT EXISTS folders (\ fid int not null,\ fname varchar(64) not null,\ fparent int not null,\ primary key (fid),\ foreign key (fparent) references folders(fid) on delete cascade,\ unique (fname, fparent));", "CREATE TABLE IF NOT EXISTS folderprojects (\ fid int not null,\ pid int not null,\ foreign key (fid) references folders(fid) on delete restrict,\ unique (fid,pid));", "CREATE TABLE IF NOT EXISTS folderclosure (\ ancestor int not null,\ descendant int not null,\ distance int not null,\ primary key (ancestor, descendant),\ foreign key (ancestor) references folders(fid),\ foreign key (descendant) references folders(fid));", "CREATE TRIGGER IF NOT EXISTS ait_folders AFTER INSERT ON folders FOR EACH ROW\ BEGIN\ INSERT INTO folderclosure (ancestor, descendant, distance)\ SELECT ancestor, NEW.fid, distance+1 FROM folderclosure\ WHERE descendant = NEW.fparent\ UNION ALL SELECT NEW.fid, NEW.fid, 0;\ END;", "CREATE TRIGGER IF NOT EXISTS but_folders BEFORE UPDATE ON folders FOR EACH ROW\ WHEN OLD.fparent != NEW.fparent\ BEGIN\ DELETE FROM folderclosure\ WHERE descendant IN (SELECT descendant FROM folderclosure WHERE ancestor = OLD.fid)\ AND ancestor NOT IN (SELECT descendant FROM folderclosure WHERE ancestor = OLD.fid);\ INSERT INTO folderclosure (ancestor, descendant, distance)\ SELECT supertree.ancestor, subtree.descendant, supertree.distance+subtree.distance+1\ FROM folderclosure AS supertree JOIN folderclosure AS subtree\ WHERE subtree.ancestor = OLD.fid\ AND supertree.descendant = NEW.fparent;\ END;", "CREATE TRIGGER IF NOT EXISTS bdt_folders BEFORE DELETE ON folders FOR EACH ROW\ BEGIN\ DELETE FROM folderclosure\ WHERE descendant IN (SELECT descendant FROM folderclosure WHERE ancestor = OLD.fid);\ END;", "INSERT OR IGNORE INTO folders (fid, fname, fparent) VALUES (1, 'root', 1);", NULL }; int state = 0; int count; int j = 0; try { m_db.Open(wxT("tvtest.db3")); while (sqlCommands[j] != NULL) { m_db.ExecuteUpdate(sqlCommands[j]); ++j; } state = 1; count = m_db.ExecuteScalar("SELECT COUNT(*) FROM folderprojects;"); if (count == 0) { m_db.ExecuteUpdate("INSERT OR IGNORE INTO projects VALUES (1,'Test project');"); m_db.ExecuteUpdate("INSERT INTO folderprojects (fid, pid) SELECT 1, pid FROM projects;"); } } catch (wxSQLite3Exception& e) { wxString msg; if (state == 0) msg = wxString(wxT("Error on creating the Treeview Sample database\n")); if (state == 0) msg = wxString(wxT("Error on initializing the Treeview Sample database\n")); msg += e.GetMessage(); wxMessageBox(msg, wxT("Treeview Sample Database Error"), wxOK | wxCENTRE | wxICON_ERROR); ok = false; } return ok; }
std::vector<std::string> CTemplateLoader::FindPlaceableTemplates(const std::string& path, bool includeSubdirectories, ETemplatesType templatesType, ScriptInterface& scriptInterface) { JSContext* cx = scriptInterface.GetContext(); JSAutoRequest rq(cx); std::vector<std::string> templates; Status ok; VfsPath templatePath; if (templatesType == SIMULATION_TEMPLATES || templatesType == ALL_TEMPLATES) { JS::RootedValue placeablesFilter(cx); scriptInterface.ReadJSONFile("simulation/data/placeablesFilter.json", &placeablesFilter); JS::RootedObject folders(cx); if (scriptInterface.GetProperty(placeablesFilter, "templates", &folders)) { if (!(JS_IsArrayObject(cx, folders))) { LOGERROR("FindPlaceableTemplates: Argument must be an array!"); return templates; } u32 length; if (!JS_GetArrayLength(cx, folders, &length)) { LOGERROR("FindPlaceableTemplates: Failed to get array length!"); return templates; } templatePath = VfsPath(TEMPLATE_ROOT) / path; //I have every object inside, just run for each for (u32 i=0; i<length; ++i) { JS::RootedValue val(cx); if (!JS_GetElement(cx, folders, i, &val)) { LOGERROR("FindPlaceableTemplates: Failed to read array element!"); return templates; } std::string directoryPath; std::wstring fileFilter; scriptInterface.GetProperty(val, "directory", directoryPath); scriptInterface.GetProperty(val, "file", fileFilter); VfsPaths filenames; if (vfs::GetPathnames(g_VFS, templatePath / (directoryPath + "/"), fileFilter.c_str(), filenames) != INFO::OK) continue; for (const VfsPath& filename : filenames) { // Strip the .xml extension VfsPath pathstem = filename.ChangeExtension(L""); // Strip the root from the path std::wstring name = pathstem.string().substr(ARRAY_SIZE(TEMPLATE_ROOT) - 1); templates.emplace_back(name.begin(), name.end()); } } } } if (templatesType == ACTOR_TEMPLATES || templatesType == ALL_TEMPLATES) { templatePath = VfsPath(ACTOR_ROOT) / path; if (includeSubdirectories) ok = vfs::ForEachFile(g_VFS, templatePath, AddActorToTemplates, (uintptr_t)&templates, L"*.xml", vfs::DIR_RECURSIVE); else ok = vfs::ForEachFile(g_VFS, templatePath, AddActorToTemplates, (uintptr_t)&templates, L"*.xml"); WARN_IF_ERR(ok); } if (templatesType != SIMULATION_TEMPLATES && templatesType != ACTOR_TEMPLATES && templatesType != ALL_TEMPLATES) LOGERROR("Undefined template type (valid: all, simulation, actor)"); return templates; }