void Clear() { if (NULL != txn) { mdb_txn_abort(txn); txn = NULL; } ref = 0; dels.clear(); inserts.clear(); }
MojErr MojLogEngine::loggerNames(StringSet& setOut) { setOut.clear(); MojThreadGuard guard(m_mutex); for (LoggerList::ConstIterator i = m_loggers.begin(); i != m_loggers.end(); ++i) { MojString name; MojErr err = name.assign((*i)->name()); MojErrCheck(err); err = setOut.put(name); MojErrCheck(err); } return MojErrNone; }
void CDwdsThesaurus::GetAllLemmasWhosePathesStartFromThisSequence (const vector<int>& HyperTermNos, StringSet& Lemmas) const { DwordVector Result; GetAllPathesWhichStartFromThisSequence(HyperTermNos, Result); sort(Result.begin(),Result.end()); Lemmas.clear(); for (size_t i=0; i<LeavesPathes.size(); i++) if (binary_search(Result.begin(),Result.end(), LeavesPathes[i])) { const char* s = LeavesLemmas[i].GetString(); Lemmas.insert(s); }; };
//////////////////////////////////////////////////////////////////////////////// // TODO: Duplicate code with reducer-shingle-threshold.cpp. Refactor into class // with virtual method call to call the emit method int main() { char value[1000], key[1000]; StringSet shingleSet; std::string prevKey; while (scanf("%s\t%s\n", key, value) != EOF) { if (prevKey != key) { emitShinglesWithDocSizes(shingleSet, prevKey); shingleSet.clear(); } shingleSet.insert(value); prevKey = key; } emitShinglesWithDocSizes(shingleSet, prevKey); return 0; }
/** * categories is the selected categories for one image, members may be Las Vegas, Chicago, and Los Angeles if the * category in question is Places. * This function then increases _groupCount with 1 for each of the groups the relavant items belongs to * Las Vegas might increase the _groupCount[Nevada] by one. * The tricky part is to avoid increasing it by more than 1 per image, that is what the countedGroupDict is * used for. */ void GroupCounter::count( const StringSet& categories ) { static StringSet countedGroupDict; countedGroupDict.clear(); for( StringSet::const_iterator categoryIt = categories.begin(); categoryIt != categories.end(); ++categoryIt ) { if ( _memberToGroup.contains(*categoryIt)) { const QStringList groups = _memberToGroup[*categoryIt]; for ( const QString& group : groups ) { if ( !countedGroupDict.contains( group ) ) { countedGroupDict.insert( group ); (_groupCount[group])++; } } } // The item Nevada should itself go into the group Nevada. if ( !countedGroupDict.contains( *categoryIt ) && _groupCount.contains( *categoryIt ) ) { countedGroupDict.insert( *categoryIt); (_groupCount[*categoryIt])++; } } }
/** * Entry point. Arguably the most common function in all applications. * @param argc the number of arguments. * @param argv the actual arguments. * @return return value for the caller to tell we succeed or not. */ int main(int argc, char *argv[]) { bool ignorenext = true; char *filename = NULL; char *ext = NULL; char *delimiter = NULL; bool append = false; bool verbose = false; for (int i = 0; i < argc; i++) { if (ignorenext) { ignorenext = false; continue; } if (argv[i][0] == '-') { /* Append */ if (strncmp(argv[i], "-a", 2) == 0) append = true; /* Include dir */ if (strncmp(argv[i], "-I", 2) == 0) { if (argv[i][2] == '\0') { i++; _include_dirs.insert(strdup(argv[i])); } else { _include_dirs.insert(strdup(&argv[i][2])); } continue; } /* Define */ if (strncmp(argv[i], "-D", 2) == 0) { char *p = strchr(argv[i], '='); if (p != NULL) *p = '\0'; _defines.insert(strdup(&argv[i][2])); continue; } /* Output file */ if (strncmp(argv[i], "-f", 2) == 0) { if (filename != NULL) continue; filename = strdup(&argv[i][2]); continue; } /* Object file extension */ if (strncmp(argv[i], "-o", 2) == 0) { if (ext != NULL) continue; ext = strdup(&argv[i][2]); continue; } /* Starting string delimiter */ if (strncmp(argv[i], "-s", 2) == 0) { if (delimiter != NULL) continue; delimiter = strdup(&argv[i][2]); continue; } /* Verbose */ if (strncmp(argv[i], "-v", 2) == 0) verbose = true; continue; } ScanFile(argv[i], ext, false, verbose); } /* Default output file is Makefile */ if (filename == NULL) filename = strdup("Makefile"); /* Default delimiter string */ if (delimiter == NULL) delimiter = strdup("# DO NOT DELETE"); char backup[PATH_MAX]; strcpy(backup, filename); strcat(backup, ".bak"); char *content = NULL; long size = 0; /* Read in the current file; so we can overwrite everything from the * end of non-depend data marker down till the end. */ FILE *src = fopen(filename, "rb"); if (src != NULL) { fseek(src, 0, SEEK_END); size = ftell(src); rewind(src); content = (char*)malloc(size * sizeof(*content)); if (fread(content, 1, size, src) != (size_t)size) { fprintf(stderr, "Could not read %s\n", filename); exit(-2); } fclose(src); } FILE *dst = fopen(filename, "w"); bool found_delimiter = false; if (size != 0) { src = fopen(backup, "wb"); if (fwrite(content, 1, size, src) != (size_t)size) { fprintf(stderr, "Could not write %s\n", filename); exit(-2); } fclose(src); /* Then append it to the real file. */ src = fopen(backup, "rb"); while (fgets(content, size, src) != NULL) { fputs(content, dst); if (!strncmp(content, delimiter, strlen(delimiter))) found_delimiter = true; if (!append && found_delimiter) break; } fclose(src); } if (!found_delimiter) fprintf(dst, "\n%s\n", delimiter); for (StringMap::iterator it = _files.begin(); it != _files.end(); it++) { for (StringSet::iterator h = it->second->begin(); h != it->second->end(); h++) { fprintf(dst, "%s: %s\n", it->first, *h); } } /* Clean up our mess. */ fclose(dst); free(delimiter); free(filename); free(ext); free(content); for (StringMap::iterator it = _files.begin(); it != _files.end(); it++) { for (StringSet::iterator h = it->second->begin(); h != it->second->end(); h++) { free(*h); } it->second->clear(); delete it->second; free(it->first); } _files.clear(); for (StringMap::iterator it = _headers.begin(); it != _headers.end(); it++) { for (StringSet::iterator h = it->second->begin(); h != it->second->end(); h++) { free(*h); } it->second->clear(); delete it->second; free(it->first); } _headers.clear(); for (StringSet::iterator it = _defines.begin(); it != _defines.end(); it++) { free(*it); } _defines.clear(); for (StringSet::iterator it = _include_dirs.begin(); it != _include_dirs.end(); it++) { free(*it); } _include_dirs.clear(); return 0; }
/** * Scan a file for includes, defines and the lot. * @param filename the name of the file to scan. * @param ext the extension of the filename. * @param header whether the file is a header or not. * @param verbose whether to give verbose debugging information. */ void ScanFile(const char *filename, const char *ext, bool header, bool verbose) { static StringSet defines; static std::stack<Ignore> ignore; /* Copy in the default defines (parameters of depend) */ if (!header) { for (StringSet::iterator it = _defines.begin(); it != _defines.end(); it++) { defines.insert(strdup(*it)); } } File file(filename); Lexer lexer(&file); /* Start the lexing! */ lexer.Lex(); while (lexer.GetToken() != TOKEN_END) { switch (lexer.GetToken()) { /* We reached the end of the file... yay, we're done! */ case TOKEN_END: break; /* The line started with a # (minus whitespace) */ case TOKEN_SHARP: lexer.Lex(); switch (lexer.GetToken()) { case TOKEN_INCLUDE: if (verbose) fprintf(stderr, "%s #include ", filename); lexer.Lex(); switch (lexer.GetToken()) { case TOKEN_LOCAL: case TOKEN_GLOBAL: { if (verbose) fprintf(stderr, "%s", lexer.GetString()); if (!ignore.empty() && ignore.top() != NOT_IGNORE) { if (verbose) fprintf(stderr, " (ignored)"); break; } const char *h = GeneratePath(file.GetDirname(), lexer.GetString(), lexer.GetToken() == TOKEN_LOCAL); if (h != NULL) { StringMap::iterator it = _headers.find(h); if (it == _headers.end()) { it = (_headers.insert(StringMapItem(strdup(h), new StringSet()))).first; if (verbose) fprintf(stderr, "\n"); ScanFile(h, ext, true, verbose); } StringMap::iterator curfile; if (header) { curfile = _headers.find(filename); } else { /* Replace the extension with the provided extension of '.o'. */ char path[PATH_MAX]; strcpy(path, filename); *(strrchr(path, '.')) = '\0'; strcat(path, ext != NULL ? ext : ".o"); curfile = _files.find(path); if (curfile == _files.end()) { curfile = (_files.insert(StringMapItem(strdup(path), new StringSet()))).first; } } if (it != _headers.end()) { for (StringSet::iterator header = it->second->begin(); header != it->second->end(); header++) { if (curfile->second->find(*header) == curfile->second->end()) curfile->second->insert(strdup(*header)); } } if (curfile->second->find(h) == curfile->second->end()) curfile->second->insert(strdup(h)); free(h); } } /* FALL THROUGH */ default: break; } break; case TOKEN_DEFINE: if (verbose) fprintf(stderr, "%s #define ", filename); lexer.Lex(); if (lexer.GetToken() == TOKEN_IDENTIFIER) { if (verbose) fprintf(stderr, "%s", lexer.GetString()); if (!ignore.empty() && ignore.top() != NOT_IGNORE) { if (verbose) fprintf(stderr, " (ignored)"); break; } if (defines.find(lexer.GetString()) == defines.end()) defines.insert(strdup(lexer.GetString())); lexer.Lex(); } break; case TOKEN_UNDEF: if (verbose) fprintf(stderr, "%s #undef ", filename); lexer.Lex(); if (lexer.GetToken() == TOKEN_IDENTIFIER) { if (verbose) fprintf(stderr, "%s", lexer.GetString()); if (!ignore.empty() && ignore.top() != NOT_IGNORE) { if (verbose) fprintf(stderr, " (ignored)"); break; } StringSet::iterator it = defines.find(lexer.GetString()); if (it != defines.end()) { free(*it); defines.erase(it); } lexer.Lex(); } break; case TOKEN_ENDIF: if (verbose) fprintf(stderr, "%s #endif", filename); lexer.Lex(); if (!ignore.empty()) ignore.pop(); if (verbose) fprintf(stderr, " -> %signore", (!ignore.empty() && ignore.top() != NOT_IGNORE) ? "" : "not "); break; case TOKEN_ELSE: { if (verbose) fprintf(stderr, "%s #else", filename); lexer.Lex(); Ignore last = ignore.empty() ? NOT_IGNORE : ignore.top(); if (!ignore.empty()) ignore.pop(); if (ignore.empty() || ignore.top() == NOT_IGNORE) { ignore.push(last == IGNORE_UNTIL_ELSE ? NOT_IGNORE : IGNORE_UNTIL_ENDIF); } else { ignore.push(IGNORE_UNTIL_ENDIF); } if (verbose) fprintf(stderr, " -> %signore", (!ignore.empty() && ignore.top() != NOT_IGNORE) ? "" : "not "); break; } case TOKEN_ELIF: { if (verbose) fprintf(stderr, "%s #elif ", filename); lexer.Lex(); Ignore last = ignore.empty() ? NOT_IGNORE : ignore.top(); if (!ignore.empty()) ignore.pop(); if (ignore.empty() || ignore.top() == NOT_IGNORE) { bool value = ExpressionOr(&lexer, &defines, verbose); ignore.push(last == IGNORE_UNTIL_ELSE ? (value ? NOT_IGNORE : IGNORE_UNTIL_ELSE) : IGNORE_UNTIL_ENDIF); } else { ignore.push(IGNORE_UNTIL_ENDIF); } if (verbose) fprintf(stderr, " -> %signore", (!ignore.empty() && ignore.top() != NOT_IGNORE) ? "" : "not "); break; } case TOKEN_IF: { if (verbose) fprintf(stderr, "%s #if ", filename); lexer.Lex(); if (ignore.empty() || ignore.top() == NOT_IGNORE) { bool value = ExpressionOr(&lexer, &defines, verbose); ignore.push(value ? NOT_IGNORE : IGNORE_UNTIL_ELSE); } else { ignore.push(IGNORE_UNTIL_ENDIF); } if (verbose) fprintf(stderr, " -> %signore", (!ignore.empty() && ignore.top() != NOT_IGNORE) ? "" : "not "); break; } case TOKEN_IFDEF: if (verbose) fprintf(stderr, "%s #ifdef ", filename); lexer.Lex(); if (lexer.GetToken() == TOKEN_IDENTIFIER) { bool value = defines.find(lexer.GetString()) != defines.end(); if (verbose) fprintf(stderr, "%s[%d]", lexer.GetString(), value); if (ignore.empty() || ignore.top() == NOT_IGNORE) { ignore.push(value ? NOT_IGNORE : IGNORE_UNTIL_ELSE); } else { ignore.push(IGNORE_UNTIL_ENDIF); } } if (verbose) fprintf(stderr, " -> %signore", (!ignore.empty() && ignore.top() != NOT_IGNORE) ? "" : "not "); break; case TOKEN_IFNDEF: if (verbose) fprintf(stderr, "%s #ifndef ", filename); lexer.Lex(); if (lexer.GetToken() == TOKEN_IDENTIFIER) { bool value = defines.find(lexer.GetString()) != defines.end(); if (verbose) fprintf(stderr, "%s[%d]", lexer.GetString(), value); if (ignore.empty() || ignore.top() == NOT_IGNORE) { ignore.push(!value ? NOT_IGNORE : IGNORE_UNTIL_ELSE); } else { ignore.push(IGNORE_UNTIL_ENDIF); } } if (verbose) fprintf(stderr, " -> %signore", (!ignore.empty() && ignore.top() != NOT_IGNORE) ? "" : "not "); break; default: if (verbose) fprintf(stderr, "%s #<unknown>", filename); lexer.Lex(); break; } if (verbose) fprintf(stderr, "\n"); /* FALL THROUGH */ default: /* Ignore the rest of the garbage on this line */ while (lexer.GetToken() != TOKEN_EOL && lexer.GetToken() != TOKEN_END) lexer.Lex(); lexer.Lex(); break; } } if (!header) { for (StringSet::iterator it = defines.begin(); it != defines.end(); it++) { free(*it); } defines.clear(); while (!ignore.empty()) ignore.pop(); } }
void CScreenMulti::CreateDMMissionFile() { char path[256]; std::string sFN = _getcwd(path,sizeof(path)); sFN += "\\"; sFN += MISSION_DM_FILE; if (CWinUtil::FileExist(sFN.c_str())) { remove(MISSION_DM_FILE); } // Get a list of world names and sort them alphabetically uint8 nNumPaths = g_pClientButeMgr->GetNumMultiWorldPaths(); char pathBuf[128]; FileEntry** pFilesArray = debug_newa(FileEntry*, nNumPaths); if (pFilesArray) { for (int i=0; i < nNumPaths; ++i) { pathBuf[0] = '\0'; g_pClientButeMgr->GetWorldPath(i, pathBuf, ARRAY_LEN(pathBuf),LTFALSE); if (pathBuf[0]) { pFilesArray[i] = g_pLTClient->GetFileList(pathBuf); } else { pFilesArray[i] = LTNULL; } } } char strBaseName[256]; char* pBaseName = NULL; char* pBaseExt = NULL; StringSet filenames; for (int i=0; i < nNumPaths; ++i) { pathBuf[0] = '\0'; g_pClientButeMgr->GetWorldPath(i, pathBuf, ARRAY_LEN(pathBuf),LTFALSE); if (pathBuf[0] && pFilesArray[i]) { sprintf(path, "%s\\", pathBuf); FileEntry* ptr = pFilesArray[i]; while (ptr) { if (ptr->m_Type == TYPE_FILE) { if (strnicmp(ptr->m_pBaseFilename,"DM_",3) == 0 || strnicmp(ptr->m_pBaseFilename,"DD_",3) == 0) { SAFE_STRCPY(strBaseName, ptr->m_pBaseFilename); pBaseName = strtok (strBaseName, "."); pBaseExt = strtok (NULL, "\0"); if (pBaseExt && stricmp (pBaseExt, "dat") == 0) { char szString[512]; sprintf(szString, "%s%s", path, pBaseName); // add this to the array filenames.insert(szString); } } } ptr = ptr->m_pNext; } g_pLTClient->FreeFileList(pFilesArray[i]); } } debug_deletea(pFilesArray); int index = 0; char szLabel[256]; StringSet::iterator iter = filenames.begin(); CDMButeMgr buteMgr; char szTmp[16]; char szString[512]; while (iter != filenames.end()) { bool bDefaultWeapons = false; sprintf(szLabel,"Mission%d",index); sprintf(szString, "\"%s\"", (*iter).c_str()); CWinUtil::WinWritePrivateProfileString( szLabel, "Level0", szString, sFN.c_str()); std::string sCfg = (*iter); sCfg += ".cfg"; if (buteMgr.Init(sCfg.c_str())) { MISSION mission; mission.Init(*buteMgr.GetButeMgr(),"Mission"); if (mission.nNameId > 0) { sprintf(szTmp,"%d",mission.nNameId); CWinUtil::WinWritePrivateProfileString( szLabel, "NameId", szTmp, sFN.c_str()); } if (!mission.sName.empty()) { sprintf(szString, "\"%s\"", mission.sName.c_str()); CWinUtil::WinWritePrivateProfileString( szLabel, "NameStr", szString, sFN.c_str()); } if (!mission.sPhoto.empty()) { sprintf(szString, "\"%s\"", mission.sPhoto.c_str()); CWinUtil::WinWritePrivateProfileString( szLabel, "Photo", szString, sFN.c_str()); } if (mission.nNumDefaultWeapons) { bDefaultWeapons = true; std::string sDef = "\""; for (int w = 0; w < mission.nNumDefaultWeapons; w++) { if (w > 0) sDef += ","; sDef += g_pWeaponMgr->GetWeapon(mission.aDefaultWeapons[w])->szName; } sDef += "\""; CWinUtil::WinWritePrivateProfileString( szLabel, "DefaultWeapons", sDef.c_str(), sFN.c_str()); } buteMgr.Term(); } if (!bDefaultWeapons) { sprintf(szString, "\"%s\"", g_pWeaponMgr->GetMPDefaultWeapons()); CWinUtil::WinWritePrivateProfileString( szLabel, "DefaultWeapons", szString, sFN.c_str()); } ++index; iter++; } // Flush the file. (if anything was added) if (index > 0) { CWinUtil::WinWritePrivateProfileString( NULL, NULL, NULL, sFN.c_str()); } filenames.clear(); }