void VerilatedVcd::makeNameMap() { // Take signal information from each module and build m_namemapp deleteNameMap(); m_nextCode = 1; m_namemapp = new NameMap; for (vluint32_t ent = 0; ent< m_callbacks.size(); ent++) { VerilatedVcdCallInfo *cip = m_callbacks[ent]; cip->m_code = nextCode(); (cip->m_initcb) (this, cip->m_userthis, cip->m_code); } // Though not speced, it's illegal to generate a vcd with signals // not under any module - it crashes at least two viewers. // If no scope was specified, prefix everything with a "top" // This comes from user instantiations with no name - IE Vtop(""). bool nullScope = false; for (NameMap::iterator it=m_namemapp->begin(); it!=m_namemapp->end(); ++it) { const char* hiername = (*it).first.c_str(); if (hiername[0] == '\t') nullScope=true; } if (nullScope) { NameMap* newmapp = new NameMap; for (NameMap::iterator it=m_namemapp->begin(); it!=m_namemapp->end(); ++it) { const string& hiername = it->first; const string& decl = it->second; string newname = string("top"); if (hiername[0] != '\t') newname += ' '; newname += hiername; newmapp->insert(make_pair(newname,decl)); } deleteNameMap(); m_namemapp = newmapp; } }
bool ArcApp::Create() { // turn m_args into a list of files, i.e. expand wildcards, recurse // directories and remove duplicates NameMap files; MakeFileList(files); wxOutputStreamPtr out(MakeOutputStream()); if (!out.get()) return false; wxArchiveOutputStreamPtr arc(m_factory->NewStream(*out)); for (NameMap::iterator it = files.begin(); it != files.end(); ++it) { wxString name = it->second; wxDateTime dt(wxFileModificationTime(name)); *m_info << "adding " << name.mb_str() << std::endl; if (wxDirExists(name)) { if (!arc->PutNextDirEntry(name, dt)) return false; } else { wxFFileInputStream in(name); if (in.Ok()) { if (!arc->PutNextEntry(name, dt, in.GetSize()) || !arc->Write(in) || !in.Eof()) return false; } } } return arc->Close() && out->Close(); }
void bitch_about_junk_files(const NameMap &junk) { if (junk.size()) { if (junk.size() == 1) { FAIL() << "Junk file \"" << junk.begin()->first << '"'; } else { FAIL() << "Found " << junk.size() << " junk files, including " << '"' << junk.begin()->first << '"'; } } }
void user_list(void) { NameMap users; ServerPrx server; server = meta->getServer(serverId, ctx); users = server->getRegisteredUsers("", ctx); for (NameMap::iterator ii=users.begin(); ii != users.end(); ii++) cout << setw(8) << right << (*ii).first << " " << left << (*ii).second << endl; }
virtual void lookupHash(const URI &namedUri, const Callback &cb) { { boost::shared_lock<boost::shared_mutex> lookuplock(mMut); NameMap::const_iterator iter = mLookupCache.find(namedUri); if (iter != mLookupCache.end()) { RemoteFileId rfid ((*iter).second); // copy, because the map could change. lookuplock.unlock(); cb(namedUri, &rfid); return; } } NameLookupManager::lookupHash(namedUri, cb); }
void ArcApp::MakeFileList(NameMap& files) { DirTraverser traverser(files, *m_factory); for (size_t i = 0; i < m_args.size(); ++i) { #ifndef __WXMSW__ wxString name = m_args[i]; #else // on Windows expand wildcards wxString name = wxFindFirstFile(m_args[i], 0); if (name.empty()) wxLogError(_T("file not found '%s'"), m_args[i].c_str()); while (!name.empty()) { #endif // if the name is a directory recursively add all it's contents if (wxDirExists(name)) { if (traverser.OnDir(name) == wxDIR_CONTINUE) { wxDir dir(name); if (dir.IsOpened()) dir.Traverse(traverser); } } else { traverser.OnFile(name); } #ifdef __WXMSW__ name = wxFindNextFile(); } #endif } if (files.empty()) wxLogWarning(_T("nothing to do")); }
StringList AbstractFileManager::resolveSearchpaths(const String& rootPath, const StringList& searchPaths) { typedef std::map<String, String> NameMap; NameMap nameMap; StringList contents = directoryContents(rootPath, "", true, false); for (size_t i = 0; i < contents.size(); i++) nameMap[Utility::toLower(contents[i])] = contents[i]; StringList result; for (size_t i = 0; i < searchPaths.size(); i++) { NameMap::iterator it = nameMap.find(Utility::toLower(searchPaths[i])); if (it != nameMap.end()) result.push_back(appendPath(rootPath, it->second)); } return result; }
void printContributors(const std::string& changeLog, bool printNumEntries) { NameMap names; buildContributors(names); if (!changeLog.empty()) { readContributors(names, changeLog); } typedef multimap<unsigned int, NamePair> SortedNameMap; SortedNameMap sortedNames; for (NameMap::iterator itr = names.begin(); itr != names.end(); ++itr) { sortedNames.insert(SortedNameMap::value_type(itr->second, itr->first)); } cout << names.size() << " Contributors:" << endl << endl; if (printNumEntries) { cout << "Entries Firstname Surname" << endl; cout << "-------------------------" << endl; for (SortedNameMap::reverse_iterator sitr = sortedNames.rbegin(); sitr != sortedNames.rend(); ++sitr) { cout << sitr->first << "\t" << sitr->second.first; if (!sitr->second.second.empty()) cout << " " << sitr->second.second; cout << endl; } } else { cout << "Firstname Surname" << endl; cout << "-----------------" << endl; for (SortedNameMap::reverse_iterator sitr = sortedNames.rbegin(); sitr != sortedNames.rend(); ++sitr) { cout << sitr->second.first; if (!sitr->second.second.empty()) cout << " " << sitr->second.second; cout << endl; } } }
void OutputInfo::outputMAE(InputData* pData) { const int numExamples = pData->getNumExamples(); table& g = _gTableMap[pData]; vector<Label>::const_iterator lIt,maxlIt,truelIt; float maxDiscriminant,mae = 0.0,mse = 0.0,tmpVal; char maxLabel; // Get label values: they must be convertible to float vector<float> labelValues; NameMap classMap = pData->getClassMap(); for (int l = 0;l < classMap.getNumNames(); ++l) labelValues.push_back(atof(classMap.getNameFromIdx(l).c_str())); // Building the strong learner (discriminant function) for (int i = 0; i < numExamples; ++i){ const vector<Label>& labels = pData->getLabels(i); maxDiscriminant = -numeric_limits<float>::max(); maxLabel = -100; for (lIt = labels.begin(); lIt != labels.end(); ++lIt ) { if ( g[i][lIt->idx] > maxDiscriminant ) { maxDiscriminant = g[i][lIt->idx]; maxlIt = lIt; } if ( lIt->y > maxLabel ) { maxLabel = lIt->y; truelIt = lIt; } } tmpVal = labelValues[truelIt->idx] - labelValues[maxlIt->idx]; mae += fabs(tmpVal); mse += tmpVal * tmpVal; } _outStream << '\t' << mae/(float)(numExamples) << '\t' << sqrt(mse/(float)(numExamples)); }
void readContributors(NameMap& names, const string& file) { osgDB::ifstream fin(file.c_str()); Words words; while(fin) { string keyword; fin >> keyword; words.push_back(keyword); } string blank_string; for(unsigned int i = 0; i < words.size(); ++i) { if (submissionsSequence(words, i)) { if (i + 2 < words.size() && validName(words[i + 1])) { NamePair name = createName(words[i + 1], words[i + 2]); nameCorrection(name); if (!name.first.empty()) ++names[name]; i += 2; } else if (i + 1 < words.size() && validName(words[i + 1])) { NamePair name = createName(words[i + 1], blank_string); nameCorrection(name); if (!name.first.empty()) ++names[name]; i += 1; } } else { if (words[i] == "robert") { ++names[NameRobertOsfield]; } else if (words[i] == "don") { ++names[NameDonBurns]; } } } // reassign fisrt name entries to their full names entries if (names.size() > 1) { for (NameMap::iterator itr = names.begin(); itr != names.end(); ) { if (itr->first.second.empty()) { NameMap::iterator next_itr = itr; ++next_itr; if (next_itr != names.end() && itr->first.first == next_itr->first.first) { next_itr->second += itr->second; names.erase(itr); itr = next_itr; } else { ++itr; } } else { ++itr; } } } // remove the double entries from Robert's contributions if (names.size() > 1) { for (NameMap::iterator itr = names.begin(); itr != names.end(); ++itr) { if (itr->first != NameRobertOsfield && itr->first != NameDonBurns) { names[NameRobertOsfield] -= itr->second; } } } }
RelationGraphR* MakeRelationGraphR(string &inputFile){ ifstream myfile(inputFile.c_str()); if (myfile.is_open()){ string line; getline (myfile,line); int numDomains = atoi(line.c_str()); //get the number of domains getline (myfile,line); int numContexts = atoi(line.c_str()); //get the number of contexts if(numContexts + 1 != numDomains) { string errMsg = "Number of domains and number of contexts are not consistent"; Error(errMsg); } vector<string> domainNames; map<string,int> domainName_id_map; map<int,int> domainId_size_map; //map domain number to number of elements vector<NameMap*> nameMaps; //get set names and thier name files for(int i=0; i < numDomains; i++){ getline(myfile,line); //this is the domain name //the line is split into name and number of elements vector<string> tkns; Tokenize(line, tkns, ";"); //first token is name of domain domainNames.push_back(tkns[0]); //second token is number of elements domainId_size_map[i+1] = atoi(tkns[1].c_str()); domainName_id_map[tkns[0]]=i+1; getline(myfile,line); //this is the path to the namefile NameMap *nmp = new NameMap(line,atoi(tkns[1].c_str())); nmp->SetId(i+1); nameMaps.push_back(nmp); } //now get contexts and relation graph RelationGraphR *grph = new RelationGraphR(); for(int i=0; i < numContexts; i++){ getline(myfile,line); //this line specifies the two domains vector<string> currDomainNames; string ctxName; Tokenize(line,currDomainNames,"--"); int dId1 = domainName_id_map[currDomainNames[0]]; //check if a new element was inserted indicating an error if(domainName_id_map.size() > numDomains) { string errMsg = "Error specifying context..."+currDomainNames[0]+" does not match any previoulsy defined domain"; Error(errMsg); } int dId2 = domainName_id_map[currDomainNames[1]]; //check if a new element was inserted indicating an error if(domainName_id_map.size() > numDomains) { string errMsg = "Error specifying context..."+currDomainNames[1]+" does not match any previoulsy defined domain"; Error(errMsg); } ctxName = currDomainNames[0]+"__"+currDomainNames[1]; getline(myfile,line); //this line specifies the fimi file grph->AddRContext(MakeRContext(line,dId1,dId2,ctxName,i+1,nameMaps[dId1-1],nameMaps[dId2-1], domainId_size_map[dId1],domainId_size_map[dId2])); } myfile.close(); return grph; } else{ string errMsg="Could not open the input file: "+inputFile; Error(errMsg); } }
MissionObject* MissionManager::generateDestroyMission(MissionObject* mission, uint64 terminal) { mission->setMissionType(destroy); //crc = destroy //find the missiondata for the respective terminal Terminal_Mission_Link* link = NULL; TerminalMap::iterator terminalMapIt = mTerminalMap.find(terminal); if(terminalMapIt != mTerminalMap.end()) { gLogger->logMsgF("MissionManager : found the terminal",MSG_HIGH); Terminal_Type* terminal = (*terminalMapIt).second; //now get the amount of stfs and get one per chance uint32 amount = terminal->list.size(); uint32 chosen = gRandom->getRand() % amount; gLogger->logMsgF("MissionManager : random : %u",MSG_HIGH,chosen); bool found = false; uint32 counter = 0; while(!found) { gLogger->logMsgF("MissionManager : != found ",MSG_HIGH); MissionLinkList::iterator it = terminal->list.begin(); while(it != terminal->list.end()) { if(counter >= chosen) { if((*it)->type == MissionTypeDestroy) { found = true; link = (*it); break; } } counter++; it++; } it = terminal->list.begin(); } } int mission_num; if(link) { gLogger->logMsgF("MissionManager : found db destroy missions",MSG_HIGH); //now set the stf char s[255]; sprintf(s,"mission/%s",link->missiontype->stf.getAnsi()); mission->setTitleFile(s); //the mission within the stf mission_num = (gRandom->getRand() % link->content)+1; mission->setNum(mission_num); //Mission Title sprintf(mt,"m%dt",mission_num); mission->setTitle(mt); //Mission Description sprintf(md,"m%dd",mission_num); mission->setDetailFile(s); mission->setDetail(md); //still have to sort out the names of the mission givers } else { gLogger->logMsgF("MissionManager : No mission file associated :(",MSG_HIGH); return NULL; } missionTargets missionTarget[4] = { {0xB9BA5440, "@lair_n:naboo_otolla_gungan_camp_neutral_large_theater"}, {0x6D4C33E5, "@lair_n:naboo_capper_spineflap_nest_neutral_large"}, {0xA0057DAE, "@lair_n:naboo_kaadu_lair_neutral_medium_boss_01"}, {0xFA6FD53A, "@lair_n:lair_base"}, }; //Randomly choose a target int target = gRandom->getRand() % 4; //END TEMP //Position //int radius = 500; //500m radius Location destination; glm::vec3 new_vector = glm::gtx::random::vecRand3(50.0f, 500.0f); new_vector.y = 0; destination.Coordinates = mission->getOwner()->mPosition + new_vector; destination.CellID = 0; destination.PlanetCRC = BString(gWorldManager->getPlanetNameThis()).getCrc(); mission->setDestination(destination); //Creator sprintf(mo,"m%do",mission_num); string moS(mo); gLogger->logMsgF("MissionManager : creator :%s",MSG_HIGH,moS.getAnsi()); NameMap nameMap = link->missiontype->names; NameMap::iterator NameMapIt = nameMap.find(moS.getCrc()); if(NameMapIt != nameMap.end()) { sprintf(mo,"%s",(*NameMapIt).second->name.getAnsi()); mission->setCreator(mo); //mo = (*NameMapIt).second->name.getAnsi(); } else { uint32 selected = gRandom->getRand() % this->mNameMap.size(); NameMap::iterator it = mNameMap.find(selected); mission->setCreator((*it).second->name.getAnsi()); } //Difficulty mission->setDifficulty((gRandom->getRand() % 90)+5); //Payment mission->setReward((gRandom->getRand() % 10000)+1000); //Target mission->setTargetModel(missionTarget[target].crc); mission->setTarget(missionTarget[target].name); return mission; }
void pop(const State& offset) { variables.pop(offset[0]); typedefs.pop(offset[1]); generics.pop(offset[2]); }
State top() const { return { variables.top(), typedefs.top(), generics.top() }; }
void ArffParser::readHeader( ifstream& in, NameMap& classMap, vector<NameMap>& enumMaps, NameMap& attributeNameMap, vector<RawData::eAttributeType>& attributeTypes ) { bool isData = false; string tmpStr; string tmpAttrType; locale labelsLocale = locale(locale(), new nor_utils::white_spaces("{,}")); while ( !isData ) { switch ( getNextTokenType(in) ) { case TT_DATA: isData = true; break; case TT_COMMENT: getline(in, tmpStr); // ignore line break; case TT_RELATION: in >> _headerFileName; break; case TT_ATTRIBUTE: in >> tmpStr; if ( nor_utils::cmp_nocase(tmpStr, "class") ) { // It's a class!! char firstChar = 0; while ( isspace(firstChar = in.get()) && !in.eof() ); in.putback(firstChar); getline(in, tmpStr); stringstream ss(tmpStr); ss.imbue(labelsLocale); // read the classes for (;;) { ss >> tmpStr; if ( ss.eof() ) break; tmpStr = nor_utils::trim(tmpStr); if (!tmpStr.empty()) classMap.addName(tmpStr); } in.putback( '\n' ); } else if ( nor_utils::cmp_nocase(tmpStr.substr(0,5), "class") ) { classMap.addName(tmpStr.substr(5)); _hasAttributeClassForm = true; } else { NameMap enumMap; in >> tmpAttrType; if ( nor_utils::cmp_nocase(tmpAttrType, "numeric") || nor_utils::cmp_nocase(tmpAttrType, "real") || nor_utils::cmp_nocase(tmpAttrType, "integer") ) { attributeNameMap.addName(tmpStr); attributeTypes.push_back(RawData::ATTRIBUTE_NUMERIC); } else if ( nor_utils::cmp_nocase(tmpAttrType, "string") ) { if (attributeNameMap.getNumNames() == 0) _hasName = true; else { cerr << "ERROR: One can specify the name of an example only as the first attribute, otherwise string types are not supported!!" << endl; exit(1); } } else { // enum attributeTypes // For the time being the enumeration cannot contain spaces, we should // correct it. if (tmpAttrType[0] == '{') { attributeNameMap.addName(tmpStr); attributeTypes.push_back(RawData::ATTRIBUTE_ENUM); stringstream ss(tmpAttrType); ss.imbue(labelsLocale); for (;;) { ss >> tmpAttrType; if ( ss.eof() ) break; tmpAttrType = nor_utils::trim(tmpAttrType); if (!tmpAttrType.empty()) enumMap.addName(tmpAttrType); } } else { cerr << "ERROR: Unknown attribute type " << tmpAttrType[0] << endl; exit(1); } }
bool ArcApp::Add() { // turn m_args into a list of files, i.e. expand wildcards, recurse // directories and remove duplicates NameMap files; MakeFileList(files); wxInputStreamPtr in(MakeInputStream()); if (!in.get()) return false; wxOutputStreamPtr out(MakeOutputStream()); if (!out.get()) return false; wxArchiveInputStreamPtr arc(m_factory->NewStream(*in)); wxArchiveOutputStreamPtr outarc(m_factory->NewStream(*out)); wxArchiveEntryPtr entry(arc->GetNextEntry()); outarc->CopyArchiveMetaData(*arc); for (;;) { bool keep; NameMap::iterator it; if (!entry.get()) { if (files.empty()) break; it = files.begin(); keep = false; } else { it = files.find(entry->GetInternalName()); keep = true; } if (it != files.end()) { wxString name = it->second; files.erase(it); if (wxDirExists(name)) { *m_info << "adding " << name.mb_str() << std::endl; wxDateTime dt(wxFileModificationTime(name)); if (!outarc->PutNextDirEntry(name, dt)) return false; keep = false; } else { wxFFileInputStream file(name); if (file.Ok()) { *m_info << "adding " << name.mb_str() << std::endl; wxDateTime dt(wxFileModificationTime(name)); if (!outarc->PutNextEntry(name, dt, file.GetSize()) || !outarc->Write(file) || !file.Eof()) return false; keep = false; } } } if (keep) if (!outarc->CopyEntry(entry.release(), *arc)) return false; if (entry.get() || keep) entry.reset(arc->GetNextEntry()); } in.reset(); return arc->Eof() && outarc->Close() && out->Close(); }
virtual void addToCache(const URI &origNamedUri, const RemoteFileId &toFetch) { boost::unique_lock<boost::shared_mutex> updatecache(mMut); mLookupCache.insert(NameMap::value_type(origNamedUri, toFetch)); }
// Create an Archive from the specified File List. VFS_BOOL VFS_Archive_CreateFromFileList( const VFS_String& strArchiveFileName, const VFS_FileNameMap& Files, const VFS_FilterNameList& UsedFilters ) { static VFS_BYTE Chunk[ FILE_COPY_CHUNK_SIZE ]; // If there's already an Archive with the same File Name and it's open... VFS_EntityInfo Info; if( VFS_Archive_GetInfo( ToLower( strArchiveFileName ), Info ) ) { // Check if the Archive is open. if( GetOpenArchives().find( ToLower( Info.strPath ) ) != GetOpenArchives().end() ) { // Check if the Reference Count is != 0. if( GetOpenArchives()[ ToLower( Info.strPath ) ]->GetRefCount() > 0 ) { // We don't want to manipulate an open Archive, do we? SetLastError( VFS_ERROR_IN_USE ); return VFS_FALSE; } else { // Free the Archive. delete GetOpenArchives()[ ToLower( Info.strPath ) ]; GetOpenArchives().erase( ToLower( Info.strPath ) ); } } } else { Info.strPath = ToLower( strArchiveFileName ); SetLastError( VFS_ERROR_NONE ); } // Check the Filter Names and make a List of all Filter Pointers. VFS_FilterList Filters; for( VFS_FilterNameList::const_iterator iter = UsedFilters.begin(); iter != UsedFilters.end(); iter++ ) { // Bad Filter Name? if( !VFS_ExistsFilter( *iter ) ) { SetLastError( VFS_ERROR_INVALID_PARAMETER ); return VFS_FALSE; } // Add the Filter. Filters.push_back( VFS_GetFilter( *iter ) ); } // Check all Files. for( VFS_FileNameMap::const_iterator iter2 = Files.begin(); iter2 != Files.end(); iter2++ ) { if( !VFS_File_Exists( ( *iter2 ).first ) ) { SetLastError( VFS_ERROR_NOT_FOUND ); return VFS_FALSE; } VFS_String strName; VFS_Util_GetName( ( *iter2 ).second, strName ); if( strName.size() > VFS_MAX_NAME_LENGTH ) { SetLastError( VFS_ERROR_INVALID_PARAMETER ); return VFS_FALSE; } } // Make a list of the Directories to create. typedef vector< VFS_String > NameMap; NameMap Dirs; for( VFS_FileNameMap::const_iterator iter3 = Files.begin(); iter3 != Files.end(); iter3++ ) { VFS_String strDir; VFS_Util_GetPath( ( *iter3 ).second, strDir ); strDir = ToLower( strDir ); if( strDir != VFS_TEXT( "" ) && find( Dirs.begin(), Dirs.end(), strDir ) == Dirs.end() ) { // Add the top-level Dirs. while( strDir.rfind( VFS_PATH_SEPARATOR ) != VFS_String::npos ) { if( find( Dirs.begin(), Dirs.end(), strDir ) != Dirs.end() ) break; Dirs.push_back( ToLower( strDir ) ); if( strDir.size() > VFS_MAX_NAME_LENGTH ) { SetLastError( VFS_ERROR_INVALID_PARAMETER ); return VFS_FALSE; } strDir = strDir.substr( 0, strDir.rfind( VFS_PATH_SEPARATOR ) ); } if( find( Dirs.begin(), Dirs.end(), strDir ) == Dirs.end() ) { Dirs.push_back( ToLower( strDir ) ); if( strDir.size() > VFS_MAX_NAME_LENGTH ) { SetLastError( VFS_ERROR_INVALID_PARAMETER ); return VFS_FALSE; } } } } // (Re)create the Target File. VFS_Handle hFile = VFS_File_Create( Info.strPath + VFS_TEXT( "." ) + VFS_ARCHIVE_FILE_EXTENSION, VFS_READ | VFS_WRITE ); if( hFile == VFS_INVALID_HANDLE_VALUE ) return VFS_FALSE; // Write the Header. ARCHIVE_HEADER Header; memcpy( Header.ID, ARCHIVE_ID, sizeof( ARCHIVE_ID ) ); Header.wVersion = VFS_VERSION; Header.dwNumFilters = ( VFS_DWORD )Filters.size(); Header.dwNumDirs = ( VFS_DWORD )Dirs.size(); Header.dwNumFiles = ( VFS_DWORD )Files.size(); VFS_DWORD dwWritten; if( !VFS_File_Write( hFile, ( const VFS_BYTE* ) &Header, sizeof( ARCHIVE_HEADER ), &dwWritten ) ) { VFS_File_Close( hFile ); return VFS_FALSE; } // Write the Filters. for( VFS_FilterList::iterator iter4 = Filters.begin(); iter4 != Filters.end(); iter4++ ) { ARCHIVE_FILTER Filter; strcpy( Filter.szName, ToLower( ( *iter4 )->GetName() ).c_str() ); if( !VFS_File_Write( hFile, ( const VFS_BYTE* ) &Filter, sizeof( ARCHIVE_FILTER ) ) ) { VFS_File_Close( hFile ); return VFS_FALSE; } } // Write the Directories. for( NameMap::iterator iter5 = Dirs.begin(); iter5 != Dirs.end(); iter5++ ) { ARCHIVE_DIR Dir; // Get the Name of the Dir and add it. VFS_String strName; VFS_Util_GetName( *iter5, strName ); strcpy( Dir.szName, ToLower( strName ).c_str() ); // Remove the <pathsep> and the Name from the path; the rest should be the Parent Directory. if( ( *iter5 ).find( VFS_PATH_SEPARATOR ) != VFS_String::npos ) { // Get the Name of the Parent Directory. VFS_String strParentDir = ( *iter5 ).substr( 0, ( *iter5 ).rfind( VFS_PATH_SEPARATOR ) ); // Get the Index of the Parent Directory. assert( find( Dirs.begin(), Dirs.end(), ToLower( strParentDir ) ) != Dirs.end() ); Dir.dwParentIndex = ( VFS_DWORD )( find( Dirs.begin(), Dirs.end(), ToLower( strParentDir ) ) - Dirs.begin() ); } else Dir.dwParentIndex = DIR_INDEX_ROOT; if( !VFS_File_Write( hFile, ( const VFS_BYTE* ) &Dir, sizeof( ARCHIVE_DIR ) ) ) { VFS_File_Close( hFile ); return VFS_FALSE; } } // Get the starting offset for the file data. VFS_DWORD dwOffset = sizeof( ARCHIVE_HEADER ) + Header.dwNumFilters * sizeof( ARCHIVE_FILTER ) + Header.dwNumDirs * sizeof( ARCHIVE_DIR ) + Header.dwNumFiles * sizeof( ARCHIVE_FILE ); // Let the Filters store the configuration Data. for( VFS_FilterList::iterator iter6 = Filters.begin(); iter6 != Filters.end(); iter6++ ) { // Setup diverse global Variables. g_ToBuffer.clear(); // Call the Saver Proc. if( !( *iter6 )->SaveConfigData( Writer ) ) { VFS_File_Close( hFile ); return VFS_FALSE; } // Save it. VFS_DWORD dwPos = VFS_File_Tell( hFile ); VFS_File_Seek( hFile, dwOffset, VFS_SET ); VFS_File_Write( hFile, &*g_ToBuffer.begin(), ( VFS_DWORD )g_ToBuffer.size() ); VFS_File_Seek( hFile, dwPos, VFS_SET ); dwOffset += ( VFS_DWORD )g_ToBuffer.size(); } // Write the Files. for( VFS_FileNameMap::const_iterator iter7 = Files.begin(); iter7 != Files.end(); iter7++ ) { // Prepare the record. ARCHIVE_FILE File; // Get the Name of the File and add it. VFS_String strName; VFS_Util_GetName( ( *iter7 ).second, strName ); strcpy( File.szName, ToLower( strName ).c_str() ); // Get the Parent Dir ID. if( ( *iter7 ).second.find( VFS_PATH_SEPARATOR ) != VFS_String::npos ) { // Get the Name of the Parent Directory. VFS_String strParentDir = ( *iter7 ).second.substr( 0, ( *iter7 ).second.rfind( VFS_PATH_SEPARATOR ) ); // Get the Index of the Parent Directory. assert( find( Dirs.begin(), Dirs.end(), ToLower( strParentDir ) ) != Dirs.end() ); File.dwDirIndex = ( VFS_DWORD )( find( Dirs.begin(), Dirs.end(), ToLower( strParentDir ) ) - Dirs.begin() ); } else File.dwDirIndex = DIR_INDEX_ROOT; // Open the Source File. VFS_Handle hSrc = VFS_File_Open( ( *iter7 ).first, VFS_READ ); if( hSrc == VFS_INVALID_HANDLE_VALUE ) { VFS_File_Close( hFile ); return VFS_FALSE; } // Store the uncompressed size. File.dwUncompressedSize = VFS_File_GetSize( hSrc ); // Setup diverse global Variables. g_FromBuffer.clear(); g_ToBuffer.clear(); // Read in the File. VFS_DWORD dwRead; g_FromPos = 0; do { if( !VFS_File_Read( hSrc, Chunk, FILE_COPY_CHUNK_SIZE, &dwRead ) ) { VFS_File_Close( hSrc ); VFS_File_Close( hFile ); return VFS_FALSE; } g_FromBuffer.reserve( g_FromBuffer.size() + dwRead ); for( VFS_DWORD dwIndex = 0; dwIndex < dwRead; dwIndex++ ) g_FromBuffer.push_back( Chunk[ dwIndex ] ); } while( dwRead > 0 ); // Close the File. VFS_File_Close( hSrc ); // Call the Filters. VFS_EntityInfo Info; VFS_File_GetInfo( ( *iter7 ).first, Info ); for( VFS_FilterList::iterator iter8 = Filters.begin(); iter8 != Filters.end(); iter8++ ) { g_FromPos = 0; if( !( *iter8 )->Encode( Reader, Writer, Info ) ) { VFS_ErrorCode eError = VFS_GetLastError(); if( eError == VFS_ERROR_NONE ) eError = VFS_ERROR_GENERIC; SetLastError( eError ); VFS_File_Close( hFile ); return VFS_FALSE; } g_FromBuffer = g_ToBuffer; g_ToBuffer.clear(); } // Store the final Result. VFS_DWORD dwPos = VFS_File_Tell( hFile ); VFS_File_Seek( hFile, dwOffset, VFS_SET ); VFS_File_Write( hFile, &*g_FromBuffer.begin(), ( VFS_DWORD )g_FromBuffer.size() ); File.dwCompressedSize = ( VFS_DWORD )g_FromBuffer.size(); VFS_File_Seek( hFile, dwPos, VFS_SET ); dwOffset += File.dwCompressedSize; if( !VFS_File_Write( hFile, ( const VFS_BYTE* ) &File, sizeof( ARCHIVE_FILE ) ) ) { VFS_File_Close( hFile ); return VFS_FALSE; } } // Close the File. if( !VFS_File_Close( hFile ) ) return VFS_FALSE; return VFS_TRUE; }