int * Plan_File::Path (int *num_path) { int offset = 0; *num_path = 0; //---- mode specific adjustments ---- switch (Mode ()) { case AUTO_MODE: //---- auto ---- if (Driver_Flag ()) { //---- driver ---- offset = 2; //*num_path = Tokens () - offset - plan->data [1]; *num_path = Tokens () - offset; } break; case TRANSIT_MODE: //---- transit ---- if (Driver_Flag ()) { //---- driver ---- offset = 3; *num_path = Tokens () - offset - 2 * plan->data [0]; } break; case BIKE_MODE: //---- bike ---- case WALK_MODE: //---- walk ---- *num_path = Tokens (); break; default: //---- other ---- offset = 1; *num_path = Tokens () - offset; break; } return (plan->data + offset); }
void Player::_LoadArchaeology(QueryResult* result) { for (uint8 i = 0; i < MAX_RESEARCH_SITES; ++i) _digSites[i].count = 0; if (!sWorld.getConfig(CONFIG_BOOL_ARCHAEOLOGY_ENABLED)) { delete result; return; } if (!result) { GenerateResearchSites(); GenerateResearchProjects(); return; } Field* fields = result->Fetch(); // Loading current zones Tokens tokens = Tokens(fields[0].GetCppString(), ' '); if (tokens.size() != 0 && tokens.size() <= MAX_RESEARCH_SITES) { _researchSites.clear(); for (uint8 i = 0; i < tokens.size(); ++i) _researchSites.insert(uint32(atoi(tokens[i]))); } else GenerateResearchSites(); // Loading current zone info tokens = Tokens(fields[1].GetCppString(), ' '); if (tokens.size() == MAX_RESEARCH_SITES) { for (uint8 i = 0; i < MAX_RESEARCH_SITES; ++i) _digSites[i].count = uint32(atoi(tokens[i])); } // Loading current projects tokens = Tokens(fields[2].GetCppString(), ' '); if (tokens.size() == MAX_RESEARCH_PROJECTS) { for (uint8 i = 0; i < MAX_RESEARCH_PROJECTS; ++i) if (ResearchProjectEntry const* entry = sResearchProjectStore.LookupEntry(atoi(tokens[i]))) if (entry->IsVaid()) ReplaceResearchProject(0, entry->ID); } else GenerateResearchProjects(); delete result; }
FileReaderUtils::FileReaderUtils() { nameHash.push_back(Tokens("(2)Destination", "4e24f217d2fe4dbfa6799bc57f74d8dc939d425b")); nameHash.push_back(Tokens("(2)Benzene", "af618ea3ed8a8926ca7b17619eebcb9126f0d8b1")); nameHash.push_back(Tokens("(2)Heartbreak Ridge", "6f8da3c3cc8d08d9cf882700efa049280aedca8c")); nameHash.push_back(Tokens("(3)Aztec", "ba2fc0ed637e4ec91cc70424335b3c13e131b75a")); nameHash.push_back(Tokens("(3)Tau Cross", "9bfc271360fa5bab3707a29e1326b84d0ff58911")); nameHash.push_back(Tokens("(4)Andromeda", "1e983eb6bcfa02ef7d75bd572cb59ad3aab49285")); nameHash.push_back(Tokens("(4)Circuit Breaker", "450a792de0e544b51af5de578061cb8a2f020f32")); nameHash.push_back(Tokens("(4)Empire of the Sun", "a220d93efdf05a439b83546a579953c63c863ca7")); nameHash.push_back(Tokens("(4)Fortress", "83320e505f35c65324e93510ce2eafbaa71c9aa1")); nameHash.push_back(Tokens("(4)Python", "de2ada75fbc741cfa261ee467bf6416b10f9e301")); }
wxTreeItemId ProjectConfigurationPanel::CategoryId(const wxString& Category) { if ( m_CategoryMap.find(Category.Lower()) != m_CategoryMap.end() ) { return m_CategoryMap[Category.Lower()]; } wxStringTokenizer Tokens(Category,_T("."),wxTOKEN_STRTOK); wxString PathSoFar = _T(""); wxTreeItemId IdSoFar = m_KnownLibrariesTree->GetRootItem(); bool FirstElem = true; while ( Tokens.HasMoreTokens() ) { // Iterate through items already added to map wxString Part = Tokens.GetNextToken(); PathSoFar += Part.Lower(); if ( m_CategoryMap.find(PathSoFar) == m_CategoryMap.end() ) { // Ok, found first node which is not yet added, this mean // that all subnodes are also not yet added int SkipLast = FirstElem ? (m_IsOtherCategory?1:0) + (m_IsPkgConfig?1:0) : 0; // First elem of the path must take into consideration // that some categoies must remain at the end if ( SkipLast ) { IdSoFar = m_CategoryMap[PathSoFar] = m_KnownLibrariesTree->InsertItem( IdSoFar, m_KnownLibrariesTree->GetChildrenCount(IdSoFar,false)-SkipLast, Part); FirstElem = false; } else { IdSoFar = m_CategoryMap[PathSoFar] = m_KnownLibrariesTree->AppendItem(IdSoFar,Part); } // Next items are always added at the end while ( Tokens.HasMoreTokens() ) { Part = Tokens.GetNextToken(); PathSoFar += _T("."); PathSoFar = Part.Lower(); IdSoFar = m_CategoryMap[PathSoFar] = m_KnownLibrariesTree->AppendItem(IdSoFar,Part); } // If we're here, all remaining path has been added, so we // finished here break; } FirstElem = false; PathSoFar += _T("."); } // Just for sure if there are multiple dots m_CategoryMap[Category.Lower()] = IdSoFar; return IdSoFar; }
int main(int ArgCount, char** Args) { string GrammarFilename; // TToken NullToken = {nullptr, 0, 0}; // _CrtSetDbgFlag(_CRTDBG_ALLOC_MEM_DF | _CRTDBG_LEAK_CHECK_DF); unique_ptr<TTokens> Tokens(new TTokens); try { // parse command-line so we can get an input file Global::ParseArgs(ArgCount, Args, true); if(Global::Dump) printf("# of threads = %u\n", HardwareConcurrency()); GrammarFilename = Global::InputFilename(); if(GrammarFilename.empty()) Error("MISSING_INPUT_FILE"); TokenizeAll(Tokens, GrammarFilename); Parse(Tokens); } catch(TError* ErrorParms) { unique_ptr<TError> Errors(ErrorParms); DUMPVERBOSE("Caught error: '%s'\n", ErrorParms->GetName().c_str()); Tokens->Freeze(); ErrorParms->WriteToFile(Tokens, stdout); #if 0 system("time /t"); #define LOOPS (1024*1024*16) for(int i=0; i < LOOPS; ++i) for(int j=0; j < Tokens->End(); ++j) if(Tokens->Get(j).Type == 999) printf("!!\n"); system("time /t"); auto Slice = Tokens->Slice(0,-1); for(int i=0; i < LOOPS; ++i) { // for(auto Token: Tokens->Slice(0, -1)) for(auto Token: Slice) if(Token.Type == 999) printf("!!\n"); } system("time /t"); for(int i=0; i < LOOPS; ++i) { // for(auto Token: Tokens->Slice(0, -1)) auto End = Slice.end(); for(auto Iter=Slice.begin(); Iter < End; ++Iter) if(Iter->Type == 999) printf("!!\n"); } system("time /t"); #endif } }
void Plan_File::Num_Path (int num) { //---- mode specific adjustments ---- switch (Mode ()) { case AUTO_MODE: //---- auto ---- if (Driver_Flag ()) { //---- driver ---- Tokens (num + 2); } else { //---- passenger ---- Tokens (1); } break; case TRANSIT_MODE: //---- transit ---- if (Driver_Flag ()) { //---- driver ---- Tokens (num + 3); } else { Tokens (1); } break; case BIKE_MODE: //---- bike ---- case WALK_MODE: //---- walk ---- Tokens (num); break; default: //---- other ---- Tokens (num + 1); break; } }
void wxWidgetsGUIConfigPanel::OnApply() { wxStringTokenizer Tokens(AutoLoad->GetValue(),_T("\n")); m_GUI->m_LoadedResources.Clear(); while ( Tokens.HasMoreTokens() ) { m_GUI->m_LoadedResources.Add(Tokens.GetNextToken()); } m_GUI->m_MainResource = MainRes->GetValue(); m_GUI->m_CallInitAll = InitAll->GetValue(); m_GUI->m_CallInitAllNecessary = m_GUI->m_CallInitAll && InitAllNecessary->GetValue(); m_GUI->NotifyChange(); m_GUI->OnRebuildApplicationCode(); }
bool PostgreSQLConnection::Initialize(const char* infoString) { Tokens tokens = Tokens(infoString, ";"); Tokens::iterator iter; std::string host, port_or_socket_dir, user, password, database; iter = tokens.begin(); if (iter != tokens.end()) host = *iter++; if (iter != tokens.end()) port_or_socket_dir = *iter++; if (iter != tokens.end()) user = *iter++; if (iter != tokens.end()) password = *iter++; if (iter != tokens.end()) database = *iter++; if (host == ".") mPGconn = PQsetdbLogin(NULL, port_or_socket_dir == "." ? NULL : port_or_socket_dir.c_str(), NULL, NULL, database.c_str(), user.c_str(), password.c_str()); else mPGconn = PQsetdbLogin(host.c_str(), port_or_socket_dir.c_str(), NULL, NULL, database.c_str(), user.c_str(), password.c_str()); /* check to see that the backend connection was successfully made */ if (PQstatus(mPGconn) != CONNECTION_OK) { sLog.outError("Could not connect to Postgre database at %s: %s", host.c_str(), PQerrorMessage(mPGconn)); PQfinish(mPGconn); mPGconn = NULL; return false; } DETAIL_LOG("Connected to Postgre database %s@%s:%s/%s", user.c_str(), host.c_str(), port_or_socket_dir.c_str(), database.c_str()); sLog.outString("PostgreSQL server ver: %d", PQserverVersion(mPGconn)); return true; }
void clASELoader::ASE_ReadTFaceList( iIStream* FStream, clVAMender* Mender ) { guard(); #ifdef ASE_HEAVY_DEBUG Env->Logger->Log( L_DEBUG, "Reading Tface list..." ); #endif LStr::clStringsVector Tokens( 6 ); while ( !FStream->Eof() ) { LString Line = FStream->ReadLineTrimLeadSpaces(); if ( LStr::ContainsSubStr( Line, "}" ) ) { break; } else if ( LStr::StartsWith( Line, ASE_MeshTFace ) ) { LStr::FastSplitLine( 1, 5, Line, Tokens, true ); int Index = LStr::ToInt( Tokens[2] ); int A = LStr::ToInt( Tokens[3] ); int B = LStr::ToInt( Tokens[4] ); int C = LStr::ToInt( Tokens[5] ); Mender->EmitTextureFace( Index, A, B, C ); } else { FATAL_MSG( "Unexpected token in " + ASE_MeshTFaceList + " : " + Line ); } } unguard(); }
bool Plan_File::Read (long offset) { //---- check the file status ---- if (!Check_File ()) return (false); if (plan == NULL) return (Status (RECORD_SIZE)); if (File_Access () != READ) return (Status (ERROR)); //---- move to a specified location in the file ---- if (offset >= 0) { if (!Offset (offset)) return (false); } //---- allocate space ---- if (allocate_memory) { if (!Setup_Record ()) return (false); } //---- read the next plan ---- if (Record_Format () == BINARY) { int num_token; if (!Db_File::Read (plan, (sizeof (Plan_Data) - sizeof (int)))) return (false); if (time_sort) { int temp = plan->key1; plan->key1 = plan->key2; plan->key2 = temp; } num_record++; num_plan++; if (Leg () == 2) { num_trip++; } else if (Leg () == 1 && Trip () == 1) { num_traveler++; } num_token = Tokens (); if (num_token > 0) { if (!Check_Size (num_token)) return (false); if (!Db_File::Read (&(plan->data [0]), num_token * sizeof (int))) return (Status (PLAN_FIELDS)); num_record++; } return (true); } else { int field, max_field, value; char buffer [40], *ptr; field = max_field = 0; while (Db_File::Read ()) { num_record++; ptr = Clean_Record (); //---- check for a blank record ---- if (ptr == NULL || *ptr == '\0') continue; //---- process the plan record ---- while (ptr != NULL) { ptr = Get_Token (ptr, buffer, sizeof (buffer)); if (buffer [0] == '\0') break; field++; value = atol (buffer); switch (field) { case 1: //---- traveler id ---- Traveler (value); num_plan++; break; case 2: //---- user field ---- break; case 3: //---- trip id ---- Trip (value); break; case 4: //---- leg id ---- Leg (value); if (value == 2) { num_trip++; } else if (value == 1 && Trip () == 1) { num_traveler++; } break; case 5: //---- time ---- Time (value); break; case 6: //---- start id ---- Start_ID (value); break; case 7: //---- start type ---- Start_Type (value); break; case 8: //---- end id ---- End_ID (value); break; case 9: //---- end type ---- End_Type (value); break; case 10: //---- duration ---- Duration (value); break; case 11: //---- stop time ---- Stop_Time (value); break; case 12: //---- max time flag ---- break; case 13: //---- cost ---- Cost (value); break; case 14: //---- gcf ---- GCF (value); break; case 15: //---- driver flag ---- Driver_Flag (value); break; case 16: //---- mode ---- Mode (value); break; case 17: //---- number of tokens ---- if (value < 0) { Status (PLAN_FIELDS); return (false); } Tokens (value); max_field = value + 17; if (value == 0) return (true); if (!Check_Size (value)) return (false); break; default: //---- token value ---- if (field > max_field) { Status (PLAN_FIELDS); return (false); } plan->data [field - 18] = value; if (field == max_field) return (true); break; } } } if (field != 0) { return (Status (PLAN_FIELDS)); } return (false); } }
bool Plan_File::Write (Plan_Data *data) { int num_token; FILE *file; //---- check the file status ---- if (!Check_File ()) return (false); if (File_Access () == READ) return (Status (ERROR)); Plan_Data *backup = NULL; if (data != NULL) { backup = plan; plan = data; } else { if (plan == NULL) return (Status (RECORD_SIZE)); } //---- write the plan data ---- file = File (); num_token = Tokens (); if (Record_Format () == BINARY) { if (time_sort) { int size, temp; size = sizeof (Plan_Data) - sizeof (int); memcpy (backup, plan, size); temp = backup->key1; backup->key1 = backup->key2; backup->key2 = temp; if (!Db_File::Write (backup, size)) goto reset; } else { if (!Db_File::Write (plan, (sizeof (Plan_Data) - sizeof (int)))) goto reset; } num_record++; num_plan++; if (Leg () == 2) { num_trip++; } else if (Leg () == 1 && Trip () == 1) { num_traveler++; } if (num_token > 0) { if (!Db_File::Write (&(plan->data [0]), num_token * sizeof (int))) goto reset; num_record++; } } else { if (fprintf (file, "%d 0 %d %d\n%d %d %d %d %d\n%d %d %d %d %d\n%d %d\n%d\n", Traveler (), Trip (), Leg (), Time (), Start_ID (), Start_Type (), End_ID (), End_Type (), Duration (), Stop_Time (), 1, Cost (), GCF (), Driver_Flag (), Mode (), num_token) < 0) goto reset; num_record += 5; num_plan++; if (Leg () == 2) { num_trip++; } else if (Leg () == 1 && Trip () == 1) { num_traveler++; } //---- write tokens ---- if (num_token > 0) { int field; int i = 0; switch (Mode ()) { case AUTO_MODE: //---- auto ---- if (Driver_Flag ()) { //---- vehicle ID and number of passengers ---- i = 2; if (fprintf (file, "%d %d\n", plan->data [0], plan->data [1]) < 0) goto reset; num_record++; } break; case TRANSIT_MODE: //---- transit ---- if (Driver_Flag ()) { //---- schedule pairs, vehicle ID, and route ID ---- i = 3; if (fprintf (file, "%d %d %d\n", plan->data [0], plan->data [1], plan->data [2]) < 0) goto reset; num_record++; } break; default: break; } //---- print the rest of the fields in groups of 10 ---- for (field=0; i < num_token; i++, field++) { if (!field) { if (fprintf (file, "%d", plan->data [i]) < 0) goto reset; num_record++; } else if (!(field % 10)) { if (fprintf (file, "\n%d", plan->data [i]) < 0) goto reset; num_record++; } else { if (fprintf (file, " %d", plan->data [i]) < 0) goto reset; } } if (field) { if (fprintf (file, "\n") < 0) goto reset; } } //---- add a blank line at the end of the plan ---- if (fprintf (file, "\n") < 0) goto reset; num_record++; Flush (); } if (data != NULL) { plan = backup; } return (true); reset: if (data != NULL) { plan = backup; } return (false); }
void clASELoader::ASE_ReadFaceList( iIStream* FStream, clVAMender* Mender ) { guard(); #ifdef ASE_HEAVY_DEBUG Env->Logger->Log( L_DEBUG, "Reading face list..." ); #endif LStr::clStringsVector Tokens( 19 ); while ( !FStream->Eof() ) { LString Line = FStream->ReadLineTrimLeadSpaces(); if ( LStr::ContainsSubStr( Line, "}" ) ) { break; } else if ( LStr::StartsWith( Line, ASE_MeshFace ) ) { LStr::FastSplitLine( 1, 18, Line, Tokens, true ); int Index = LStr::ToInt( Tokens[2].substr( 0, Tokens[2].length() - 1 ) ); int A = LStr::ToInt( Tokens[4] ); int B = LStr::ToInt( Tokens[6] ); int C = LStr::ToInt( Tokens[8] ); LString Token = Tokens[16]; int SubMTLTokenIndex = ( Token == ASE_MeshMaterialID ) ? 17 : 18; LString SubMTL_ID = Tokens[SubMTLTokenIndex]; int SubMTL = SubMTL_ID.empty() ? -1 : LStr::ToInt( SubMTL_ID ); int SmoothingGroup = LStr::ToInt( Tokens[SubMTLTokenIndex-2] ); /* LString IndexS = LStr::GetToken(Line, 2); int Index = LStr::ToInt( IndexS.substr(0, IndexS.length()-1) ); int A = LStr::ToInt( LStr::GetToken(Line, 4) ); int B = LStr::ToInt( LStr::GetToken(Line, 6) ); int C = LStr::ToInt( LStr::GetToken(Line, 8) ); LString Token = LStr::GetToken(Line, 16); int SubMTLTokenIndex = ( Token == ASE_MeshMaterialID ) ? 17 : 18; LString SubMTL_ID = LStr::GetToken( Line, SubMTLTokenIndex ); int SubMTL = SubMTL_ID.empty() ? -1 : LStr::ToInt( SubMTL_ID ); int SmoothingGroup = LStr::ToInt( LStr::GetToken( Line, SubMTLTokenIndex-2 ) ); */ /* char Keyword[32]; int A, B, C; int Index; int AB, BC, CA; int SmoothingGroup; int SubMTL; sscanf( Line.c_str(), "%s %d: A: %d B: %d C: %d AB: %d BC: %d CA: %d *MESH_SMOOTHING %d", Keyword, &Index, &A, &B, &C, &AB, &BC, &CA, &SmoothingGroup); char* pBuf = strrchr(Keyword, '*'); sscanf(pBuf,"*MESH_MTLID %d", &SubMTL); */ #ifdef ASE_HEAVY_DEBUG Env->Logger->Log( L_DEBUG, "Smoothing group:" + LStr::ToStr( SmoothingGroup ) ); #endif Mender->EmitFace( Index, A, B, C, SubMTL, SmoothingGroup ); } else { FATAL_MSG( "Unexpected token in " + ASE_MeshFaceList + " : " + Line ); } } unguard(); }
void clASELoader::ASE_ReadNormals( iIStream* FStream, clVAMender* Mender ) { guard(); #ifdef ASE_HEAVY_DEBUG Env->Logger->Log( L_DEBUG, "Reading normals..." ); #endif int CurrentFace = -1; int VertexIndex = 0; sFaceNormal FaceNormal; LStr::clStringsVector Tokens( 6 ); while ( !FStream->Eof() ) { LString Line = FStream->ReadLineTrimLeadSpaces(); if ( LStr::ContainsSubStr( Line, "}" ) ) { break; } else if ( LStr::StartsWith( Line, ASE_MeshFaceNormal ) ) { if ( CurrentFace > -1 ) { Mender->EmitFaceNormal( CurrentFace, FaceNormal ); } LStr::FastSplitLine( 1, 5, Line, Tokens, true ); CurrentFace = LStr::ToInt( Tokens[2] ); float X = LStr::ToFloat( Tokens[3] ); float Y = LStr::ToFloat( Tokens[4] ); float Z = LStr::ToFloat( Tokens[5] ); VertexIndex = 0; FaceNormal.FFaceNormal = LVector3( X, Y, Z ); } else if ( LStr::StartsWith( Line, ASE_MeshVertexNormal ) ) { LStr::FastSplitLine( 1, 5, Line, Tokens, true ); int Index = LStr::ToInt( Tokens[2] ); float X = LStr::ToFloat( Tokens[3] ); float Y = LStr::ToFloat( Tokens[4] ); float Z = LStr::ToFloat( Tokens[5] ); LVector3 Vec( X, Y, Z ); Mender->EmitNormal( Index, Vec ); FaceNormal.FVertexNormal[ VertexIndex++ ] = Vec; } else { FATAL_MSG( "Unexpected token in " + ASE_MeshNormals + " : " + Line ); } } if ( CurrentFace > -1 ) { Mender->EmitFaceNormal( CurrentFace, FaceNormal ); } unguard(); }
VectorFont::Glyph::Glyph( const std::list<std::string> &cxf_glyph_definition, const double word_space_percentage, const double character_space_percentage ) { m_word_space_percentage = word_space_percentage; m_character_space_percentage = character_space_percentage; for (std::list<std::string>::const_iterator l_itLine = cxf_glyph_definition.begin(); l_itLine != cxf_glyph_definition.end(); l_itLine++) { wxString line( Ctt(l_itLine->c_str()) ); wxString delimiters( _T(" \t\r\n,") ); std::vector<wxString> tokens = Tokens( line, delimiters ); if (tokens.size() == 0) { std::ostringstream l_ossError; l_ossError << "Expected tokens in glyph definition"; throw(std::runtime_error(l_ossError.str().c_str())); } // Replace dot (.) for comma (,) if the locale settings require it. for (std::vector<wxString>::iterator token = tokens.begin(); token != tokens.end(); token++) { *token = PrepareStringForConversion( *token ); } const wxChar * c_str = tokens[0].c_str(); switch (c_str[0]) { case 'L': if (tokens.size() != 5) { std::ostringstream l_ossError; l_ossError << "Expected 5 tokens when defining a line. We got " << tokens.size() << " tokens from '" << l_itLine->c_str() << "\n"; throw(std::runtime_error(l_ossError.str().c_str())); } else { GlyphLine *line = new GlyphLine( PointToMM(strtod( Ttc(tokens[1].c_str()), NULL )), PointToMM(strtod( Ttc(tokens[2].c_str()), NULL )), PointToMM(strtod( Ttc(tokens[3].c_str()), NULL )), PointToMM(strtod( Ttc(tokens[4].c_str()), NULL )) ); m_graphics_list.push_back( line ); m_bounding_box.Insert( line->BoundingBox() ); } break; case 'A': if (tokens.size() != 6) { std::ostringstream l_ossError; l_ossError << "Expected 6 tokens when defining an arc"; throw(std::runtime_error(l_ossError.str().c_str())); } else { if ((tokens[0].size() == 2) && (c_str[1] == 'R')) { // Reverse the starting and ending points. GlyphArc *arc = new GlyphArc( PointToMM(strtod( Ttc(tokens[1].c_str()), NULL )), PointToMM(strtod( Ttc(tokens[2].c_str()), NULL )), PointToMM(strtod( Ttc(tokens[3].c_str()), NULL )), strtod( Ttc(tokens[5].c_str()), NULL) , strtod( Ttc(tokens[4].c_str()), NULL )); m_graphics_list.push_back( arc ); m_bounding_box.Insert( arc->BoundingBox() ); } // End if - then else { GlyphArc *arc = new GlyphArc( PointToMM(strtod( Ttc(tokens[1].c_str()), NULL )), PointToMM(strtod( Ttc(tokens[2].c_str()), NULL )), PointToMM(strtod( Ttc(tokens[3].c_str()), NULL )), strtod( Ttc(tokens[4].c_str()), NULL ), strtod( Ttc(tokens[5].c_str()), NULL ) ); m_graphics_list.push_back( arc ); m_bounding_box.Insert( arc->BoundingBox() ); } } break; default: std::ostringstream l_ossError; l_ossError << "Unexpected graphics element type '" << c_str[0] << "'"; throw(std::runtime_error(l_ossError.str().c_str())); } // End switch } // End for } // End constructor