void UMPFile::parseTile (Tokeniser &tokeniser) throw (UMPException) { std::string name = tokeniser.getToken(); if (name.empty()) throw UMPException("Unexpected tile format, expected tile name"); std::string token = tokeniser.getToken(); if (token != "{") throw UMPException("Unexpected tile format, expected {, found " + token); std::string width = tokeniser.getToken(); if (width.empty()) throw UMPException("Unexpected tile format, expected tile width"); std::string height = tokeniser.getToken(); if (width.empty()) throw UMPException("Unexpected tile format, expected tile height"); UMPTile tile(name, string::toInt(width), string::toInt(height)); for (;;) { token = tokeniser.getToken(); if (token.empty() || token == "}") break; // TODO: fill the data } addTile(tile); }
static NodeSmartReference Entity_parseTokens (Tokeniser& tokeniser, EntityCreator& entityTable, const PrimitiveParser& parser, int index) { NodeSmartReference entity(g_nullNode); KeyValues keyValues; std::string classname = ""; int count_primitives = 0; while (1) { std::string token = tokeniser.getToken(); if (token.empty()) { Tokeniser_unexpectedError(tokeniser, token, "#entity-token"); return g_nullNode; } if (token == "}") { // end entity if (entity == g_nullNode) { // entity does not have brushes entity = Entity_create(entityTable, GlobalEntityClassManager().findOrInsert(classname, false), keyValues); } return entity; } else if (token == "{") { // begin primitive if (entity == g_nullNode) { // entity has brushes entity = Entity_create(entityTable, GlobalEntityClassManager().findOrInsert(classname, true), keyValues); } NodeSmartReference primitive(parser.parsePrimitive(tokeniser)); if (primitive == g_nullNode || !Node_getMapImporter(primitive)->importTokens(tokeniser)) { globalErrorStream() << "brush " << count_primitives << ": parse error\n"; return g_nullNode; } scene::Traversable* traversable = Node_getTraversable(entity); if (Node_getEntity(entity)->isContainer() && traversable != 0) { traversable->insert(primitive); } else { globalErrorStream() << "entity " << index << ": type " << classname << ": discarding brush " << count_primitives << "\n"; } ++count_primitives; } else { // epair const std::string key = token; token = tokeniser.getToken(); if (token.empty()) { Tokeniser_unexpectedError(tokeniser, token, "#epair-value"); return g_nullNode; } keyValues.push_back(KeyValues::value_type(key, token)); if (key == "classname") classname = keyValues.back().second; } } // unreachable code return g_nullNode; }
/** * Parse the optional contents/flags/value * @param faceShader * @param tokeniser * @return */ void UFOFaceTokenImporter::importContentAndSurfaceFlags (ContentsFlagsValue& flags, Tokeniser& tokeniser) { std::string token = tokeniser.getToken(); flags.setContentFlags(string::toInt(token)); token = tokeniser.getToken(); flags.setSurfaceFlags(string::toInt(token)); token = tokeniser.getToken(); flags.setValue(string::toInt(token)); }
void Map_Read (scene::Node& root, Tokeniser& tokeniser, EntityCreator& entityTable, const PrimitiveParser& parser) { // Create an info display panel to track load progress gtkutil::ModalProgressDialog dialog(GlobalRadiant().getMainWindow(), _("Loading map")); // Read each entity in the map, until EOF is reached for (int entCount = 0; ; entCount++) { // Update the dialog text dialog.setText("Loading entity " + string::toString(entCount)); // Check for end of file if (tokeniser.getToken().empty()) break; // Create an entity node by parsing from the stream NodeSmartReference entity(Entity_parseTokens(tokeniser, entityTable, parser, entCount)); if (entity == g_nullNode) { globalErrorStream() << "entity " << entCount << ": parse error\n"; return; } // Insert the new entity into the scene graph Node_getTraversable(root)->insert(entity); } }
bool BrushTokenImporter::importTokens (Tokeniser& tokeniser) { while (1) { // check for end of brush const std::string token = tokeniser.getToken(); if (token == "}") break; tokeniser.ungetToken(); m_brush.push_back(FaceSmartPointer(new Face(&m_brush))); Face& face = *m_brush.back(); UFOFaceTokenImporter importer(face); if (!importer.importTokens(tokeniser)) return false; face.planeChanged(); } m_brush.planeChanged(); m_brush.shaderChanged(); return true; }
void UMPFile::parse (Tokeniser &tokeniser) { std::string token = tokeniser.getToken(); while (token.length()) { if (token == "base") { _base = tokeniser.getToken(); if (_base.empty()) { globalErrorStream() << _fileName << ": base without parameter given\n"; return; } } else if (token == "tile") { try { parseTile(tokeniser); } catch (UMPException &e) { globalErrorStream() << _fileName << ": " << e.getMessage() << "\n"; return; } } token = tokeniser.getToken(); } }
void LicenseParser::parseLicensesFile (Tokeniser& tokeniser, const std::string& filename) { for (;;) { std::string token = tokeniser.getToken(); if (token.empty()) break; if (tokeniser.getLine() > 1) { tokeniser.ungetToken(); break; } } std::size_t lastLine = 1; for (;;) { std::string token = tokeniser.getToken(); if (token.empty()) break; if (string::contains(token, "base/textures/") && lastLine != tokeniser.getLine()) { _licensesMap[os::stripExtension(token.substr(5))] = true; lastLine = tokeniser.getLine(); } } }
bool UFOFaceTokenImporter::importTextureName (FaceShader& faceShader, Tokeniser& tokeniser) { const std::string texture = tokeniser.getToken(); if (texture.empty()) { Tokeniser_unexpectedError(tokeniser, texture, "#texture-name"); return false; } if (texture == "NULL" || texture.empty()) { faceShader.setShader(""); } else { faceShader.setShader(GlobalTexturePrefix_get() + texture); } return true; }
void Map_Read(scene::Node& root, Tokeniser& tokeniser, EntityCreator& entityTable, const PrimitiveParser& parser) { int count_entities = 0; for(;;) { tokeniser.nextLine(); if (!tokeniser.getToken()) // { or 0 break; NodeSmartReference entity(Entity_parseTokens(tokeniser, entityTable, parser, count_entities)); if(entity == g_nullNode) { globalErrorStream() << "entity " << count_entities << ": parse error\n"; return; } Node_getTraversable(root)->insert(entity); ++count_entities; } }
bool EntityClassDoom3_parseToken( Tokeniser& tokeniser, const char* string ){ const char* token = tokeniser.getToken(); PARSE_RETURN_FALSE_IF_FAIL( token != 0 ); return string_equal( token, string ); }
bool EntityClassDoom3_parseToken( Tokeniser& tokeniser ){ const char* token = tokeniser.getToken(); PARSE_RETURN_FALSE_IF_FAIL( token != 0 ); return true; }
bool EntityClassDoom3_parseString( Tokeniser& tokeniser, StringOutputStream& s ){ const char* token = tokeniser.getToken(); PARSE_RETURN_FALSE_IF_FAIL( token != 0 ); s << token; return true; }
bool EntityClassDoom3_parseString( Tokeniser& tokeniser, CopiedString& s ){ const char* token = tokeniser.getToken(); PARSE_RETURN_FALSE_IF_FAIL( token != 0 ); s = token; return true; }