size_t MapWriter::writeEntity(Model::Entity& entity, const size_t lineNumber, FILE* stream) { size_t lineCount = writeEntityHeader(entity, stream); const Model::BrushList& brushes = entity.brushes(); for (unsigned int i = 0; i < brushes.size(); i++) lineCount += writeBrush(*brushes[i], lineNumber + lineCount, stream); lineCount += writeEntityFooter(stream); entity.setFilePosition(lineNumber, lineCount); return lineCount; }
Model::Entity* MapParser::parseEntity(const BBox& worldBounds, Utility::ProgressIndicator* indicator) { Token token = m_tokenizer.nextToken(); if (token.type() == TokenType::Eof) return NULL; expect(TokenType::OBrace | TokenType::CBrace, token); if (token.type() == TokenType::CBrace) return NULL; Model::Entity* entity = new Model::Entity(worldBounds); size_t firstLine = token.line(); while ((token = m_tokenizer.nextToken()).type() != TokenType::Eof) { switch (token.type()) { case TokenType::String: { String key = token.data(); expect(TokenType::String, token = m_tokenizer.nextToken()); String value = token.data(); entity->setProperty(key, value); break; } case TokenType::OBrace: { m_tokenizer.pushToken(token); bool moreBrushes = true; while (moreBrushes) { Model::Brush* brush = parseBrush(worldBounds, indicator); if (brush != NULL) entity->addBrush(*brush); expect(TokenType::OBrace | TokenType::CBrace, token = m_tokenizer.nextToken()); moreBrushes = (token.type() == TokenType::OBrace); m_tokenizer.pushToken(token); } break; } case TokenType::CBrace: { if (indicator != NULL) indicator->update(static_cast<int>(token.position())); entity->setFilePosition(firstLine, token.line() - firstLine); return entity; } default: delete entity; throw MapParserException(token, TokenType::String | TokenType::OBrace | TokenType::CBrace); } } return entity; }