bool VdfParser::ProcessUsing(Tokens& tokens) { assert(_variables); tokens.AssertToken(TokenKeywordUsing, true); tokens.AssertToken(TokenKeywordNamespace, true); _namespace_in_use.insert(tokens.GetNamespaceId()); tokens.AssertToken(TokenSemiColon, true); return true; }
/** Process enum declarations, for example: enum Dimension { DimensionUnknown, DimensionRead, DimensionPhase, DimensionSlice, }; */ bool VdfParser::ProcessEnumDeclaration(Tokens& tokens) { tokens.AssertToken(TokenKeywordEnum, true); auto enum_id = GetFqId(tokens.GetId().c_str()); tokens.AssertToken(TokenLeftParenthesis, true); assert(0 && "not finished yet."); return false; }
bool VdfParser::ProcessNamespace(Tokens& tokens) { assert(_variables); tokens.AssertToken(TokenKeywordNamespace, true); wstring namespace_id = tokens.GetId(); _current_namespace = _current_namespace.empty() ? namespace_id : (_current_namespace + L"::" + namespace_id); tokens.AssertToken(TokenLeftBrace, true); ProcessStatement(tokens); tokens.AssertToken(TokenRightBrace, true); _current_namespace = OuterNamespace(_current_namespace); return true; }
bool VdfParser::ProcessSimpleDeclaration(Tokens& tokens) { assert(_variables); auto type = tokens.GetCurrentToken().type; auto type_id = tokens.GetCurrentToken().text; assert(type == TokenKeywordFloat || type == TokenKeywordInt || type == TokenKeywordBool || type == TokenKeywordString || type == TokenId); tokens.Next(); auto id = tokens.GetId(); auto fq_type_id = ResolveFqId(type_id.c_str(), true, tokens); if (!tokens.IsTokenOfType(TokenSharp, true)) { _variables->Add(fq_type_id.c_str(), GetFqId(id.c_str()).c_str(), L""); } else { tokens.AssertToken(TokenLessThan, true); auto start_index = boost::lexical_cast<int>(tokens.GetLiteralValue()); tokens.AssertToken(TokenComma, true); auto end_index = boost::lexical_cast<int>(tokens.GetLiteralValue()); tokens.AssertToken(TokenGreaterThan, true); for (int i = start_index; i <= end_index; ++i) { wostringstream output; output << id << i; wstring variable_id = GetFqId(output.str().c_str()); _variables->Add(fq_type_id.c_str(), variable_id.c_str(), L""); } } tokens.AssertToken(TokenSemiColon, true); return true; }
bool VdfParser::ProcessArrayDeclaration(Tokens& tokens, VariableSpace& variables) { static map<TokenType, int> token_to_array_property{ {TokenKeywordFloat, VariableFloatArray}, {TokenKeywordInt, VariableIntArray}, {TokenKeywordString, VariableStringArray}, {TokenKeywordBool, VariableBoolArray}, {TokenId, VariableStructArray}, }; tokens.AssertToken(TokenKeywordArray, true); tokens.AssertToken(TokenLessThan, true); auto type = tokens.GetCurrentToken().type; auto element_type_id = tokens.GetCurrentToken().text; auto fq_type_id = ResolveFqId(element_type_id.c_str(), true, tokens); assert(type == TokenKeywordFloat || type == TokenKeywordInt || type == TokenKeywordBool || type == TokenKeywordString || type == TokenId); tokens.Next(); tokens.AssertToken(TokenGreaterThan, true); auto id = (&variables == _variables.get()) ? GetFqId(tokens.GetId().c_str()) : tokens.GetId(); if (!tokens.IsTokenOfType(TokenSharp, true)) { variables.AddArray(fq_type_id.c_str(), id.c_str(), L""); } else { tokens.AssertToken(TokenLessThan, true); auto start_index = _wtoi(tokens.GetLiteralValue().c_str()); tokens.AssertToken(TokenComma, true); auto end_index = _wtoi(tokens.GetLiteralValue().c_str()); tokens.AssertToken(TokenGreaterThan, true); for (int i = start_index; i <= end_index; ++i) { wostringstream output; output << id << i; auto variable_id = (&variables == _variables.get()) ? GetFqId(output.str().c_str()) : output.str(); variables.AddArray(fq_type_id.c_str(), variable_id.c_str(), L""); } } tokens.AssertToken(TokenSemiColon, true); return true; }
bool VdfParser::ProcessStructDeclaration(Tokens& tokens) { tokens.AssertToken(TokenKeywordStruct, true); auto struct_id = GetFqId(tokens.GetId().c_str()); tokens.AssertToken(TokenLeftBrace, true); VariableSpace struct_variables; static std::set<TokenType> s_allowed_types = { TokenKeywordBool, TokenKeywordFloat, TokenKeywordInt, TokenKeywordString, TokenKeywordArray, TokenId }; do { auto& type_token = tokens.GetCurrentToken(); if (s_allowed_types.find(type_token.type) == s_allowed_types.end()) { throw(CompileError(type_token, CompileErrorTypeExpected, L"Expected either one of float, int, string, bool, array, or a struct name.")); } if (type_token.type == TokenKeywordArray) { ProcessArrayDeclaration(tokens, struct_variables); } else { auto type_id = ResolveFqId(type_token.text.c_str(), true, tokens); if (type_token.type == TokenId) { if (!_variables->TypeExists(type_id.c_str())) throw CompileError(type_token, CompileErrorTypeNotFound, L"Type not found."); if (type_token.text == struct_id) throw CompileError(type_token, CompileErrorNestStruct, L"Nested struct definition not allowed."); } tokens.Next(); auto& member_token = tokens.GetCurrentToken(); if (struct_variables.VariableExists(member_token.text.c_str())) throw CompileError(type_token, CompileErrorMemeberExists, L"Duplicated struct member ids."); auto prototype = _variables->GetType(type_id.c_str()); if (prototype == nullptr) throw CompileError(type_token, CompileErrorTypeNotFound, L"Type not found."); auto member = dynamic_cast<IVariable*>(prototype->Clone()); assert(member != nullptr); member->SetId(member_token.text.c_str()); struct_variables.Add(member); tokens.Next(); tokens.AssertToken(TokenSemiColon, true); } } while (!tokens.IsTokenOfType(TokenRightBrace, false)); tokens.Next(); tokens.AssertToken(TokenSemiColon, true); _variables->AddType(struct_id.c_str(), struct_variables.Variables()); return true; }