void dParserCompiler::ReplaceAllMacros (dString& data, const dString& newName, const dString& macro) const { int size = int (macro.Size()); for (int i = data.Find (macro); i != -1; i = data.Find (macro)) { data.Replace(i, size, newName); } }
void dParserCompiler::ReplaceMacro (dString& data, const dString& newName, const dString& macro) const { int size = int(macro.Size()); int position = int (data.Find (macro)); dAssert (position != -1); data.Replace(position, size, newName); }
void dParserCompiler::ScanGrammarFile( const dString& inputRules, dProductionRule& ruleList, dTree<dTokenInfo, dCRCTYPE>& symbolList, dOperatorsPrecedence& operatorPrecedence, dString& userCodeBlock, dString& userVariableClass, dString& endUserCode, int& lastTokenEnum) { dString startSymbol (""); int tokenEnumeration = 256; int operatorPrecedencePriority = 0; dParserLexical lexical (inputRules.GetStr()); LoadTemplateFile("dParserUserVariableTemplate_cpp.txt", userVariableClass); // scan the definition segment for (dToken token = dToken(lexical.NextToken()); token != GRAMMAR_SEGMENT; ) { switch (int (token)) { case START: { token = dToken(lexical.NextToken()); startSymbol = lexical.GetTokenString(); token = dToken(lexical.NextToken()); break; } case TOKEN: { for (token = dToken(lexical.NextToken()); token == LITERAL; token = dToken(lexical.NextToken())) { const char* const name = lexical.GetTokenString(); symbolList.Insert(dTokenInfo (tokenEnumeration, TERMINAL, name), dCRC64 (name)); tokenEnumeration ++; } break; } case LEFT: case RIGHT: { dOperatorsAssociation& association = operatorPrecedence.Append()->GetInfo(); association.m_prioprity = operatorPrecedencePriority; operatorPrecedencePriority ++; switch (int (token)) { case LEFT: association.m_associativity = dOperatorsAssociation::m_left; break; case RIGHT: association.m_associativity = dOperatorsAssociation::m_right; break; } for (token = dToken(lexical.NextToken()); (token == LITERAL) || ((token < 256) && !isalnum (token)); token = dToken(lexical.NextToken())) { association.Append(dCRC64 (lexical.GetTokenString())); } break; } case UNION: { token = dToken(lexical.NextToken()); dAssert (token == SEMANTIC_ACTION); userVariableClass = lexical.GetTokenString() + 1; userVariableClass.Replace(userVariableClass.Size() - 1, 1, ""); token = dToken(lexical.NextToken()); break; } case CODE_BLOCK: { userCodeBlock += lexical.GetTokenString(); token = dToken(lexical.NextToken()); break; } case EXPECT: { token = dToken(lexical.NextToken()); dAssert (token == INTEGER); m_shiftReduceExpectedWarnings = atoi (lexical.GetTokenString()); token = dToken(lexical.NextToken()); break; } default:; { dAssert (0); token = dToken(lexical.NextToken()); } } } int ruleNumber = 1; lastTokenEnum = tokenEnumeration; // scan the production rules segment dToken token1 = dToken(lexical.NextToken()); for (; (token1 != GRAMMAR_SEGMENT) && (token1 != -1); token1 = dToken(lexical.NextToken())) { //dTrace (("%s\n", lexical.GetTokenString())); switch (int (token1)) { case LITERAL: { // add the first Rule; dRuleInfo& rule = ruleList.Append()->GetInfo(); rule.m_token = token1; rule.m_type = NONTERMINAL; rule.m_name = lexical.GetTokenString(); rule.m_nameCRC = dCRC64 (lexical.GetTokenString()); dTree<dTokenInfo, dCRCTYPE>::dTreeNode* nonTerminalIdNode = symbolList.Find(rule.m_nameCRC); if (!nonTerminalIdNode) { nonTerminalIdNode = symbolList.Insert(dTokenInfo (tokenEnumeration, NONTERMINAL, rule.m_name), rule.m_nameCRC); tokenEnumeration ++; } rule.m_ruleId = nonTerminalIdNode->GetInfo().m_tokenId; rule.m_ruleNumber = ruleNumber; ruleNumber ++; token1 = ScanGrammarRule(lexical, ruleList, symbolList, ruleNumber, tokenEnumeration, operatorPrecedence); break; } default: dAssert (0); } } dProductionRule::dListNode* firtRuleNode = ruleList.GetFirst(); if (startSymbol != "") { firtRuleNode = ruleList.Find (dCRC64 (startSymbol.GetStr())); } //Expand the Grammar Rule by adding an empty start Rule; const dRuleInfo& firstRule = firtRuleNode->GetInfo(); dRuleInfo& rule = ruleList.Addtop()->GetInfo(); rule.m_ruleNumber = 0; rule.m_ruleId = tokenEnumeration; rule.m_token = firstRule.m_token; rule.m_type = NONTERMINAL; rule.m_name = firstRule.m_name + dString("__"); rule.m_nameCRC = dCRC64 (rule.m_name.GetStr()); symbolList.Insert(dTokenInfo (tokenEnumeration, rule.m_type, rule.m_name), rule.m_nameCRC); tokenEnumeration ++; dSymbol& symbol = rule.Append()->GetInfo(); symbol.m_token = firstRule.m_token; symbol.m_type = firstRule.m_type; symbol.m_name = firstRule.m_name; symbol.m_nameCRC = firstRule.m_nameCRC; // scan literal use code if (token1 == GRAMMAR_SEGMENT) { endUserCode = lexical.GetNextBuffer(); //endUserCode += "\n"; } }