std::string IntervalSet::elementName(const dfa::Vocabulary &vocabulary, ssize_t a) const { if (a == Token::EOF) { return "<EOF>"; } else if (a == Token::EPSILON) { return "<EPSILON>"; } else { return vocabulary.getDisplayName(a); } }
ParserInterpreter::ParserInterpreter(const std::string &grammarFileName, const dfa::Vocabulary &vocabulary, const std::vector<std::string> &ruleNames, const atn::ATN &atn, TokenStream *input) : Parser(input), _grammarFileName(grammarFileName), _atn(atn), _ruleNames(ruleNames), _vocabulary(vocabulary) { for (size_t i = 0; i < atn.maxTokenType; ++i) { _tokenNames.push_back(vocabulary.getDisplayName(i)); } // init decision DFA for (size_t i = 0; i < atn.getNumberOfDecisions(); ++i) { atn::DecisionState *decisionState = atn.getDecisionState(i); _decisionToDFA.push_back(dfa::DFA(decisionState, i)); } // get atn simulator that knows how to do predictions _interpreter = new atn::ParserATNSimulator(this, atn, _decisionToDFA, _sharedContextCache); /* mem-check: deleted in d-tor */ }
LexerInterpreter::LexerInterpreter(const std::string &grammarFileName, const dfa::Vocabulary &vocabulary, const std::vector<std::string> &ruleNames, const std::vector<std::string> &modeNames, const atn::ATN &atn, CharStream *input) : Lexer(input), _grammarFileName(grammarFileName), _atn(atn), _ruleNames(ruleNames), _modeNames(modeNames), _vocabulary(vocabulary) { if (_atn.grammarType != atn::ATNType::LEXER) { throw IllegalArgumentException("The ATN must be a lexer ATN."); } for (size_t i = 0; i < atn.maxTokenType; i++) { _tokenNames.push_back(vocabulary.getDisplayName(i)); } for (size_t i = 0; i < atn.getNumberOfDecisions(); ++i) { _decisionToDFA.push_back(dfa::DFA(_atn.getDecisionState(i), i)); } _interpreter = new atn::LexerATNSimulator(_atn, _decisionToDFA, _sharedContextCache); /* mem-check: deleted in d-tor */ }