bool try_to_consume_keyword(TokenizeContext& context, int keyword) { const char* str = get_token_text(keyword); int str_len = (int) strlen(str); // Check if every letter matches for (int i=0; i < str_len; i++) { if (context.next(i) != str[i]) return false; } // Check that this is really the end of the word if (is_acceptable_inside_identifier(context.next(str_len))) return false; // Don't match as a keyword if the next character is (. This might be // a bad idea. if (context.next(str_len) == '(') return false; // Keyword matches, now consume it context.consume(keyword, str_len); return true; }
void print_remaining_tokens(std::ostream& out, TokenStream& tokens) { for (int i=0; i < tokens.remaining(); i++) { if (i != 0) out << " "; out << get_token_text(tokens.next(i).match); out << "(" << tokens.nextStr(i) << ")"; } }
void TokenStream::consume(int match) { if (finished()) throw std::runtime_error(std::string("Unexpected EOF while looking for ") + get_token_text(match)); if ((match != -1) && next().match != match) { std::stringstream msg; msg << "Unexpected token (expected " << get_token_text(match) << ", found " << get_token_text(next().match) << " '" << nextStr() << "')"; throw std::runtime_error(msg.str()); } _position++; }
void TokenStream::consume(int match) { if (finished()) internal_error(std::string("Unexpected EOF while looking for ") + get_token_text(match)); if ((match != -1) && next().match != match) { std::stringstream msg; Value nextStr; getNextStr(&nextStr); msg << "Unexpected token (expected " << get_token_text(match) << ", found " << get_token_text(next().match) << " '" << as_cstring(&nextStr) << "')"; internal_error(msg.str()); } _position++; }
void dump_remaining_tokens(TokenStream& tokens) { for (int i=0; i < tokens.remaining(); i++) { if (i != 0) printf(" "); Value nextStr; tokens.getNextStr(&nextStr, i); printf("%s(%s)", get_token_text(tokens.next(i).match), as_cstring(&nextStr)); } printf("\n"); }
void TokenStream::dump() { int lookbehind = 5; int lookahead = 15; for (int i=-lookbehind; i < lookahead; i++) { int index = position() + i; if (index < 0) continue; if (index >= length()) continue; std::cout << "[" << i << "] " << get_token_text(next(i).match) << " '" << nextStr(i) << "'" << std::endl; } }
void TokenStream::dump() { int lookbehind = 5; int lookahead = 15; for (int i=-lookbehind; i < lookahead; i++) { int index = position() + i; if (index < 0) continue; if (index >= length()) continue; Value nextStr; getNextStr(&nextStr, i); printf("[%d] %s '%s'\n", i, get_token_text(next(i).match), as_cstring(&nextStr)); } }
std::string Token::toString() const { std::stringstream out; out << get_token_text(match); return out.str(); }