TypeRef Parse_Type_ErasedType(TokenStream& lex, bool allow_trait_list) { Token tok; auto ps = lex.start_span(); ::std::vector<Type_TraitPath> traits; ::std::vector<AST::LifetimeRef> lifetimes; do { if( LOOK_AHEAD(lex) == TOK_LIFETIME ) { GET_TOK(tok, lex); lifetimes.push_back(AST::LifetimeRef( /*lex.point_span(),*/ lex.get_ident(mv$(tok)) )); } else { AST::HigherRankedBounds hrbs; if( lex.lookahead(0) == TOK_RWORD_FOR ) { hrbs = Parse_HRB(lex); } traits.push_back({ mv$(hrbs), Parse_Path(lex, PATH_GENERIC_TYPE) }); } } while( GET_TOK(tok, lex) == TOK_PLUS ); PUTBACK(tok, lex); return TypeRef(lex.end_span(mv$(ps)), TypeData::make_ErasedType({ mv$(traits), mv$(lifetimes) })); }
IntegerType parse_ascii_integer(TokenStream & stream) { if (!stream.peek()) throw std::runtime_error("eof"); if (!ascii_digit_value(*stream.peek())) { throw std::runtime_error("not at a number!"); } IntegerType toret = 0; while (true) { auto inb = stream.peek(); if (!inb) break; auto digit_val = ascii_digit_value(*inb); if (!digit_val) break; auto oldtoret = toret; toret = toret * 10 + *digit_val; if (toret < oldtoret) { throw std::runtime_error("overflow occured"); } stream.skip(); } return toret; }
// Macro-expand a macro argument 'arg' to create 'expandedArg'. // Does not replace 'arg'. // Returns nullptr if no expanded argument is created. TPpContext::TokenStream* TPpContext::PrescanMacroArg(TokenStream& arg, TPpToken* ppToken, bool newLineOkay) { // expand the argument TokenStream* expandedArg = new TokenStream; pushInput(new tMarkerInput(this)); pushTokenStreamInput(arg); int token; while ((token = scanToken(ppToken)) != tMarkerInput::marker && token != EndOfInput) { token = tokenPaste(token, *ppToken); if (token == tMarkerInput::marker || token == EndOfInput) break; if (token == PpAtomIdentifier && MacroExpand(ppToken, false, newLineOkay) != 0) continue; expandedArg->putToken(token, ppToken); } if (token == EndOfInput) { // MacroExpand ate the marker, so had bad input, recover delete expandedArg; expandedArg = nullptr; } else { // remove the marker popInput(); } return expandedArg; }
void calculate() { const char quit = 'q'; // t.kind==quit means that t is a quit Token const char print = ';'; // t.kind==print means that t is a print Token const std::string prompt = "> "; const std::string equals = "= "; // used to indicate that what follows is an evaluation TokenStream tokenStream; std::string enterToClose = "~~"; while (std::cin) try { std::cout << prompt; Token token = tokenStream.getToken(); if (token.kind == print) token = tokenStream.getToken(); // eat ';' if (token.kind == quit) break; tokenStream.setToken(token); // reset token into tokenStream std::cout << equals << expression(tokenStream) << '\n'; } catch (std::exception& e) { std::cerr << e.what() << '\n'; // write error message tokenStream.ignore(print); } }
Option<Term> singleTerm(TokenStream& tokens) { if (tokens->type == Token::LAMBDA) { return lambdaTerm(tokens); } else if (tokens->type == Token::OPEN_BRACKET) { tokens.advance(); Option<Term> t = term(tokens); if (tokens->type == Token::CLOSE_BRACKET) { tokens.advance(); return t; } else return errorTerm("expected closing bracket"); } else if (tokens->type == Token::IDENTIFIER) { string id = tokens->identifierValue; tokens.advance(); return Option<Term>(variableTerm(id)); } else if (tokens->type == Token::INTEGER) { int value = tokens->intValue; tokens.advance(); return Option<Term>(integerTerm(value)); } else if (tokens->type == Token::CLOSE_BRACKET) { return errorTerm("unexpected closing bracket"); } else if (!tokens.good()) { return errorTerm("expected a term"); } else return errorTerm("unknown token"); }
double expression(TokenStream& tokenStream) { double result = term(tokenStream); // set result to value of term Token token = tokenStream.getToken(); // get next token from tokenStream while (true) { if (token.kind == '+') { result += term(tokenStream); // evaluate term and add to result token = tokenStream.getToken(); // get next token from tokenStream } if (token.kind == '-') { result -= term(tokenStream); // evaluate term and subtract from result token = tokenStream.getToken(); // get next token from tokenStream } if (token.kind != '+' && token.kind != '-') { tokenStream.setToken(token); // reset token into token stream return result; // return result } } }
SAWYER_EXPORT std::string Grammar::evalFunction(TokenStream &tokens, ErrorLocation &eloc) const { ASSERT_require(tokens.isa(TOK_FUNCTION)); std::string funcName = tokens.lexeme(); ASSERT_require(funcName.size() >= 2 && '@' == funcName[0]); funcName = funcName.substr(1); tokens.consume(); // Get the function declaration const Function::Ptr func = functions_.getOrDefault(funcName); if (!func) throw SyntaxError("function \"" + funcName + "\" is not declared"); // Parse the actual arguments std::vector<std::string> actuals; while (tokens.isa(TOK_LEFT)) { tokens.consume(); if (func->isMacro()) { actuals.push_back(readArgument(tokens, eloc, CONSUME)); } else { actuals.push_back(evalArgument(tokens, eloc, CONSUME)); } } func->validateArgs(actuals, tokens); ErrorLocation::Trap t(eloc, tokens, "in function \"" + funcName + "\""); std::string retval = func->eval(*this, actuals); t.passed(); return retval; }
int main(int argc, char *argv[]){ if (argc != 2){ cout << "Usage: " << argv[0] << " <filename>" << endl; exit (1); } else { try { FileReader *reader = _CLNEW FileReader(argv[1],"UTF-8"); Analyzer *analyzer = new StandardAnalyzer(); TokenStream *tokenStream = analyzer->tokenStream(_T("iets"), reader); Token token; while (tokenStream->next(&token)){ char buffer[1000]; STRCPY_TtoA(buffer, token.termText(), 1000); cout << "\t token: " << buffer; STRCPY_TtoA(buffer, token.type(), 1000); cout << " type: " << buffer << endl; } tokenStream->close(); delete tokenStream; //reader->close(); delete reader; delete analyzer; } catch (CLuceneError &e){ cerr << e.what() << endl; } } }
void print_remaining_tokens(std::ostream& out, TokenStream& tokens) { for (int i=0; i < tokens.remaining(); i++) { if (i != 0) out << " "; out << get_token_text(tokens.next(i).match); out << "(" << tokens.nextStr(i) << ")"; } }
void skip_token(TokenStream & stream, Token token) { while (true) { auto inb = stream.peek(); if (!inb || *inb != token) break; stream.skip(); } }
void expect(TokenStream & stream, Token expected) { auto maybe_token = stream.peek(); if (!maybe_token || *maybe_token != expected) { throw std::runtime_error("Failed expect"); } stream.skip(); }
static bool SetDisplayURL(ExclusiveContext *cx, TokenStream &tokenStream, ScriptSource *ss) { if (tokenStream.hasDisplayURL()) { if (!ss->setDisplayURL(cx, tokenStream.displayURL())) return false; } return true; }
static bool SetSourceMap(JSContext *cx, TokenStream &tokenStream, ScriptSource *ss, JSScript *script) { if (tokenStream.hasSourceMap()) { if (!ss->setSourceMap(cx, tokenStream.releaseSourceMap(), script->filename())) return false; } return true; }
static bool SetSourceMap(ExclusiveContext *cx, TokenStream &tokenStream, ScriptSource *ss) { if (tokenStream.hasSourceMap()) { if (!ss->setSourceMap(cx, tokenStream.releaseSourceMap())) return false; } return true; }
static bool SetSourceMap(ExclusiveContext* cx, TokenStream& tokenStream, ScriptSource* ss) { if (tokenStream.hasSourceMapURL()) { MOZ_ASSERT(!ss->hasSourceMapURL()); if (!ss->setSourceMapURL(cx, tokenStream.sourceMapURL())) return false; } return true; }
SAWYER_EXPORT std::string Grammar::eval(TokenStream &tokens, ErrorLocation &eloc) const { std::string retval; while (!tokens.atEof()) { retval += evalArgument(tokens, eloc, LEAVE); if (tokens.isa(TOK_RIGHT)) throw SyntaxError("unexpected end-of-argument"); } return retval; }
void dump_remaining_tokens(TokenStream& tokens) { for (int i=0; i < tokens.remaining(); i++) { if (i != 0) printf(" "); Value nextStr; tokens.getNextStr(&nextStr, i); printf("%s(%s)", get_token_text(tokens.next(i).match), as_cstring(&nextStr)); } printf("\n"); }
/** * A convenience method used for parsing out tokens, this is kind of a repetitive * task so it has been written once here. * * @param ts The token stream to draw from. * @param token The token as string to match. * @return true if the token was matched, false otherwise. */ bool Case_::MatchToken_( TokenStream& ts , std::string const& token) const { bool success = ts.HasTokens(); if (success) { Element currentElement = ts.NextToken(); int const TYPE = currentElement.Type(); std::string const VALUE(currentElement.ToString()); success = (TYPE == Types::TOKEN && VALUE == token); } return success; }
Option<Term> lambdaTerm(TokenStream& tokens) { assert(tokens->type == Token::LAMBDA); tokens.advance(); if (tokens->type == Token::IDENTIFIER) { string id = tokens->identifierValue; tokens.advance(); Option<Term> body = term(tokens); if (body.exists()) { return Option<Term>(lambdaTerm(id, *body)); } else return body; } else return errorTerm("expected identifier"); }
void DefaultErrorStrategy::reportNoViableAlternative(Parser *recognizer, const NoViableAltException &e) { TokenStream *tokens = recognizer->getTokenStream(); std::string input; if (tokens != nullptr) { if (e.getStartToken()->getType() == Token::EOF) { input = "<EOF>"; } else { input = tokens->getText(e.getStartToken(), e.getOffendingToken()); } } else { input = "<unknown input>"; } std::string msg = "no viable alternative at input " + escapeWSAndQuote(input); recognizer->notifyErrorListeners(e.getOffendingToken(), msg, std::make_exception_ptr(e)); }
/** * Parse the end of the token stream. * * @param stream -- Stream that has been prepped. */ bool Builtin_::ParseEnd_(TokenStream& stream) const { bool is_end = false; if (stream.HasTokens()) { stream.Push(); Element current_element = stream.NextToken(); is_end = (current_element.Type() == Types::TOKEN && current_element.ToString() == ")" ); stream.Rollback(); } return is_end; }
/** * Parse out the token stream. * * @param in -- The input token stream. * @return the element that was parsed. */ Element Case_::Parse_( Parser const& , TokenStream& in , std::vector<strine::Element> const& elements) const { in.Consume(); Case_* s = new Case_(); s->SetCondition(elements[0]); size_t const ELEMENTS_SIZE = elements.size(); for(size_t i=1; i<ELEMENTS_SIZE; i += 2) { size_t nextIndex = i + 1; // Check to see if you have a fallthrough case. if (nextIndex >= ELEMENTS_SIZE) { s->SetFallthrough(elements[i]); } // Just a regular ol' element pairing. else { s->Add(elements[i], elements[nextIndex]); } } return Case(s, SourceLocation()); }
// === CODE === TypeRef Parse_Type(TokenStream& lex, bool allow_trait_list) { ProtoSpan ps = lex.start_span(); TypeRef rv = Parse_Type_Int(lex, allow_trait_list); //rv.set_span(lex.end_span(ps)); return rv; }
/** * Parse out the token stream. * * @param in -- The input token stream. * @return the element that was parsed. */ Element Error_::Parse_( Parser const& , TokenStream& in , std::vector<strine::Element> const& elements) const { in.Consume(); std::shared_ptr<Error_> s(new Error_()); std::shared_ptr<String_ > str = std::dynamic_pointer_cast<String_>(elements[0].ElementHandle()); if (0 != str) { s->SetIdentifier(str->Value()); } if (elements.size() >= 3) { std::shared_ptr<Number_ > number = std::dynamic_pointer_cast<Number_>(elements[1].ElementHandle()); if (0 != number) { s->SetRow(number->IntValue()); } number = std::dynamic_pointer_cast<Number_>(elements[2].ElementHandle()); if (0 != number) { s->SetColumn(number->IntValue()); } } return Error(s, SourceLocation()); }
/** * Parse out the token stream. * * @param parser Not used. * @param in The input token stream. * @param elements The elements. * @return the element that was parsed. */ Element Function_::Parse_( Parser const& /* parser */ , TokenStream& in , std::vector<Element> const& elements) const { in.Consume(); std::shared_ptr<Function_> f(new Function_()); size_t const NUMBER_OF_ARGS = (elements.empty()) ? 0 : elements.size() - 1; std::vector<Variable> parms; bool isAVariable = false; for(size_t i=0; i<NUMBER_OF_ARGS; ++i) { Variable v = CastToVariable(elements[i], isAVariable); if (isAVariable) { parms.push_back(v); } } f->SetArguments(parms); if (false == elements.empty()) { f->SetBody( elements[elements.size() - 1] ); } else { f->SetBody(Nil()); } return Function(f, SourceLocation()); }
void DefaultErrorStrategy::sync(Parser *recognizer) { atn::ATNState *s = recognizer->getInterpreter<atn::ATNSimulator>()->atn.states[recognizer->getState()]; // If already recovering, don't try to sync if (inErrorRecoveryMode(recognizer)) { return; } TokenStream *tokens = recognizer->getTokenStream(); size_t la = tokens->LA(1); // try cheaper subset first; might get lucky. seems to shave a wee bit off if (recognizer->getATN().nextTokens(s).contains(la) || la == Token::EOF) { return; } // Return but don't end recovery. only do that upon valid token match if (recognizer->isExpectedToken((int)la)) { return; } switch (s->getStateType()) { case atn::ATNState::BLOCK_START: case atn::ATNState::STAR_BLOCK_START: case atn::ATNState::PLUS_BLOCK_START: case atn::ATNState::STAR_LOOP_ENTRY: // report error and recover if possible if (singleTokenDeletion(recognizer) != nullptr) { return; } throw InputMismatchException(recognizer); case atn::ATNState::PLUS_LOOP_BACK: case atn::ATNState::STAR_LOOP_BACK: { reportUnwantedToken(recognizer); misc::IntervalSet expecting = recognizer->getExpectedTokens(); misc::IntervalSet whatFollowsLoopIterationOrRule = expecting.Or(getErrorRecoverySet(recognizer)); consumeUntil(recognizer, whatFollowsLoopIterationOrRule); } break; default: // do nothing if we can't identify the exact kind of ATN state break; } }
std::vector<typename std::decay<decltype(*std::declval<TokenStream>().peek())>::type> read_token_vector(TokenStream & stream, opt::optional<Token> term, opt::optional<size_t> max_amt, bool drop_term = false) { std::vector<typename std::decay<decltype(*std::declval<TokenStream>().peek())>::type> toret; for (size_t i = 0; !max_amt || i < *max_amt; ++i) { auto mB = stream.peek(); if (!mB || (term && *mB == *term)) { if (drop_term) stream.skip(); break; } toret.push_back(*mB); stream.skip(); } return toret; }
bool CompoundStatement::make(TokenStream& t) { while (t.fetch()) { if (t.token() == Token(Token::PUNCTUATION, '}')) { return true; } else { t.didNotConsume(); if (Statement s = make_statement(t)) { statements_.push_back(move(s)); } else { t.errorMessage("Unfinished compound statement"); t.stopLooking(); return false; } } } return false; }
/** * Parse out the token stream. * * @param in -- The input token stream. * @return the element that was parsed. */ Element Quoted_::Parse_( Parser const& , TokenStream& in , std::vector<Element> const& elements) const { in.Consume(); Quoted quoted; quoted.SetElement(elements[0]); return quoted; }
std::stack<std::unique_ptr<Token> > exprTree::getStack(TokenStream &ts) { std::stack<std::unique_ptr<Token> > reverse; while(!ts.empty()) { std::unique_ptr<Token> t = ts.get(); reverse.push(std::move(t)); } std::stack<std::unique_ptr<Token> > returnStack; while (!reverse.empty()) { returnStack.push(std::move(reverse.top())); reverse.pop(); } return returnStack; }