// T := 'T' FilePath Comment // FilePath := AnyString // AnyString := .*^('\t' Comment) bool MetaParser::isTCommand(MetaSema::ActionResult& actionResult) { bool result = false; if (getCurTok().is(tok::ident) && getCurTok().getIdent().equals("T")) { consumeAnyStringToken(tok::comment); if (getCurTok().is(tok::raw_ident)) { result = true; actionResult = m_Actions->actOnTCommand(getCurTok().getIdent()); consumeToken(); if (getCurTok().is(tok::comment)) { consumeAnyStringToken(tok::eof); m_Actions->actOnComment(getCurTok().getIdent()); } } } // TODO: Some fine grained diagnostics return result; }
// Term' → '&&' Unary Term' | ε bool AnimExpression::parseTermPrime(const QString& str, QString::const_iterator& iter) { auto token = consumeToken(str, iter); if (token.type == Token::And) { if (!parseUnary(str, iter)) { unconsumeToken(token); return false; } if (!parseTermPrime(str, iter)) { unconsumeToken(token); return false; } _opCodes.push_back(OpCode {OpCode::And}); return true; } else { unconsumeToken(token); return true; } }
static bool consumeTime(const char* input, int* index, Microseconds* result) { const char* expectedToken; double val; if (!consumeDouble(input, index, &val)) { return false; } consumeWhitespace(input, index); switch (input[*index]) { case 'u': expectedToken = "us"; break; case '\xB5': // Latin-1 micro expectedToken = "\xB5s"; break; case '\xC2': // micro, utf-8 expectedToken = "\xC2\xB5s"; break; case '\xCE': // mu, utf-8 expectedToken = "\xCE\xBCs"; break; case 'm': expectedToken = "ms"; val *= 1000; break; case 's': expectedToken = "s"; val *= 1000000; break; default: return false; } *result = Microseconds(val); return consumeToken(expectedToken, input, index); }
bool ASTBuilder::toPostFix() //CONSTANT or VAR_NAME or OPEN_BRACKET or CLOSE_BRACKET expected as current token upon call to expr() { bool leftOp, rightOp; string Operator; leftOp = factor(); if (!leftOp) return false; if (nextToken() != ";") //Parse only if end-of-statement is not reached { if (nextToken() == "+" || nextToken() == "-") { Operator = nextToken(); consumeToken(); if ( !operatorStack->empty() ) { if ( operatorStack->back() != "(" ) { outputVector->push_back( operatorStack->back() ); operatorStack->pop_back(); } } operatorStack->push_back(Operator); rightOp = toPostFix(); return rightOp; } } if (nextToken() == "") { while ( !operatorStack->empty() ) { Operator = operatorStack->back(); operatorStack->pop_back(); outputVector->push_back(Operator); } } return leftOp; }
bool Parser::Impl::parseNumber() { // The lexer returns the number including the quantifier as a // single token value. Here, we split is an check that the number // is not out of range: if ( !obtainToken() || atEnd() ) return false; if ( token() != Lexer::Number ) return false; // number: unsigned long result = 0; int i = 0; const QByteArray s = tokenValue().toLatin1(); for ( const int len = s.length() ; i < len && isdigit( s[i] ) ; ++i ) { const unsigned long digitValue = s[i] - '0' ; if ( willOverflowULong( result, digitValue ) ) { makeError( Error::NumberOutOfRange ); return false; } else { result *= 10 ; result += digitValue ; } } // optional quantifier: char quantifier = '\0'; if ( i < s.length() ) { assert( i + 1 == s.length() ); quantifier = s[i]; const unsigned long factor = factorForQuantifier( quantifier ); if ( result > double(ULONG_MAX) / double(factor) ) { makeError( Error::NumberOutOfRange ); return false; } result *= factor; } if ( scriptBuilder() ) scriptBuilder()->numberArgument( result, quantifier ); consumeToken(); return true; }
/*! * expression ::= term ( '|' term )* ( '|' | ) */ Node* Parser::parseExpression() { std::vector<Node*> v; v.push_back(parseTerm()); while (token_ == Lexer::Token::bar) { consumeToken(); if ((token_ == Lexer::Token::identifier && peek_token_ != Lexer::Token::coloncolonequal && peek_token_ != Lexer::Token::minusgreater) || token_ == Lexer::Token::text || token_ == Lexer::Token::l_paren) { v.push_back(parseTerm()); } else{ v.push_back(new EmptyNode()); break; } } return new OrNode(v); }
// XCommand := 'x' FilePath[ArgList] | 'X' FilePath[ArgList] // FilePath := AnyString // ArgList := (ExtraArgList) ' ' [ArgList] // ExtraArgList := AnyString [, ExtraArgList] bool MetaParser::isXCommand(MetaSema::ActionResult& actionResult, Value* resultValue) { if (resultValue) *resultValue = Value(); const Token& Tok = getCurTok(); if (Tok.is(tok::ident) && (Tok.getIdent().equals("x") || Tok.getIdent().equals("X"))) { // There might be ArgList consumeAnyStringToken(tok::l_paren); llvm::StringRef file(getCurTok().getIdent()); consumeToken(); // '(' to end of string: std::string args = getCurTok().getBufStart(); if (args.empty()) args = "()"; actionResult = m_Actions->actOnxCommand(file, args, resultValue); return true; } return false; }
void Type::parseGenericArguments(Type contextType, EmojicodeChar theNamespace, TypeDynamism dynamism, const Token *errorToken) { if (this->type == TT_CLASS) { this->genericArguments = std::vector<Type>(this->eclass->superGenericArguments); if (this->eclass->ownGenericArgumentCount){ int count = 0; while(nextToken()->value[0] == E_SPIRAL_SHELL){ const Token *token = consumeToken(); Type ta = parseAndFetchType(contextType, theNamespace, dynamism, nullptr); validateGenericArgument(ta, count, contextType, token); genericArguments.push_back(ta); count++; } if(count != this->eclass->ownGenericArgumentCount){ auto str = this->toString(typeNothingness, false); compilerError(errorToken, "Type %s requires %d generic arguments, but %d were given.", str.c_str(), this->eclass->ownGenericArgumentCount, count); } } } }
ForNode* Parser::parseFor() { uint32_t token = _currentTokenIndex; ensureKeyword("for"); ensureToken(tLPAREN); if (currentToken() != tIDENT) { error("identifier expected"); } const string& varName = currentTokenValue(); consumeToken(); ensureKeyword("in"); AstNode* inExpr = parseExpression(); ensureToken(tRPAREN); BlockNode* forBody = parseBlock(true); AstVar* forVar = forBody->scope()->lookupVariable(varName); return new ForNode(token, forVar, inExpr, forBody); }
static const Token* until(EmojicodeChar end, EmojicodeChar deeper, int *deep) { const Token *token = consumeToken(); if (token == nullptr) { compilerError(nullptr, "Unexpected end of program."); } if (token->type != IDENTIFIER) { return token; } if(token->value[0] == deeper){ (*deep)++; } else if(token->value[0] == end){ if (*deep == 0){ return nullptr; } (*deep)--; } return token; }
VerbatimBlockComment *Parser::parseVerbatimBlock() { assert(Tok.is(tok::verbatim_block_begin)); VerbatimBlockComment *VB = S.actOnVerbatimBlockStart(Tok.getLocation(), Tok.getVerbatimBlockName()); consumeToken(); // Don't create an empty line if verbatim opening command is followed // by a newline. if (Tok.is(tok::newline)) consumeToken(); SmallVector<VerbatimBlockLineComment *, 8> Lines; while (Tok.is(tok::verbatim_block_line) || Tok.is(tok::newline)) { VerbatimBlockLineComment *Line; if (Tok.is(tok::verbatim_block_line)) { Line = S.actOnVerbatimBlockLine(Tok.getLocation(), Tok.getVerbatimBlockText()); consumeToken(); if (Tok.is(tok::newline)) { consumeToken(); } } else { // Empty line, just a tok::newline. Line = S.actOnVerbatimBlockLine(Tok.getLocation(), ""); consumeToken(); } Lines.push_back(Line); } if (Tok.is(tok::verbatim_block_end)) { VB = S.actOnVerbatimBlockFinish(VB, Tok.getLocation(), Tok.getVerbatimBlockName(), copyArray(llvm::makeArrayRef(Lines))); consumeToken(); } else { // Unterminated \\verbatim block VB = S.actOnVerbatimBlockFinish(VB, SourceLocation(), "", copyArray(llvm::makeArrayRef(Lines))); } return VB; }
AstNode* Parser::parseUnary() { if (isUnaryOp(currentToken())) { TokenKind op = currentToken(); consumeToken(); return new UnaryOpNode(_currentTokenIndex, op, parseUnary()); } else if (currentToken() == tIDENT && lookaheadToken(1) == tLPAREN) { AstNode* expr = parseCall(); return expr; } else if (currentToken() == tIDENT) { AstVar* var = _currentScope->lookupVariable(currentTokenValue()); if (var == 0) { error("undeclared variable: %s", currentTokenValue().c_str()); } LoadNode* result = new LoadNode(_currentTokenIndex, var); consumeToken(); return result; } else if (currentToken() == tDOUBLE) { DoubleLiteralNode* result = new DoubleLiteralNode(_currentTokenIndex, parseDouble(currentTokenValue())); consumeToken(); return result; } else if (currentToken() == tINT) { IntLiteralNode* result = new IntLiteralNode(_currentTokenIndex, parseInt(currentTokenValue())); consumeToken(); return result; } else if (currentToken() == tSTRING) { StringLiteralNode* result = new StringLiteralNode(_currentTokenIndex, currentTokenValue()); consumeToken(); return result; } else if (currentToken() == tLPAREN) { consumeToken(); AstNode* expr = parseExpression(); ensureToken(tRPAREN); return expr; } else { error("Unexpected token: %s", tokenStr(currentToken())); return 0; } }
BlockNode* Parser::parseBlock(bool needBraces, const bool scoped/* = true */) { if (needBraces) { ensureToken(tLBRACE); } if (scoped) { pushScope(); } BlockNode* block = new BlockNode(_currentTokenIndex, _currentScope); TokenKind sentinel = needBraces ? tRBRACE : tEOF; while (currentToken() != sentinel) { if (currentToken() == tSEMICOLON) { consumeToken(); continue; } AstNode* statement = parseStatement(); // Ignore statements that doesn't result in AST nodes, such // as variable or function declaration. if (statement != 0) { block->add(statement); } } if (scoped) { popScope(); } if (needBraces) { ensureToken(tRBRACE); } return block; }
NodePtr fullelem() { if (tryConsumeToken(XTT_beginelem)) { Node_Element *e = new Node_Element(); NodePtr r(e); { const std::string& s = getPreviewToken().value; ElemTagSyntax::Parser(s.substr(1, s.size() - 2), e->tag, e->attris); } elems(e->children); consumeToken(XTT_endelem); { const std::string& s = getPreviewToken().value; std::string tag; AttriMap m; ElemTagSyntax::Parser(s.substr(2, s.size() - 3), tag, m); PARSE_ASSERT(tag == e->tag && m.empty()); } return r; } return NodePtr(); }
/*! * factor ::= ( 'id' | 'text' | '(' expression ')' ) ( '+' | '*' | ) */ Node* Parser::parseFactor() { Node* n = 0; switch (token_) { case Lexer::Token::identifier: n = new SymbolNode(token_value_); consumeToken(); break; case Lexer::Token::text: n = new TerminalNode(token_value_.substr(1, token_value_.size() - 2)); consumeToken(); break; case Lexer::Token::l_paren: consumeToken(); n = new ParenNode(parseExpression()); if (token_ != Lexer::Token::r_paren) { return errorNode("missing terminating character \")\""); } consumeToken(); break; default: assert(false && "unexpected token"); break; } switch (token_) { case Lexer::Token::plus: consumeToken(); n = new PlusNode(n); break; case Lexer::Token::star: consumeToken(); n = new StarNode(new PlusNode(n)); break; default: break; } return n; }
Type Type::parseAndFetchType(Type contextType, EmojicodeChar theNamespace, TypeDynamism dynamism, bool *dynamicType){ if (dynamicType) { *dynamicType = false; } if (dynamism & AllowGenericTypeVariables && contextType.type == TT_CLASS && (nextToken()->type == VARIABLE || (nextToken()->value[0] == E_CANDY && nextToken()->nextToken->type == VARIABLE))) { if (dynamicType) { *dynamicType = true; } bool optional = false; const Token *variableToken = consumeToken(); if (variableToken->value[0] == E_CANDY) { variableToken = consumeToken(); optional = true; } auto it = contextType.eclass->ownGenericArgumentVariables.find(variableToken->value); if (it != contextType.eclass->ownGenericArgumentVariables.end()){ Type type = it->second; type.optional = optional; return type; } else { compilerError(variableToken, "No such generic type variable \"%s\".", variableToken->value.utf8CString()); } } else if (nextToken()->value[0] == E_RAT) { const Token *token = consumeToken(); if(!(dynamism & AllowDynamicClassType)){ compilerError(token, "🐀 not allowed here."); } if (dynamicType) { *dynamicType = true; } return contextType; } else if (nextToken()->value[0] == E_GRAPES || (nextToken()->value[0] == E_CANDY && nextToken()->nextToken->type == VARIABLE)) { bool optional = false; if (nextToken()->value[0] == E_CANDY) { consumeToken(); optional = true; } consumeToken(); Type t(TT_CALLABLE, optional); t.arguments = 0; t.genericArguments.push_back(typeNothingness); while (!(nextToken()->type == IDENTIFIER && (nextToken()->value[0] == E_WATERMELON || nextToken()->value[0] == E_RIGHTWARDS_ARROW))) { t.arguments++; t.genericArguments.push_back(parseAndFetchType(contextType, theNamespace, dynamism, nullptr)); } if(nextToken()->type == IDENTIFIER && nextToken()->value[0] == E_RIGHTWARDS_ARROW){ consumeToken(); t.genericArguments[0] = parseAndFetchType(contextType, theNamespace, dynamism, nullptr); } const Token *token = consumeToken(IDENTIFIER); if (token->value[0] != E_WATERMELON) { compilerError(token, "Expected 🍉."); } return t; } else { EmojicodeChar typeName, typeNamespace; bool optional, existent; const Token *token = parseTypeName(&typeName, &typeNamespace, &optional, theNamespace); Type type = fetchRawType(typeName, typeNamespace, optional, token, &existent); if (!existent) { ecCharToCharStack(typeName, nameString); ecCharToCharStack(typeNamespace, namespaceString); compilerError(token, "Could not find type %s in enamespace %s.", nameString, namespaceString); } type.parseGenericArguments(contextType, theNamespace, dynamism, token); return type; } }
bool Parser::Impl::parseStringList() { // string-list := "[" string *("," string) "]" / string // ;; if there is only a single string, the brackets are optional // // However, since strings are already handled separately from // string lists in parseArgument(), our ABNF is modified to: // string-list := "[" string *("," string) "]" if ( !obtainToken() || atEnd() ) return false; if ( token() != Lexer::Special || tokenValue() != "[" ) return false; if ( scriptBuilder() ) scriptBuilder()->stringListArgumentStart(); consumeToken(); // generic while/switch construct for comma-separated lists. See // parseTestList() for another one. Any fix here is like to apply there, too. bool lastWasComma = true; while ( !atEnd() ) { if ( !obtainToken() ) return false; switch ( token() ) { case Lexer::None: break; case Lexer::Special: assert( tokenValue().length() == 1 ); switch ( tokenValue()[0].toLatin1() ) { case ']': consumeToken(); if ( lastWasComma ) { makeError( Error::ConsecutiveCommasInStringList ); return false; } if ( scriptBuilder() ) scriptBuilder()->stringListArgumentEnd(); return true; case ',': consumeToken(); if ( lastWasComma ) { makeError( Error::ConsecutiveCommasInStringList ); return false; } lastWasComma = true; break; default: makeError( Error::NonStringInStringList ); return false; } break; case Lexer::QuotedString: case Lexer::MultiLineString: if ( !lastWasComma ) { makeError( Error::MissingCommaInStringList ); return false; } lastWasComma = false; if ( scriptBuilder() ) scriptBuilder()->stringListEntry( tokenValue(), token() == Lexer::MultiLineString, QString() ); consumeToken(); break; default: makeError( Error::NonStringInStringList ); return false; } } makeError( Error::PrematureEndOfStringList ); return false; }
bool Parser::Impl::parseTestList() { // test-list := "(" test *("," test) ")" if ( !obtainToken() || atEnd() ) return false; if ( token() != Lexer::Special || tokenValue() != "(" ) return false; if ( scriptBuilder() ) scriptBuilder()->testListStart(); consumeToken(); // generic while/switch construct for comma-separated lists. See // parseStringList() for another one. Any fix here is like to apply there, too. bool lastWasComma = true; while ( !atEnd() ) { if ( !obtainToken() ) return false; switch ( token() ) { case Lexer::None: break; case Lexer::Special: assert( tokenValue().length() == 1 ); assert( tokenValue()[0].toLatin1() ); switch ( tokenValue()[0].toLatin1() ) { case ')': consumeToken(); if ( lastWasComma ) { makeError( Error::ConsecutiveCommasInTestList ); return false; } if ( scriptBuilder() ) scriptBuilder()->testListEnd(); return true; case ',': consumeToken(); if( lastWasComma ) { makeError( Error::ConsecutiveCommasInTestList ); return false; } lastWasComma = true; break; default: makeError( Error::NonStringInStringList ); return false; } break; case Lexer::Identifier: if ( !lastWasComma ) { makeError( Error::MissingCommaInTestList ); return false; } else { lastWasComma = false; if ( !parseTest() ) { assert( error() ); return false; } } break; default: makeUnexpectedTokenError( Error::NonTestInTestList ); return false; } } makeError( Error::PrematureEndOfTestList ); return false; }
bool Parser::Impl::parseCommand() { // command := identifier arguments ( ";" / block ) // arguments := *argument [ test / test-list ] // block := "{" *command "}" // our ABNF: // block := "{" [ command-list ] "}" if ( atEnd() ) return false; // // identifier // if ( !obtainToken() || token() != Lexer::Identifier ) return false; if ( scriptBuilder() ) scriptBuilder()->commandStart( tokenValue() ); consumeToken(); // // *argument // if ( !obtainToken() ) return false; if ( atEnd() ) { makeError( Error::MissingSemicolonOrBlock ); return false; } if ( isArgumentToken() && !parseArgumentList() ) { assert( error() ); return false; } // // test / test-list // if ( !obtainToken() ) return false; if ( atEnd() ) { makeError( Error::MissingSemicolonOrBlock ); return false; } if ( token() == Lexer::Special && tokenValue() == "(" ) { // test-list if ( !parseTestList() ) { assert( error() ); return false; } } else if ( token() == Lexer::Identifier ) { // should be test: if ( !parseTest() ) { assert( error() ); return false; } } // // ";" / block // if ( !obtainToken() ) return false; if ( atEnd() ) { makeError( Error::MissingSemicolonOrBlock ); return false; } if ( token() != Lexer::Special ) { makeUnexpectedTokenError( Error::ExpectedBlockOrSemicolon ); return false; } if ( tokenValue() == ";" ) consumeToken(); else if ( tokenValue() == "{" ) { // block if ( !parseBlock() ) return false; // it's an error since we saw '{' } else { makeError( Error::MissingSemicolonOrBlock ); return false; } if ( scriptBuilder() ) scriptBuilder()->commandEnd(); return true; }
main() { IFD = XTAL; IFADRH = 0; IFADRL = CKCON2; IFMT = PageP; ISPCR = ISPEN; SCMD = 0x46; SCMD = 0xb9; Delay_ms(30); #ifdef BUFFER produceCount = 0; consumeCount = 0; #endif #ifdef PARSER note = velocity = keyboard = 0; action = IGNORE; #endif TMOD = 0; UART_init(31250); //設定串列環境及鮑率 #ifdef TIMER2 //PCON2=5; //Fosc=Fosc/32,時間=31250uS*32=1秒(軟體模擬無效) T2CON = 0x00; /* 0000 1000,由T2EX腳輸入負緣觸發會重新載入 bit3:EXEN2=1,使用外部T2EX接腳 bit1:C/T=0,內部計時 bit0:CP/RL2=0,重新載入*/ T2MOD = 0x00; RCAP2=T2R=65536-TT; //設定Timer2及T2自動載入暫存器 #endif EA=1; //AUXIE |= ES2; #ifdef HARDRAYPWM CCAPM0=CCAPM1=CCAPM2=CCAPM3=CCAPM4=ECOM+PWM; //致能CEX1比較器及PWM輸出 CMOD=0x00; //CPS1-0=00,Fpwm=Fosc/12/256=22.1184MHz/12/256=7.2KHz //PCAPWM0=PCAPWM1=PCAPWM2=PCAPWM3=PCAPWM4=PCAPWM5=ECAPH; CCAP0H=CCAP1H=CCAP2H=CCAP3H=CCAP4H=~0x00;//0x00; //設定(P12/CEX0),平均電壓為0V #ifdef PCATIMER //CMOD = 0; //PCA計數時脈來源CPS1-0:00=Fosc/12 CCAPM5=ECOM+MAT+ECCF; //MAT=1,PAC計數與CCAP0匹配時,令CCF0=1 //ECOM=1,致能比較器功能 //ECCF=1,致能有匹配(CCFn=1)時,產生中斷 CCAP5L=TTT; CCAP5H=TTT>>8; //設定模組5比較暫存器 AUXIE = EPCA; //致能PCA中斷 CCF5=0; //清除模組0-5的比較旗標 //CR = 1; #else CCAPM5=ECOM+PWM; CCAP5H=~0x00; #endif CR = 1; P00VAR=P01VAR=P02VAR=P03VAR=P04VAR=P05VAR=P06VAR=P07VAR=P11VAR=P14VAR=P15VAR=P16VAR=P17VAR=P20VAR=P21VAR=P22VAR=P23VAR=P24VAR=P25VAR=P26VAR=P27VAR=P32VAR=P34VAR=P35VAR=P36VAR=P40VAR=P41VAR=P42VAR=P43VAR=P46VAR=0; #ifndef LEDRay P50VAR=P51VAR=P52VAR=P53VAR=P54VAR=P55VAR=P56VAR=P57VAR=0; #endif #else P00=P01=P02=P03=P04=P05=P06=P07=P11=P14=P15=P16=P17=P20=P21=P22=P23=P24=P25=P26=P27=P32=P34=P35=P36=P37=P40=P41=P42=P43=P46=P50=P51=P52=P53=P54=P55=P56=P57=0; #endif #ifdef TIMER2 i00=i01=i02=i03=i04=i05=i06=i07=i11=i14=i15=i16=i17=i20=i21=i22=i23=i24=i25=i26=i27=i32=i34=i35=i36=i37=i40=i41=i42=i43=i46=i10000=0; #ifndef LEDRay i50=i51=i52=i53=i54=i55=i56=i57=0; #endif #endif ES=1; //致能串列中斷 #ifdef TIMER2 ET2=1; //致能Timer2中斷 TR2=1; #endif #ifdef TIMER0 TMOD |= T0_M1; //設定Timer0為mode1內部計時 TL0=0; //TL0=65536 - TT; TH0=0; //Timer0由0開始計時 //TH0=65536 - TT >> 8; //設定計時值 ET0=1; //致能Timer0中 TR0=1; //啟動Timer0開始計時 #endif go_crazy(); while(1) { #ifdef BUFFER while (abs(produceCount - consumeCount) == 0) { softPWM(); } consumeToken( buffer[consumeCount++]); if( consumeCount >= BUFFER_SIZE ) consumeCount = 0; #else softPWM(); //自我空轉,表示可做其它工作 #endif } }
void PulseStateCommand::parseFromString(const char* input, const char** error, unsigned* repeatDepth) { int index = 0; uint32_t val; consumeWhitespace(input, &index); if (input[index] == 0) { // empty line, comment, etc. type = noOp; } else if (input[index] == 'e') { // e.g. "end program" or "end repeat" if (!consumeToken("end", input, &index)) { *error = "unrecognized command"; return; } consumeWhitespace(input, &index); if (input[index] == 'p') { if (!consumeToken("program", input, &index)) { *error = "unrecognized command"; return; } if (*repeatDepth != 0) { *error = "found \"end program\" while still expecting an \"end repeat\""; return; } type = endProgram; } else if (input[index] == 'r') { if (!consumeToken("repeat", input, &index)) { *error = "unrecognized command"; return; } if (*repeatDepth == 0) { *error = "found \"end repeat\" without matching \"repeat\""; return; } type = endRepeat; *repeatDepth -= 1; } else { *error = "expected \"repeat\" or \"program\""; return; } } else if (input[index] == 'r') { // e.g. "repeat 12 times:" if (!consumeToken("repeat", input, &index)) { *error = "unrecognized command"; return; } type = repeat; consumeWhitespace(input, &index); if (!consumeUInt32(input, &index, &repeatCount)) { *error = "expected repeat count"; return; } consumeWhitespace(input, &index); if (!consumeToken("times", input, &index)) { *error = "expected \"times\""; return; } consumeWhitespace(input, &index); if (!consumeToken(":", input, &index)) { *error = "expected \":\""; return; } if (*repeatDepth == maxRepeatNesting) { *error = "repeats nested too deeply"; return; } *repeatDepth += 1; } else if (input[index] == 's') { // e.g. "set channel 3 to 213 us pulses at 15.1 Hz" if (!consumeToken("set", input, &index)) { *error = "unrecognized command"; return; } type = setChannel; consumeWhitespace(input, &index); if (!consumeToken("channel", input, &index)) { *error = "expected \"channel\""; return; } consumeWhitespace(input, &index); if (!consumeUInt32(input, &index, &val)) { *error = "expected channel number"; return; } if (val > numChannels || val == 0) { *error = "channel number must be between 1 and 8"; return; } channel = val; consumeWhitespace(input, &index); if (!consumeToken("to", input, &index)) { *error = "expected \"to\""; return; } consumeWhitespace(input, &index); if (!consumeTime(input, &index, &onTime)) { *error = "expected time, e.g. \"2 s\", " "\"13 ms\", \"12 us\", or \"15 \u00B5s\""; return; } consumeWhitespace(input, &index); if (!consumeToken("pulses", input, &index)) { *error = "expected \"pulses\""; return; } consumeWhitespace(input, &index); Microseconds period; if (input[index] == 'a') { // e.g. "at 12 Hz" if (!consumeToken("at", input, &index)) { *error = "expected \"at\" or \"every\""; return; } consumeWhitespace(input, &index); if (!consumeFrequency(input, &index, &period)) { *error = "expected frequency, e.g. \"2.3 Hz\" or \"15 kHz\""; return; } } else { // e.g. "every 2 s" if (!consumeToken("every", input, &index)) { *error = "expected \"at\" or \"every\""; return; } consumeWhitespace(input, &index); if (!consumeTime(input, &index, &period)) { *error = "expected time, e.g. \"2 s\", " "\"13 ms\", \"12 us\", or \"15 \u00B5s\""; return; } } if (period < onTime) { *error = "pulse duration longer than total period"; return; } offTime = period - onTime; } else if (input[index] == 't') { // e.g. "turn off channel 4" or "turn on channel 1" if (!consumeToken("turn", input, &index)) { *error = "unrecognized command"; return; } type = setChannel; consumeWhitespace(input, &index); const char* expectedToken; if (input[index] == 'o' && input[index + 1] == 'n') { expectedToken = "on"; onTime = forever; offTime = 0; } else { expectedToken = "off"; onTime = 0; offTime = forever; } if (!consumeToken(expectedToken, input, &index)) { *error = "expected \"on\" or \"off\""; return; } consumeWhitespace(input, &index); if (!consumeToken("channel", input, &index)) { *error = "expected \"channel\""; return; } consumeWhitespace(input, &index); if (!consumeUInt32(input, &index, &val)) { *error = "expected channel number"; return; } if (val > numChannels || val == 0) { *error = "channel number must be between 1 and 8"; return; } channel = val; } else if (input[index] == 'w') { // e.g. "wait 182 us" if (!consumeToken("wait", input, &index)) { *error = "unrecognized command"; return; } type = wait; consumeWhitespace(input, &index); if (!consumeTime(input, &index, &waitTime)) { *error = "expected time, e.g. \"2 s\", " "\"13 ms\", \"12 us\", or \"15 \u00B5s\""; return; } } else { *error = "unrecognized command"; return; } consumeWhitespace(input, &index); if (input[index] != 0) { *error = "unexpected text found after end of command"; return; } *error = NULL; }
void Parser::ensureToken(TokenKind token) { if (currentToken() != token) { error("'%s' expected, seen %s", tokenStr(token), tokenStr(currentToken())); } consumeToken(); }
BlockContentComment *Parser::parseParagraphOrBlockCommand() { SmallVector<InlineContentComment *, 8> Content; while (true) { switch (Tok.getKind()) { case tok::verbatim_block_begin: case tok::verbatim_line_name: case tok::eof: assert(Content.size() != 0); break; // Block content or EOF ahead, finish this parapgaph. case tok::command: if (S.isBlockCommand(Tok.getCommandName())) { if (Content.size() == 0) return parseBlockCommand(); break; // Block command ahead, finish this parapgaph. } if (S.isInlineCommand(Tok.getCommandName())) { Content.push_back(parseInlineCommand()); continue; } // Not a block command, not an inline command ==> an unknown command. Content.push_back(S.actOnUnknownCommand(Tok.getLocation(), Tok.getEndLocation(), Tok.getCommandName())); consumeToken(); continue; case tok::newline: { consumeToken(); if (Tok.is(tok::newline) || Tok.is(tok::eof)) { consumeToken(); break; // Two newlines -- end of paragraph. } if (Content.size() > 0) Content.back()->addTrailingNewline(); continue; } // Don't deal with HTML tag soup now. case tok::html_start_tag: Content.push_back(parseHTMLStartTag()); continue; case tok::html_end_tag: Content.push_back(parseHTMLEndTag()); continue; case tok::text: Content.push_back(S.actOnText(Tok.getLocation(), Tok.getEndLocation(), Tok.getText())); consumeToken(); continue; case tok::verbatim_block_line: case tok::verbatim_block_end: case tok::verbatim_line_text: case tok::html_ident: case tok::html_equals: case tok::html_quoted_string: case tok::html_greater: case tok::html_slash_greater: llvm_unreachable("should not see this token"); } break; } return S.actOnParagraphComment(copyArray(llvm::makeArrayRef(Content))); }
Parser::Parser(Lexer &L, Sema &S, llvm::BumpPtrAllocator &Allocator, const SourceManager &SourceMgr, DiagnosticsEngine &Diags): L(L), S(S), Allocator(Allocator), SourceMgr(SourceMgr), Diags(Diags) { consumeToken(); }
HTMLStartTagComment *Parser::parseHTMLStartTag() { assert(Tok.is(tok::html_start_tag)); HTMLStartTagComment *HST = S.actOnHTMLStartTagStart(Tok.getLocation(), Tok.getHTMLTagStartName()); consumeToken(); SmallVector<HTMLStartTagComment::Attribute, 2> Attrs; while (true) { switch (Tok.getKind()) { case tok::html_ident: { Token Ident = Tok; consumeToken(); if (Tok.isNot(tok::html_equals)) { Attrs.push_back(HTMLStartTagComment::Attribute(Ident.getLocation(), Ident.getHTMLIdent())); continue; } Token Equals = Tok; consumeToken(); if (Tok.isNot(tok::html_quoted_string)) { Diag(Tok.getLocation(), diag::warn_doc_html_start_tag_expected_quoted_string) << SourceRange(Equals.getLocation()); Attrs.push_back(HTMLStartTagComment::Attribute(Ident.getLocation(), Ident.getHTMLIdent())); while (Tok.is(tok::html_equals) || Tok.is(tok::html_quoted_string)) consumeToken(); continue; } Attrs.push_back(HTMLStartTagComment::Attribute( Ident.getLocation(), Ident.getHTMLIdent(), Equals.getLocation(), SourceRange(Tok.getLocation(), Tok.getEndLocation()), Tok.getHTMLQuotedString())); consumeToken(); continue; } case tok::html_greater: HST = S.actOnHTMLStartTagFinish(HST, copyArray(llvm::makeArrayRef(Attrs)), Tok.getLocation(), /* IsSelfClosing = */ false); consumeToken(); return HST; case tok::html_slash_greater: HST = S.actOnHTMLStartTagFinish(HST, copyArray(llvm::makeArrayRef(Attrs)), Tok.getLocation(), /* IsSelfClosing = */ true); consumeToken(); return HST; case tok::html_equals: case tok::html_quoted_string: Diag(Tok.getLocation(), diag::warn_doc_html_start_tag_expected_ident_or_greater); while (Tok.is(tok::html_equals) || Tok.is(tok::html_quoted_string)) consumeToken(); if (Tok.is(tok::html_ident) || Tok.is(tok::html_greater) || Tok.is(tok::html_slash_greater)) continue; return S.actOnHTMLStartTagFinish(HST, copyArray(llvm::makeArrayRef(Attrs)), SourceLocation(), /* IsSelfClosing = */ false); default: // Not a token from an HTML start tag. Thus HTML tag prematurely ended. HST = S.actOnHTMLStartTagFinish(HST, copyArray(llvm::makeArrayRef(Attrs)), SourceLocation(), /* IsSelfClosing = */ false); bool StartLineInvalid; const unsigned StartLine = SourceMgr.getPresumedLineNumber( HST->getLocation(), &StartLineInvalid); bool EndLineInvalid; const unsigned EndLine = SourceMgr.getPresumedLineNumber( Tok.getLocation(), &EndLineInvalid); if (StartLineInvalid || EndLineInvalid || StartLine == EndLine) Diag(Tok.getLocation(), diag::warn_doc_html_start_tag_expected_ident_or_greater) << HST->getSourceRange(); else { Diag(Tok.getLocation(), diag::warn_doc_html_start_tag_expected_ident_or_greater); Diag(HST->getLocation(), diag::note_doc_html_tag_started_here) << HST->getSourceRange(); } return HST; } } }
FunctionNode* Parser::parseFunction() { uint32_t tokenIndex = _currentTokenIndex; ensureKeyword("function"); if (currentToken() != tIDENT) { error("identifier expected"); } const string& returnTypeName = currentTokenValue(); VarType returnType = nameToType(returnTypeName); if (returnType == VT_INVALID) { error("wrong return type"); } consumeToken(); if (currentToken() != tIDENT) { error("name expected"); } const string& name = currentTokenValue(); consumeToken(); Signature signature; signature.push_back(SignatureElement(returnType, "return")); ensureToken(tLPAREN); while (currentToken() != tRPAREN) { const string& parameterTypeName = currentTokenValue(); VarType parameterType = nameToType(parameterTypeName); if (parameterType == VT_INVALID) { error("wrong parameter type"); } consumeToken(); const string& parameterName = currentTokenValue(); if (currentToken() != tIDENT) { error("identifier expected"); } consumeToken(); signature.push_back(SignatureElement(parameterType, parameterName)); if (currentToken() == tCOMMA) { consumeToken(); } } ensureToken(tRPAREN); BlockNode* body = 0; pushScope(); for (uint32_t i = 1; i < signature.size(); i++) { const string& name = signature[i].second; VarType type = signature[i].first; if (!_currentScope->declareVariable(name, type)) { error("Formal \"%s\" already declared", name.c_str()); } } if ((currentToken() == tIDENT) && currentTokenValue() == "native") { consumeToken(); if (currentToken() != tSTRING) { error("Native name expected, got %s", tokenStr(currentToken())); } pushScope(); body = new BlockNode(_currentTokenIndex, _currentScope); body->add(new NativeCallNode(tokenIndex, currentTokenValue(), signature)); consumeToken(); ensureToken(tSEMICOLON); body->add(new ReturnNode(0, 0)); popScope(); } else { body = parseBlock(true, false); if (body->nodes() == 0 || !(body->nodeAt(body->nodes() - 1)->isReturnNode())) { body->add(new ReturnNode(0, defaultReturnExpr(returnType))); } } popScope(); if (_currentScope->lookupFunction(name) != 0) { error("Function %s already defined", name.c_str()); } FunctionNode* result = new FunctionNode(tokenIndex, name, signature, body); _currentScope->declareFunction(result); // We don't add function node into AST. return 0; }
bool Parser::Impl::parseTest() { // test := identifier arguments // arguments := *argument [ test / test-list ] // // identifier // if(!obtainToken() || atEnd()) return false; if(token() != Lexer::Identifier) return false; if(scriptBuilder()) scriptBuilder()->testStart(tokenValue()); consumeToken(); // // *argument // if(!obtainToken()) return false; if(atEnd()) // a test w/o args goto TestEnd; if(isArgumentToken() && !parseArgumentList()) { assert(error()); return false; } // // test / test-list // if(!obtainToken()) return false; if(atEnd()) // a test w/o nested tests goto TestEnd; if(token() == Lexer::Special && tokenValue() == "(") // test-list { if(!parseTestList()) { assert(error()); return false; } } else if(token() == Lexer::Identifier) // should be test: { if(!parseTest()) { assert(error()); return false; } } TestEnd: if(scriptBuilder()) scriptBuilder()->testEnd(); return true; }
void Parser::ensureKeyword(const string& keyword) { if (currentToken() != tIDENT || currentTokenValue() != keyword) { error("keyword '%s' expected", currentTokenValue().c_str()); } consumeToken(); }
bool Parser::parseOne(Directive *&ret) { consumeToken(); switch (_tok._kind) { case Kind::eof: return true; case Kind::kw_exports: { // EXPORTS std::vector<PECOFFLinkingContext::ExportDesc> exports; for (;;) { PECOFFLinkingContext::ExportDesc desc; if (!parseExport(desc)) break; exports.push_back(desc); } ret = new (_alloc) Exports(exports); return true; } case Kind::kw_heapsize: { // HEAPSIZE uint64_t reserve, commit; if (!parseMemorySize(reserve, commit)) return false; ret = new (_alloc) Heapsize(reserve, commit); return true; } case Kind::kw_library: { // LIBRARY std::string name; uint64_t baseaddr; if (!parseName(name, baseaddr)) return false; if (!StringRef(name).endswith_lower(".dll")) name.append(".dll"); ret = new (_alloc) Library(name, baseaddr); return true; } case Kind::kw_stacksize: { // STACKSIZE uint64_t reserve, commit; if (!parseMemorySize(reserve, commit)) return false; ret = new (_alloc) Stacksize(reserve, commit); return true; } case Kind::kw_name: { // NAME std::string outputPath; uint64_t baseaddr; if (!parseName(outputPath, baseaddr)) return false; ret = new (_alloc) Name(outputPath, baseaddr); return true; } case Kind::kw_version: { // VERSION int major, minor; if (!parseVersion(major, minor)) return false; ret = new (_alloc) Version(major, minor); return true; } default: error(_tok, Twine("Unknown directive: ") + _tok._range); return false; } }
void MetaParser::skipWhitespace() { while(getCurTok().is(tok::space)) consumeToken(); }