void LineParser::parse( char* pszLine ) { DEBUG(("LineParser::prase(%s)\n", pszLine)); if (NULL == pszLine) return; int flagIsInQuotes = 0; int flagIsEscaped = 0; char* pszLast = pszLine; char* pszCursor = pszLine; // parser loop while (*pszCursor != '\0') { if (flagIsEscaped) flagIsEscaped = 0; else { switch(*pszCursor) { case ' ': if (!flagIsInQuotes) { // extract this token createToken(pszLast, pszCursor); // store the new last-position marker pszLast = pszCursor + 1; } break; case '\"': flagIsInQuotes = !flagIsInQuotes; break; case '\\': flagIsEscaped = !flagIsEscaped; break; } } // advance to the next character pszCursor++; } // add the last token of the string to the list createToken(pszLast, pszCursor); }
int checkNumber(FILE *fp, int tokenStart){ if(isdigit(tokenStart)){ int i; char lexeme[20] = {'\0'}; lexeme[0] = tokenStart; //Loop through the next 1-4 digits to build lexeme for(i = 1; i < 5; i++){ char workingDigit = fpeek(fp); if(isdigit(workingDigit)){ lexeme[i] = fgetc(fp); } else if(isalpha(workingDigit) || workingDigit == '_'){ return 1; //Error, Variable does not start with letter } else{ break; } } if(i == 5 && isdigit(fpeek(fp))){ return 2; //Error, number is too long } createToken(lexeme, numbersym); return 0; } }
Token* TokenFactory::getDot() { if (fDot == 0) fDot = createToken(Token::T_DOT); return fDot; }
/** pushes a token with content */ void pushInitializedToken(Lexer *self, int type, char *content) { Token *tok = createToken(self->lineNumber, self->charNumber, self->fileName); tok->type = type; tok->content = content; verboseModeMessage("Lexed token: %-15s, type %s", tok->content, getTokenTypeName(tok->type)); pushBackItem(self->tokenStream, tok); }
/** pushes a token with no content */ void pushToken(Lexer *self, int type) { Token *tok = createToken(self->lineNumber, self->charNumber, self->fileName); tok->type = type; tok->content = extractToken(self, self->startPos, self->pos - self->startPos); verboseModeMessage("Lexed token: %-15s, type %s", tok->content, getTokenTypeName(tok->type)); pushBackItem(self->tokenStream, tok); }
/** * Adds all subjects in a PKCS12 files and notifies the frontend of them. */ static TokenError _backend_addFile(Backend *backend, const char *data, size_t length, void *tag) { SharedPKCS12 *p12 = pkcs12_parse(data, length); if (!p12) return TokenError_BadFile; STACK_OF(X509) *certList = pkcs12_listCerts(p12->data); if (!certList) return TokenError_Unknown; int certCount = sk_X509_num(certList); for (int i = 0; i < certCount; i++) { X509 *x = sk_X509_value(certList, i); if (!certutil_hasKeyUsage(x, backend->notifier->keyUsage)) goto dontAddCert; X509_NAME *id = X509_get_subject_name(x); if (!certutil_matchSubjectFilter(backend->notifier->subjectFilter, id)) goto dontAddCert; PKCS12Token *token = createToken(backend, p12, id, tag); if (token) { backend->notifier->notifyFunction((Token*)token, TokenChange_Added); continue; } dontAddCert: X509_free(x); } pkcs12_release(p12); return TokenError_Success; }
int checkVariable(FILE *fp, int tokenStart){ if(isalpha(tokenStart) || tokenStart == '_'){ int i; char lexeme[20] = {'\0'}; lexeme[0] = tokenStart; //Loop through the next 1-10 characters to build lexeme for(i = 1; i < 11; i++){ char workingChar = fpeek(fp); if(isalnum(workingChar) || workingChar == '_'){ lexeme[i] = fgetc(fp); } else{ break; } } //If there's a 12th alphanumeric character after the 11th, name is too long if(i == 11 && (isalnum(fpeek(fp)) || fpeek(fp) == '_')){ return 3; //Error, name is too long } createToken(lexeme, identsym); return 0; } else{ return -1; } }
/** pushes a token with content */ void pushInitializedToken(Lexer *self, int type, char *content) { int tokenLength = self->pos - self->startPos; // The length (in characters) of this token Token *tok = createToken(self->lineNumber, self->startPos, self->charNumber - tokenLength, self->charNumber, self->fileName); tok->type = type; tok->content = content; verboseModeMessage("Lexed token: %-15s, type %s", tok->content, getTokenTypeName(tok->type)); pushBackItem(self->tokenStream, tok); }
pToken createEnumNumToken(unsigned long num) { pToken tok = createToken(createTokData(), NULL); tok->data->code = Y_NUMBER; tok->data->repr.constant.type = CONSTT_UINT_CONST; tok->data->repr.constant.radix = RADT_DECIMAL; tok->data->repr.constant.repr.lIntConst = num; return tok; }
pToken createIDTokenAfter( char *str, pTokPos pos) { pToken tok = createToken(createTokData(), dupTokPos(pos, NULL)); tok->pos->spacesBefore = 1; tok->pos->linesBefore = 0; tok->data->code = Y_ID; tok->data->repr.string = registerString(wicStrdup(str), FREE_STRING); return tok; }
Token *ScannerImp::nextToken() { if(skip_spaces())return NULL; runMachines(); TType typ = manager->getType(); skip_comment(&typ); int wortlaenge = manager->getLexemLength(); int wrongChars = manager->ungetCtr(); buffer->ungetChar(wrongChars); return createToken(typ,wortlaenge,x,y); }
Token* TokenFactory::getGraphemePattern() { if (fGrapheme == 0) { Token* base_char = createRange(); // [{ASSIGNED}]-[{M},{C}] base_char->mergeRanges(getRange(fgUniAssigned)); base_char->subtractRanges(getRange(fgUniMark)); base_char->subtractRanges(getRange(fgUniControl)); Token* virama = createRange(); virama->addRange(0x094D, 0x094D); virama->addRange(0x09CD, 0x09CD); virama->addRange(0x0A4D, 0x0A4D); virama->addRange(0x0ACD, 0x0ACD); virama->addRange(0x0B4D, 0x0B4D); virama->addRange(0x0BCD, 0x0BCD); virama->addRange(0x0C4D, 0x0C4D); virama->addRange(0x0CCD, 0x0CCD); virama->addRange(0x0D4D, 0x0D4D); virama->addRange(0x0E3A, 0x0E3A); virama->addRange(0x0F84, 0x0F84); Token* combiner_wo_virama = createRange(); combiner_wo_virama->mergeRanges(getRange(fgUniMark)); combiner_wo_virama->addRange(0x1160, 0x11FF); // hangul_medial and hangul_final combiner_wo_virama->addRange(0xFF9F, 0xFF9F); // extras Token* left = TokenFactory::createUnion(); // base_char? left->addChild(base_char, this); left->addChild(createToken(Token::T_EMPTY), this); Token* foo = createUnion(); foo->addChild(TokenFactory::createConcat(virama,getRange(fgUniLetter)), this); foo->addChild(combiner_wo_virama, this); foo = createClosure(foo); foo = createConcat(left, foo); fGrapheme = foo; } return fGrapheme; }
void evaluatorHelper(ParseTreeNode *p_node){ if(0 == strcmp("num", p_node->m_token->m_type)){ push(p_node); } else if(0 == strcmp("op", p_node->m_token->m_type)){ char *op; int *int_val = malloc(sizeof(int)); Token *new_token; ParseTreeNode *container; ParseTreeNode *operand_one_container = pop(); ParseTreeNode *operand_two_container = pop(); int operand_two = *(operand_one_container->m_token->m_data_container->m_int); int operand_one = *(operand_two_container->m_token->m_data_container->m_int); op = p_node->m_token->m_data_container->m_char; if(0 == strcmp("*", op)) *int_val = operand_one * operand_two; else if(0 == strcmp("/", op)){ if(0 == operand_two){ printf("Error: Division by zero."); exit(1); } *int_val = operand_one / operand_two; } else if(0 == strcmp("+", op)) *int_val = operand_one + operand_two; else if(0 == strcmp("-", op)) *int_val = operand_one - operand_two; new_token = createToken("num", "0"); new_token->m_data_container->m_int = int_val; container = createParseTreeNode(new_token); push(container); enqueue(container); } }
// Function that gets VK token either from settings file or user (~/.vkp) int getToken(char * token) { int rc; // get absolute settings file path FILE *frc = NULL; char *fname = pntHomeCat("/.vkp", 5); check(fname != NULL, "Failed to get path to settings"); if (access( fname, F_OK ) != -1) { // exists - try to open settings and get the token frc = fopen(fname, "r"); check(frc, "Failed to open: %s", fname); fscanf(frc, "%s", token); /* printf("Token retrieved successfully!\n"); */ } else { // not exists - ask user to validate with OAuth rc = createToken(token); check(rc == OK, "Failed to generate token"); frc = fopen(fname, "w+"); check(frc, "Failed to open: %s", fname); fprintf(frc, "%s", token); printf("\nNew token has been saved successfully!\n"); } if (frc) fclose(frc); return OK; error: if (frc) fclose(frc); return -1; }
/*! * \brief Get the next token in the stream */ HllTokenizer::Token HllTokenizer::getNext() { Token next; // Start out by moving past any whitespace skipCharacters(whitespace); // Check if we've reached the end of the file if(!fillBuffer(1)) { next = createToken(Token::TypeEnd, ""); return next; } // Scan through the list of literals and see if any match if(scanLiteral(literals, next)) { return next; } // If no literals matched, see if an identifier can be constructed if(std::isalpha(buffer()[0]) || buffer()[0] == '_') { size_t len = 0; while(std::isalpha(buffer()[len]) || buffer()[len] == '_') { len++; if(!fillBuffer(len + 1)) { break; } } TokenType type = TypeIdentifier; std::string string = buffer().substr(0, len); // Check if the string is a keyword for(std::string &keyword : keywords) { if(string == keyword) { type = TypeLiteral; break; } } // Construct a token out of the characters found next = createToken(type, string); emptyBuffer(len); return next; } // If an identifier couldn't be found, check for a number if(std::isdigit(buffer()[0])) { size_t len = 0; while(std::isdigit(buffer()[len])) { len++; if(!fillBuffer(len + 1)) { break; } } // Construct a token out of the characters found next = createToken(TypeNumber, buffer().substr(0, len)); emptyBuffer(len); return next; } if(buffer()[0] == '\"') { size_t len = 1; while(true) { if(!fillBuffer(len + 1)) { setError("Unterminated string literal"); return next; } if(buffer()[len] == '\"') { break; } len++; } // Construct a token out of the characters found std::string text = buffer().substr(1, len - 1); emptyBuffer(len + 1); if(evaluateEscapes(text)) { next = createToken(TypeString, text); } return next; } if(buffer()[0] == '\'') { size_t len = 1; while(true) { if(!fillBuffer(len + 1)) { setError("Unterminated character literal"); return next; } if(buffer()[len] == '\'') { break; } len++; } // Construct a token out of the characters found std::string text = buffer().substr(1, len - 1); emptyBuffer(len + 1); if(evaluateEscapes(text)) { if(text.size() == 1) { next = createToken(TypeChar, text); } else { setError("Invalid character literal"); } } return next; } // Nothing matched, log an error std::stringstream ss; ss << "Illegal symbol '" << buffer()[0] << "'"; setError(ss.str()); return next; }
pToken createCommentToken(char *str, pTokPos pos) { pToken tok = createToken(createTokData(), pos); tok->data->code = Y_PRE_COMMENT; tok->data->repr.string = registerString(str, FREE_STRING); return tok; }
pToken createEQUALToken(void) { pToken tok = createToken(createTokData(), NULL); tok->data->code = Y_EQUAL; tok->data->repr.string = registerString("=", !FREE_STRING); return tok; }
pToken createPLUSToken(void) { pToken tok = createToken(createTokData(), NULL); tok->data->code = Y_PLUS; tok->data->repr.string = registerString("+", !FREE_STRING); return tok; }
Token_MOD2::Token_MOD2(int depth) : Token(depth) { val = createToken(depth+1); mod = createToken(depth+1); }
void OMPPragmaHandler::HandlePragma(Preprocessor &PP, PragmaIntroducerKind Introducer, SourceRange IntroducerRange, Token &FirstTok) { Diags.Report(IntroducerRange.getBegin(), DiagFoundPragmaStmt); // TODO: Clean this up because I'm too lazy to now PragmaDirective * DirectivePointer = new PragmaDirective; PragmaDirective &Directive = *DirectivePointer; // First lex the pragma statement extracting the variable names SourceLocation Loc = IntroducerRange.getBegin(); Token Tok = FirstTok; StringRef ident = getIdentifier(Tok); if (ident != "omp") { LexUntil(PP, Tok, clang::tok::eod); return; } PP.Lex(Tok); ident = getIdentifier(Tok); bool isParallel = false; bool isThreadPrivate = false; if (ident == "parallel") { PragmaConstruct C; C.Type = ParallelConstruct; C.Range = getTokenRange(Tok, PP); Directive.insertConstruct(C); isParallel = true; } else if (ident == "sections" || ident == "section" || ident == "task" || ident == "taskyield" || ident == "taskwait" || ident == "atomic" || ident == "ordered") { Diags.Report(Tok.getLocation(), DiagUnsupportedConstruct); LexUntil(PP, Tok, clang::tok::eod); return; } else if (ident == "for") { PragmaConstruct C; C.Type = ForConstruct; C.Range = getTokenRange(Tok, PP); Directive.insertConstruct(C); } else if (ident == "threadprivate") { isThreadPrivate = true; PragmaConstruct C; C.Type = ThreadprivateConstruct; C.Range = getTokenRange(Tok, PP); Directive.insertConstruct(C); } else if (ident == "single") { PragmaConstruct C; C.Type = SingleConstruct; C.Range = getTokenRange(Tok, PP); Directive.insertConstruct(C); } else if (ident == "master") { PragmaConstruct C; C.Type = MasterConstruct; C.Range = getTokenRange(Tok, PP); Directive.insertConstruct(C); } else if (ident == "critical" || ident == "flush") { // Ignored Directive // (Critical, Flush) LexUntil(PP, Tok, clang::tok::eod); return; } else if (ident == "barrier") { PragmaConstruct C; C.Type = BarrierConstruct; C.Range = getTokenRange(Tok, PP); Directive.insertConstruct(C); } else { Diags.Report(Tok.getLocation(), DiagUnknownDirective); return; } if (!isThreadPrivate) { PP.Lex(Tok); } if (isParallel) { ident = getIdentifier(Tok); if (ident == "sections") { Diags.Report(Tok.getLocation(), DiagUnsupportedConstruct); LexUntil(PP, Tok, clang::tok::eod); return; } else if (ident == "for") { PragmaConstruct C; C.Type = ForConstruct; C.Range = getTokenRange(Tok, PP); Directive.insertConstruct(C); PP.Lex(Tok); } else { // Just a standard "#pragma omp parallel" clause if (Tok.isNot(clang::tok::eod) && PragmaDirective::getClauseType(ident) == UnknownClause) { Diags.Report(Tok.getLocation(), DiagUnknownClause); return; } } } // If we've made it this far then we either have: // "#pragma omp parallel", // "#pragma omp parallel for", // "#pragma omp for", // "#pragma omp threadprivate // Need to read in the options, if they exists // Don't really care about them unless there exists a private(...) list // In which case, get the variables inside that list // But we read them all in anyway. // There's also threadprivate, which won't have any clauses, but will have // a list of private variables just after the threadprivate directive // Treating threadprivate as a clause and directive at the same time. while(Tok.isNot(clang::tok::eod)) { PragmaClause C; ident = getIdentifier(Tok); C.Type = PragmaDirective::getClauseType(ident); if (C.Type == UnknownClause) { Diags.Report(Tok.getLocation(), DiagUnknownClause); return; } SourceLocation clauseStart = Tok.getLocation(); SourceLocation clauseEnd = PP.getLocForEndOfToken(clauseStart); PP.Lex(Tok); if (Tok.is(clang::tok::l_paren)) { if (!handleList(Tok, PP, C)) { Diags.Report(clauseStart, DiagMalformedStatement); LexUntil(PP, Tok, clang::tok::eod); return; } clauseEnd = PP.getLocForEndOfToken(Tok.getLocation()); // Eat the clang::tok::r_paren PP.Lex(Tok); } C.Range = SourceRange(clauseStart, clauseEnd); Directive.insertClause(C); } SourceLocation EndLoc = PP.getLocForEndOfToken(Tok.getLocation()); Directive.setRange(SourceRange(Loc, EndLoc)); Directives.insert(std::make_pair(Loc.getRawEncoding(), DirectivePointer)); // Then replace with parseable compound statement to catch in Sema, and // references to private variables; // { // i; // j; // k; // } // If it's a threadprivate directive, then we skip this completely if (isThreadPrivate) { return; } set<IdentifierInfo *> PrivateVars = Directive.getPrivateIdentifiers(); int tokenCount = 2 + 2 * PrivateVars.size(); int currentToken = 0; Token * Toks = new Token[tokenCount]; Toks[currentToken++] = createToken(Loc, clang::tok::l_brace); set<IdentifierInfo *>::iterator PrivIt; for (PrivIt = PrivateVars.begin(); PrivIt != PrivateVars.end(); PrivIt++) { Toks[currentToken++] = createToken(Loc, clang::tok::identifier, *PrivIt); Toks[currentToken++] = createToken(Loc, clang::tok::semi); } Toks[currentToken++] = createToken(EndLoc, clang::tok::r_brace); assert(currentToken == tokenCount); Diags.setDiagnosticGroupMapping("unused-value", clang::diag::MAP_IGNORE, Loc); Diags.setDiagnosticGroupMapping("unused-value", clang::diag::MAP_WARNING, EndLoc); PP.EnterTokenStream(Toks, tokenCount, true, true); }
Token_IF::Token_IF(int depth) : Token(depth) { pred_val = createToken(depth+1); then_val = createToken(depth+1); else_val = createToken(depth+1); }
Token_SIN::Token_SIN(int depth) : Token(depth) { val = createToken(depth+1); }
Token_COS::Token_COS(int depth) : Token(depth) { val = createToken(depth+1); }
Token_EXP::Token_EXP(int depth) : Token(depth) { val = createToken(depth+1); }
Token_MULT::Token_MULT(int depth) : Token(depth) { v1 = createToken(depth+1); v2 = createToken(depth+1); }
OrganizeCollectionWidget::OrganizeCollectionWidget( QWidget *parent ) : FilenameLayoutWidget( parent ) { m_configCategory = "OrganizeCollectionDialog"; // TODO: also supported by TrackOrganizer: // folder theartist thealbumartist rating filesize length m_tokenPool->addToken( createToken( Title ) ); m_tokenPool->addToken( createToken( Artist ) ); m_tokenPool->addToken( createToken( AlbumArtist ) ); m_tokenPool->addToken( createToken( Album ) ); m_tokenPool->addToken( createToken( Genre ) ); m_tokenPool->addToken( createToken( Composer ) ); m_tokenPool->addToken( createToken( Comment ) ); m_tokenPool->addToken( createToken( Year ) ); m_tokenPool->addToken( createToken( TrackNumber ) ); m_tokenPool->addToken( createToken( DiscNumber ) ); m_tokenPool->addToken( createToken( Folder ) ); m_tokenPool->addToken( createToken( FileType ) ); m_tokenPool->addToken( createToken( Initial ) ); m_tokenPool->addToken( createToken( Slash ) ); m_tokenPool->addToken( createToken( Underscore ) ); m_tokenPool->addToken( createToken( Dash ) ); m_tokenPool->addToken( createToken( Dot ) ); m_tokenPool->addToken( createToken( Space ) ); // show some non-editable tags before and after // but only if screen size is large enough (BR: 283361) const QRect screenRect = QApplication::desktop()->screenGeometry(); if( screenRect.width() >= 1024 ) { m_schemaLineLayout->insertWidget( 0, createStaticToken( CollectionRoot ), 0 ); m_schemaLineLayout->insertWidget( 1, createStaticToken( Slash ), 0 ); m_schemaLineLayout->insertWidget( m_schemaLineLayout->count(), createStaticToken( Dot ) ); m_schemaLineLayout->insertWidget( m_schemaLineLayout->count(), createStaticToken( FileType ) ); } m_syntaxLabel->setText( buildFormatTip() ); populateConfiguration(); }
Token_RGB::Token_RGB(int depth) : Token(depth) { r = createToken(depth+1); g = createToken(depth+1); b = createToken(depth+1); }
Token_COSRGB::Token_COSRGB(int depth) : Token(depth) { val = createToken(depth+1); }
Token_BLEND::Token_BLEND(int depth) : Token(depth) { val1 = createToken(depth+1); val2 = createToken(depth+1); ratio = createToken(depth+1); }
Token_PLUS::Token_PLUS(int depth) : Token(depth) { v1 = createToken(depth+1); v2 = createToken(depth+1); }