static void parseFunction (tokenInfo *const token) { tokenInfo *const name = newToken (); vString *const signature = vStringNew (); boolean is_class = FALSE; /* * This deals with these formats * function validFunctionTwo(a,b) {} */ readToken (name); if (!isType (name, TOKEN_IDENTIFIER)) goto cleanUp; /* Add scope in case this is an INNER function */ addToScope(name, token->scope); readToken (token); while (isType (token, TOKEN_PERIOD)) { readToken (token); if ( isKeyword(token, KEYWORD_NONE) ) { addContext (name, token); readToken (token); } } if ( isType (token, TOKEN_OPEN_PAREN) ) skipArgumentList(token, FALSE, signature); if ( isType (token, TOKEN_OPEN_CURLY) ) { is_class = parseBlock (token, name); if ( is_class ) makeClassTag (name, signature); else makeFunctionTag (name, signature); } findCmdTerm (token, FALSE, FALSE); cleanUp: vStringDelete (signature); deleteToken (name); }
// scans for a c-style comment bool opScanner::CComment(const inputtype& Input, int& current) { int size = Input.Size(); if (current + 1 < size) { int one = current; int two = current + 1; if (Input[one] == '/' && Input[two] == '*') { opToken newToken(T_CCOMMENT, "/*", CurrentLine); bool bFoundEnd = false; current += 2; while (current + 1 < size) { one = current; two = current + 1; if (Input[one] == '*' && Input[two] == '/') { newToken.Value += "*/"; current += 2; bFoundEnd = true; break; } else { if (IsNewline(Input[one])) ++CurrentLine; newToken.Value += Input[one]; ++current; } } // check for unbounded comments if (bFoundEnd) { Tokens.PushBack(newToken); return true; } else { opError::UnboundedCommentError(Root, newToken.Line); ScanError = true; } } } return false; }
static void enterScope (tokenInfo *const parentToken, const vString *const extraScope, const int parentKind) { tokenInfo *token = newToken (); int origParentKind = parentToken->parentKind; copyToken (token, parentToken, true); if (extraScope) { addToScope (token, extraScope); token->parentKind = parentKind; } readToken (token); while (token->type != TOKEN_EOF && token->type != TOKEN_CLOSE_CURLY) { bool readNext = true; switch (token->type) { case TOKEN_OPEN_CURLY: enterScope (token, NULL, -1); break; case TOKEN_KEYWORD: readNext = parseFunction (token); break; case TOKEN_VARIABLE: readNext = parseVariable (token); break; default: break; } if (readNext) readToken (token); } copyToken (parentToken, token, false); parentToken->parentKind = origParentKind; deleteToken (token); }
void SyntaxAnalyzer::primary(bool isNegative) { if (currentToken.type() == "identifier") { cout << "<Primary> -> <Identifier> <Primary>'" << endl; checkIdExistence(currentToken.lexeme()); int address = table.getAddress(currentToken.lexeme()); if (isNegative) { instTable.genInstr("PUSHI", 0); instTable.genInstr("PUSHM", address); instTable.genInstr("SUB", NIL); } else { instTable.genInstr("PUSHM", address); } newToken(); primaryPrime(); } else if (currentToken.type() == "integer") { cout << "<Primary> -> <Integer>" << endl; int address = table.getAddress(currentToken.lexeme()); if (isNegative) { instTable.genInstr("PUSHI", -std::stoi(currentToken.lexeme())); } else { instTable.genInstr("PUSHI", std::stoi(currentToken.lexeme())); } newToken(); } else if (currentToken.lexeme() == "(") { cout << "<Primary> -> ( <Expression> )" << endl; newToken(); expression(); if (currentToken.lexeme() == ")") { cout << "<Primary> -> ( <Expression> )" << endl; newToken(); } else { errorMessage(")"); } } else if (currentToken.lexeme() == "true") { cout << "<Primary> -> true" << endl; instTable.genInstr("PUSHI", 1); newToken(); } else if (currentToken.lexeme() == "false") { cout << "<Primary> -> false" << endl; instTable.genInstr("PUSHI", 0); newToken(); } else { errorMessage("<Identifier>, <Integer>, \")\", \"true\", or \"false\""); } }
void SyntaxAnalyzer::ids(const string& type, bool fromRead) { if (currentToken.type() == "identifier") { cout << "<IDs> -> <Identifier> <IDs>'" << endl; if (type != "") { addToTable(currentToken.lexeme(), type); } if (fromRead) { checkIdExistence(currentToken.lexeme()); instTable.genInstr("STDIN", NIL); int addr = table.getAddress(currentToken.lexeme()); instTable.genInstr("POPM", addr); } newToken(); } else { errorMessage("<Identifier>"); } idsPrime(type); }
ANTLR3_API pANTLR3_COMMON_TOKEN antlr3CommonTokenNew(ANTLR3_UINT32 ttype) { pANTLR3_COMMON_TOKEN token; // Create a raw token with the interface installed // token = newToken(); if (token != NULL) { token->type = ttype; } // All good // return token; }
static char addToken(Lexer *l, TokenType type) { Token *tok = newToken(type, l->start, l->pos); tok->prev = l->tail; tok->next = NULL; if (l->tail == NULL) { l->head = tok; } else { l->tail->next = tok; } l->tail = tok; }
ANTLR3_API pANTLR3_COMMON_TOKEN antlr3CommonTokenNew(ANTLR3_UINT32 ttype) { pANTLR3_COMMON_TOKEN token; /* Create a raw token with the interface installed */ token = newToken(); if (token != (pANTLR3_COMMON_TOKEN)ANTLR3_FUNC_PTR(ANTLR3_ERR_NOMEM)) { token->setType(token, ttype); } /* All good */ return token; }
// operator - parses an operator bool opScanner::Operator(const inputtype& Input, int& current) { char c = Input[current]; // if this character is an operator character, // try to parse it as an operator if (IsOperatorChar(c)) { opString op = c; int start = current + 1; int end = Input.Size(); Token id; int length; // create a string with all consecutive // operator characters while (start != end) { c = Input[start]; if (!IsOperatorChar(c)) break; op += c; ++start; } // find the largest string that is a // valid operator while ((id = opTokenMap::GetToken(op)) == T_UNKNOWN) op.PopBack(); length = op.Length(); if (length == 0) return false; opToken newToken(id, op, CurrentLine); Tokens.PushBack(newToken); for (int i = 0; i < length; i++) { ++current; } return true; } return false; }
static void processFunction (tokenInfo *const token) { int c; tokenInfo *classType; /* Search for function name * Last identifier found before a '(' or a ';' is the function name */ c = skipWhite (vGetc ()); do { readIdentifier (token, c); c = skipWhite (vGetc ()); /* Identify class type prefixes and create respective context*/ if (isLanguage (Lang_systemverilog) && c == ':') { c = vGetc (); if (c == ':') { verbose ("Found function declaration with class type %s\n", vStringValue (token->name)); classType = newToken (); vStringCopy (classType->name, token->name); classType->kind = K_CLASS; createContext (classType); currentContext->classScope = TRUE; } else { vUngetc (c); } } } while (c != '(' && c != ';' && c != EOF); if ( vStringLength (token->name) > 0 ) { verbose ("Found function: %s\n", vStringValue (token->name)); /* Create tag */ createTag (token); /* Get port list from function */ processPortList (c); } }
Token* scannerGetToken() // FUNCTION, WHICH RETURNS POINTER ON TOKEN STRUCTURE { Token *token = newToken(); scannerFillToken(token); if (getError()) { freeToken(&token); fclose(source); source = NULL; return NULL; } if (token->type == STT_EOF) { fclose(source); source = NULL; } return token; }
static void parseTemplateString (vString *const string) { int c; do { c = fileGetc (); if (c == '`') break; vStringPut (string, c); if (c == '\\') { c = fileGetc(); vStringPut(string, c); } else if (c == '$') { c = fileGetc (); if (c != '{') fileUngetc (c); else { int depth = 1; /* we need to use the real token machinery to handle strings, * comments, regexes and whatnot */ tokenInfo *token = newToken (); LastTokenType = TOKEN_UNDEFINED; vStringPut(string, c); do { readTokenFull (token, FALSE, string); if (isType (token, TOKEN_OPEN_CURLY)) depth++; else if (isType (token, TOKEN_CLOSE_CURLY)) depth--; } while (! isType (token, TOKEN_EOF) && depth > 0); deleteToken (token); } } } while (c != EOF); vStringTerminate (string); }
// parses the next token // returns false if done void opScanner::ScanTokens(const inputtype& Input) { // if we've reached the end of the Input stream, // add an EOF token and return false int size = Input.Size(); int current = 0; // TODO: the input list of chars is really a bad idea // Input should be a vector, and we should not alter it, // instead we should iterate over it (maybe w/ an iterator we pass // around. while (current != size) { // scan for the next token // (with the correct precedence) if (current != size && Newline(Input, current)) ; else if (current != size && CComment(Input, current)) ; else if (current != size && Comment(Input, current)) ; else if (current != size && String(Input, current)) ; else if (current != size && WhiteSpace(Input, current)) ; else if (current != size && Operator(Input, current)) ; else if (current != size && Hexadecimals(Input, current)) ; else if (current != size && Number(Input, current)) ; else if (current != size && GetId(Input, current)) ; else if (current != size) { opToken newToken(T_ANYCHAR, Input[current], CurrentLine); Tokens.PushBack(newToken); ++current; } } Tokens.PushBack(opToken(T_EOF, "", CurrentLine)); }
static PSmmAstBlockNode parseBlock(PSmmParser parser, PSmmTypeInfo curFuncReturnType, bool isFuncBlock) { assert(parser->curToken->kind == '{'); getNextToken(parser); // Skip '{' PSmmAstBlockNode block = smmNewAstNode(nkSmmBlock, parser->a); block->scope = newScopeNode(parser); block->scope->returnType = curFuncReturnType; PSmmAstNode* nextStmt = &block->stmts; PSmmAstNode curStmt = NULL; while (parser->curToken->kind != tkSmmEof && parser->curToken->kind != '}') { if (curStmt && curStmt->kind == nkSmmReturn) { smmPostMessage(parser->msgs, errSmmUnreachableCode, parser->curToken->filePos); } curStmt = parseStatement(parser); if (curStmt != NULL && curStmt != &errorNode) { *nextStmt = curStmt; nextStmt = &curStmt->next; } } if (curStmt) { bool isLastStmtReturn = curStmt->kind == nkSmmReturn; bool isLastStmtReturningBlock = curStmt->kind == nkSmmBlock && curStmt->asBlock.endsWithReturn; block->endsWithReturn = isLastStmtReturn || isLastStmtReturningBlock; } if (isFuncBlock) { bool funcHasReturnType = curFuncReturnType->kind != tiSmmUnknown && curFuncReturnType->kind != tiSmmVoid; if (funcHasReturnType && !block->endsWithReturn && curStmt != &errorNode) { smmPostMessage(parser->msgs, errSmmFuncMustReturnValue, parser->curToken->filePos); } else if (!funcHasReturnType && !block->endsWithReturn) { // We add empty return statement PSmmAstNode retNode = smmNewAstNode(nkSmmReturn, parser->a); retNode->token = newToken(tkSmmReturn, "return", parser->curToken->filePos, parser->a); retNode->type = curFuncReturnType; *nextStmt = retNode; } } expect(parser, '}'); removeScopeVars(parser); return block; }
/* parses declarations of the form * const NAME = VALUE */ static boolean parseConstant (tokenInfo *const token) { tokenInfo *name; readToken (token); /* skip const keyword */ if (token->type != TOKEN_IDENTIFIER && token->type != TOKEN_KEYWORD) return FALSE; name = newToken (); copyToken (name, token, TRUE); readToken (token); if (token->type == TOKEN_EQUAL_SIGN) makeSimplePhpTag (name, K_DEFINE, ACCESS_UNDEFINED); deleteToken (name); return token->type == TOKEN_EQUAL_SIGN; }
/** * Lex the string into tokens, each of which has a given offset into the string. * Lexing is done by the following algorithm: * (1) If the current character is a space, and if it is then check the next: * (a) If it is another space, then the token is a tab. * (b) If it is some other character, the token is a space. * (2) If the current character is a character (either upper or lower case), or a digit, * then continue until the first non-matching character and that is an ident. * (3) If the current character is a #, then ignore everything until the end of the line. * (4) If the current character is a newline, then the token is a newline. * (5) If the current character is a colon, then the token is just a colon. * (6) If the current character is a quote, then read until the endquote and * declare the string as the contents of the string. */ Token* lex(char* input, int len) { Token* first = newToken(0, 0, 0); Token* last = first; int index = 0; while (index < len-1) { //printf("*"); int start = index; char cur = input[index]; if (isSpace(cur)) { if (isSpace(input[index+1])) { index++; addNewToken(last, TAB, start, index); } else { addNewToken(last, SPACE, index, index); } index++; } else if (isTab(cur)) { index++; addNewToken(last, TAB, start, index); } else if (isChar(cur)) { while (isChar(input[++index])); addNewToken(last, IDENT, start, index); } else if (isComment(cur)) { while (!isNewLine(input[++index])); } else if (isNewLine(cur)) { index++; addNewToken(last, NEWLINE, index, index); } else if (isColon(cur)) { index++; addNewToken(last, COLON, index, index); } else if (isQuote(cur)) { while (!isQuote(input[++index])); addNewToken(last, STRING, start+1, index); index++; /* Pass by the end quote. */ } if (last->next != NULL) last = last->next; } addNewToken(last, NEWLINE, index, index); return first->next; }
static HSPToken *nextToken(char *p, char **out) { char *r = p; HSPToken *token = NULL; // skip separators r += strspn(r, HSP_SEPARATORS); if(*r != '\0') { // found token, but watch out for a contiguous '{' or '}' token. uint32_t len = strcspn(r, "{}" HSP_SEPARATORS); if(len == 0) len = 1; // started with '{' or '}' token = newToken(r, len); r += len; } // tell the caller how far we got *out = r; // return token or NULL return token; }
PSmmAstNode smmParse(PSmmParser parser) { if (parser->curToken->kind == tkSmmEof) return NULL; PSmmAstNode program = smmNewAstNode(nkSmmProgram, parser->a); PSmmAstBlockNode block = smmNewAstNode(nkSmmBlock, parser->a); parser->curScope = smmNewAstNode(nkSmmScope, parser->a); parser->curScope->lastDecl = (PSmmAstDeclNode)parser->curScope; parser->curScope->returnType = &builtInTypes[tiSmmInt32]; block->scope = parser->curScope; program->next = (PSmmAstNode)block; PSmmAstNode* nextStmt = &block->stmts; PSmmAstNode curStmt = NULL; while (parser->curToken->kind != tkSmmEof) { curStmt = parseStatement(parser); if (curStmt != NULL && curStmt != &errorNode) { *nextStmt = curStmt; nextStmt = &curStmt->next; } } bool isReturnMissing = true; if (curStmt) { if (curStmt->kind == nkSmmBlock) isReturnMissing = !curStmt->asBlock.endsWithReturn; else isReturnMissing = curStmt->kind != nkSmmReturn; } // Add return stmt if missing if (isReturnMissing) { curStmt = smmNewAstNode(nkSmmReturn, parser->a); struct SmmFilePos fp = parser->curToken->filePos; fp.lineNumber++; fp.lineOffset = 0; curStmt->token = newToken(tkSmmReturn, "return", fp, parser->a); curStmt->type = parser->curScope->returnType; curStmt->left = smmGetZeroValNode(parser->curToken->filePos, curStmt->type, parser->a); *nextStmt = curStmt; } program->token = ibsAlloc(parser->a, sizeof(struct SmmToken)); program->token->repr = parser->lex->filePos.filename; return program; }
static void findTags (boolean startsInPhpMode) { tokenInfo *const token = newToken (); InPhp = startsInPhpMode; CurrentStatement.access = ACCESS_UNDEFINED; CurrentStatement.impl = IMPL_UNDEFINED; CurrentNamesapce = vStringNew (); FullScope = vStringNew (); AnonymousID = 0; do { enterScope (token, NULL, -1); } while (token->type != TOKEN_EOF); /* keep going even with unmatched braces */ vStringDelete (FullScope); vStringDelete (CurrentNamesapce); deleteToken (token); }
static boolean parseType (tokenInfo *const token) { tokenInfo* const id = newToken (); copyToken (id, token); readToken (token); if (isType (token, TOKEN_COLON)) /* check for "{entity: TYPE}" */ { readToken (id); readToken (token); } if (isKeyword (id, KEYWORD_like)) { if (isType (token, TOKEN_IDENTIFIER) || isKeyword (token, KEYWORD_Current)) readToken (token); } else { if (isKeyword (id, KEYWORD_attached) || isKeyword (id, KEYWORD_detachable) || isKeyword (id, KEYWORD_expanded)) { copyToken (id, token); readToken (token); } if (isType (id, TOKEN_IDENTIFIER)) { #ifdef TYPE_REFERENCE_TOOL reportType (id); #endif if (isType (token, TOKEN_OPEN_BRACKET)) parseGeneric (token, FALSE); else if ((strcmp ("BIT", vStringValue (id->string)) == 0)) readToken (token); /* read token after number of bits */ } } deleteToken (id); return TRUE; }
void CAuxParse::StringTokenize(const char* in_str, const char* sepChars, vector<string>& vTokens) { vector<pair<int, int> > vIndTokens; if((sepChars == 0) || (strlen(sepChars) <= 0)) { if((in_str != 0) && (strlen(in_str) > 0)) { vTokens.push_back(string(in_str)); } return; } StringFindTokens(in_str, sepChars, vIndTokens); int numTokens = (int)vIndTokens.size(); if(numTokens <= 0) return; for(int i=0; i<numTokens; i++) { pair<int, int>& curTokenInd = vIndTokens[i]; string newToken(in_str + curTokenInd.first, curTokenInd.second); //removing eventual spaces from beginning of string: const char *OrigStr = newToken.c_str(); if(*OrigStr == ' ') { int lenOrigStr = (int)strlen(OrigStr); char *NewStr = new char[lenOrigStr + 1]; //CAuxParse::StringSymbolsRemoveAtBegin(OrigStr, " \0", NewStr); char strAux[] = " \0"; CAuxParse::StringSymbolsRemoveAtBegin(OrigStr, strAux, NewStr); string cleanedNewToken(NewStr); newToken = cleanedNewToken; delete[] NewStr; } vTokens.push_back(newToken); } }
/* parses namespace declarations * namespace Foo {} * namespace Foo\Bar {} * namespace Foo; * namespace Foo\Bar; * namespace; * napespace {} */ static boolean parseNamespace (tokenInfo *const token) { tokenInfo *nsToken = newToken (); vStringClear (CurrentNamesapce); copyToken (nsToken, token, FALSE); do { readToken (token); if (token->type == TOKEN_IDENTIFIER) { if (vStringLength (CurrentNamesapce) > 0) { const char *sep; sep = phpScopeSeparatorFor(K_NAMESPACE, K_NAMESPACE); vStringCatS (CurrentNamesapce, sep); } vStringCat (CurrentNamesapce, token->string); } } while (token->type != TOKEN_EOF && token->type != TOKEN_SEMICOLON && token->type != TOKEN_OPEN_CURLY); vStringTerminate (CurrentNamesapce); if (vStringLength (CurrentNamesapce) > 0) makeNamespacePhpTag (nsToken, CurrentNamesapce); if (token->type == TOKEN_OPEN_CURLY) enterScope (token, NULL, -1); deleteToken (nsToken); return TRUE; }
void SlkToken::getBlockString(const char* delimiter) { newToken(); const char* pLeft = mIterator.GetLeft(); const char* pFind = strstr(pLeft, delimiter); if (!pFind) { char* pInfo = mErrorAndStringBuffer->NewErrorInfo(); if (pInfo) tsnprintf(pInfo, MAX_ERROR_INFO_CAPACITY, "[line %d ]:Block can't finish, delimiter: %s!", mLineNumber, delimiter); endToken(); return; } int len = (int)strlen(delimiter); const char* p = pLeft; while (p != pFind){ if (*p == '\n') ++mLineNumber; pushTokenChar(*p++); ++mIterator; } endToken(); mIterator = mIterator + len; return removeFirstAndLastEmptyLine(); }
/* parses a trait: * trait Foo {} */ static boolean parseTrait (tokenInfo *const token) { boolean readNext = TRUE; tokenInfo *name; readToken (token); if (token->type != TOKEN_IDENTIFIER) return FALSE; name = newToken (); copyToken (name, token, TRUE); makeSimplePhpTag (name, K_TRAIT, ACCESS_UNDEFINED); readToken (token); if (token->type == TOKEN_OPEN_CURLY) enterScope (token, name->string, K_TRAIT); else readNext = FALSE; deleteToken (name); return readNext; }
// parses a number (an integer) bool opScanner::Number(const inputtype& Input, int& current) { char c = Input[current]; int size = Input.Size(); if (IsDigit(c)) { opToken newToken(T_NUMBER, c, CurrentLine); ++current; while (current + 1 < size) { c = Input[current]; if (!IsDigit(c)) break; newToken.Value += c; ++current; } Tokens.PushBack(newToken); return true; } return false; }
// scans for whitespace bool opScanner::WhiteSpace(const inputtype& Input, int& current) { char c = Input[current]; int size = Input.Size(); if (IsWhiteSpace(c)) { opToken newToken(T_WHITESPACE, c, CurrentLine); ++current; while (current < size) { c = Input[current]; if (!IsWhiteSpace(c)) break; newToken.Value += c; ++current; } Tokens.PushBack(newToken); return true; } return false; }
static int skipMacro (int c) { tokenInfo *token = newToken ();; if (c == '`') { /* Skip keyword */ if (isIdentifierCharacter (c = vGetc ())) { readIdentifier (token, c); c = vGetc (); /* Skip next keyword if macro is `ifdef or `ifndef or `elsif*/ if (strcmp (vStringValue (token->name), "ifdef") == 0 || strcmp (vStringValue (token->name), "ifndef") == 0 || strcmp (vStringValue (token->name), "elsif") == 0) { verbose ("%c\n", c); c = skipWhite (c); readIdentifier (token, c); c = vGetc (); verbose ("Skipping conditional macro %s\n", vStringValue (token->name)); } /* Skip macro functions */ else { c = skipWhite (c); if (c == '(') { c = skipPastMatch ("()"); } } } } deleteToken (token); return c; }
static void parseSubProgram (tokenInfo * const token) { tokenInfo *const name = newToken (); boolean endSubProgram = FALSE; const vhdlKind kind = isKeyword (token, KEYWORD_FUNCTION) ? VHDLTAG_FUNCTION : VHDLTAG_PROCEDURE; Assert (isKeyword (token, KEYWORD_FUNCTION) || isKeyword (token, KEYWORD_PROCEDURE)); readToken (name); /* the name of the function or procedure */ readToken (token); if (isType (token, TOKEN_OPEN_PAREN)) { skipToMatched (token); } if (kind == VHDLTAG_FUNCTION) { if (isKeyword (token, KEYWORD_RETURN)) { /* Read datatype */ readToken (token); while (! isKeyword (token, KEYWORD_IS) && ! isType (token, TOKEN_SEMICOLON) && ! isType (token, TOKEN_EOF)) { readToken (token); } } } if (isType (token, TOKEN_SEMICOLON)) { makeVhdlTag (name, VHDLTAG_PROTOTYPE); } else if (isKeyword (token, KEYWORD_IS)) { if (kind == VHDLTAG_FUNCTION) { makeVhdlTag (name, VHDLTAG_FUNCTION); do { readToken (token); if (isKeyword (token, KEYWORD_END)) { readToken (token); endSubProgram = isKeywordOrIdent (token, KEYWORD_FUNCTION, name->string); fileSkipToCharacter (';'); } else { if (isType (token, TOKEN_EOF)) { endSubProgram = TRUE; } else { parseKeywords (token, TRUE); } } } while (!endSubProgram); } else { makeVhdlTag (name, VHDLTAG_PROCEDURE); do { readToken (token); if (isKeyword (token, KEYWORD_END)) { readToken (token); endSubProgram = isKeywordOrIdent (token, KEYWORD_PROCEDURE, name->string); fileSkipToCharacter (';'); } else { if (isType (token, TOKEN_EOF)) { endSubProgram = TRUE; } else { parseKeywords (token, TRUE); } } } while (!endSubProgram); } } deleteToken (name); }
void CSEMachine::processCurrentToken(Token &currToken,stack<Token> &controlStack, stack<Token> &executionStack) { if(currToken.type == RecursiveParser::OPT) { Token firstToken = executionStack.top(); executionStack.pop(); Token secondToken = executionStack.top(); executionStack.pop(); Token resultToken = applyOperator(firstToken, secondToken, currToken); executionStack.push(resultToken); } else if(currToken.type == "neg") { Token firstToken = executionStack.top(); executionStack.pop(); int paramVal = atoi(firstToken.value.c_str()); paramVal = -paramVal; Token newToken(intToString(paramVal), RecursiveParser::INT); executionStack.push(newToken); } else if(currToken.type =="not") { Token firstToken = executionStack.top(); executionStack.pop(); if(firstToken.value != "true") { executionStack.push(Token("true","true")); } else { executionStack.push(Token("false","false")); } } else if(currToken.type == RecursiveParser::ID && isParamter(currToken)) { string varName = currToken.value; int temp = currEnv; pair<int,string> keyPair(temp,varName); map<key_pair,Token>::iterator it = paramMap.find(keyPair); while(paramMap.end() == it && temp>=0) { temp = envMap[temp]; keyPair.first = temp; it = paramMap.find(keyPair); } if(paramMap.end() != it) { Token paramValToken = it->second; executionStack.push(paramValToken); } } else if(currToken.type == "gamma") { Token topExeToken = executionStack.top(); executionStack.pop(); if(topExeToken.type == "lambdaClosure") { Token env("env",++envCounter); envMap[envCounter] = topExeToken.lambdaEnv; envStack.push(envCounter); currEnv = envCounter; if(topExeToken.isTuple == false) { string paramName = topExeToken.lambdaParam; Token paramToken = executionStack.top(); executionStack.pop(); pair<int,string> keyPair(envCounter,paramName); paramMap[keyPair] = paramToken; } else { string tuple = topExeToken.lambdaParam; vector<string> params = split(tuple,','); Token valueTuple = executionStack.top(); executionStack.pop(); vector<Token> tupleVector = valueTuple.tuple; unsigned int i=0; while(i<params.size()) { if(params[i] != "") { pair<int,string> keyPair(envCounter,params[i].c_str()); paramMap[keyPair] = tupleVector[i]; } i++; } } controlStack.push(env); executionStack.push(env); int lambdaNum = topExeToken.lambdaNum; vector<Token> delta = deltaMap[lambdaNum]; int i=0; while(i<delta.size()) { controlStack.push(delta[i]); i++; } } else if(topExeToken.type == "YSTAR") { Token nextToken = executionStack.top(); executionStack.pop(); nextToken.type ="eta"; executionStack.push(nextToken); } else if(topExeToken.type == "eta") { Token lambdaToken = topExeToken; lambdaToken.type = "lambdaClosure"; executionStack.push(topExeToken); executionStack.push(lambdaToken); Token gammaToken("gamma","gamma"); controlStack.push(gammaToken); controlStack.push(gammaToken); } else if(topExeToken.value == "Stern" || topExeToken.value == "stern") { Token stringToken = executionStack.top(); executionStack.pop(); string tokenValue = stringToken.value; tokenValue = tokenValue.substr(2,tokenValue.size()-3); tokenValue = "'"+tokenValue+"'"; stringToken.value = tokenValue; executionStack.push(stringToken); } else if(topExeToken.value == "Stem" || topExeToken.value == "stem") { Token stringToken = executionStack.top(); executionStack.pop(); string tokenValue = stringToken.value; tokenValue = tokenValue.substr(1,1); tokenValue = "'"+tokenValue+"'"; stringToken.value = tokenValue; executionStack.push(stringToken); } else if(topExeToken.value == "Conc" || topExeToken.value == "conc") { Token firstToken = executionStack.top(); executionStack.pop(); Token secondToken = executionStack.top(); executionStack.pop(); string concatValue = firstToken.value.substr(1,firstToken.value.size()-2)+secondToken.value.substr(1,secondToken.value.size()-2); concatValue = "'"+concatValue+"'"; Token newToken(concatValue,RecursiveParser::STR); executionStack.push(newToken); controlStack.pop(); } else if(topExeToken.value == "ItoS" || topExeToken.value == "itos") { Token firstToken = executionStack.top(); executionStack.pop(); firstToken.type = RecursiveParser::STR; firstToken.value = "'"+firstToken.value+"'"; executionStack.push(firstToken); } else if(topExeToken.value == "Print" || topExeToken.value == "print") { printCalled = true; Token t = executionStack.top(); executionStack.pop(); if(t.isTuple != true) { if(t.type== RecursiveParser::STR) { string tempStr =unescape(t.value.substr(1,t.value.size()-2)); cout << tempStr; if(tempStr[tempStr.size()-1] == '\n') cout<<endl; } else if(t.type == "lambdaClosure") { cout <<"[lambda closure: "<<t.lambdaParam<<": "<<t.lambdaNum<<"]"; } else { cout<<t.value; } Token dummyToken("dummy","dummy"); executionStack.push(dummyToken); } else { vector<Token> tupleVector = t.tuple; int i=0; while(i<tupleVector.size()) { if(i!=0) { cout<<", "; } else { cout<<"("; } if(tupleVector[i].type == RecursiveParser::STR) { cout<< unescape(tupleVector[i].value.substr(1,tupleVector[i].value.size()-2)); } else if(tupleVector[i].isTuple == true ) { cout<<"Inside else if"<<endl; vector<Token> innerTuple = tupleVector[i].tuple; cout << "Size" << innerTuple.size()<<endl; if(innerTuple.size() == 1) { if(innerTuple[0].type == RecursiveParser::STR) cout<< unescape(innerTuple[0].value.substr(1,innerTuple[0].value.size()-2)); } } else { cout << tupleVector[i].value; } if(i==tupleVector.size() -1) { cout<<")"; } i++; } } } else if(topExeToken.value == "Isinteger") { Token t = executionStack.top(); executionStack.pop(); if (t.type == RecursiveParser::INT) executionStack.push(Token("true","true")); else executionStack.push(Token("false","false")); } else if(topExeToken.value == "Istruthvalue") { Token t = executionStack.top(); executionStack.pop(); if (t.value=="true" || t.value=="false") executionStack.push(Token("true","true")); else executionStack.push(Token("false","false")); } else if(topExeToken.value == "Isstring") { Token t = executionStack.top(); executionStack.pop(); if (t.type==RecursiveParser::STR) executionStack.push(Token("true","true")); else executionStack.push(Token("false","false")); } else if(topExeToken.value == "Istuple") { Token t = executionStack.top(); executionStack.pop(); if (t.isTuple==true) executionStack.push(Token("true","true")); else executionStack.push(Token("false","false")); } else if(topExeToken.value == "Isdummy") { Token t = executionStack.top(); executionStack.pop(); if (t.value=="dummy") executionStack.push(Token("true","true")); else executionStack.push(Token("false","false")); } else if(topExeToken.value == "Isfunction") { Token t = executionStack.top(); executionStack.pop(); if (t.type=="lambdaClosure") executionStack.push(Token("true","true")); else executionStack.push(Token("false","false")); } else if(topExeToken.value == "Order") { Token t = executionStack.top(); executionStack.pop(); executionStack.push(Token(intToString(t.tuple.size()),RecursiveParser::INT)); } else if(topExeToken.value == "Null") { Token t = executionStack.top(); executionStack.pop(); if (t.value == "nil") executionStack.push(Token("true","true")); else executionStack.push(Token("false","false")); } else if(topExeToken.isTuple == true) { Token t = executionStack.top(); executionStack.pop(); if(t.type == RecursiveParser::INT) { int indx = atoi(t.value.c_str()); indx -=1; executionStack.push(topExeToken.tuple[indx]); } } } else if(currToken.type == "env") { Token topToken = executionStack.top(); executionStack.pop(); executionStack.pop(); executionStack.push(topToken); envStack.pop(); currEnv = envStack.top(); } else if(currToken.type == "beta") { Token topToken = executionStack.top(); executionStack.pop(); vector<Token> delta; unsigned int i=0; if(topToken.value != "true") { delta = deltaMap[currToken.betaElseDeltaNum]; } else { delta = deltaMap[currToken.betaIfDeltaNum]; } while(i<delta.size()) { controlStack.push(delta[i]); i++; } } else if(currToken.value == "tau") { int tauCount = currToken.tauCount; string tuple="("; vector<Token> tupleVector; int i=0; while(i<tauCount) { Token t = executionStack.top(); tupleVector.push_back(t); executionStack.pop(); if(i != tauCount -1) tuple += t.value +", "; else tuple += t.value; i++; } tuple +=")"; Token newToken(tuple,"tuple"); newToken.tuple = tupleVector; newToken.isTuple = true; executionStack.push(newToken); } else if(currToken.value == "nil") { currToken.isTuple = true; executionStack.push(currToken); } else if(currToken.value == "aug") { Token tuple = executionStack.top(); executionStack.pop(); Token toAdd = executionStack.top(); executionStack.pop(); if(tuple.value == "nil") { Token newToken(toAdd.value,"tuple"); newToken.isTuple = true; newToken.tuple = vector<Token>(); newToken.tuple.push_back(toAdd); executionStack.push(newToken); } else { tuple.tuple.push_back(toAdd); executionStack.push(tuple); } } else if(currToken.type == "lambdaClosure") { currToken.lambdaEnv = currEnv; executionStack.push(currToken); } else { executionStack.push(currToken); } }
/** * checkForNewPCSCToken looks into a specific slot for a token. * * @param slot Pointer to slot structure. * * @return * <P><TABLE> * <TR><TD>Code</TD><TD>Meaning</TD></TR> * <TR> * <TD>CKR_OK </TD> * <TD>Success </TD> * </TR> * <TR> * <TD>CKR_HOST_MEMORY </TD> * <TD>Error getting memory (malloc) </TD> * </TR> * <TR> * <TD>CKR_GENERAL_ERROR </TD> * <TD>Error opening slot directory </TD> * </TR> * </TABLE></P> */ static int checkForNewPCSCToken(struct p11Slot_t *slot) { struct p11Token_t *ptoken; int rc, i; LONG rv; DWORD dwActiveProtocol; WORD feature; DWORD featurecode, lenr, atrlen,readernamelen,state,protocol; unsigned char buf[256]; unsigned char atr[36]; char *po; FUNC_CALLED(); if (slot->closed) { FUNC_RETURNS(CKR_TOKEN_NOT_PRESENT); } rv = SCardConnect(slot->context, slot->readername, SCARD_SHARE_SHARED, SCARD_PROTOCOL_T1, &(slot->card), &dwActiveProtocol); #ifdef DEBUG debug("SCardConnect (%i, %s): %s\n", slot->id, slot->readername, pcsc_error_to_string(rv)); #endif if (rv == SCARD_E_NO_SMARTCARD || rv == SCARD_W_REMOVED_CARD || rv == SCARD_E_SHARING_VIOLATION) { FUNC_RETURNS(CKR_TOKEN_NOT_PRESENT); } if (rv != SCARD_S_SUCCESS) { closeSlot(slot); FUNC_FAILS(CKR_DEVICE_ERROR, pcsc_error_to_string(rv)); } if (!slot->hasFeatureVerifyPINDirect) { rv = SCardControl(slot->card, SCARD_CTL_CODE(3400), NULL,0, buf, sizeof(buf), &lenr); #ifdef DEBUG debug("SCardControl (CM_IOCTL_GET_FEATURE_REQUEST): %s\n", pcsc_error_to_string(rv)); #endif /* Ignore the feature codes if an error occured */ if (rv == SCARD_S_SUCCESS) { for (i = 0; i < lenr; i += 6) { feature = buf[i]; featurecode = (buf[i + 2] << 24) + (buf[i + 3] << 16) + (buf[i + 4] << 8) + buf[i + 5]; #ifdef DEBUG debug("%s - 0x%08X\n", pcsc_feature_to_string(feature), featurecode); #endif if (feature == FEATURE_VERIFY_PIN_DIRECT) { po = getenv("PKCS11_IGNORE_PINPAD"); #ifdef DEBUG if (po) { debug("PKCS11_IGNORE_PINPAD=%s\n", po); } else { debug("PKCS11_IGNORE_PINPAD not found\n"); } #endif if (!po || (*po == '0')) { #ifdef DEBUG debug("Slot supports feature VERIFY_PIN_DIRECT - setting CKF_PROTECTED_AUTHENTICATION_PATH for token\n"); #endif slot->hasFeatureVerifyPINDirect = featurecode; } } } } } readernamelen = 0; atrlen = sizeof(atr); rc = SCardStatus(slot->card, NULL, &readernamelen, &state, &protocol, atr, &atrlen); if (rc != SCARD_S_SUCCESS) { closeSlot(slot); FUNC_FAILS(CKR_DEVICE_ERROR, pcsc_error_to_string(rc)); } rc = newToken(slot, atr, atrlen, &ptoken); if (rc != CKR_OK) { FUNC_FAILS(rc, "newToken() failed"); } FUNC_RETURNS(rc); }