void Foam::primitiveEntry::append ( const token& currToken, const dictionary& dict, Istream& is ) { if (currToken.isWord()) { const word& w = currToken.wordToken(); if ( w.size() == 1 || ( !(w[0] == '$' && expandVariable(w, dict)) && !(w[0] == '#' && expandFunction(w, dict, is)) ) ) { newElmt(tokenIndex()++) = currToken; } } else { newElmt(tokenIndex()++) = currToken; } }
/* * Returns the contents of the lexer as a token */ bool lexer::get_token(token &tok) { // set appropriate values tok.set_text(text); tok.set_type(type); return true; }
void intersect (basic_charset &rhs_, basic_charset &overlap_) { _token.intersect (rhs_._token, overlap_._token); if (!overlap_._token.empty ()) { typename index_set::const_iterator iter_ = _index_set.begin (); typename index_set::const_iterator end_ = _index_set.end (); for (; iter_ != end_; ++iter_) { overlap_._index_set.insert (*iter_); } iter_ = rhs_._index_set.begin (); end_ = rhs_._index_set.end (); for (; iter_ != end_; ++iter_) { overlap_._index_set.insert (*iter_); } if (_token.empty ()) { _index_set.clear (); } if (rhs_._token.empty ()) { rhs_._index_set.clear (); } } }
int opPriority(token t) { if ( (binOP_Table.end() != binOP_Table.find(t.Tok())) ) return binOP_Table[t.Tok()]; else // this will stop OpPrecedence parsing once we hit a return -1; // non-op while evaluating InfixEpxr }
bool token::operator == (const token& rhs) const { if (this == &rhs) return true; if (valid() && rhs.valid()) return to_string() == rhs.to_string(); else return false; }
virtual parse_result parse( string::const_iterator& i, string::const_iterator e, token& tok ) const { string::const_iterator j = i; if ( *j != '&' && *j != '+' ) return failed; ++j; while ( j < e && ( *j == '.' || *j == '-' || isalnum(*j) ) ) ++j; tok.assign(i, j); tok.type( tok_types::pn_lit ); i = j; return done; }
/* * Returns the contents at the root */ bool syn_tree::get_root_contents(token &tok) { // check that root token exists if(!root) return false; // set the text & type tok.set_text(root->get_text()); tok.set_type(root->get_type()); return true; }
zstring::zstring(const token& tok) : _buf(0) , _pool(0) { if(tok.len() == 0) _zptr = _zend = nullstring; else { _zptr = tok.ptr(); _zend = tok.ptre() - 1; } }
/* * Set the contents of the current token */ bool syn_tree::set_contents(token &tok) { // make sure current token exists if(!cur) return false; // set current token contents cur->set_text(tok.get_text()); cur->set_type(tok.get_type()); return true; }
/* * Returns contents at current token */ bool syn_tree::get_contents(token &tok) { // check that current token exists if(!cur) return false; // set the text & type tok.set_text(cur->get_text()); tok.set_type(cur->get_type()); return true; }
static bool is_greater_precedence(const token &one, const token &two) { size_t one_id = one.first_matching_sym_op_fn_id(); size_t two_id = two.first_matching_sym_op_fn_id(); typename Fn_Table::fn_info one_info = Fn_Table::get_info(one_id); typename Fn_Table::fn_info two_info = Fn_Table::get_info(two_id); bool one_is_left_assoc = one_info.is_left_assoc(); size_t one_precedence = one_info.precedence(); size_t two_precedence = two_info.precedence(); return ((one_is_left_assoc && (one_precedence >= two_precedence)) || (!one_is_left_assoc && (one_precedence > two_precedence))); }
void dotfile_visitor::format_node(abstract_node *node, const char *name, const token &tok) { if (!m_connectTo) { if (tok.type() == token_types::STRING_LITERAL) fprintf(m_file, "\tptr%p [label=\"[%s]\\n[%s]\\n\\\"%.*s\\\"\"];\n", node, name, token_types::names[tok.type()], tok.length() - 2, tok.text() + 1); else if (tok.text()) fprintf(m_file, "\tptr%p [label=\"[%s]\\n[%s]\\n%.*s\"];\n", node, name, token_types::names[tok.type()], tok.length(), tok.text()); else format_node(node, name); } }
string symbol_table::new_const(token tkconst) { string type = type_of(tkconst); auto ent = consts_.find(type+tkconst.text()); if (ent != consts_.end()) { return ent->second.place(); } const_desc desc(tkconst); consts_[type+tkconst.text()] = desc; return desc.place(); }
static inline token hash(const std::string &pw, token const& salt) { token result; if(::libscrypt_scrypt( (const uint8_t*)pw.data(), pw.size(), salt.data(), salt.size(), SCRYPT_N, SCRYPT_r, SCRYPT_p, result.data(), result.size() ) == -1) throw std::runtime_error("Incapable of hashing plaintext"); return result; }
static inline token hash(token const& pw, token const& salt) { token result; if(::libscrypt_scrypt( pw.data(), pw.size(), salt.data(), salt.size(), SCRYPT_N, SCRYPT_r, SCRYPT_p, result.data(), result.size() ) == -1) throw std::runtime_error("Incapable of hashing token"); return result; }
bool token::operator < (const token& rhs) const { if (!valid() && rhs.valid()) return true; if (valid() && !rhs.valid()) return false; if (!valid() && !rhs.valid()) return false; return to_string() < rhs.to_string(); }
/* * Sets the contents of the specified child token of current token */ bool syn_tree::set_child_contents(token &tok, unsigned int index) { // check to make sure child token exists if(!cur || !get_size() || index >= get_size()) return false; // set contents token *child = cur->get_child(index); child->set_text(tok.get_text()); child->set_type(tok.get_type()); return true; }
int parse_operator(string &str, string::iterator i_iter, token &output) { bool is_error = false; output.clean(); if(i_iter != str.end()) { switch(*i_iter) { case '+': output._typeid = OPER_ADD; break; case '-': output._typeid = OPER_SUB; break; case '*': output._typeid = OPER_MULT;break; case '/': output._typeid = OPER_DIV; break; case '=': output._typeid = OPER_EQU; break; case '(': output._typeid = OPER_LPAR;break; case '{': output._typeid = OPER_LPAR;break; case ')': output._typeid = OPER_RPAR;break; case '}': output._typeid = OPER_RPAR;break; case '!': if((i_iter+1) != str.end() && *(i_iter + 1) == '=') output._typeid = OPER_NOTEQ;break; default: is_error = true; break; } }else{ is_error = true; } if(is_error){ return -1; }else if(output._typeid != OPER_NOTEQ){ output.elemt = *i_iter; return 1; }else{ output.elemt = "!="; return 2; } }
int parse_string(string &str, string::iterator i_iter, token& output) { const int s_input = 0; const int s_exit = 1; const int s_note = 2; output.clean(); bool flag = false; bool is_error = false; vector <char> o; string::iterator iter = i_iter; int state = s_input; if(*iter != '"'){ state = s_exit; }else{ ++iter; } while(flag == false) { switch(state) { case s_input: if(iter == str.end()){ state = s_exit; }else if(*iter == '\\'){ state = s_note; }else if(*iter == '"'){ state = s_exit; }else{ o.push_back(*iter++); } break; case s_exit: if(iter == i_iter) is_error = true; flag = true; break; case s_note: if((iter + 1) != str.end() && *(iter + 1) == '"'){ o.push_back(*(iter + 1 )); iter += 2; }else if((iter + 1) != str.end() && *(iter + 1) == 'n'){ o.push_back('\n'); iter += 2; }else if((iter + 1) != str.end() && *(iter + 1) == 't'){ o.push_back('\t'); iter += 2; } state = s_input; break; } } if(is_error) { return -1; }else{ output.elemt = string(o.begin(), o.end()); output._typeid = TYPE_STR; return iter - i_iter; } }
void Foam::primitiveEntry::append ( const token& currToken, const dictionary& dict, Istream& is ) { if (currToken.isWord()) { const word& w = currToken.wordToken(); if ( disableFunctionEntries || w.size() == 1 || ( !(w[0] == '$' && expandVariable(w, dict)) && !(w[0] == '#' && expandFunction(w, dict, is)) ) ) { newElmt(tokenIndex()++) = currToken; } } else if (currToken.isVariable()) { const string& w = currToken.stringToken(); if ( disableFunctionEntries || w.size() <= 3 || !( w[0] == '$' && w[1] == token::BEGIN_BLOCK && expandVariable(w, dict) ) ) { newElmt(tokenIndex()++) = currToken; } } else { newElmt(tokenIndex()++) = currToken; } }
parsetree::parsetree(const token &t) // {{{ : myToken(t), myType(t.Name()), myCase("_TOKEN"), is_token(true) { myContent.clear(); } // }}}
Condition DB_Set::comparison_interpret(token& t, vector<Table*> r) { if(t.get_value() == "OP") { string left, right; if(t[0][0].get_type() == identifier) left = t[0][0].get_value(); else left = t[0][0][0].get_value(); if(t[2][0].get_type() == identifier) right = t[2][0].get_value(); else right = t[2][0][0].get_value(); Condition_Sing op(left, t[1].get_value(), right); Condition c(op); return c; } else if(t.get_value() == "CONDITION") return condition_interpret(t[0], r); return condition_interpret(t[0],r); }
virtual parse_result parse( string::const_iterator& i, string::const_iterator e, token& tok ) const { string::const_iterator j = i; while ( j < e && *j != '\n' ) ++j; tok = string(i, j); tok.type( tok_types::comment ); i = j; return done; }
int parse_digit(string &str, string::iterator i_iter, token& output) { output.clean(); const int s_input = 0; const int s_exit = 1; const int s_point = 2; bool flag = false; bool is_error = false; bool is_point = false; vector<char> o; string::iterator iter = i_iter; int state = s_input; while(flag == false) { switch(state) { case s_input: if(iter == str.end()){ state = s_exit; }else if(*iter == '.'){ state = s_point; }else if(is_digit(*iter) == false){ state = s_exit; }else{ o.push_back(*iter++); } break; case s_exit: flag = true; if(iter == i_iter) is_error = true; break; case s_point: if(is_point){ is_error = true; state = s_exit; }else{ is_point = true; o.push_back(*iter++); state = s_input; } break; } } if(is_error){ return -1; }else{ output.elemt = string(o.begin(), o.end()); if(is_point){ output._typeid = TYPE_REAL; }else{ output._typeid = TYPE_NUM; } return iter - i_iter; } }
Foam::token Foam::functionEntries::ifeqEntry::expand ( const dictionary& dict, const string& keyword, const token& t ) { if (keyword[0] == '$') { word varName = keyword(1, keyword.size()-1); // lookup the variable name in the given dictionary const entry* ePtr = dict.lookupScopedEntryPtr ( varName, true, true ); if (ePtr) { return token(ePtr->stream()); } else { // String expansion. Allow unset variables string expanded(keyword); stringOps::inplaceExpand(expanded, dict, true, true); // Re-form as a string token so we can compare to string return token(expanded, t.lineNumber()); } } else if (!t.isString()) { // Re-form as a string token so we can compare to string return token(keyword, t.lineNumber()); } else { return t; } }
bool token::operator==(token compareToken) { if(tokenType == compareToken.type()) { return true; } else { return false; } }
int parse_end(string &str, string::iterator i_iter, token &output) { output.clean(); if(i_iter != str.end()) { if(is_end(*i_iter)){ output.elemt = ";"; output._typeid = END; return 1; } } return -1; }
void intersect (basic_charset &rhs_, basic_charset &overlap_) { _token.intersect (rhs_._token, overlap_._token); if (!overlap_._token.empty ()) { std::merge (_index_set.begin (), _index_set.end (), rhs_._index_set.begin (), rhs_._index_set.end (), std::inserter (overlap_._index_set, overlap_._index_set.end ())); if (_token.empty ()) { _index_set.clear (); } if (rhs_._token.empty ()) { rhs_._index_set.clear (); } } }
//function that finds if the chars following match a target symbol bool followingChars(string target, char currentChar, token curToken) { for(int x = 0; x < target.length(); x++) { if(currentChar == target[x]) { curToken.appendChar(currentChar); currentChar = filestream.get(); } else { curToken.clearData(); while(x != 0) { filestream.unget(); x--; } return false; } } filestream.unget(); return true; }
void Foam::functionEntries::ifeqEntry::readToken(token& t, Istream& is) { // Skip dummy tokens - avoids entry::getKeyword consuming #else, #endif do { if ( is.read(t).bad() || is.eof() || !t.good() ) { return; } } while (t == token::END_STATEMENT); }