void PPToken::squeeze() { std::set<wxString> alreadyReplacedMacros; // perform the squeeze 5 times max for(size_t count=0; count < 5; count++) { bool modified(false); // get list of possible macros in the replacement wxArrayString tmpWords = TokenizeWords(replacement); wxArrayString words; // make sure that a word is not been replaced more than once // this will avoid recursion // an example (taken from qglobal.h of the Qt library): // // #define qDebug QT_NO_QDEBUG_MACRO // #define QT_NO_QDEBUG_MACRO if(1); else qDebug // for(size_t i=0; i<tmpWords.size(); i++) { if(alreadyReplacedMacros.find(tmpWords.Item(i)) == alreadyReplacedMacros.end()){ alreadyReplacedMacros.insert(tmpWords[i]); words.Add(tmpWords[i]); } } for(size_t i=0; i<words.size(); i++) { PPToken tok = PPTable::Instance()->Token(words.Item(i)); if(tok.flags & IsValid) { if(tok.flags & IsFunctionLike) { int where = replacement.Find(words.Item(i)); if(where != wxNOT_FOUND) { wxString initList; wxArrayString initListArr; if(readInitList( replacement, where + words.Item(i).Length(), initList, initListArr )) { tok.expandOnce(initListArr); replacement.Remove(where, words.Item(i).Length() + initList.Length()); tok.replacement.Replace(wxT("##"), wxT("")); replacement.insert(where, tok.replacement); modified = true; } } } else { if(replacement.Replace(words.Item(i), tok.replacement)) { modified = true; } } } } if(!modified) break; } replacement.Replace(wxT("##"), wxT("")); }
wxString TagEntry::NameFromTyperef(wxString& templateInitList, bool nameIncludeTemplate) { wxString typeref = GetTyperef(); if(typeref.IsEmpty() == false) { wxString name = typeref.AfterFirst(wxT(':')); return name; } // incase our entry is a typedef, and it is not marked as typeref, // try to get the real name from the pattern if(GetKind() == wxT("typedef")) { wxString pat(GetPattern()); if(!GetPattern().Contains(wxT("typedef"))) { // The pattern does not contain 'typedef' however this *is* a typedef // try to see if this is a macro pat.StartsWith(wxT("/^"), &pat); pat.Trim().Trim(false); // we take the first token CppScanner scanner; scanner.SetText(pat.To8BitData()); int type = scanner.yylex(); if(type == IDENTIFIER) { wxString token = wxString::From8BitData(scanner.YYText()); PPToken tok = TagsManagerST::Get()->GetDatabase()->GetMacro(token); if(tok.flags & PPToken::IsValid) { // we found a match! if(tok.flags & PPToken::IsFunctionLike) { wxArrayString argList; if(GetMacroArgList(scanner, argList)) { tok.expandOnce(argList); } } pat = tok.replacement; pat << wxT(";"); // Remove double spaces while(pat.Replace(wxT(" "), wxT(" "))) { } } } } wxString name; if(TypedefFromPattern(pat, GetName(), name, templateInitList, nameIncludeTemplate)) return name; } return wxEmptyString; }
PPToken Preprocessor :: include() { PPToken current = lexer->get(); bufLexSource->trim(1); bufLexSource->reset(); while( current.getKey() == WHITESPACE && \ current.getName() != "\n") { //Skip whitespace tokens current = lexer->get(); bufLexSource->reset(); } PPToken headerfileToken = lexer->matchHeaderName(); string headerfile = headerfileToken.getName(); string filename; //If headername matched, it has a name of length > 0 if (headerfile.length() > 0) { if (headerfile.substr(0,1) == "<") { filename += "/usr/include/"; //OS X 10.9 } else { char* cwdBuf = NULL; cwdBuf = getcwd(cwdBuf, 0); filename += cwdBuf; filename += "/"; free(cwdBuf); } headerfile.pop_back(); headerfile.erase(0,1); //Remove first char filename += headerfile; bufLexSource->clear(); //Now, set up a new Preprocessor for the included file as 'cache' //in the current preprocessor. try { this->cache = new Preprocessor(filename); } catch (IOException error) { throw IOException("Could not create preprocessor for " + \ filename + " while in " + this->filename); } this->usingCache = true; if (!this->cache->empty()) { return this->cache->get(); } } return PPToken(this->getPosition(), "", OTHER); }
//! Returns the next PPToken without expanding macros PPToken Preprocessor :: unexpandedGet() { if (this->usingCache) { if (!this->cache->empty()) { return this->cache->get(); } else { //Done with this cache, continue in normal file this->usingCache = false; //delete cache; } } PPToken first = lexer->get(); bufLexSource->trim(1); bufLexSource->reset(); if (first.getName() == "#") { PPToken current = lexer->get(); bufLexSource->trim(1); bufLexSource->reset(); if (current.getName() == "include") { return this->include(); } else if (current.getName() == "define") { return this->define(); } else if (current.getName() == "undef") { return this->undef(); } } this->bufLexSource->trim(1); this->bufLexSource->reset(); return first; }
PPToken Preprocessor :: undef() { PPToken current = lexer->get(); while (current.getKey() == WHITESPACE && current.getName() != "\n") { current = lexer->get(); bufLexSource->trim(1); bufLexSource->reset(); } if (current.getName() == "\n") { string err = "Expected macro name to undef before new line"; throw SyntaxException(err); } else if (current.getKey() != IDENTIFIER) { string err = "Expected identifier to apply undef to"; throw SyntaxException(err); } else { macroMap->erase(current.getName()); } bufLexSource->trim(1); bufLexSource->reset(); return lexer->get(); }
PPToken Preprocessor :: get() { if (this->expandingMacro) { if (!this->macroCache->empty()) { PPToken ret = this->macroCache->front(); this->macroCache->pop_front(); if (this->macroCache->empty() ){ this->expandingMacro = false; } return ret; } else { //Done with this macro this->expandingMacro = false; } } PPToken token = this->unexpandedGet(); if (token.getKey() == IDENTIFIER) { auto search = this->macroMap->find(token.getName()); if (search != this->macroMap->end()) { //Do dynamic down-cast to FunctionMacro if possible and expect function-like //macro whenever it works if (FunctionMacro* fm = dynamic_cast<FunctionMacro*>(search->second)) { token = this->unexpandedGet(); if (token.getName() != "(") { string err = "Expected '(' after invocation of function-like macro "; err += search->first; throw SyntaxException(err); } else { unsigned int parenDepth = 1; //Keep track of how many layers of //parenthesis deep we curently are list<PPToken>* currentList = new list<PPToken>(); token = this->unexpandedGet(); while (parenDepth > 0) { if (token.getName() == ")") { --parenDepth; if (parenDepth == 0) { break; } } else if (token.getName() == "(") { ++parenDepth; } else if (token.getName() == "," && parenDepth == 1) { if (!fm->bind(currentList)) { string err = "Could not bind argument to function-like"\ " macro"; throw SyntaxException(err); } currentList = new list<PPToken>(); } else if (token.getName() == "\n" && parenDepth != 0) { string err = "Expected ')' before new line"; throw SyntaxException(err); } else { currentList->push_back(token); } token = this->unexpandedGet(); } //Do not check for errors here, as it might be //a 0 argument macro fm->bind(currentList); this->macroCache = new list<PPToken>(fm->expand()); this->expandingMacro = true; this->bufLexSource->trim(1); this->bufLexSource->reset(); return PPToken(this->getPosition(), "", WHITESPACE); } } //Found something, start returning expanded version this->macroCache = new list<PPToken>(search->second->expand()); this->expandingMacro = true; return this->get(); } } return token; }
PPToken Preprocessor :: define() { PPToken token = lexer->get(); bufLexSource->trim(1); bufLexSource->reset(); while (token.getKey() == WHITESPACE && token.getName() != "\n") { token = lexer->get(); bufLexSource->trim(1); bufLexSource->reset(); } if (token.getKey() == IDENTIFIER) { //We have a macro, start setting up for adding it to the macroMap string macroName = token.getName(); PPToken current = lexer->get(); list<PPToken>* body = new list<PPToken>(); if (current.getName() == "(") { //Function macro bufLexSource->trim(1); bufLexSource->reset(); list<PPToken>* args = new list<PPToken>(); current = lexer->get(); bufLexSource->trim(1); bufLexSource->reset(); bool delimited = true; while (current.getName() != "\n" || current.getName() != ")") { if (current.getKey() == IDENTIFIER) { args->push_back(current); delimited = false; } else if (current.getName() == ")") { if (delimited) { string err = "Expected name of an argument before closing "\ "parenthesis in definition of function-like macro"; throw SyntaxException(err); } else { current = lexer->get(); break; } } else if (current.getName() == ",") { if (delimited) { string err = "Expected name of an argument in definition of "\ "function-like macro"; throw SyntaxException(err); } else { delimited = true; } } else if (current.getName() == "\n") { string err = "Expected end of list of arguments before new line in "\ "declaration of function-like macro"; } current = lexer->get(); bufLexSource->trim(1); bufLexSource->reset(); } list<PPToken>* body = new list<PPToken>(); while (current.getName() != "\n") { body->push_back(current); current = lexer->get(); bufLexSource->trim(1); bufLexSource->reset(); } FunctionMacro* macro = new FunctionMacro(macroName, *body, *args); (*this->macroMap)[macroName] = macro; return current; } else { //Object macro current = lexer->get(); bufLexSource->trim(1); bufLexSource->reset(); while (current.getName() != "\n") { body->push_back(current); current = lexer->get(); bufLexSource->trim(1); bufLexSource->reset(); } ObjectMacro* macro = new ObjectMacro(macroName, *body); (*this->macroMap)[macroName] = macro; return current; } } return token; }