wxString Tokenizer::PeekToken() { if (!m_PeekAvailable) { m_PeekAvailable = true; unsigned int savedTokenIndex = m_TokenIndex; unsigned int savedLineNumber = m_LineNumber; unsigned int savedNestLevel = m_NestLevel; if (SkipUnwanted()) m_PeekToken = DoGetToken(); else m_PeekToken.Clear(); m_PeekTokenIndex = m_TokenIndex; m_PeekLineNumber = m_LineNumber; m_PeekNestLevel = m_NestLevel; m_TokenIndex = savedTokenIndex; m_LineNumber = savedLineNumber; m_NestLevel = savedNestLevel; } return m_PeekToken; }
wxString Tokenizer::GetToken() { m_UndoTokenIndex = m_TokenIndex; m_UndoLineNumber = m_LineNumber; m_UndoNestLevel = m_NestLevel; if (m_PeekAvailable) { m_TokenIndex = m_PeekTokenIndex; m_LineNumber = m_PeekLineNumber; m_NestLevel = m_PeekNestLevel; m_Token = m_PeekToken; } else { if (SkipUnwanted()) m_Token = DoGetToken(); else m_Token.Clear(); } m_PeekAvailable = false; return m_Token; }
bool Tokenizer::IsMacroDefined() { while (SkipWhiteSpace() || SkipComment()) ; int id = m_TokensTree->TokenExists(DoGetToken(), -1, tkPreprocessor); SkipToEOL(false); return (id != -1); }
//----------------------------------------------------------------------------- // Purpose: Fetches the next token from the file. // Input : tr - The token reader object with which to fetch the token. // pszStore - Buffer in which to place the token, NULL to discard the token. // ttexpecting - The token type that we are expecting. If this is not TOKENNONE // and token type read is different, the operation will fail. // pszExpecting - The token string that we are expecting. If this string // is not NULL and the token string read is different, the operation will fail. // Output : Returns TRUE if the operation succeeded, FALSE if there was an error. // If there was an error, the error will be reported in the message window. //----------------------------------------------------------------------------- bool GDSkipToken(TokenReader &tr, trtoken_t ttexpecting, const char *pszExpecting) { // // Read the next token into a buffer and discard it. // char szDiscardBuf[MAX_TOKEN]; char *pszDiscardBuf = szDiscardBuf; return DoGetToken(tr, &pszDiscardBuf, sizeof(szDiscardBuf), ttexpecting, pszExpecting); }
//----------------------------------------------------------------------------- // Purpose: Fetches the next token from the file, allocating a buffer exactly // large enough to hold the token. // Input : tr - // ppszStore - // ttexpecting - // pszExpecting - // Output : //----------------------------------------------------------------------------- bool GDGetTokenDynamic(TokenReader &tr, char **ppszStore, trtoken_t ttexpecting, const char *pszExpecting) { if (ppszStore == NULL) { return false; } *ppszStore = NULL; return DoGetToken(tr, ppszStore, -1, ttexpecting, pszExpecting); }
//----------------------------------------------------------------------------- // Purpose: Fetches the next token from the file. // Input : tr - The token reader object with which to fetch the token. // pszStore - Buffer in which to place the token, NULL to discard the token. // ttexpecting - The token type that we are expecting. If this is not TOKENNONE // and token type read is different, the operation will fail. // pszExpecting - The token string that we are expecting. If this string // is not NULL and the token string read is different, the operation will fail. // Output : Returns TRUE if the operation succeeded, FALSE if there was an error. // If there was an error, the error will be reported in the message window. //----------------------------------------------------------------------------- bool GDGetToken(TokenReader &tr, char *pszStore, int nSize, trtoken_t ttexpecting, const char *pszExpecting) { Assert(pszStore != NULL); if (pszStore != NULL) { return DoGetToken(tr, &pszStore, nSize, ttexpecting, pszExpecting); } return false; }
PreprocessorType Tokenizer::GetPreprocessorType() { const unsigned int undoIndex = m_TokenIndex; const unsigned int undoLine = m_LineNumber; MoveToNextChar(); while (SkipWhiteSpace() || SkipComment()) ; const wxString token = DoGetToken(); switch (token.Len()) { case 2: if (token == TokenizerConsts::kw_if) return ptIf; break; case 4: if (token == TokenizerConsts::kw_else) return ptElse; else if (token == TokenizerConsts::kw_elif) return ptElif; break; case 5: if (token == TokenizerConsts::kw_ifdef) return ptIfdef; else if (token == TokenizerConsts::kw_endif) return ptEndif; break; case 6: if (token == TokenizerConsts::kw_ifndef) return ptIfndef; break; case 7: if (token == TokenizerConsts::kw_elifdef) return ptElifdef; break; case 8: if (token == TokenizerConsts::kw_elifndef) return ptElifndef; break; } m_TokenIndex = undoIndex; m_LineNumber = undoLine; return ptOthers; }
//vfc add bGetValue wxString Tokenizer::GetToken(bool bGetValue, bool bTemplate) { m_UndoTokenIndex = m_TokenIndex; m_UndoLineNumber = m_LineNumber; m_UndoNestLevel = m_NestLevel; if (bGetValue) { m_curtoken = DoGetToken(bGetValue, bTemplate); } else if(m_peekavailable) { m_TokenIndex = m_PeekTokenIndex; m_LineNumber = m_PeekLineNumber; m_NestLevel = m_PeekNestLevel; m_curtoken = m_peek; } else m_curtoken = DoGetToken(bGetValue, bTemplate); m_peekavailable = false; return ThisOrReplacement(m_curtoken); }
void Tokenizer::SpliteArguments(wxArrayString& results) { while (SkipWhiteSpace() || SkipComment()) ; if (CurrentChar() != _T('(')) return; MoveToNextChar(); // Skip the '(' int level = 1; // include '(' wxString piece; while (NotEOF()) { wxString token = DoGetToken(); if (token.IsEmpty()) break; if (token == _T("(")) ++level; else if (token == _T(")")) --level; if (token == _T(",")) { results.Add(piece); piece.Clear(); } else if (level != 0) { if (!piece.IsEmpty() && piece.Last() > _T(' ')) piece << _T(" "); piece << token; } if (level == 0) { if (!piece.IsEmpty()) results.Add(piece); break; } while (SkipWhiteSpace() || SkipComment()) ; } }
wxString Tokenizer::PeekToken(bool bGetValue, bool bTemplate) { if(!m_peekavailable) { m_peekavailable = true; unsigned int undoTokenIndex = m_TokenIndex; unsigned int undoLineNumber = m_LineNumber; unsigned int undoNestLevel = m_NestLevel; m_peek = DoGetToken(bGetValue,bTemplate); m_PeekTokenIndex = m_TokenIndex; m_PeekLineNumber = m_LineNumber; m_PeekNestLevel = m_NestLevel; m_TokenIndex = undoTokenIndex; m_LineNumber = undoLineNumber; m_NestLevel = undoNestLevel; } return m_peek; }
void Tokenizer::ReadToEOL(wxArrayString& tokens) { // need to force the tokenizer skip raw expression const TokenizerState oldState = m_State; m_State = tsReadRawExpression; const unsigned int undoIndex = m_TokenIndex; const unsigned int undoLine = m_LineNumber; SkipToEOL(false); const unsigned int lastBufferLen = m_BufferLen - m_TokenIndex; m_TokenIndex = undoIndex; m_LineNumber = undoLine; int level = 0; wxArrayString tmp; while (m_BufferLen - m_TokenIndex > lastBufferLen) { while (SkipComment()) ; wxString token = DoGetToken(); if (token[0] <= _T(' ') || token == _T("\\")) continue; if (token[0] == _T('(')) ++level; if (level == 0) { if (tmp.IsEmpty()) { if (!token.Trim().IsEmpty()) tokens.Add(token); } else { wxString blockStr; for (size_t i = 0; i < tmp.GetCount(); ++i) blockStr << tmp[i]; tokens.Add(blockStr.Trim()); tmp.Clear(); } } else tmp.Add(token); if (token[0] == _T(')')) --level; } if (!tmp.IsEmpty()) { if (level == 0) { wxString blockStr; for (size_t i = 0; i < tmp.GetCount(); ++i) blockStr << tmp[i]; tokens.Add(blockStr.Trim()); } else { for (size_t i = 0; i < tmp.GetCount(); ++i) { if (!tmp[i].Trim().IsEmpty()) tokens.Add(tmp[i]); } } } m_State = oldState; }
bool Tokenizer::CalcConditionExpression() { // need to force the tokenizer skip raw expression const TokenizerState oldState = m_State; m_State = tsReadRawExpression; const unsigned int undoIndex = m_TokenIndex; const unsigned int undoLine = m_LineNumber; SkipToEOL(false); const unsigned int lastBufferLen = m_BufferLen - m_TokenIndex; m_TokenIndex = undoIndex; m_LineNumber = undoLine; Expression exp; while (m_BufferLen - m_TokenIndex > lastBufferLen) { while (SkipComment()) ; wxString token = DoGetToken(); if (token[0] <= _T(' ') || token == _T("defined") || token == _T("\\")) continue; if (token.Len() > 1 && !wxIsdigit(token[0])) // handle macro { const int id = m_TokensTree->TokenExists(token, -1, tkPreprocessor); if (id != -1) { Token* tk = m_TokensTree->at(id); if (tk) { if (tk->m_Type.IsEmpty() || tk->m_Type == token) { if (tk->m_Args.IsEmpty()) { exp.AddToInfixExpression(_T("1")); continue; } else { if (ReplaceBufferForReparse(tk->m_Args, false)) continue; } } else if (!tk->m_Args.IsEmpty()) { if (ReplaceMacroActualContext(tk, false)) continue; } else if (wxIsdigit(tk->m_Type[0])) token = tk->m_Type; else if (tk->m_Type != tk->m_Name) { if (ReplaceBufferForReparse(tk->m_Type, false)) continue; } } } else { exp.AddToInfixExpression(_T("0")); continue; } } // only remaining number now if (!token.StartsWith(_T("0x"))) exp.AddToInfixExpression(token); else { long value; if (token.ToLong(&value, 16)) exp.AddToInfixExpression(wxString::Format(_T("%ld"), value)); else exp.AddToInfixExpression(_T("0")); } } // reset tokenizer's functionality m_State = oldState; exp.ConvertInfixToPostfix(); if (exp.CalcPostfix()) { TRACE(_T("CalcConditionExpression() : exp.GetStatus() : %d, exp.GetResult() : %d"), exp.GetStatus(), exp.GetResult()); return exp.GetStatus() && exp.GetResult(); } return true; }
void Tokenizer::MacroReplace(wxString& str) { if (m_IsReplaceParsing) { const int id = m_TokensTree->TokenExists(str, -1, tkPreprocessor); if (id != -1) { Token* tk = m_TokensTree->at(id); if (tk) { bool replaced = false; if (!tk->m_Args.IsEmpty()) replaced = ReplaceMacroActualContext(tk, false); else if (tk->m_Type != tk->m_Name) replaced = ReplaceBufferForReparse(tk->m_Type, false); if (replaced || tk->m_Type.IsEmpty()) { SkipUnwanted(); str = DoGetToken(); } } } } wxStringHashMap::const_iterator it = s_Replacements.find(str); if (it == s_Replacements.end()) return; TRACE(_T("MacroReplace() : Replacing '%s' with '%s' (file='%s', line='%d')."), it->first.wx_str(), it->second.wx_str(), m_Filename.wx_str(), m_LineNumber); if (it->second.IsEmpty()) { SkipUnwanted(); str = DoGetToken(); } else if (it->second[0] == _T('+')) { while (SkipWhiteSpace() || SkipComment()) ; DoGetToken(); // eat (...) wxString target = (const wxChar*)it->second + 1; if (target.IsEmpty()) { while (SkipWhiteSpace() || SkipComment()) ; str = DoGetToken(); } else if (target != str && ReplaceBufferForReparse(target, false)) str = DoGetToken(); } else if (it->second[0] == _T('-')) { wxString end((const wxChar*)it->second + 1); if (end.IsEmpty()) return; while (NotEOF()) { while (SkipComment() && SkipWhiteSpace()) ; if (CurrentChar() == end[0]) { if (DoGetToken() == end) break; } else MoveToNextChar(); } // eat () SkipUnwanted(); str = DoGetToken(); if (str[0] == _T('(')) { SkipUnwanted(); str = DoGetToken(); } } else { if (it->second != str && ReplaceBufferForReparse(it->second, false)) str = DoGetToken(); } }