// caches the inheritance info for each token (recursive function) void TokensTree::RecalcFullInheritance(int parentIdx, TokenIdxSet& result) { // no parent token? no ancestors... if (parentIdx == -1) return; // no parent token? no ancestors... Token* ancestor = at(parentIdx); if (!ancestor) return; // only classes take part in inheritance if (!(ancestor->m_TokenKind & (tkClass | tkTypedef))) return; TRACE(cc_text("RecalcFullInheritance() : Anc: '%s'"), ancestor->m_Name.wx_str()); // for all its ancestors for (TokenIdxSet::iterator it = ancestor->m_Ancestors.begin(); it != ancestor->m_Ancestors.end(); it++) { if (*it != -1 && // not global scope *it != parentIdx && // not the same token (avoid infinite loop) result.find(*it) == result.end()) // not already in the set (avoid infinite loop) { // add it to the set result.insert(*it); // and recurse for its ancestors RecalcFullInheritance(*it, result); } } }
size_t TokenTree::FindMatches(const wxString& query, TokenIdxSet& result, bool caseSensitive, bool is_prefix, TokenKind kindMask) { result.clear(); std::set<size_t> lists; int numitems = m_Tree.FindMatches(query, lists, caseSensitive, is_prefix); if (!numitems) return 0; // now the lists contains indexes to all the matching keywords // first loop will find all the keywords for (std::set<size_t>::const_iterator it = lists.begin(); it != lists.end(); ++it) { const TokenIdxSet* curset = &(m_Tree.GetItemAtPos(*it)); // second loop will get all the items mapped by the same keyword, // for example, we have ClassA::foo, ClassB::foo ... if (curset) { for (TokenIdxSet::const_iterator it2 = curset->begin(); it2 != curset->end(); ++it2) { const Token* token = at(*it2); if ( token && ( (kindMask == tkUndefined) || (token->m_TokenKind & kindMask) ) ) result.insert(*it2); } } } return result.size(); }
int TokenTree::TokenExists(const wxString& name, const TokenIdxSet& parents, short int kindMask) { int idx = m_Tree.GetItemNo(name); if (!idx) return -1; TokenIdxSet::const_iterator it; TokenIdxSet& curList = m_Tree.GetItemAtPos(idx); int result = -1; for (it = curList.begin(); it != curList.end(); ++it) { result = *it; if (result < 0 || (size_t)result >= m_Tokens.size()) continue; const Token* curToken = m_Tokens[result]; if (!curToken) continue; if (curToken->m_TokenKind & kindMask) { for ( TokenIdxSet::const_iterator pIt = parents.begin(); pIt != parents.end(); ++pIt ) { if (curToken->m_ParentIndex == *pIt) return result; } } } return -1; }
inline void SaveTokenIdxSetToFile(std::ostream& f,const TokenIdxSet& data) { SaveIntToFile(f, (int)(data.size())); for (TokenIdxSet::iterator it = data.begin(); it != data.end(); it++) { int num = *it; SaveIntToFile(f, num); } }
size_t ParserBase::FindTokensInFile(const wxString& filename, TokenIdxSet& result, short int kindMask) { result.clear(); size_t tokens_found = 0; TRACE(_T("Parser::FindTokensInFile() : Searching for file '%s' in tokens tree..."), filename.wx_str()); CC_LOCKER_TRACK_TT_MTX_LOCK(s_TokenTreeMutex) TokenIdxSet tmpresult; if ( m_TokenTree->FindTokensInFile(filename, tmpresult, kindMask) ) { for (TokenIdxSet::const_iterator it = tmpresult.begin(); it != tmpresult.end(); ++it) { const Token* token = m_TokenTree->at(*it); if (token) result.insert(*it); } tokens_found = result.size(); } CC_LOCKER_TRACK_TT_MTX_UNLOCK(s_TokenTreeMutex) return tokens_found; }
size_t TokensTree::FindMatches(const cc_string& s,TokenIdxSet& result,bool caseSensitive,bool is_prefix, int kindMask) { set<size_t> lists; result.clear(); int numitems = m_Tree.FindMatches(s,lists,caseSensitive,is_prefix); if(!numitems) return 0; // now the lists contains indexes to all the matching keywords TokenIdxSet* curset; set<size_t>::iterator it; TokenIdxSet::iterator it2; // first loop will find all the keywords for(it = lists.begin(); it != lists.end(); it++) { curset = &(m_Tree.GetItemAtPos(*it)); // second loop will get all the items maped by the same keyword, // for example, we have ClassA::foo, ClassB::foo ... for(it2 = curset->begin();it2 != curset->end(); it2++) { if (kindMask == 0xffff || (at(*it)->m_TokenKind & kindMask)) result.insert(*it2); } } return result.size(); }
size_t TokenTree::FindTokensInFile(const wxString& filename, TokenIdxSet& result, short int kindMask) { result.clear(); // get file idx wxString f(filename); while (f.Replace(_T("\\"),_T("/"))) { ; } if ( !m_FilenameMap.HasItem(f) ) { // CCLogger::Get()->DebugLog(F(_T("TokenTree::FindTokensInFile() : File '%s' not found in file names map."), f.wx_str())); TRACE(_T("TokenTree::FindTokensInFile() : File '%s' not found in file names map."), f.wx_str()); return 0; } int idx = m_FilenameMap.GetItemNo(f); // now get the tokens set matching this file idx TokenFileMap::iterator itf = m_FileMap.find(idx); if (itf == m_FileMap.end()) { // CCLogger::Get()->DebugLog(F(_T("TokenTree::FindTokensInFile() : No tokens found for file '%s' (index %d)."), f.wx_str(), idx)); TRACE(_T("TokenTree::FindTokensInFile() : No tokens found for file '%s' (index %d)."), f.wx_str(), idx); return 0; } // loop all results and add to final result set after filtering on token kind TokenIdxSet& tokens = itf->second; for (TokenIdxSet::const_iterator it = tokens.begin(); it != tokens.end(); ++it) { const Token* token = at(*it); if (token && (kindMask & token->m_TokenKind)) result.insert(*it); } // CCLogger::Get()->DebugLog(F(_T("TokenTree::FindTokensInFile() : Found %lu results for file '%s'."), static_cast<unsigned long>(result.size()), f.wx_str())); TRACE(_T("TokenTree::FindTokensInFile() : Found %lu results for file '%s'."), static_cast<unsigned long>(result.size()), f.wx_str()); return result.size(); }
int TokenTree::TokenExists(const wxString& name, const wxString& baseArgs, const TokenIdxSet& parents, TokenKind kind) { int idx = m_Tree.GetItemNo(name); if (!idx) return -1; TokenIdxSet::const_iterator it; TokenIdxSet& curList = m_Tree.GetItemAtPos(idx); int result = -1; for (it = curList.begin(); it != curList.end(); ++it) { result = *it; if (result < 0 || (size_t)result >= m_Tokens.size()) continue; const Token* curToken = m_Tokens[result]; if (!curToken) continue; // for a container token, their args member variable is used to store inheritance information // so, don't compare args for tkAnyContainer if ( curToken->m_TokenKind == kind && ( curToken->m_BaseArgs == baseArgs || kind & tkAnyContainer )) { for ( TokenIdxSet::const_iterator pIt = parents.begin(); pIt != parents.end(); ++pIt ) { if (curToken->m_ParentIndex == *pIt) return result; } } } return -1; }
size_t TokensTree::FindTokensInFile(const cc_string& file, TokenIdxSet& result, short int kindMask) { result.clear(); // get file idx if (!m_FilenamesMap.HasItem(file)) return 0; int idx = m_FilenamesMap.GetItemIdx(file); // now get the tokens set matching this file idx TokenFilesMap::iterator itf = m_FilesMap.find(idx); if (itf == m_FilesMap.end()) return 0; TokenIdxSet& tokens = itf->second; // loop all results and add to final result set after filtering on token kind for (TokenIdxSet::iterator it = tokens.begin(); it != tokens.end(); ++it) { Token* token = at(*it); if (kindMask & token->m_TokenKind) result.insert(*it); } return result.size(); }
void CCDebugInfo::OnFindClick(wxCommandEvent& /*event*/) { TokensTree* tokens = m_Parser->GetTokensTree(); wxString search = txtFilter->GetValue(); m_Token = 0; // first determine if the user entered an ID or a search mask long unsigned id; if (search.ToULong(&id, 10)) { // easy; ID m_Token = tokens->at(id); } else { // find all matching tokens TokenIdxSet result; for (size_t i = 0; i < tokens->size(); ++i) { Token* token = tokens->at(i); if (token && token->m_Name.Matches(search)) result.insert(i); } // a single result? if (result.size() == 1) { m_Token = tokens->at(*(result.begin())); } else { // fill a list and ask the user which token to display wxArrayString arr; wxArrayInt intarr; for (TokenIdxSet::iterator it = result.begin(); it != result.end(); ++it) { Token* token = tokens->at(*it); arr.Add(token->DisplayName()); intarr.Add(*it); } int sel = wxGetSingleChoiceIndex(_("Please make a selection:"), _("Multiple matches"), arr, this); if (sel == -1) return; m_Token = tokens->at(intarr[sel]); } } DisplayTokenInfo(); }
size_t TokensTree::FindMatches(const wxString& s,TokenIdxSet& result,bool caseSensitive,bool is_prefix, int kindMask) { set<size_t> lists; result.clear(); int numitems = m_Tree.FindMatches(s,lists,caseSensitive,is_prefix); if(!numitems) return 0; TokenIdxSet* curset; set<size_t>::iterator it; TokenIdxSet::iterator it2; for(it = lists.begin(); it != lists.end(); it++) { curset = &(m_Tree.GetItemAtPos(*it)); for(it2 = curset->begin();it2 != curset->end(); it2++) { if (kindMask == 0xffff || (at(*it)->m_TokenKind & kindMask)) result.insert(*it2); } } return result.size(); }
size_t ParserBase::FindTokensInFile(const wxString& fileName, TokenIdxSet& result, short int kindMask) { result.clear(); wxString file = UnixFilename(fileName); TRACE(_T("Parser::FindTokensInFile() : Searching for file '%s' in tokens tree..."), file.wx_str()); TRACK_THREAD_LOCKER(s_TokensTreeCritical); wxCriticalSectionLocker locker(s_TokensTreeCritical); THREAD_LOCKER_SUCCESS(s_TokensTreeCritical); TokenIdxSet tmpresult; if ( !m_TokensTree->FindTokensInFile(file, tmpresult, kindMask) ) return 0; for (TokenIdxSet::iterator it = tmpresult.begin(); it != tmpresult.end(); ++it) { Token* token = m_TokensTree->at(*it); if (token) result.insert(*it); } return result.size(); }
bool CodeRefactoring::Parse() { cbEditor* editor = Manager::Get()->GetEditorManager()->GetBuiltinActiveEditor(); if (!editor) return false; const wxString targetText = GetSymbolUnderCursor(); if (targetText.IsEmpty()) return false; TokenIdxSet targetResult; const int endOfWord = editor->GetControl()->WordEndPosition(editor->GetControl()->GetCurrentPos(), true); m_NativeParser.MarkItemsByAI(targetResult, true, false, true, endOfWord); if (targetResult.empty()) { cbMessageBox(_("Symbol not found under cursor!"), _("Code Refactoring"), wxOK | wxICON_WARNING); return false; } // handle local variables bool isLocalVariable = false; TokenTree* tree = m_NativeParser.GetParser().GetTokenTree(); CC_LOCKER_TRACK_TT_MTX_LOCK(s_TokenTreeMutex) const Token* token = tree->at(*targetResult.begin()); if (token) { const Token* parent = tree->at(token->m_ParentIndex); if (parent && parent->m_TokenKind == tkFunction) isLocalVariable = true; } CC_LOCKER_TRACK_TT_MTX_UNLOCK(s_TokenTreeMutex) wxArrayString files; cbProject* project = m_NativeParser.GetProjectByEditor(editor); if (isLocalVariable || !project) files.Add(editor->GetFilename()); else { ScopeDialog scopeDlg(Manager::Get()->GetAppWindow(), _("Code Refactoring")); const int ret = scopeDlg.ShowModal(); if (ret == ScopeDialog::ID_OPEN_FILES) GetOpenedFiles(files); else if (ret == ScopeDialog::ID_PROJECT_FILES) GetAllProjectFiles(files, project); else return false; } if (files.IsEmpty()) return false; size_t count = SearchInFiles(files, targetText); if (count) count = VerifyResult(targetResult, targetText, isLocalVariable); return count != 0; }
void TokensTree::RemoveToken(Token* oldToken) { if(!oldToken) return; int idx = oldToken->m_Self; if(m_Tokens[idx]!=oldToken) return; // Step 1: Detach token from its parent Token* parentToken = 0; if((size_t)(oldToken->m_ParentIndex) >= m_Tokens.size()) oldToken->m_ParentIndex = -1; if(oldToken->m_ParentIndex >= 0) parentToken = m_Tokens[oldToken->m_ParentIndex]; if(parentToken) parentToken->m_Children.erase(idx); TokenIdxSet nodes; TokenIdxSet::iterator it; // Step 2: Detach token from its ancestors nodes = (oldToken->m_DirectAncestors); for(it = nodes.begin();it!=nodes.end(); it++) { int ancestoridx = *it; if(ancestoridx < 0 || (size_t)ancestoridx >= m_Tokens.size()) continue; Token* ancestor = m_Tokens[ancestoridx]; if(ancestor) ancestor->m_Descendants.erase(idx); } oldToken->m_Ancestors.clear(); oldToken->m_DirectAncestors.clear(); // Step 3: Remove children nodes = (oldToken->m_Children); // Copy the list to avoid interference for(it = nodes.begin();it!=nodes.end(); it++) RemoveToken(*it); // m_Children SHOULD be empty by now - but clear anyway. oldToken->m_Children.clear(); // Step 4: Remove descendants nodes = oldToken->m_Descendants; // Copy the list to avoid interference for(it = nodes.begin();it!=nodes.end(); it++) RemoveToken(*it); // m_Descendants SHOULD be empty by now - but clear anyway. oldToken->m_Descendants.clear(); // Step 5: Detach token from the SearchTrees int idx2 = m_Tree.GetItemNo(oldToken->m_Name); if(idx2) { TokenIdxSet& curlist = m_Tree.GetItemAtPos(idx2); curlist.erase(idx); } // Now, from the global namespace (if applicable) if(oldToken->m_ParentIndex == -1) { m_GlobalNameSpace.erase(idx); m_TopNameSpaces.erase(idx); } // Step 6: Finally, remove it from the list. RemoveTokenFromList(idx); }
void TokensTree::RecalcData() { TRACE(cc_text("RecalcData() : Calculating full inheritance tree.")); // first loop to convert ancestors string to token indices for each token for (size_t i = 0; i < size(); ++i) { Token* token = at(i); if (!token) continue; if (!(token->m_TokenKind & (tkClass | tkTypedef | tkEnum))) continue; if (token->m_AncestorsString.empty()) continue; // only local symbols might change inheritance // if (!token->m_IsLocal) // continue; token->m_DirectAncestors.clear(); token->m_Ancestors.clear(); TRACE(cc_text("RecalcData() : Token %s, Ancestors %s"), token->m_Name.wx_str(), token->m_AncestorsString.wx_str()); StringTokenizer tkz(token->m_AncestorsString, cc_text(",")); while (tkz.HasMoreTokens()) { cc_string ancestor = tkz.GetNextToken(); if (ancestor.empty() || ancestor == token->m_Name) continue; TRACE(cc_text("RecalcData() : Ancestor %s"), ancestor.wx_str()); // ancestors might contain namespaces, e.g. NS::Ancestor if (ancestor.find(cc_text("::")) != cc_string::npos) { Token* ancestorToken = 0; StringTokenizer anctkz(ancestor, cc_text("::")); while (anctkz.HasMoreTokens()) { cc_string ns = anctkz.GetNextToken(); if (!ns.empty()) { int ancestorIdx = TokenExists(ns, ancestorToken ? ancestorToken->GetSelf() : -1, tkNamespace | tkClass | tkTypedef); ancestorToken = at(ancestorIdx); // ancestorToken = token->HasChildToken(ns, tkNamespace | tkClass); if (!ancestorToken) // unresolved break; } } if (ancestorToken && ancestorToken != token && ancestorToken->m_TokenKind == tkClass)// && !ancestorToken->m_IsTypedef) { TRACE(cc_text("RecalcData() : Resolved to %s"), ancestorToken->m_Name.wx_str()); token->m_Ancestors.insert(ancestorToken->GetSelf()); ancestorToken->m_Descendants.insert(i); TRACE(cc_text("RecalcData() : + '%s'"), ancestorToken->m_Name.wx_str()); } else TRACE(cc_text("RecalcData() : ! '%s' (unresolved)"), ancestor.wx_str()); } else // no namespaces in ancestor { // accept multiple matches for inheritance TokenIdxSet result; FindMatches(ancestor, result, true, false); for (TokenIdxSet::iterator it = result.begin(); it != result.end(); it++) { Token* ancestorToken = at(*it); // only classes take part in inheritance if ( ancestorToken && (ancestorToken != token) && ( (ancestorToken->m_TokenKind == tkClass) || (ancestorToken->m_TokenKind == tkEnum) || (ancestorToken->m_TokenKind == tkTypedef) ) ) // && !ancestorToken->m_IsTypedef) { token->m_Ancestors.insert(*it); ancestorToken->m_Descendants.insert(i); TRACE(cc_text("RecalcData() : + '%s'"), ancestorToken->m_Name.wx_str()); } } #if TOKEN_DEBUG_OUTPUT if (result.empty()) TRACE(_T("RecalcData() : ! '%s' (unresolved)"), ancestor.wx_str()); #endif } } token->m_DirectAncestors = token->m_Ancestors; if (!token->m_IsLocal) // global symbols are linked once { TRACE(cc_text("RecalcData() : Removing ancestor string from %s"), token->m_Name.wx_str(), token->m_Name.wx_str()); token->m_AncestorsString.clear(); } } // second loop to calculate full inheritance for each token for (size_t i = 0; i < size(); ++i) { Token* token = at(i); if (!token) continue; if (!(token->m_TokenKind & (tkClass | tkTypedef | tkEnum))) continue; // recalc TokenIdxSet result; for (TokenIdxSet::iterator it = token->m_Ancestors.begin(); it != token->m_Ancestors.end(); it++) RecalcFullInheritance(*it, result); // now, add the resulting set to ancestors set for (TokenIdxSet::iterator it = result.begin(); it != result.end(); it++) { Token* ancestor = at(*it); if (ancestor) { token->m_Ancestors.insert(*it); ancestor->m_Descendants.insert(i); } } #if TOKEN_DEBUG_OUTPUT // debug loop TRACE(_T("RecalcData() : Ancestors for %s:"),token->m_Name.wx_str()); for (TokenIdxSet::iterator it = token->m_Ancestors.begin(); it != token->m_Ancestors.end(); it++) TRACE(_T("RecalcData() : + %s"), at(*it)->m_Name.wx_str()); #endif } TRACE(cc_text("RecalcData() : Full inheritance calculated.")); }
void TokensTree::RecalcData() { // Manager::Get()->GetMessageManager()->DebugLog(_T("Calculating full inheritance tree")); // first loop to convert ancestors string to token indices for each token for (size_t i = 0; i < size(); ++i) { Token* token = at(i); if (!token) continue; if (!(token->m_TokenKind & (tkClass | tkTypedef | tkEnum))) continue; if (token->m_AncestorsString.IsEmpty()) continue; // only local symbols might change inheritance // if (!token->m_IsLocal) // continue; token->m_DirectAncestors.clear(); token->m_Ancestors.clear(); // Manager::Get()->GetMessageManager()->DebugLog(_T(" : '%s'"), token->m_Name.c_str()); //Manager::Get()->GetMessageManager()->DebugLog("Token %s, Ancestors %s", token->m_Name.c_str(), token->m_AncestorsString.c_str()); wxStringTokenizer tkz(token->m_AncestorsString, _T(",")); while (tkz.HasMoreTokens()) { wxString ancestor = tkz.GetNextToken(); if (ancestor.IsEmpty() || ancestor == token->m_Name) continue; // Manager::Get()->GetMessageManager()->DebugLog(_T("Ancestor %s"), ancestor.c_str()); // ancestors might contain namespaces, e.g. NS::Ancestor if (ancestor.Find(_T("::")) != wxNOT_FOUND) { Token* ancestorToken = 0; wxStringTokenizer anctkz(ancestor, _T("::")); while (anctkz.HasMoreTokens()) { wxString ns = anctkz.GetNextToken(); if (!ns.IsEmpty()) { int ancestorIdx = TokenExists(ns, ancestorToken ? ancestorToken->GetSelf() : -1, tkNamespace | tkClass | tkTypedef); ancestorToken = at(ancestorIdx); // ancestorToken = token->HasChildToken(ns, tkNamespace | tkClass); if (!ancestorToken) // unresolved break; } } if (ancestorToken && ancestorToken != token && ancestorToken->m_TokenKind == tkClass)// && !ancestorToken->m_IsTypedef) { // Manager::Get()->GetMessageManager()->DebugLog(_T("Resolved to %s"), ancestorToken->m_Name.c_str()); token->m_Ancestors.insert(ancestorToken->GetSelf()); ancestorToken->m_Descendants.insert(i); // Manager::Get()->GetMessageManager()->DebugLog(_T(" + '%s'"), ancestorToken->m_Name.c_str()); } // else // Manager::Get()->GetMessageManager()->DebugLog(_T(" ! '%s' (unresolved)"), ancestor.c_str()); } else // no namespaces in ancestor { // accept multiple matches for inheritance TokenIdxSet result; FindMatches(ancestor, result, true, false); for (TokenIdxSet::iterator it = result.begin(); it != result.end(); it++) { Token* ancestorToken = at(*it); // only classes take part in inheritance if (ancestorToken && ancestorToken != token && (ancestorToken->m_TokenKind == tkClass || ancestorToken->m_TokenKind == tkEnum))// && !ancestorToken->m_IsTypedef) { token->m_Ancestors.insert(*it); ancestorToken->m_Descendants.insert(i); // Manager::Get()->GetMessageManager()->DebugLog(_T(" + '%s'"), ancestorToken->m_Name.c_str()); } } // if (result.empty()) // Manager::Get()->GetMessageManager()->DebugLog(_T(" ! '%s' (unresolved)"), ancestor.c_str()); } } token->m_DirectAncestors = token->m_Ancestors; if (!token->m_IsLocal) // global symbols are linked once { //Manager::Get()->GetMessageManager()->DebugLog("Removing ancestor string from %s", token->m_Name.c_str(), token->m_Name.c_str()); token->m_AncestorsString.Clear(); } } // second loop to calculate full inheritance for each token for (size_t i = 0; i < size(); ++i) { Token* token = at(i); if (!token) continue; if (!(token->m_TokenKind & (tkClass | tkTypedef | tkEnum))) continue; // recalc TokenIdxSet result; for (TokenIdxSet::iterator it = token->m_Ancestors.begin(); it != token->m_Ancestors.end(); it++) RecalcFullInheritance(*it, result); // now, add the resulting set to ancestors set for (TokenIdxSet::iterator it = result.begin(); it != result.end(); it++) { Token* ancestor = at(*it); if (ancestor) { token->m_Ancestors.insert(*it); ancestor->m_Descendants.insert(i); } } // // debug loop // Manager::Get()->GetMessageManager()->DebugLog(_T("Ancestors for %s:"),token->m_Name.c_str()); // for (TokenIdxSet::iterator it = token->m_Ancestors.begin(); it != token->m_Ancestors.end(); it++) // Manager::Get()->GetMessageManager()->DebugLog(_T(" + %s"), at(*it)->m_Name.c_str()); } // Manager::Get()->GetMessageManager()->DebugLog(_T("Full inheritance calculated.")); }
void TokensTree::eraseToken(Token* oldToken) { if(!oldToken) return; int idx = oldToken->m_Self; if(m_Tokens[idx]!=oldToken) return; // Step 1: Detach token from its parent Token* parentToken = 0; if((size_t)(oldToken->m_ParentIndex) >= m_Tokens.size()) oldToken->m_ParentIndex = -1; if(oldToken->m_ParentIndex >= 0) parentToken = m_Tokens[oldToken->m_ParentIndex]; if(parentToken) parentToken->m_Children.erase(idx); TokenIdxSet nodes; TokenIdxSet::iterator it; // Step 2: Detach token from its ancestors nodes = (oldToken->m_DirectAncestors); for(it = nodes.begin();it!=nodes.end(); it++) { int ancestoridx = *it; if(ancestoridx < 0 || (size_t)ancestoridx >= m_Tokens.size()) continue; Token* ancestor = m_Tokens[ancestoridx]; if(ancestor) ancestor->m_Descendants.erase(idx); } oldToken->m_Ancestors.clear(); oldToken->m_DirectAncestors.clear(); // Step 3: erase children nodes = (oldToken->m_Children); // Copy the list to avoid interference for(it = nodes.begin();it!=nodes.end(); it++) eraseToken(*it); // m_Children SHOULD be empty by now - but clear anyway. oldToken->m_Children.clear(); // Step 4: erase descendants nodes = oldToken->m_Descendants; // Copy the list to avoid interference for(it = nodes.begin();it!=nodes.end(); it++) { if(*it == idx) // that should not happen, we can not be our own descendant, but in fact that can happen with boost { DebugLog(cc_text("Break out the loop to erase descendants, to avoid a crash. We can not be our own descendant !!")); break; } eraseToken(*it); } // m_Descendants SHOULD be empty by now - but clear anyway. oldToken->m_Descendants.clear(); // Step 5: Detach token from the SearchTrees int idx2 = m_Tree.GetItemIdx(oldToken->m_Name); if(idx2) { TokenIdxSet& curlist = m_Tree.GetItemAtPos(idx2); curlist.erase(idx); } // Now, from the global namespace (if applicable) if(oldToken->m_ParentIndex == -1) { m_GlobalNameSpace.erase(idx); m_TopNameSpaces.erase(idx); } // Step 6: Finally, erase it from the list. eraseTokenFromList(idx); }
bool NativeParserTest::TestExpression(wxString& expression, const TokenIdxSet& searchScope, TokenIdxSet& result) { // find all other matches std::queue<ParserComponent> components; BreakUpComponents(expression, components); ResolveExpression(m_Parser.GetTokenTree(), components, searchScope, result, true, true /*isPrefix*/); if (s_DebugSmartSense) CCLogger::Get()->DebugLog(F(_T("NativeParserTest::TestExpression, returned %lu results"), static_cast<unsigned long>(result.size()))); return true; }
void CCDebugInfo::OnSave(wxCommandEvent& /*event*/) { TokensTree* tokens = m_Parser->GetTokensTree(); wxArrayString saveWhat; saveWhat.Add(_("Dump the tokens tree")); saveWhat.Add(_("Dump the file list")); saveWhat.Add(_("Dump the list of include directories")); saveWhat.Add(_("Dump the token list of files")); int sel = wxGetSingleChoiceIndex(_("What do you want to save?"), _("CC Debug Info"), saveWhat, this); switch (sel) { case -1: // cancelled return; case 0: { wxString tt; { // life time of wxWindowDisabler/wxBusyInfo wxWindowDisabler disableAll; wxBusyInfo running(_("Obtaining tokens tree... please wait (this may take several seconds)..."), Manager::Get()->GetAppWindow()); tt = tokens->m_Tree.dump(); } SaveCCDebugInfo(_("Save tokens tree"), tt); } break; case 1: { wxString files; for (size_t i = 0; i < tokens->m_FilenamesMap.size(); ++i) { wxString file = tokens->m_FilenamesMap.GetString(i); if (!file.IsEmpty()) files += file + _T("\r\n"); } SaveCCDebugInfo(_("Save file list"), files); } break; case 2: { wxString dirs; const wxArrayString& dirsArray = m_Parser->GetIncludeDirs(); for (size_t i = 0; i < dirsArray.GetCount(); ++i) { const wxString& dir = dirsArray[i]; if (!dir.IsEmpty()) dirs += dir + _T("\r\n"); } SaveCCDebugInfo(_("Save list of include directories"), dirs); } break; case 3: { wxString fileTokens; { wxWindowDisabler disableAll; wxBusyInfo running(_("Obtaining tokens tree... please wait (this may take several seconds)..."), Manager::Get()->GetAppWindow()); for (size_t i = 0; i < tokens->m_FilenamesMap.size(); ++i) { const wxString file = tokens->m_FilenamesMap.GetString(i); if (!file.IsEmpty()) { fileTokens += file + _T("\r\n"); TokenIdxSet result; tokens->FindTokensInFile(file, result, tkUndefined); for (TokenIdxSet::iterator it = result.begin(); it != result.end(); ++it) { Token* token = tokens->at(*it); fileTokens << token->GetTokenKindString() << _T(" "); if (token->m_TokenKind == tkFunction) fileTokens << token->m_Name << token->GetFormattedArgs() << _T("\t"); else fileTokens << token->DisplayName() << _T("\t"); fileTokens << _T("[") << token->m_Line << _T(",") << token->m_ImplLine << _T("]"); fileTokens << _T("\r\n"); } } fileTokens += _T("\r\n"); } } SaveCCDebugInfo(_("Save token list of files"), fileTokens); } break; default: cbMessageBox(_("Invalid selection."), _("CC Debug Info")); } }
bool NativeParserTest::ParseAndCodeCompletion(wxString filename, bool isLocalFile) { Clear(); //clear the tree bool parseResult = false; parseResult = Parse(filename, isLocalFile); if (!parseResult) return false; int passCount = 0; int failCount = 0; wxString testResult; wxString message; if (isLocalFile) message = wxString::Format(_T("********************************************************\n Testing in file: %s\n********************************************************"),filename.wx_str()); else message = wxString::Format(_T("********************************************************\n Testing file in edit control\n********************************************************")); wxLogMessage(message); testResult << message << wxT("\n"); // reading the test cases, first we read all the lines of the file // handling local files and wxScintilla control differently std::vector<wxString> allLines; if (isLocalFile) { // read the test cases of CodeCompletion test wxTextFile source; source.Open(filename); wxString str; for ( str = source.GetFirstLine(); source.GetCurrentLine() < source.GetLineCount(); str = source.GetNextLine() ) { allLines.push_back(str); } } else { wxStringTokenizer tokenizer(filename, wxT("\n"), wxTOKEN_RET_EMPTY); while ( tokenizer.HasMoreTokens() ) { wxString token = tokenizer.GetNextToken(); allLines.push_back(token); } } // the test cases are list as the last line of the file, so we loop backword, and stop if an // empty line is found for (size_t i = allLines.size() - 1; i >= 0; i--) { wxString str = allLines[i]; // a test case should be put in a line, and start with the double slash if (str.StartsWith(_T("//"))) { // do tests here, example of line is below // tc.St //StaticVoid // remove the beginning "//" str.Remove(0, 2); int pos; wxString expression; wxString match; wxString match_doc; // find the optional "///<" for Doxygen comment tests pos = str.Find(_T("///<")); if (pos != wxNOT_FOUND) { match_doc = str.Mid(pos + 4); str = str.Mid(0, pos); } // find the second "//", the string after the second double slash are the // the result should be listed pos = str.Find(_T("//")); if (pos != wxNOT_FOUND) { expression = str.Mid(0, pos); match = str.Mid(pos + 2);// the remaining string } else { expression = str; if (!match_doc.IsEmpty()) match = _T("* @doxygen"); } expression.Trim(true).Trim(false); match.Trim(true).Trim(false); match_doc.Trim(true).Trim(false); wxArrayString suggestList; // the match can have many items, like: AAA,BBBB wxStringTokenizer tkz(match, wxT(",")); while ( tkz.HasMoreTokens() ) { wxString token = tkz.GetNextToken().Trim(true).Trim(false); suggestList.Add(token); } TokenIdxSet searchScope; searchScope.insert(-1); TokenIdxSet result; TestExpression(expression,searchScope,result); // loop the suggestList to see it is in the result Tokens for (size_t i=0; i<suggestList.GetCount(); i++) { wxString element = suggestList[i]; bool pass = false; // pass the test? for (TokenIdxSet::const_iterator it = result.begin(); it != result.end(); ++it) { const Token* token = m_Parser.GetTokenTree()->at(*it); if (!token || token->m_Name.IsEmpty()) continue; if (element.IsSameAs(token->m_Name) || element[0] == '*') { // no doxygen documents, only matches the suggestion list if (match_doc.IsEmpty()) { message = wxString::Format(_T("+ PASS: %s %s"), expression.wx_str(), element.wx_str()); testResult << message << wxT("\n"); wxLogMessage(message); pass = true; ++passCount; } else { // check whether doxygen documents are matched if (token->m_Doc.Contains(match_doc) || (match_doc[0] == '*' && match_doc.Len() == 1 && !token->m_Doc.IsEmpty()) || (match_doc[0] == '-' && match_doc.Len() == 1 && token->m_Doc.IsEmpty())) { message = wxString::Format(_T("+ PASS: %s %s \"%s\""), expression.wx_str(), token->m_Name.wx_str(), match_doc.wx_str()); testResult << message << wxT("\n"); wxLogMessage(message); if (!pass) { pass = true; ++passCount; } } else { if (pass) --passCount; pass = false; element = wxString::Format(_T("%s \"%s\""), token->m_Name.wx_str(), match_doc.wx_str()); break; } } if (element[0] != '*') break; } } if (pass == false) { message = wxString::Format(_T("- FAIL: %s %s"), expression.wx_str(), element.wx_str()); testResult << message << wxT("\n"); wxLogMessage(message); failCount++; } } // wxLogMessage(_T("Result have %lu matches"), static_cast<unsigned long>(result.size())); } else break; // if the line is not started with //, then we just stop testing } // report the test result here again in the last stage, further more, we can show this in another text control wxLogMessage(wxT("--------------------------------------------------------\nTotal %d tests, %d PASS, %d FAIL\n--------------------------------------------------------"), passCount+failCount, passCount, failCount); return true; }
void CCTestFrame::Start() { if (m_ParserCtrl) m_ParserCtrl->SetSelection(1); // make sure "Output" tab is selected CCTestAppGlobal::s_includeDirs.Clear(); CCTestAppGlobal::s_fileQueue.Clear(); CCTestAppGlobal::s_filesParsed.Clear(); // Obtain all include directories wxStringTokenizer tkz_inc(m_IncludeCtrl->GetValue(), _T("\r\n")); while ( tkz_inc.HasMoreTokens() ) { wxString include = tkz_inc.GetNextToken().Trim(true).Trim(false); if (!include.IsEmpty()) CCTestAppGlobal::s_includeDirs.Add(include); } if (m_DoHeadersCtrl->IsChecked()) { // Obtain all priority header files wxStringTokenizer tkz_hdr(m_HeadersCtrl->GetValue(), _T(",")); while (tkz_hdr.HasMoreTokens()) { wxString header = tkz_hdr.GetNextToken().Trim(false).Trim(true); // Remove <> (if any) int lt = header.Find(wxT('<')); int gt = header.Find(wxT('>'),true); if (lt!=wxNOT_FOUND && gt!=wxNOT_FOUND && gt>lt) header = header.AfterFirst(wxT('<')).BeforeLast(wxT('>')); // Remove "" (if any) int oq = header.Find(wxT('"')); int cq = header.Find(wxT('"'),true); if (oq!=wxNOT_FOUND && cq!=wxNOT_FOUND && cq>oq) header = header.AfterFirst(wxT('"')).BeforeLast(wxT('"')); header = header.Trim(false).Trim(true); // Find the header files in include path's as provided // (practically the same as ParserBase::FindFileInIncludeDirs()) for (size_t i=0; i<CCTestAppGlobal::s_includeDirs.GetCount(); ++i) { // Normalize the path (as in C::B's "NormalizePath()") wxFileName f_header(header); wxString base_path(CCTestAppGlobal::s_includeDirs[i]); if (f_header.Normalize(wxPATH_NORM_ALL & ~wxPATH_NORM_CASE, base_path)) { wxString this_header = f_header.GetFullPath(); if ( ::wxFileExists(this_header) ) CCTestAppGlobal::s_fileQueue.Add(this_header); } } } } if (CCTestAppGlobal::s_fileQueue.IsEmpty() && !m_Control->GetLength()) { wxMessageBox(wxT("Main file not found and buffer empty. Nothing to do."), _("Information"), wxOK | wxICON_INFORMATION, this); return; } if (m_DoHideCtrl && m_DoHideCtrl->IsChecked()) Hide(); m_ProgDlg = new wxProgressDialog(_T("Please wait, operating..."), _("Preparing...\nPlease wait..."), 0, this, wxPD_APP_MODAL); m_ProgDlg->SetSize(640,100); m_ProgDlg->Layout(); m_ProgDlg->CenterOnParent(); m_LogCount = 0; m_LogCtrl->Clear(); CCTest::Get()->Clear(); // initial clearance // make sure not to over-write an existing file (in case content had changed) wxString tf(wxFileName::CreateTempFileName(wxT("cc"))); // make the parser recognise it as header file: wxFileName fn(tf); fn.SetExt(wxT("h")); wxRemoveFile(tf); // no longer needed if (m_Control->SaveFile(fn.GetFullPath())) CCTestAppGlobal::s_fileQueue.Add(fn.GetFullPath()); else AppendToLog(_T("Unable to parse buffer (could not convert to file).")); AppendToLog(_T("--------------M-a-i-n--L-o-g--------------\r\n\r\n")); // parse file from the queue one-by-one while (!CCTestAppGlobal::s_fileQueue.IsEmpty()) { wxString file = CCTestAppGlobal::s_fileQueue.Item(0); CCTestAppGlobal::s_fileQueue.Remove(file); if (file.IsEmpty()) continue; AppendToLog(_T("-----------I-n-t-e-r-i-m--L-o-g-----------")); m_CurrentFile = file; m_ProgDlg->Update(-1, m_CurrentFile); m_StatuBar->SetStatusText(m_CurrentFile); // This is the core parse stage for files CCTest::Get()->Start(m_CurrentFile); CCTestAppGlobal::s_filesParsed.Add(m_CurrentFile); // done } // don't forget to remove the temporary file (w/ ".h" extension) wxRemoveFile(fn.GetFullPath()); m_ProgDlg->Update(-1, wxT("Creating tree log...")); AppendToLog(_T("--------------T-r-e-e--L-o-g--------------\r\n")); CCTest::Get()->PrintTree(); m_ProgDlg->Update(-1, wxT("Creating list log...")); AppendToLog(_T("--------------L-i-s-t--L-o-g--------------\r\n")); CCTest::Get()->PrintList(); if (m_DoTreeCtrl->IsChecked()) { m_ProgDlg->Update(-1, wxT("Serializing tree...")); Freeze(); m_TreeCtrl->SetValue( CCTest::Get()->SerializeTree() ); Thaw(); } // Here we are going to test the expression solving algorithm NativeParserTest nativeParserTest; wxString exp = _T("obj.m_Member1"); TokenIdxSet searchScope; searchScope.insert(-1); TokenIdxSet result; TokenTree *tree = CCTest::Get()->GetTokenTree(); nativeParserTest.TestExpression(exp, tree, searchScope, result ); wxLogMessage(_T("Result have %lu matches"), static_cast<unsigned long>(result.size())); for (TokenIdxSet::iterator it=result.begin(); it!=result.end(); ++it) { Token* token = tree->at(*it); if (token) { wxString log; log << token->GetTokenKindString() << _T(" ") << token->DisplayName() << _T("\t[") << token->m_Line << _T(",") << token->m_ImplLine << _T("]"); CCLogger::Get()->Log(log); } } if (m_ProgDlg) { delete m_ProgDlg; m_ProgDlg = 0; } if ( !IsShown() ) Show(); TokenTree* tt = CCTest::Get()->GetTokenTree(); if (tt) { AppendToLog((wxString::Format(_("The parser contains %lu tokens, found in %lu files."), static_cast<unsigned long>(tt->size()), static_cast<unsigned long>(tt->m_FileMap.size())))); } }
void TokenTree::RemoveToken(Token* oldToken) { if (!oldToken) return; int idx = oldToken->m_Index; if (m_Tokens[idx]!=oldToken) return; // Step 1: Detach token from its parent Token* parentToken = 0; if ((size_t)(oldToken->m_ParentIndex) >= m_Tokens.size()) oldToken->m_ParentIndex = -1; if (oldToken->m_ParentIndex >= 0) parentToken = m_Tokens[oldToken->m_ParentIndex]; if (parentToken) parentToken->m_Children.erase(idx); TokenIdxSet nodes; TokenIdxSet::const_iterator it; // Step 2: Detach token from its ancestors nodes = (oldToken->m_DirectAncestors); for (it = nodes.begin();it!=nodes.end(); ++it) { int ancestoridx = *it; if (ancestoridx < 0 || (size_t)ancestoridx >= m_Tokens.size()) continue; Token* ancestor = m_Tokens[ancestoridx]; if (ancestor) ancestor->m_Descendants.erase(idx); } oldToken->m_Ancestors.clear(); oldToken->m_DirectAncestors.clear(); // Step 3: Remove children nodes = (oldToken->m_Children); // Copy the list to avoid interference for (it = nodes.begin();it!=nodes.end(); ++it) RemoveToken(*it); // m_Children SHOULD be empty by now - but clear anyway. oldToken->m_Children.clear(); // Step 4: Remove descendants nodes = oldToken->m_Descendants; // Copy the list to avoid interference for (it = nodes.begin();it!=nodes.end(); ++it) { if (*it == idx) // that should not happen, we can not be our own descendant, but in fact that can happen with boost { CCLogger::Get()->DebugLog(_T("Break out the loop to remove descendants, to avoid a crash. We can not be our own descendant!")); break; } RemoveToken(*it); } // m_Descendants SHOULD be empty by now - but clear anyway. oldToken->m_Descendants.clear(); // Step 5: Detach token from the SearchTrees int idx2 = m_Tree.GetItemNo(oldToken->m_Name); if (idx2) { TokenIdxSet& curList = m_Tree.GetItemAtPos(idx2); curList.erase(idx); } // Now, from the global namespace (if applicable) if (oldToken->m_ParentIndex == -1) { m_GlobalNameSpaces.erase(idx); m_TopNameSpaces.erase(idx); } // Step 6: Delete documentation associated with removed token m_TokenDocumentationMap.erase(oldToken->m_Index); // Step 7: Finally, remove it from the list. RemoveTokenFromList(idx); }
size_t CodeRefactoring::VerifyResult(const TokenIdxSet& targetResult, const wxString& targetText, bool isLocalVariable) { EditorManager* edMan = Manager::Get()->GetEditorManager(); cbEditor* editor = edMan->GetBuiltinActiveEditor(); if (!editor) return 0; const Token* parentOfLocalVariable = nullptr; if (isLocalVariable) { TokenTree* tree = m_NativeParser.GetParser().GetTokenTree(); CC_LOCKER_TRACK_TT_MTX_LOCK(s_TokenTreeMutex) const Token* token = tree->at(*targetResult.begin()); parentOfLocalVariable = tree->at(token->m_ParentIndex); CC_LOCKER_TRACK_TT_MTX_UNLOCK(s_TokenTreeMutex) } // now that list is filled, we'll search cbStyledTextCtrl* control = new cbStyledTextCtrl(editor->GetParent(), wxID_ANY, wxDefaultPosition, wxSize(0, 0)); control->Show(false); // styled the text to support control->GetStyleAt() cbEditor::ApplyStyles(control); EditorColourSet edColSet; size_t totalCount = 0; for (SearchDataMap::const_iterator it = m_SearchDataMap.begin(); it != m_SearchDataMap.end(); ++it) totalCount += it->second.size(); // let's create a progress dialog because it might take some time depending on the files count wxProgressDialog* progress = new wxProgressDialog(_("Code Refactoring"), _("Please wait while verifying result..."), totalCount, Manager::Get()->GetAppWindow(), wxPD_AUTO_HIDE | wxPD_APP_MODAL | wxPD_CAN_ABORT); PlaceWindow(progress); size_t task = totalCount; TokenIdxSet result; bool userBreak = false; for (SearchDataMap::iterator it = m_SearchDataMap.begin(); it != m_SearchDataMap.end();) { // check if the file is already opened in built-in editor and do search in it cbEditor* ed = edMan->IsBuiltinOpen(it->first); if (ed) control->SetText(ed->GetControl()->GetText()); else // else load the file in the control { EncodingDetector detector(it->first); if (!detector.IsOK()) { task -= it->second.size(); m_SearchDataMap.erase(it++); continue; // failed } control->SetText(detector.GetWxStr()); } // apply the corlor setting edColSet.Apply(editor->GetLanguage(), control); ccSearchData searchData = { control, it->first }; for (SearchDataList::iterator itList = it->second.begin(); itList != it->second.end();) { // update the progress bar if (!progress->Update(totalCount - (--task))) { userBreak = true; break; // user pressed "Cancel" } // skip string or comment const int style = control->GetStyleAt(itList->pos); if (control->IsString(style) || control->IsComment(style)) { it->second.erase(itList++); continue; } // do cc search const int endOfWord = itList->pos + targetText.Len(); control->GotoPos(endOfWord); m_NativeParser.MarkItemsByAI(&searchData, result, true, false, true, endOfWord); if (result.empty()) { it->second.erase(itList++); continue; } // verify result TokenIdxSet::const_iterator findIter = targetResult.begin(); for (; findIter != targetResult.end(); ++findIter) { if (result.find(*findIter) != result.end()) break; } if (findIter == targetResult.end()) // not found it->second.erase(itList++); else { // handle for local variable if (isLocalVariable) { bool do_continue = false; TokenTree* tree = m_NativeParser.GetParser().GetTokenTree(); CC_LOCKER_TRACK_TT_MTX_LOCK(s_TokenTreeMutex) const Token* token = tree->at(*findIter); if (token) { const Token* parent = tree->at(token->m_ParentIndex); if (parent != parentOfLocalVariable) { it->second.erase(itList++); do_continue = true; } } CC_LOCKER_TRACK_TT_MTX_UNLOCK(s_TokenTreeMutex) if (do_continue) continue; } ++itList; } } if (it->second.empty()) m_SearchDataMap.erase(it++); else ++it; if (userBreak) break; } delete control; // done with it delete progress; // done here too return m_SearchDataMap.size(); }
void TokenTree::RecalcInheritanceChain(Token* token) { if (!token) return; if (!(token->m_TokenKind & (tkClass | tkTypedef | tkEnum | tkNamespace))) return; if (token->m_AncestorsString.IsEmpty()) return; token->m_DirectAncestors.clear(); token->m_Ancestors.clear(); wxStringTokenizer tkz(token->m_AncestorsString, _T(",")); TRACE(_T("RecalcInheritanceChain() : Token %s, Ancestors %s"), token->m_Name.wx_str(), token->m_AncestorsString.wx_str()); TRACE(_T("RecalcInheritanceChain() : Removing ancestor string from %s"), token->m_Name.wx_str()); token->m_AncestorsString.Clear(); while (tkz.HasMoreTokens()) { wxString ancestor = tkz.GetNextToken(); if (ancestor.IsEmpty() || ancestor == token->m_Name) continue; TRACE(_T("RecalcInheritanceChain() : Ancestor %s"), ancestor.wx_str()); // ancestors might contain namespaces, e.g. NS::Ancestor if (ancestor.Find(_T("::")) != wxNOT_FOUND) { Token* ancestorToken = 0; wxStringTokenizer anctkz(ancestor, _T("::")); while (anctkz.HasMoreTokens()) { wxString ns = anctkz.GetNextToken(); if (!ns.IsEmpty()) { int ancestorIdx = TokenExists(ns, ancestorToken ? ancestorToken->m_Index : -1, tkNamespace | tkClass | tkTypedef); ancestorToken = at(ancestorIdx); if (!ancestorToken) // unresolved break; } } if ( ancestorToken && ancestorToken != token && (ancestorToken->m_TokenKind == tkClass || ancestorToken->m_TokenKind == tkNamespace) ) { TRACE(_T("RecalcInheritanceChain() : Resolved to %s"), ancestorToken->m_Name.wx_str()); RecalcInheritanceChain(ancestorToken); token->m_Ancestors.insert(ancestorToken->m_Index); ancestorToken->m_Descendants.insert(token->m_Index); TRACE(_T("RecalcInheritanceChain() : + '%s'"), ancestorToken->m_Name.wx_str()); } else { TRACE(_T("RecalcInheritanceChain() : ! '%s' (unresolved)"), ancestor.wx_str()); } } else // no namespaces in ancestor { // accept multiple matches for inheritance TokenIdxSet result; FindMatches(ancestor, result, true, false); for (TokenIdxSet::const_iterator it = result.begin(); it != result.end(); ++it) { Token* ancestorToken = at(*it); // only classes take part in inheritance if ( ancestorToken && (ancestorToken != token) && ( (ancestorToken->m_TokenKind == tkClass) || (ancestorToken->m_TokenKind == tkEnum) || (ancestorToken->m_TokenKind == tkTypedef) || (ancestorToken->m_TokenKind == tkNamespace) ) ) { RecalcInheritanceChain(ancestorToken); token->m_Ancestors.insert(*it); ancestorToken->m_Descendants.insert(token->m_Index); TRACE(_T("RecalcInheritanceChain() : + '%s'"), ancestorToken->m_Name.wx_str()); } } #if defined(CC_TOKEN_DEBUG_OUTPUT) #if CC_TOKEN_DEBUG_OUTPUT if (result.empty()) TRACE(_T("RecalcInheritanceChain() : ! '%s' (unresolved)"), ancestor.wx_str()); #endif #endif } // Now, we have calc all the direct ancestors token->m_DirectAncestors = token->m_Ancestors; } #if defined(CC_TOKEN_DEBUG_OUTPUT) #if CC_TOKEN_DEBUG_OUTPUT wxStopWatch sw; TRACE(_T("RecalcInheritanceChain() : First iteration took : %ld ms"), sw.Time()); sw.Start(); #endif #endif // recalc TokenIdxSet result; for (TokenIdxSet::const_iterator it = token->m_Ancestors.begin(); it != token->m_Ancestors.end(); ++it) RecalcFullInheritance(*it, result); // now, add the resulting set to ancestors set for (TokenIdxSet::const_iterator it = result.begin(); it != result.end(); ++it) { Token* ancestor = at(*it); if (ancestor) { token->m_Ancestors.insert(*it); ancestor->m_Descendants.insert(token->m_Index); } } #if defined(CC_TOKEN_DEBUG_OUTPUT) #if CC_TOKEN_DEBUG_OUTPUT if (token) { // debug loop TRACE(_T("RecalcInheritanceChain() : Ancestors for %s:"), token->m_Name.wx_str()); for (TokenIdxSet::const_iterator it = token->m_Ancestors.begin(); it != token->m_Ancestors.end(); ++it) { const Token* anc_token = at(*it); if (anc_token) TRACE(_T("RecalcInheritanceChain() : + %s"), anc_token->m_Name.wx_str()); else TRACE(_T("RecalcInheritanceChain() : + NULL?!")); } } #endif #endif #if defined(CC_TOKEN_DEBUG_OUTPUT) #if CC_TOKEN_DEBUG_OUTPUT TRACE(_T("RecalcInheritanceChain() : Second iteration took : %ld ms"), sw.Time()); #endif #endif TRACE(_T("RecalcInheritanceChain() : Full inheritance calculated.")); }