bool AnalysisConsumer::HandleTopLevelDecl(DeclGroupRef DG) { storeTopLevelDecls(DG); for (DeclGroupRef::iterator i = DG.begin(), e = DG.end(); i != e; i++) { Decl *D = *i; string ssStart = D->getLocStart().printToString(Mgr->getASTContext().getSourceManager()); //std::cout<<"HandleTopLevelDecl: "<<ssStart<<std::endl; } return true; }
static CXString getDeclCursorUSR(const CXCursor &C) { Decl *D = cxcursor::getCursorDecl(C); // Don't generate USRs for things with invalid locations. if (!D || D->getLocStart().isInvalid()) return createCXString(""); // Check if the cursor has 'NoLinkage'. if (const NamedDecl *ND = dyn_cast<NamedDecl>(D)) switch (ND->getLinkage()) { case ExternalLinkage: // Generate USRs for all entities with external linkage. break; case NoLinkage: case UniqueExternalLinkage: // We allow enums, typedefs, and structs that have no linkage to // have USRs that are anchored to the file they were defined in // (e.g., the header). This is a little gross, but in principal // enums/anonymous structs/etc. defined in a common header file // are referred to across multiple translation units. if (isa<TagDecl>(ND) || isa<TypedefDecl>(ND) || isa<EnumConstantDecl>(ND) || isa<FieldDecl>(ND) || isa<VarDecl>(ND) || isa<NamespaceDecl>(ND)) break; // Fall-through. case InternalLinkage: if (isa<FunctionDecl>(ND)) break; } CXTranslationUnit TU = cxcursor::getCursorTU(C); if (!TU) return createCXString(""); CXStringBuf *buf = cxstring::getCXStringBuf(TU); if (!buf) return createCXString(""); { USRGenerator UG(&C, &buf->Data); UG->Visit(D); if (UG->ignoreResults()) { disposeCXStringBuf(buf); return createCXString(""); } } // Return the C-string, but don't make a copy since it is already in // the string buffer. buf->Data.push_back('\0'); return createCXString(buf); }
void AnalysisConsumer::HandleDeclsCallGraph(const unsigned LocalTUDeclsSize) { // Build the Call Graph by adding all the top level declarations to the graph. // Note: CallGraph can trigger deserialization of more items from a pch // (though HandleInterestingDecl); triggering additions to LocalTUDecls. // We rely on random access to add the initially processed Decls to CG. CallGraph CG; for (unsigned i = 0 ; i < LocalTUDeclsSize ; ++i) { CG.addToCallGraph(LocalTUDecls[i]); } // Walk over all of the call graph nodes in topological order, so that we // analyze parents before the children. Skip the functions inlined into // the previously processed functions. Use external Visited set to identify // inlined functions. The topological order allows the "do not reanalyze // previously inlined function" performance heuristic to be triggered more // often. SetOfConstDecls Visited; SetOfConstDecls VisitedAsTopLevel; llvm::ReversePostOrderTraversal<clang::CallGraph*> RPOT(&CG); for (llvm::ReversePostOrderTraversal<clang::CallGraph*>::rpo_iterator I = RPOT.begin(), E = RPOT.end(); I != E; ++I) { NumFunctionTopLevel++; CallGraphNode *N = *I; Decl *D = N->getDecl(); string ssStart = D->getLocStart().printToString(Mgr->getASTContext().getSourceManager()); // Skip the abstract root node. if (!D) continue; // Skip the functions which have been processed already or previously // inlined. if (shouldSkipFunction(D, Visited, VisitedAsTopLevel)) continue; // Analyze the function. SetOfConstDecls VisitedCallees; HandleCode(D, AM_Path, (Mgr->options.InliningMode == All ? nullptr : &VisitedCallees)); // Add the visited callees to the global visited set. for (SetOfConstDecls::iterator I = VisitedCallees.begin(), E = VisitedCallees.end(); I != E; ++I) { Visited.insert(*I); } VisitedAsTopLevel.insert(D); } }