void ExplodedNode::NodeGroup::addNode(ExplodedNode *N, ExplodedGraph &G) { assert(!getFlag()); GroupStorage &Storage = reinterpret_cast<GroupStorage&>(P); if (Storage.isNull()) { Storage = N; assert(Storage.is<ExplodedNode *>()); return; } ExplodedNodeVector *V = Storage.dyn_cast<ExplodedNodeVector *>(); if (!V) { // Switch from single-node to multi-node representation. ExplodedNode *Old = Storage.get<ExplodedNode *>(); BumpVectorContext &Ctx = G.getNodeAllocator(); V = G.getAllocator().Allocate<ExplodedNodeVector>(); new (V) ExplodedNodeVector(Ctx, 4); V->push_back(Old, Ctx); Storage = V; assert(!getFlag()); assert(Storage.is<ExplodedNodeVector *>()); } V->push_back(N, G.getNodeAllocator()); }
void ExplodedNode::NodeGroup::addNode(ExplodedNode *N, ExplodedGraph &G) { assert((reinterpret_cast<uintptr_t>(N) & Mask) == 0x0); assert(!getFlag()); if (getKind() == Size1) { if (ExplodedNode *NOld = getNode()) { BumpVectorContext &Ctx = G.getNodeAllocator(); BumpVector<ExplodedNode*> *V = G.getAllocator().Allocate<BumpVector<ExplodedNode*> >(); new (V) BumpVector<ExplodedNode*>(Ctx, 4); assert((reinterpret_cast<uintptr_t>(V) & Mask) == 0x0); V->push_back(NOld, Ctx); V->push_back(N, Ctx); P = reinterpret_cast<uintptr_t>(V) | SizeOther; assert(getPtr() == (void*) V); assert(getKind() == SizeOther); } else { P = reinterpret_cast<uintptr_t>(N); assert(getKind() == Size1); } } else { assert(getKind() == SizeOther); getVector(getPtr()).push_back(N, G.getNodeAllocator()); } }
void UnreachableCodeChecker::checkEndAnalysis(ExplodedGraph &G, BugReporter &B, ExprEngine &Eng) const { CFGBlocksSet reachable, visited; if (Eng.hasWorkRemaining()) return; const Decl *D = nullptr; CFG *C = nullptr; ParentMap *PM = nullptr; const LocationContext *LC = nullptr; // Iterate over ExplodedGraph for (ExplodedGraph::node_iterator I = G.nodes_begin(), E = G.nodes_end(); I != E; ++I) { const ProgramPoint &P = I->getLocation(); LC = P.getLocationContext(); if (!LC->inTopFrame()) continue; if (!D) D = LC->getAnalysisDeclContext()->getDecl(); // Save the CFG if we don't have it already if (!C) C = LC->getAnalysisDeclContext()->getUnoptimizedCFG(); if (!PM) PM = &LC->getParentMap(); if (Optional<BlockEntrance> BE = P.getAs<BlockEntrance>()) { const CFGBlock *CB = BE->getBlock(); reachable.insert(CB->getBlockID()); } } // Bail out if we didn't get the CFG or the ParentMap. if (!D || !C || !PM) return; // Don't do anything for template instantiations. Proving that code // in a template instantiation is unreachable means proving that it is // unreachable in all instantiations. if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) if (FD->isTemplateInstantiation()) return; // Find CFGBlocks that were not covered by any node for (CFG::const_iterator I = C->begin(), E = C->end(); I != E; ++I) { const CFGBlock *CB = *I; // Check if the block is unreachable if (reachable.count(CB->getBlockID())) continue; // Check if the block is empty (an artificial block) if (isEmptyCFGBlock(CB)) continue; // Find the entry points for this block if (!visited.count(CB->getBlockID())) FindUnreachableEntryPoints(CB, reachable, visited); // This block may have been pruned; check if we still want to report it if (reachable.count(CB->getBlockID())) continue; // Check for false positives if (isInvalidPath(CB, *PM)) continue; // It is good practice to always have a "default" label in a "switch", even // if we should never get there. It can be used to detect errors, for // instance. Unreachable code directly under a "default" label is therefore // likely to be a false positive. if (const Stmt *label = CB->getLabel()) if (label->getStmtClass() == Stmt::DefaultStmtClass) continue; // Special case for __builtin_unreachable. // FIXME: This should be extended to include other unreachable markers, // such as llvm_unreachable. if (!CB->empty()) { bool foundUnreachable = false; for (CFGBlock::const_iterator ci = CB->begin(), ce = CB->end(); ci != ce; ++ci) { if (Optional<CFGStmt> S = (*ci).getAs<CFGStmt>()) if (const CallExpr *CE = dyn_cast<CallExpr>(S->getStmt())) { if (CE->getBuiltinCallee() == Builtin::BI__builtin_unreachable || CE->isBuiltinAssumeFalse(Eng.getContext())) { foundUnreachable = true; break; } } } if (foundUnreachable) continue; } // We found a block that wasn't covered - find the statement to report SourceRange SR; PathDiagnosticLocation DL; SourceLocation SL; if (const Stmt *S = getUnreachableStmt(CB)) { // In macros, 'do {...} while (0)' is often used. Don't warn about the // condition 0 when it is unreachable. if (S->getBeginLoc().isMacroID()) if (const auto *I = dyn_cast<IntegerLiteral>(S)) if (I->getValue() == 0ULL) if (const Stmt *Parent = PM->getParent(S)) if (isa<DoStmt>(Parent)) continue; SR = S->getSourceRange(); DL = PathDiagnosticLocation::createBegin(S, B.getSourceManager(), LC); SL = DL.asLocation(); if (SR.isInvalid() || !SL.isValid()) continue; } else continue; // Check if the SourceLocation is in a system header const SourceManager &SM = B.getSourceManager(); if (SM.isInSystemHeader(SL) || SM.isInExternCSystemHeader(SL)) continue; B.EmitBasicReport(D, this, "Unreachable code", "Dead code", "This statement is never executed", DL, SR); } }
void UnreachableCodeChecker::checkEndAnalysis(ExplodedGraph &G, BugReporter &B, ExprEngine &Eng) const { CFGBlocksSet reachable, visited; if (Eng.hasWorkRemaining()) return; CFG *C = 0; ParentMap *PM = 0; // Iterate over ExplodedGraph for (ExplodedGraph::node_iterator I = G.nodes_begin(), E = G.nodes_end(); I != E; ++I) { const ProgramPoint &P = I->getLocation(); const LocationContext *LC = P.getLocationContext(); // Save the CFG if we don't have it already if (!C) C = LC->getAnalysisContext()->getUnoptimizedCFG(); if (!PM) PM = &LC->getParentMap(); if (const BlockEntrance *BE = dyn_cast<BlockEntrance>(&P)) { const CFGBlock *CB = BE->getBlock(); reachable.insert(CB->getBlockID()); } } // Bail out if we didn't get the CFG or the ParentMap. if (!C || !PM) return; ASTContext &Ctx = B.getContext(); // Find CFGBlocks that were not covered by any node for (CFG::const_iterator I = C->begin(), E = C->end(); I != E; ++I) { const CFGBlock *CB = *I; // Check if the block is unreachable if (reachable.count(CB->getBlockID())) continue; // Check if the block is empty (an artificial block) if (isEmptyCFGBlock(CB)) continue; // Find the entry points for this block if (!visited.count(CB->getBlockID())) FindUnreachableEntryPoints(CB, reachable, visited); // This block may have been pruned; check if we still want to report it if (reachable.count(CB->getBlockID())) continue; // Check for false positives if (CB->size() > 0 && isInvalidPath(CB, *PM)) continue; // Special case for __builtin_unreachable. // FIXME: This should be extended to include other unreachable markers, // such as llvm_unreachable. if (!CB->empty()) { CFGElement First = CB->front(); if (const CFGStmt *S = First.getAs<CFGStmt>()) { if (const CallExpr *CE = dyn_cast<CallExpr>(S->getStmt())) { if (CE->isBuiltinCall(Ctx) == Builtin::BI__builtin_unreachable) continue; } } } // We found a block that wasn't covered - find the statement to report SourceRange SR; SourceLocation SL; if (const Stmt *S = getUnreachableStmt(CB)) { SR = S->getSourceRange(); SL = S->getLocStart(); if (SR.isInvalid() || SL.isInvalid()) continue; } else continue; // Check if the SourceLocation is in a system header const SourceManager &SM = B.getSourceManager(); if (SM.isInSystemHeader(SL) || SM.isInExternCSystemHeader(SL)) continue; B.EmitBasicReport("Unreachable code", "Dead code", "This statement is never" " executed", SL, SR); } }
ExplodedGraph* ExplodedGraph::TrimInternal(const ExplodedNode* const* BeginSources, const ExplodedNode* const* EndSources, InterExplodedGraphMap* M, llvm::DenseMap<const void*, const void*> *InverseMap) const { typedef llvm::DenseSet<const ExplodedNode*> Pass1Ty; Pass1Ty Pass1; typedef llvm::DenseMap<const ExplodedNode*, ExplodedNode*> Pass2Ty; Pass2Ty& Pass2 = M->M; SmallVector<const ExplodedNode*, 10> WL1, WL2; // ===- Pass 1 (reverse DFS) -=== for (const ExplodedNode* const* I = BeginSources; I != EndSources; ++I) { assert(*I); WL1.push_back(*I); } // Process the first worklist until it is empty. Because it is a std::list // it acts like a FIFO queue. while (!WL1.empty()) { const ExplodedNode *N = WL1.back(); WL1.pop_back(); // Have we already visited this node? If so, continue to the next one. if (Pass1.count(N)) continue; // Otherwise, mark this node as visited. Pass1.insert(N); // If this is a root enqueue it to the second worklist. if (N->Preds.empty()) { WL2.push_back(N); continue; } // Visit our predecessors and enqueue them. for (ExplodedNode::pred_iterator I = N->Preds.begin(), E = N->Preds.end(); I != E; ++I) WL1.push_back(*I); } // We didn't hit a root? Return with a null pointer for the new graph. if (WL2.empty()) return 0; // Create an empty graph. ExplodedGraph* G = MakeEmptyGraph(); // ===- Pass 2 (forward DFS to construct the new graph) -=== while (!WL2.empty()) { const ExplodedNode *N = WL2.back(); WL2.pop_back(); // Skip this node if we have already processed it. if (Pass2.find(N) != Pass2.end()) continue; // Create the corresponding node in the new graph and record the mapping // from the old node to the new node. ExplodedNode *NewN = G->getNode(N->getLocation(), N->State, N->isSink(), 0); Pass2[N] = NewN; // Also record the reverse mapping from the new node to the old node. if (InverseMap) (*InverseMap)[NewN] = N; // If this node is a root, designate it as such in the graph. if (N->Preds.empty()) G->addRoot(NewN); // In the case that some of the intended predecessors of NewN have already // been created, we should hook them up as predecessors. // Walk through the predecessors of 'N' and hook up their corresponding // nodes in the new graph (if any) to the freshly created node. for (ExplodedNode::pred_iterator I = N->Preds.begin(), E = N->Preds.end(); I != E; ++I) { Pass2Ty::iterator PI = Pass2.find(*I); if (PI == Pass2.end()) continue; NewN->addPredecessor(PI->second, *G); } // In the case that some of the intended successors of NewN have already // been created, we should hook them up as successors. Otherwise, enqueue // the new nodes from the original graph that should have nodes created // in the new graph. for (ExplodedNode::succ_iterator I = N->Succs.begin(), E = N->Succs.end(); I != E; ++I) { Pass2Ty::iterator PI = Pass2.find(*I); if (PI != Pass2.end()) { PI->second->addPredecessor(NewN, *G); continue; } // Enqueue nodes to the worklist that were marked during pass 1. if (Pass1.count(*I)) WL2.push_back(*I); } } return G; }
void AnalyzerStatsChecker::checkEndAnalysis(ExplodedGraph &G, BugReporter &B, ExprEngine &Eng) const { const CFG *C = 0; const SourceManager &SM = B.getSourceManager(); llvm::SmallPtrSet<const CFGBlock*, 256> reachable; // Root node should have the location context of the top most function. const ExplodedNode *GraphRoot = *G.roots_begin(); const LocationContext *LC = GraphRoot->getLocation().getLocationContext(); const Decl *D = LC->getDecl(); // Iterate over the exploded graph. for (ExplodedGraph::node_iterator I = G.nodes_begin(); I != G.nodes_end(); ++I) { const ProgramPoint &P = I->getLocation(); // Only check the coverage in the top level function (optimization). if (D != P.getLocationContext()->getDecl()) continue; if (const BlockEntrance *BE = dyn_cast<BlockEntrance>(&P)) { const CFGBlock *CB = BE->getBlock(); reachable.insert(CB); } } // Get the CFG and the Decl of this block. C = LC->getCFG(); unsigned total = 0, unreachable = 0; // Find CFGBlocks that were not covered by any node for (CFG::const_iterator I = C->begin(); I != C->end(); ++I) { const CFGBlock *CB = *I; ++total; // Check if the block is unreachable if (!reachable.count(CB)) { ++unreachable; } } // We never 'reach' the entry block, so correct the unreachable count unreachable--; // There is no BlockEntrance corresponding to the exit block as well, so // assume it is reached as well. unreachable--; // Generate the warning string SmallString<128> buf; llvm::raw_svector_ostream output(buf); PresumedLoc Loc = SM.getPresumedLoc(D->getLocation()); if (!Loc.isValid()) return; if (isa<FunctionDecl>(D) || isa<ObjCMethodDecl>(D)) { const NamedDecl *ND = cast<NamedDecl>(D); output << *ND; } else if (isa<BlockDecl>(D)) { output << "block(line:" << Loc.getLine() << ":col:" << Loc.getColumn(); } NumBlocksUnreachable += unreachable; NumBlocks += total; std::string NameOfRootFunction = output.str(); output << " -> Total CFGBlocks: " << total << " | Unreachable CFGBlocks: " << unreachable << " | Exhausted Block: " << (Eng.wasBlocksExhausted() ? "yes" : "no") << " | Empty WorkList: " << (Eng.hasEmptyWorkList() ? "yes" : "no"); B.EmitBasicReport(D, "Analyzer Statistics", "Internal Statistics", output.str(), PathDiagnosticLocation(D, SM)); // Emit warning for each block we bailed out on. typedef CoreEngine::BlocksExhausted::const_iterator ExhaustedIterator; const CoreEngine &CE = Eng.getCoreEngine(); for (ExhaustedIterator I = CE.blocks_exhausted_begin(), E = CE.blocks_exhausted_end(); I != E; ++I) { const BlockEdge &BE = I->first; const CFGBlock *Exit = BE.getDst(); const CFGElement &CE = Exit->front(); if (const CFGStmt *CS = dyn_cast<CFGStmt>(&CE)) { SmallString<128> bufI; llvm::raw_svector_ostream outputI(bufI); outputI << "(" << NameOfRootFunction << ")" << ": The analyzer generated a sink at this point"; B.EmitBasicReport(D, "Sink Point", "Internal Statistics", outputI.str(), PathDiagnosticLocation::createBegin(CS->getStmt(), SM, LC)); } } }
void AnalyzerStatsChecker::checkEndAnalysis(ExplodedGraph &G, BugReporter &B, ExprEngine &Eng) const { const CFG *C = 0; const Decl *D = 0; const LocationContext *LC = 0; const SourceManager &SM = B.getSourceManager(); llvm::SmallPtrSet<const CFGBlock*, 256> reachable; // Iterate over explodedgraph for (ExplodedGraph::node_iterator I = G.nodes_begin(); I != G.nodes_end(); ++I) { const ProgramPoint &P = I->getLocation(); // Save the LocationContext if we don't have it already if (!LC) LC = P.getLocationContext(); if (const BlockEntrance *BE = dyn_cast<BlockEntrance>(&P)) { const CFGBlock *CB = BE->getBlock(); reachable.insert(CB); } } // Get the CFG and the Decl of this block C = LC->getCFG(); D = LC->getAnalysisContext()->getDecl(); unsigned total = 0, unreachable = 0; // Find CFGBlocks that were not covered by any node for (CFG::const_iterator I = C->begin(); I != C->end(); ++I) { const CFGBlock *CB = *I; ++total; // Check if the block is unreachable if (!reachable.count(CB)) { ++unreachable; } } // We never 'reach' the entry block, so correct the unreachable count unreachable--; // Generate the warning string llvm::SmallString<128> buf; llvm::raw_svector_ostream output(buf); PresumedLoc Loc = SM.getPresumedLoc(D->getLocation()); if (Loc.isValid()) { output << Loc.getFilename() << " : "; if (isa<FunctionDecl>(D) || isa<ObjCMethodDecl>(D)) { const NamedDecl *ND = cast<NamedDecl>(D); output << ND; } else if (isa<BlockDecl>(D)) { output << "block(line:" << Loc.getLine() << ":col:" << Loc.getColumn(); } } output << " -> Total CFGBlocks: " << total << " | Unreachable CFGBlocks: " << unreachable << " | Exhausted Block: " << (Eng.wasBlocksExhausted() ? "yes" : "no") << " | Empty WorkList: " << (Eng.hasEmptyWorkList() ? "yes" : "no"); B.EmitBasicReport("Analyzer Statistics", "Internal Statistics", output.str(), PathDiagnosticLocation(D, SM)); // Emit warning for each block we bailed out on typedef CoreEngine::BlocksExhausted::const_iterator ExhaustedIterator; const CoreEngine &CE = Eng.getCoreEngine(); for (ExhaustedIterator I = CE.blocks_exhausted_begin(), E = CE.blocks_exhausted_end(); I != E; ++I) { const BlockEdge &BE = I->first; const CFGBlock *Exit = BE.getDst(); const CFGElement &CE = Exit->front(); if (const CFGStmt *CS = dyn_cast<CFGStmt>(&CE)) B.EmitBasicReport("Bailout Point", "Internal Statistics", "The analyzer " "stopped analyzing at this point", PathDiagnosticLocation::createBegin(CS->getStmt(), SM, LC)); } }