void ExprEngine::examineStackFrames(const Decl *D, const LocationContext *LCtx, bool &IsRecursive, unsigned &StackDepth) { IsRecursive = false; StackDepth = 0; while (LCtx) { if (const StackFrameContext *SFC = dyn_cast<StackFrameContext>(LCtx)) { const Decl *DI = SFC->getDecl(); // Mark recursive (and mutually recursive) functions and always count // them when measuring the stack depth. if (DI == D) { IsRecursive = true; ++StackDepth; LCtx = LCtx->getParent(); continue; } // Do not count the small functions when determining the stack depth. AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(DI); const CFG *CalleeCFG = CalleeADC->getCFG(); if (CalleeCFG->getNumBlockIDs() > AMgr.options.getAlwaysInlineSize()) ++StackDepth; } LCtx = LCtx->getParent(); } }
// Determine if we should inline the call. bool ExprEngine::shouldInlineDecl(const Decl *D, ExplodedNode *Pred) { AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(D); const CFG *CalleeCFG = CalleeADC->getCFG(); // It is possible that the CFG cannot be constructed. // Be safe, and check if the CalleeCFG is valid. if (!CalleeCFG) return false; bool IsRecursive = false; unsigned StackDepth = 0; examineStackFrames(D, Pred->getLocationContext(), IsRecursive, StackDepth); if ((StackDepth >= AMgr.options.InlineMaxStackDepth) && ((CalleeCFG->getNumBlockIDs() > AMgr.options.getAlwaysInlineSize()) || IsRecursive)) return false; if (Engine.FunctionSummaries->hasReachedMaxBlockCount(D)) return false; if (CalleeCFG->getNumBlockIDs() > AMgr.options.InlineMaxFunctionSize) return false; // Do not inline variadic calls (for now). if (const BlockDecl *BD = dyn_cast<BlockDecl>(D)) { if (BD->isVariadic()) return false; } else if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) { if (FD->isVariadic()) return false; } if (getContext().getLangOpts().CPlusPlus) { if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) { // Conditionally allow the inlining of template functions. if (!getAnalysisManager().options.mayInlineTemplateFunctions()) if (FD->getTemplatedKind() != FunctionDecl::TK_NonTemplate) return false; // Conditionally allow the inlining of C++ standard library functions. if (!getAnalysisManager().options.mayInlineCXXStandardLibrary()) if (getContext().getSourceManager().isInSystemHeader(FD->getLocation())) if (IsInStdNamespace(FD)) return false; } } // It is possible that the live variables analysis cannot be // run. If so, bail out. if (!CalleeADC->getAnalysis<RelaxedLiveVariables>()) return false; return true; }
// Determine if we should inline the call. bool ExprEngine::shouldInlineDecl(const FunctionDecl *FD, ExplodedNode *Pred) { AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(FD); const CFG *CalleeCFG = CalleeADC->getCFG(); if (getNumberStackFrames(Pred->getLocationContext()) == AMgr.InlineMaxStackDepth) return false; if (Engine.FunctionSummaries->hasReachedMaxBlockCount(FD)) return false; if (CalleeCFG->getNumBlockIDs() > AMgr.InlineMaxFunctionSize) return false; return true; }
void FindUnreachableCode(AnalysisDeclContext &AC, Preprocessor &PP, Callback &CB) { CFG *cfg = AC.getCFG(); if (!cfg) return; // Scan for reachable blocks from the entrance of the CFG. // If there are no unreachable blocks, we're done. llvm::BitVector reachable(cfg->getNumBlockIDs()); unsigned numReachable = scanMaybeReachableFromBlock(&cfg->getEntry(), PP, reachable); if (numReachable == cfg->getNumBlockIDs()) return; // If there aren't explicit EH edges, we should include the 'try' dispatch // blocks as roots. if (!AC.getCFGBuildOptions().AddEHEdges) { for (CFG::try_block_iterator I = cfg->try_blocks_begin(), E = cfg->try_blocks_end() ; I != E; ++I) { numReachable += scanMaybeReachableFromBlock(*I, PP, reachable); } if (numReachable == cfg->getNumBlockIDs()) return; } // There are some unreachable blocks. We need to find the root blocks that // contain code that should be considered unreachable. for (CFG::iterator I = cfg->begin(), E = cfg->end(); I != E; ++I) { const CFGBlock *block = *I; // A block may have been marked reachable during this loop. if (reachable[block->getBlockID()]) continue; DeadCodeScan DS(reachable, PP); numReachable += DS.scanBackwards(block, CB); if (numReachable == cfg->getNumBlockIDs()) return; } }
// Determine if we should inline the call. bool ExprEngine::shouldInlineDecl(const FunctionDecl *FD, ExplodedNode *Pred) { AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(FD); const CFG *CalleeCFG = CalleeADC->getCFG(); // It is possible that the CFG cannot be constructed. // Be safe, and check if the CalleeCFG is valid. if (!CalleeCFG) return false; if (getNumberStackFrames(Pred->getLocationContext()) == AMgr.InlineMaxStackDepth) return false; if (Engine.FunctionSummaries->hasReachedMaxBlockCount(FD)) return false; if (CalleeCFG->getNumBlockIDs() > AMgr.InlineMaxFunctionSize) return false; return true; }
// Determine if we should inline the call. bool ExprEngine::shouldInlineDecl(const Decl *D, ExplodedNode *Pred) { // FIXME: default constructors don't have bodies. if (!D->hasBody()) return false; AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(D); const CFG *CalleeCFG = CalleeADC->getCFG(); // It is possible that the CFG cannot be constructed. // Be safe, and check if the CalleeCFG is valid. if (!CalleeCFG) return false; if (getNumberStackFrames(Pred->getLocationContext()) == AMgr.InlineMaxStackDepth) return false; if (Engine.FunctionSummaries->hasReachedMaxBlockCount(D)) return false; if (CalleeCFG->getNumBlockIDs() > AMgr.InlineMaxFunctionSize) return false; // Do not inline variadic calls (for now). if (const BlockDecl *BD = dyn_cast<BlockDecl>(D)) { if (BD->isVariadic()) return false; } else if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) { if (FD->isVariadic()) return false; } // It is possible that the live variables analysis cannot be // run. If so, bail out. if (!CalleeADC->getAnalysis<RelaxedLiveVariables>()) return false; return true; }
bool ExprEngine::shouldInlineCall(const CallEvent &Call, const Decl *D, const ExplodedNode *Pred) { if (!D) return false; AnalysisManager &AMgr = getAnalysisManager(); AnalyzerOptions &Opts = AMgr.options; AnalysisDeclContextManager &ADCMgr = AMgr.getAnalysisDeclContextManager(); AnalysisDeclContext *CalleeADC = ADCMgr.getContext(D); // Temporary object destructor processing is currently broken, so we never // inline them. // FIXME: Remove this once temp destructors are working. if (isa<CXXDestructorCall>(Call)) { if ((*currBldrCtx->getBlock())[currStmtIdx].getAs<CFGTemporaryDtor>()) return false; } // The auto-synthesized bodies are essential to inline as they are // usually small and commonly used. Note: we should do this check early on to // ensure we always inline these calls. if (CalleeADC->isBodyAutosynthesized()) return true; if (!AMgr.shouldInlineCall()) return false; // Check if this function has been marked as non-inlinable. Optional<bool> MayInline = Engine.FunctionSummaries->mayInline(D); if (MayInline.hasValue()) { if (!MayInline.getValue()) return false; } else { // We haven't actually checked the static properties of this function yet. // Do that now, and record our decision in the function summaries. if (mayInlineDecl(CalleeADC, Opts)) { Engine.FunctionSummaries->markMayInline(D); } else { Engine.FunctionSummaries->markShouldNotInline(D); return false; } } // Check if we should inline a call based on its kind. // FIXME: this checks both static and dynamic properties of the call, which // means we're redoing a bit of work that could be cached in the function // summary. CallInlinePolicy CIP = mayInlineCallKind(Call, Pred, Opts); if (CIP != CIP_Allowed) { if (CIP == CIP_DisallowedAlways) { assert(!MayInline.hasValue() || MayInline.getValue()); Engine.FunctionSummaries->markShouldNotInline(D); } return false; } const CFG *CalleeCFG = CalleeADC->getCFG(); // Do not inline if recursive or we've reached max stack frame count. bool IsRecursive = false; unsigned StackDepth = 0; examineStackFrames(D, Pred->getLocationContext(), IsRecursive, StackDepth); if ((StackDepth >= Opts.InlineMaxStackDepth) && ((CalleeCFG->getNumBlockIDs() > Opts.getAlwaysInlineSize()) || IsRecursive)) return false; // Do not inline large functions too many times. if ((Engine.FunctionSummaries->getNumTimesInlined(D) > Opts.getMaxTimesInlineLarge()) && CalleeCFG->getNumBlockIDs() > 13) { NumReachedInlineCountMax++; return false; } if (HowToInline == Inline_Minimal && (CalleeCFG->getNumBlockIDs() > Opts.getAlwaysInlineSize() || IsRecursive)) return false; Engine.FunctionSummaries->bumpNumTimesInlined(D); return true; }