void MPIChecker::checkDoubleNonblocking(const CallEvent &PreCallEvent, CheckerContext &Ctx) const { if (!FuncClassifier->isNonBlockingType(PreCallEvent.getCalleeIdentifier())) { return; } const MemRegion *const MR = PreCallEvent.getArgSVal(PreCallEvent.getNumArgs() - 1).getAsRegion(); if (!MR) return; const ElementRegion *const ER = dyn_cast<ElementRegion>(MR); // The region must be typed, in order to reason about it. if (!isa<TypedRegion>(MR) || (ER && !isa<TypedRegion>(ER->getSuperRegion()))) return; ProgramStateRef State = Ctx.getState(); const Request *const Req = State->get<RequestMap>(MR); // double nonblocking detected if (Req && Req->CurrentState == Request::State::Nonblocking) { ExplodedNode *ErrorNode = Ctx.generateNonFatalErrorNode(); BReporter.reportDoubleNonblocking(PreCallEvent, *Req, MR, ErrorNode, Ctx.getBugReporter()); Ctx.addTransition(ErrorNode->getState(), ErrorNode); } // no error else { State = State->set<RequestMap>(MR, Request::State::Nonblocking); Ctx.addTransition(State); } }
ProgramStateRef ExprEngine::bindReturnValue(const CallEvent &Call, const LocationContext *LCtx, ProgramStateRef State) { const Expr *E = Call.getOriginExpr(); if (!E) return State; // Some method families have known return values. if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(&Call)) { switch (Msg->getMethodFamily()) { default: break; case OMF_autorelease: case OMF_retain: case OMF_self: { // These methods return their receivers. return State->BindExpr(E, LCtx, Msg->getReceiverSVal()); } } } else if (const CXXConstructorCall *C = dyn_cast<CXXConstructorCall>(&Call)){ return State->BindExpr(E, LCtx, C->getCXXThisVal()); } // Conjure a symbol if the return value is unknown. QualType ResultTy = Call.getResultType(); SValBuilder &SVB = getSValBuilder(); unsigned Count = currBldrCtx->blockCount(); SVal R = SVB.conjureSymbolVal(0, E, LCtx, ResultTy, Count); return State->BindExpr(E, LCtx, R); }
void ExprEngine::defaultEvalCall(NodeBuilder &Bldr, ExplodedNode *Pred, const CallEvent &Call) { ProgramStateRef State = 0; const Expr *E = Call.getOriginExpr(); // Try to inline the call. // The origin expression here is just used as a kind of checksum; // for CallEvents that do not have origin expressions, this should still be // safe. if (!isa<ObjCMethodCall>(Call)) { State = getInlineFailedState(Pred->getState(), E); if (State == 0 && inlineCall(Call, Pred)) { // If we inlined the call, the successor has been manually added onto // the work list and we should not consider it for subsequent call // handling steps. Bldr.takeNodes(Pred); return; } } // If we can't inline it, handle the return value and invalidate the regions. if (State == 0) State = Pred->getState(); // Invalidate any regions touched by the call. unsigned Count = currentBuilderContext->getCurrentBlockCount(); State = Call.invalidateRegions(Count, State); // Construct and bind the return value. State = bindReturnValue(Call, Pred->getLocationContext(), State); // And make the result node. Bldr.generateNode(Call.getProgramPoint(), State, Pred); }
void ExprEngine::defaultEvalCall(ExplodedNodeSet &Dst, ExplodedNode *Pred, const CallEvent &Call) { // Try to inline the call. // The origin expression here is just used as a kind of checksum; // for CallEvents that do not have origin expressions, this should still be // safe. const Expr *E = Call.getOriginExpr(); ProgramStateRef state = getInlineFailedState(Pred, E); if (state == 0 && inlineCall(Dst, Call, Pred)) return; // If we can't inline it, handle the return value and invalidate the regions. NodeBuilder Bldr(Pred, Dst, *currentBuilderContext); // Invalidate any regions touched by the call. unsigned Count = currentBuilderContext->getCurrentBlockCount(); if (state == 0) state = Pred->getState(); state = Call.invalidateRegions(Count, state); // Conjure a symbol value to use as the result. if (E) { QualType ResultTy = Call.getResultType(); SValBuilder &SVB = getSValBuilder(); const LocationContext *LCtx = Pred->getLocationContext(); SVal RetVal = SVB.getConjuredSymbolVal(0, E, LCtx, ResultTy, Count); state = state->BindExpr(E, LCtx, RetVal); } // And make the result node. Bldr.generateNode(Call.getProgramPoint(), state, Pred); }
void SimpleStreamChecker::checkPreCall(const CallEvent &Call, CheckerContext &C) const { if (!Call.isGlobalCFunction()) return; if (!Call.isCalled(CloseFn)) return; // Get the symbolic value corresponding to the file handle. SymbolRef FileDesc = Call.getArgSVal(0).getAsSymbol(); if (!FileDesc) return; // Check if the stream has already been closed. ProgramStateRef State = C.getState(); const StreamState *SS = State->get<StreamMap>(FileDesc); if (SS && SS->isClosed()) { reportDoubleClose(FileDesc, Call, C); return; } // Generate the next transition, in which the stream is closed. State = State->set<StreamMap>(FileDesc, StreamState::getClosed()); C.addTransition(State); }
void ObjCSelfInitChecker::checkPreCall(const CallEvent &CE, CheckerContext &C) const { // FIXME: A callback should disable checkers at the start of functions. if (!shouldRunOnFunctionOrMethod(dyn_cast<NamedDecl>( C.getCurrentAnalysisDeclContext()->getDecl()))) return; ProgramStateRef state = C.getState(); unsigned NumArgs = CE.getNumArgs(); // If we passed 'self' as and argument to the call, record it in the state // to be propagated after the call. // Note, we could have just given up, but try to be more optimistic here and // assume that the functions are going to continue initialization or will not // modify self. for (unsigned i = 0; i < NumArgs; ++i) { SVal argV = CE.getArgSVal(i); if (isSelfVar(argV, C)) { unsigned selfFlags = getSelfFlags(state->getSVal(cast<Loc>(argV)), C); C.addTransition(state->set<PreCallSelfFlags>(selfFlags)); return; } else if (hasSelfFlag(argV, SelfFlag_Self, C)) { unsigned selfFlags = getSelfFlags(argV, C); C.addTransition(state->set<PreCallSelfFlags>(selfFlags)); return; } } }
void ObjCSelfInitChecker::checkPostCall(const CallEvent &CE, CheckerContext &C) const { // FIXME: A callback should disable checkers at the start of functions. if (!shouldRunOnFunctionOrMethod(dyn_cast<NamedDecl>( C.getCurrentAnalysisDeclContext()->getDecl()))) return; ProgramStateRef state = C.getState(); SelfFlagEnum prevFlags = (SelfFlagEnum)state->get<PreCallSelfFlags>(); if (!prevFlags) return; state = state->remove<PreCallSelfFlags>(); unsigned NumArgs = CE.getNumArgs(); for (unsigned i = 0; i < NumArgs; ++i) { SVal argV = CE.getArgSVal(i); if (isSelfVar(argV, C)) { // If the address of 'self' is being passed to the call, assume that the // 'self' after the call will have the same flags. // EX: log(&self) addSelfFlag(state, state->getSVal(cast<Loc>(argV)), prevFlags, C); return; } else if (hasSelfFlag(argV, SelfFlag_Self, C)) { // If 'self' is passed to the call by value, assume that the function // returns 'self'. So assign the flags, which were set on 'self' to the // return value. // EX: self = performMoreInitialization(self) addSelfFlag(state, CE.getReturnValue(), prevFlags, C); return; } } C.addTransition(state); }
static void addParameterValuesToBindings(const StackFrameContext *CalleeCtx, CallEvent::BindingsTy &Bindings, SValBuilder &SVB, const CallEvent &Call, CallEvent::param_iterator I, CallEvent::param_iterator E) { MemRegionManager &MRMgr = SVB.getRegionManager(); // If the function has fewer parameters than the call has arguments, we simply // do not bind any values to them. unsigned NumArgs = Call.getNumArgs(); unsigned Idx = 0; for (; I != E && Idx < NumArgs; ++I, ++Idx) { const ParmVarDecl *ParamDecl = *I; assert(ParamDecl && "Formal parameter has no decl?"); SVal ArgVal = Call.getArgSVal(Idx); if (!ArgVal.isUnknown()) { Loc ParamLoc = SVB.makeLoc(MRMgr.getVarRegion(ParamDecl, CalleeCtx)); Bindings.push_back(std::make_pair(ParamLoc, ArgVal)); } } // FIXME: Variadic arguments are not handled at all right now. }
void DoubleFetchChecker::checkPostCall(const CallEvent &Call,CheckerContext &Ctx) const { const IdentifierInfo *ID = Call.getCalleeIdentifier(); std::cout<<"[checkPostCall]------call function:"<<ID->getName().str()<<std::endl; ProgramStateRef state = Ctx.getState(); if(ID == NULL) { return; } if (ID->getName() == "malloc") { SVal arg = Call.getArgSVal(0); SVal ret = Call.getReturnValue(); if (this->isTaintedByTime(state, arg)){ std::cout<<"[checkPostCall] arg of malloc is tainted."<<"\targ is:"<<toStr(arg)<<std::endl; //pass current taint tag to return value ProgramStateRef newstate = passTaints(state, arg, ret); if (newstate!=state && newstate != NULL){ Ctx.addTransition(newstate); std::cout<<"[checkPostCall][add ret Taint finish] ret is "<<toStr(ret)<<std::endl; showValTaintTags(newstate, ret); } else std::cout<<"[checkPostCall][add ret Taint failed] ret is "<<toStr(ret)<<std::endl; } else{ std::cout<<"[checkPostCall] arg of malloc not tainted."<<"\targ is:"<<toStr(arg)<<std::endl; } } }
// Conservatively evaluate call by invalidating regions and binding // a conjured return value. void ExprEngine::conservativeEvalCall(const CallEvent &Call, NodeBuilder &Bldr, ExplodedNode *Pred, ProgramStateRef State) { State = Call.invalidateRegions(currBldrCtx->blockCount(), State); State = bindReturnValue(Call, Pred->getLocationContext(), State); // And make the result node. Bldr.generateNode(Call.getProgramPoint(), State, Pred); }
// FIXME: This is the sort of code that should eventually live in a Core // checker rather than as a special case in ExprEngine. void ExprEngine::performTrivialCopy(NodeBuilder &Bldr, ExplodedNode *Pred, const CallEvent &Call) { SVal ThisVal; bool AlwaysReturnsLValue; const CXXRecordDecl *ThisRD = nullptr; if (const CXXConstructorCall *Ctor = dyn_cast<CXXConstructorCall>(&Call)) { assert(Ctor->getDecl()->isTrivial()); assert(Ctor->getDecl()->isCopyOrMoveConstructor()); ThisVal = Ctor->getCXXThisVal(); ThisRD = Ctor->getDecl()->getParent(); AlwaysReturnsLValue = false; } else { assert(cast<CXXMethodDecl>(Call.getDecl())->isTrivial()); assert(cast<CXXMethodDecl>(Call.getDecl())->getOverloadedOperator() == OO_Equal); ThisVal = cast<CXXInstanceCall>(Call).getCXXThisVal(); ThisRD = cast<CXXMethodDecl>(Call.getDecl())->getParent(); AlwaysReturnsLValue = true; } assert(ThisRD); if (ThisRD->isEmpty()) { // Do nothing for empty classes. Otherwise it'd retrieve an UnknownVal // and bind it and RegionStore would think that the actual value // in this region at this offset is unknown. return; } const LocationContext *LCtx = Pred->getLocationContext(); ExplodedNodeSet Dst; Bldr.takeNodes(Pred); SVal V = Call.getArgSVal(0); // If the value being copied is not unknown, load from its location to get // an aggregate rvalue. if (Optional<Loc> L = V.getAs<Loc>()) V = Pred->getState()->getSVal(*L); else assert(V.isUnknownOrUndef()); const Expr *CallExpr = Call.getOriginExpr(); evalBind(Dst, CallExpr, Pred, ThisVal, V, true); PostStmt PS(CallExpr, LCtx); for (ExplodedNodeSet::iterator I = Dst.begin(), E = Dst.end(); I != E; ++I) { ProgramStateRef State = (*I)->getState(); if (AlwaysReturnsLValue) State = State->BindExpr(CallExpr, LCtx, ThisVal); else State = bindReturnValue(Call, LCtx, State); Bldr.generateNode(PS, State, *I); } }
// Try to retrieve the function declaration and find the function parameter // types which are pointers/references to a non-pointer const. // We will not invalidate the corresponding argument regions. static void findPtrToConstParams(llvm::SmallSet<unsigned, 1> &PreserveArgs, const CallEvent &Call) { unsigned Idx = 0; for (CallEvent::param_type_iterator I = Call.param_type_begin(), E = Call.param_type_end(); I != E; ++I, ++Idx) { if (isPointerToConst(*I)) PreserveArgs.insert(Idx); } }
bool BlockInCriticalSectionChecker::isBlockingFunction(const CallEvent &Call) const { if (Call.isCalled(SleepFn) || Call.isCalled(GetcFn) || Call.isCalled(FgetsFn) || Call.isCalled(ReadFn) || Call.isCalled(RecvFn)) { return true; } return false; }
bool ExprEngine::inlineCall(const CallEvent &Call, const Decl *D, NodeBuilder &Bldr, ExplodedNode *Pred, ProgramStateRef State) { assert(D); const LocationContext *CurLC = Pred->getLocationContext(); const StackFrameContext *CallerSFC = CurLC->getCurrentStackFrame(); const LocationContext *ParentOfCallee = CallerSFC; if (Call.getKind() == CE_Block && !cast<BlockCall>(Call).isConversionFromLambda()) { const BlockDataRegion *BR = cast<BlockCall>(Call).getBlockRegion(); assert(BR && "If we have the block definition we should have its region"); AnalysisDeclContext *BlockCtx = AMgr.getAnalysisDeclContext(D); ParentOfCallee = BlockCtx->getBlockInvocationContext(CallerSFC, cast<BlockDecl>(D), BR); } // This may be NULL, but that's fine. const Expr *CallE = Call.getOriginExpr(); // Construct a new stack frame for the callee. AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(D); const StackFrameContext *CalleeSFC = CalleeADC->getStackFrame(ParentOfCallee, CallE, currBldrCtx->getBlock(), currStmtIdx); CallEnter Loc(CallE, CalleeSFC, CurLC); // Construct a new state which contains the mapping from actual to // formal arguments. State = State->enterStackFrame(Call, CalleeSFC); bool isNew; if (ExplodedNode *N = G.getNode(Loc, State, false, &isNew)) { N->addPredecessor(Pred, G); if (isNew) Engine.getWorkList()->enqueue(N); } // If we decided to inline the call, the successor has been manually // added onto the work list so remove it from the node builder. Bldr.takeNodes(Pred); NumInlinedCalls++; Engine.FunctionSummaries->bumpNumTimesInlined(D); // Mark the decl as visited. if (VisitedCallees) VisitedCallees->insert(D); return true; }
void CallAndMessageChecker::checkPreCall(const CallEvent &Call, CheckerContext &C) const { ProgramStateRef State = C.getState(); // If this is a call to a C++ method, check if the callee is null or // undefined. if (const CXXInstanceCall *CC = dyn_cast<CXXInstanceCall>(&Call)) { SVal V = CC->getCXXThisVal(); if (V.isUndef()) { if (!BT_cxx_call_undef) BT_cxx_call_undef.reset(new BuiltinBug("Called C++ object pointer is " "uninitialized")); emitBadCall(BT_cxx_call_undef.get(), C, CC->getCXXThisExpr()); return; } ProgramStateRef StNonNull, StNull; llvm::tie(StNonNull, StNull) = State->assume(V.castAs<DefinedOrUnknownSVal>()); if (StNull && !StNonNull) { if (!BT_cxx_call_null) BT_cxx_call_null.reset(new BuiltinBug("Called C++ object pointer " "is null")); emitBadCall(BT_cxx_call_null.get(), C, CC->getCXXThisExpr()); return; } State = StNonNull; } // Don't check for uninitialized field values in arguments if the // caller has a body that is available and we have the chance to inline it. // This is a hack, but is a reasonable compromise betweens sometimes warning // and sometimes not depending on if we decide to inline a function. const Decl *D = Call.getDecl(); const bool checkUninitFields = !(C.getAnalysisManager().shouldInlineCall() && (D && D->getBody())); OwningPtr<BugType> *BT; if (isa<ObjCMethodCall>(Call)) BT = &BT_msg_arg; else BT = &BT_call_arg; for (unsigned i = 0, e = Call.getNumArgs(); i != e; ++i) if (PreVisitProcessArg(C, Call.getArgSVal(i), Call.getArgSourceRange(i), Call.getArgExpr(i), /*IsFirstArgument=*/i == 0, checkUninitFields, Call, *BT)) return; // If we make it here, record our assumptions about the callee. C.addTransition(State); }
bool CallEventHandler::event(QEvent* e) { CallEvent* callEvent = dynamic_cast<CallEvent*>(e); if(callEvent){ callEvent->function(); if(callEvent->syncInfo){ callEvent->syncInfo->completed = true; } return true; } return false; }
void StackAddrEscapeChecker::checkPreCall(const CallEvent &Call, CheckerContext &C) const { if (!ChecksEnabled[CK_StackAddrAsyncEscapeChecker]) return; if (!Call.isGlobalCFunction("dispatch_after") && !Call.isGlobalCFunction("dispatch_async")) return; for (unsigned Idx = 0, NumArgs = Call.getNumArgs(); Idx < NumArgs; ++Idx) { if (const BlockDataRegion *B = dyn_cast_or_null<BlockDataRegion>( Call.getArgSVal(Idx).getAsRegion())) checkAsyncExecutedBlockCaptures(*B, C); } }
bool SimpleStreamChecker::guaranteedNotToCloseFile(const CallEvent &Call) const{ // If it's not in a system header, assume it might close a file. if (!Call.isInSystemHeader()) return false; // Handle cases where we know a buffer's /address/ can escape. if (Call.argumentsMayEscape()) return false; // Note, even though fclose closes the file, we do not list it here // since the checker is modeling the call. return true; }
bool BlockInCriticalSectionChecker::isUnlockFunction(const CallEvent &Call) const { if (const auto *Dtor = dyn_cast<CXXDestructorCall>(&Call)) { const auto *DRecordDecl = dyn_cast<CXXRecordDecl>(Dtor->getDecl()->getParent()); auto IdentifierInfo = DRecordDecl->getIdentifier(); if (IdentifierInfo == IILockGuard || IdentifierInfo == IIUniqueLock) return true; } if (Call.isCalled(UnlockFn) || Call.isCalled(PthreadUnlockFn) || Call.isCalled(MtxUnlock)) { return true; } return false; }
bool LazyCallerImpl::event(QEvent* e) { CallEvent* callEvent = dynamic_cast<CallEvent*>(e); if(callEvent){ if(isConservative){ callEvent->function(); self->isPending_ = false; } else { self->isPending_ = false; callEvent->function(); } return true; } return false; }
void BlockInCriticalSectionChecker::reportBlockInCritSection( SymbolRef BlockDescSym, const CallEvent &Call, CheckerContext &C) const { ExplodedNode *ErrNode = C.generateNonFatalErrorNode(); if (!ErrNode) return; std::string msg; llvm::raw_string_ostream os(msg); os << "Call to blocking function '" << Call.getCalleeIdentifier()->getName() << "' inside of critical section"; auto R = llvm::make_unique<BugReport>(*BlockInCritSectionBugType, os.str(), ErrNode); R->addRange(Call.getSourceRange()); R->markInteresting(BlockDescSym); C.emitReport(std::move(R)); }
/// Diagnose if any of the arguments to CE have already been /// dealloc'd. void ObjCSuperDeallocChecker::diagnoseCallArguments(const CallEvent &CE, CheckerContext &C) const { ProgramStateRef State = C.getState(); unsigned ArgCount = CE.getNumArgs(); for (unsigned I = 0; I < ArgCount; I++) { SymbolRef Sym = CE.getArgSVal(I).getAsSymbol(); if (!Sym) continue; if (State->contains<CalledSuperDealloc>(Sym)) { reportUseAfterDealloc(Sym, StringRef(), CE.getArgExpr(I), C); return; } } }
/// If we are in -dealloc or -dealloc is on the stack, handle the call if it is /// call to Block_release(). void ObjCDeallocChecker::checkPreCall(const CallEvent &Call, CheckerContext &C) const { const IdentifierInfo *II = Call.getCalleeIdentifier(); if (II != Block_releaseII) return; if (Call.getNumArgs() != 1) return; SymbolRef ReleasedValue = Call.getArgSVal(0).getAsSymbol(); if (!ReleasedValue) return; transitionToReleaseValue(C, ReleasedValue); }
ProgramStateRef ExprEngine::bindReturnValue(const CallEvent &Call, const LocationContext *LCtx, ProgramStateRef State) { const Expr *E = Call.getOriginExpr(); if (!E) return State; // Some method families have known return values. if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(&Call)) { switch (Msg->getMethodFamily()) { default: break; case OMF_autorelease: case OMF_retain: case OMF_self: { // These methods return their receivers. return State->BindExpr(E, LCtx, Msg->getReceiverSVal()); } } } else if (const CXXConstructorCall *C = dyn_cast<CXXConstructorCall>(&Call)){ SVal ThisV = C->getCXXThisVal(); // If the constructed object is a temporary prvalue, get its bindings. if (isTemporaryPRValue(cast<CXXConstructExpr>(E), ThisV)) ThisV = State->getSVal(ThisV.castAs<Loc>()); return State->BindExpr(E, LCtx, ThisV); } // Conjure a symbol if the return value is unknown. QualType ResultTy = Call.getResultType(); SValBuilder &SVB = getSValBuilder(); unsigned Count = currBldrCtx->blockCount(); // See if we need to conjure a heap pointer instead of // a regular unknown pointer. bool IsHeapPointer = false; if (const auto *CNE = dyn_cast<CXXNewExpr>(E)) if (CNE->getOperatorNew()->isReplaceableGlobalAllocationFunction()) { // FIXME: Delegate this to evalCall in MallocChecker? IsHeapPointer = true; } SVal R = IsHeapPointer ? SVB.getConjuredHeapSymbolVal(E, LCtx, Count) : SVB.conjureSymbolVal(nullptr, E, LCtx, ResultTy, Count); return State->BindExpr(E, LCtx, R); }
static void describeUninitializedArgumentInCall(const CallEvent &Call, int ArgumentNumber, llvm::raw_svector_ostream &Os) { switch (Call.getKind()) { case CE_ObjCMessage: { const ObjCMethodCall &Msg = cast<ObjCMethodCall>(Call); switch (Msg.getMessageKind()) { case OCM_Message: Os << (ArgumentNumber + 1) << llvm::getOrdinalSuffix(ArgumentNumber + 1) << " argument in message expression is an uninitialized value"; return; case OCM_PropertyAccess: assert(Msg.isSetter() && "Getters have no args"); Os << "Argument for property setter is an uninitialized value"; return; case OCM_Subscript: if (Msg.isSetter() && (ArgumentNumber == 0)) Os << "Argument for subscript setter is an uninitialized value"; else Os << "Subscript index is an uninitialized value"; return; } llvm_unreachable("Unknown message kind."); } case CE_Block: Os << (ArgumentNumber + 1) << llvm::getOrdinalSuffix(ArgumentNumber + 1) << " block call argument is an uninitialized value"; return; default: Os << (ArgumentNumber + 1) << llvm::getOrdinalSuffix(ArgumentNumber + 1) << " function call argument is an uninitialized value"; return; } }
void NoReturnFunctionChecker::checkPostCall(const CallEvent &CE, CheckerContext &C) const { ProgramStateRef state = C.getState(); bool BuildSinks = false; if (const FunctionDecl *FD = dyn_cast_or_null<FunctionDecl>(CE.getDecl())) BuildSinks = FD->getAttr<AnalyzerNoReturnAttr>() || FD->isNoReturn(); const Expr *Callee = CE.getOriginExpr(); if (!BuildSinks && Callee) BuildSinks = getFunctionExtInfo(Callee->getType()).getNoReturn(); if (!BuildSinks && CE.isGlobalCFunction()) { if (const IdentifierInfo *II = CE.getCalleeIdentifier()) { // HACK: Some functions are not marked noreturn, and don't return. // Here are a few hardwired ones. If this takes too long, we can // potentially cache these results. BuildSinks = llvm::StringSwitch<bool>(StringRef(II->getName())) .Case("exit", true) .Case("panic", true) .Case("error", true) .Case("Assert", true) // FIXME: This is just a wrapper around throwing an exception. // Eventually inter-procedural analysis should handle this easily. .Case("ziperr", true) .Case("assfail", true) .Case("db_error", true) .Case("__assert", true) // For the purpose of static analysis, we do not care that // this MSVC function will return if the user decides to continue. .Case("_wassert", true) .Case("__assert_rtn", true) .Case("__assert_fail", true) .Case("dtrace_assfail", true) .Case("yy_fatal_error", true) .Case("_XCAssertionFailureHandler", true) .Case("_DTAssertionFailureHandler", true) .Case("_TSAssertionFailureHandler", true) .Default(false); } } if (BuildSinks) C.generateSink(); }
// FIXME: This is the sort of code that should eventually live in a Core // checker rather than as a special case in ExprEngine. void ExprEngine::performTrivialCopy(NodeBuilder &Bldr, ExplodedNode *Pred, const CallEvent &Call) { SVal ThisVal; bool AlwaysReturnsLValue; if (const CXXConstructorCall *Ctor = dyn_cast<CXXConstructorCall>(&Call)) { assert(Ctor->getDecl()->isTrivial()); assert(Ctor->getDecl()->isCopyOrMoveConstructor()); ThisVal = Ctor->getCXXThisVal(); AlwaysReturnsLValue = false; } else { assert(cast<CXXMethodDecl>(Call.getDecl())->isTrivial()); assert(cast<CXXMethodDecl>(Call.getDecl())->getOverloadedOperator() == OO_Equal); ThisVal = cast<CXXInstanceCall>(Call).getCXXThisVal(); AlwaysReturnsLValue = true; } const LocationContext *LCtx = Pred->getLocationContext(); ExplodedNodeSet Dst; Bldr.takeNodes(Pred); SVal V = Call.getArgSVal(0); // If the value being copied is not unknown, load from its location to get // an aggregate rvalue. if (Optional<Loc> L = V.getAs<Loc>()) V = Pred->getState()->getSVal(*L); else assert(V.isUnknown()); const Expr *CallExpr = Call.getOriginExpr(); evalBind(Dst, CallExpr, Pred, ThisVal, V, true); PostStmt PS(CallExpr, LCtx); for (ExplodedNodeSet::iterator I = Dst.begin(), E = Dst.end(); I != E; ++I) { ProgramStateRef State = (*I)->getState(); if (AlwaysReturnsLValue) State = State->BindExpr(CallExpr, LCtx, ThisVal); else State = bindReturnValue(Call, LCtx, State); Bldr.generateNode(PS, State, *I); } }
void ExprEngine::defaultEvalCall(ExplodedNodeSet &Dst, ExplodedNode *Pred, const CallEvent &Call) { // Try to inline the call. ProgramStateRef state = 0; const Expr *E = Call.getOriginExpr(); if (E) { state = getInlineFailedState(Pred, E); if (state == 0 && inlineCall(Dst, Call, Pred)) return; } // If we can't inline it, handle the return value and invalidate the regions. StmtNodeBuilder Bldr(Pred, Dst, *currentBuilderContext); // Invalidate any regions touched by the call. unsigned Count = currentBuilderContext->getCurrentBlockCount(); if (state == 0) state = Pred->getState(); state = Call.invalidateRegions(Count, state); // Conjure a symbol value to use as the result. assert(Call.getOriginExpr() && "Must have an expression to bind the result"); QualType ResultTy = Call.getResultType(); SValBuilder &SVB = getSValBuilder(); const LocationContext *LCtx = Pred->getLocationContext(); SVal RetVal = SVB.getConjuredSymbolVal(0, Call.getOriginExpr(), LCtx, ResultTy, Count); // And make the result node. state = state->BindExpr(Call.getOriginExpr(), LCtx, RetVal); Bldr.generateNode(Call.getOriginExpr(), Pred, state); }
/// \return Bitvector marking non-null attributes. static llvm::SmallBitVector getNonNullAttrs(const CallEvent &Call) { const Decl *FD = Call.getDecl(); unsigned NumArgs = Call.getNumArgs(); llvm::SmallBitVector AttrNonNull(NumArgs); for (const auto *NonNull : FD->specific_attrs<NonNullAttr>()) { if (!NonNull->args_size()) { AttrNonNull.set(0, NumArgs); break; } for (const ParamIdx &Idx : NonNull->args()) { unsigned IdxAST = Idx.getASTIndex(); if (IdxAST >= NumArgs) continue; AttrNonNull.set(IdxAST); } } return AttrNonNull; }
void SimpleStreamChecker::checkPostCall(const CallEvent &Call, CheckerContext &C) const { if (!Call.isGlobalCFunction()) return; if (!Call.isCalled(OpenFn)) return; // Get the symbolic value corresponding to the file handle. SymbolRef FileDesc = Call.getReturnValue().getAsSymbol(); if (!FileDesc) return; // Generate the next transition (an edge in the exploded graph). ProgramStateRef State = C.getState(); State = State->set<StreamMap>(FileDesc, StreamState::getOpened()); C.addTransition(State); }