void ReturnUndefChecker::checkPreStmt(const ReturnStmt *RS, CheckerContext &C) const { const Expr *RetE = RS->getRetValue(); if (!RetE) return; SVal RetVal = C.getSVal(RetE); const StackFrameContext *SFC = C.getStackFrame(); QualType RT = CallEvent::getDeclaredResultType(SFC->getDecl()); if (RetVal.isUndef()) { // "return;" is modeled to evaluate to an UndefinedVal. Allow UndefinedVal // to be returned in functions returning void to support this pattern: // void foo() { // return; // } // void test() { // return foo(); // } if (RT.isNull() || !RT->isVoidType()) emitUndef(C, RetE); return; } if (RT.isNull()) return; if (RT->isReferenceType()) { checkReference(C, RetE, RetVal.castAs<DefinedOrUnknownSVal>()); return; } }
void ObjCSelfInitChecker::checkPostObjCMessage(const ObjCMethodCall &Msg, CheckerContext &C) const { // When encountering a message that does initialization (init rule), // tag the return value so that we know later on that if self has this value // then it is properly initialized. // FIXME: A callback should disable checkers at the start of functions. if (!shouldRunOnFunctionOrMethod(dyn_cast<NamedDecl>( C.getCurrentAnalysisDeclContext()->getDecl()))) return; if (isInitMessage(Msg)) { // Tag the return value as the result of an initializer. ProgramStateRef state = C.getState(); // FIXME this really should be context sensitive, where we record // the current stack frame (for IPA). Also, we need to clean this // value out when we return from this method. state = state->set<CalledInit>(true); SVal V = C.getSVal(Msg.getOriginExpr()); addSelfFlag(state, V, SelfFlag_InitRes, C); return; } // We don't check for an invalid 'self' in an obj-c message expression to cut // down false positives where logging functions get information from self // (like its class) or doing "invalidation" on self when the initialization // fails. }
bool GenericTaintChecker::generateReportIfTainted(const Expr *E, const char Msg[], CheckerContext &C) const { assert(E); // Check for taint. ProgramStateRef State = C.getState(); Optional<SVal> PointedToSVal = getPointedToSVal(C, E); SVal TaintedSVal; if (PointedToSVal && State->isTainted(*PointedToSVal)) TaintedSVal = *PointedToSVal; else if (State->isTainted(E, C.getLocationContext())) TaintedSVal = C.getSVal(E); else return false; // Generate diagnostic. if (ExplodedNode *N = C.generateNonFatalErrorNode()) { initBugType(); auto report = llvm::make_unique<BugReport>(*BT, Msg, N); report->addRange(E->getSourceRange()); report->addVisitor(llvm::make_unique<TaintBugVisitor>(TaintedSVal)); C.emitReport(std::move(report)); return true; } return false; }
bool GenericTaintChecker::isStdin(const Expr *E, CheckerContext &C) { ProgramStateRef State = C.getState(); SVal Val = C.getSVal(E); // stdin is a pointer, so it would be a region. const MemRegion *MemReg = Val.getAsRegion(); // The region should be symbolic, we do not know it's value. const SymbolicRegion *SymReg = dyn_cast_or_null<SymbolicRegion>(MemReg); if (!SymReg) return false; // Get it's symbol and find the declaration region it's pointing to. const SymbolRegionValue *Sm =dyn_cast<SymbolRegionValue>(SymReg->getSymbol()); if (!Sm) return false; const DeclRegion *DeclReg = dyn_cast_or_null<DeclRegion>(Sm->getRegion()); if (!DeclReg) return false; // This region corresponds to a declaration, find out if it's a global/extern // variable named stdin with the proper type. if (const VarDecl *D = dyn_cast_or_null<VarDecl>(DeclReg->getDecl())) { D = D->getCanonicalDecl(); if ((D->getName().find("stdin") != StringRef::npos) && D->isExternC()) if (const PointerType * PtrTy = dyn_cast<PointerType>(D->getType().getTypePtr())) if (PtrTy->getPointeeType().getCanonicalType() == C.getASTContext().getFILEType().getCanonicalType()) return true; } return false; }
void FixedAddressChecker::checkPreStmt(const BinaryOperator *B, CheckerContext &C) const { // Using a fixed address is not portable because that address will probably // not be valid in all environments or platforms. if (B->getOpcode() != BO_Assign) return; QualType T = B->getType(); if (!T->isPointerType()) return; SVal RV = C.getSVal(B->getRHS()); if (!RV.isConstant() || RV.isZeroConstant()) return; if (ExplodedNode *N = C.generateNonFatalErrorNode()) { if (!BT) BT.reset( new BuiltinBug(this, "Use fixed address", "Using a fixed address is not portable because that " "address will probably not be valid in all " "environments or platforms.")); auto R = llvm::make_unique<BugReport>(*BT, BT->getDescription(), N); R->addRange(B->getRHS()->getSourceRange()); C.emitReport(std::move(R)); } }
void ObjCLoopChecker::checkPostObjCMessage(const ObjCMethodCall &M, CheckerContext &C) const { if (!M.isInstanceMessage()) return; const ObjCInterfaceDecl *ClassID = M.getReceiverInterface(); if (!ClassID) return; FoundationClass Class = findKnownClass(ClassID); if (Class != FC_NSDictionary && Class != FC_NSArray && Class != FC_NSSet) return; SymbolRef ContainerS = M.getReceiverSVal().getAsSymbol(); if (!ContainerS) return; // If we are processing a call to "count", get the symbolic value returned by // a call to "count" and add it to the map. if (!isCollectionCountMethod(M, C)) return; const Expr *MsgExpr = M.getOriginExpr(); SymbolRef CountS = C.getSVal(MsgExpr).getAsSymbol(); if (CountS) { ProgramStateRef State = C.getState(); C.getSymbolManager().addSymbolDependency(ContainerS, CountS); State = State->set<ContainerCountMap>(ContainerS, CountS); C.addTransition(State); } return; }
void PthreadLockChecker::DestroyLock(CheckerContext &C, const CallExpr *CE, SVal Lock, enum LockingSemantics semantics) const { const MemRegion *LockR = Lock.getAsRegion(); if (!LockR) return; ProgramStateRef State = C.getState(); const SymbolRef *sym = State->get<DestroyRetVal>(LockR); if (sym) State = resolvePossiblyDestroyedMutex(State, LockR, sym); const LockState *LState = State->get<LockMap>(LockR); // Checking the return value of the destroy method only in the case of // PthreadSemantics if (semantics == PthreadSemantics) { if (!LState || LState->isUnlocked()) { SymbolRef sym = C.getSVal(CE).getAsSymbol(); if (!sym) { State = State->remove<LockMap>(LockR); C.addTransition(State); return; } State = State->set<DestroyRetVal>(LockR, sym); if (LState && LState->isUnlocked()) State = State->set<LockMap>( LockR, LockState::getUnlockedAndPossiblyDestroyed()); else State = State->set<LockMap>( LockR, LockState::getUntouchedAndPossiblyDestroyed()); C.addTransition(State); return; } } else { if (!LState || LState->isUnlocked()) { State = State->set<LockMap>(LockR, LockState::getDestroyed()); C.addTransition(State); return; } } StringRef Message; if (LState->isLocked()) { Message = "This lock is still locked"; } else { Message = "This lock has already been destroyed"; } if (!BT_destroylock) BT_destroylock.reset(new BugType(this, "Destroy invalid lock", "Lock checker")); ExplodedNode *N = C.generateErrorNode(); if (!N) return; auto Report = llvm::make_unique<BugReport>(*BT_destroylock, Message, N); Report->addRange(CE->getArg(0)->getSourceRange()); C.emitReport(std::move(Report)); }
/// Returns NULL state if the collection is known to contain elements /// (or is known not to contain elements if the Assumption parameter is false.) static ProgramStateRef assumeCollectionNonEmpty(CheckerContext &C, ProgramStateRef State, const ObjCForCollectionStmt *FCS, bool Assumption = false) { if (!State) return NULL; SymbolRef CollectionS = C.getSVal(FCS->getCollection()).getAsSymbol(); if (!CollectionS) return State; const SymbolRef *CountS = State->get<ContainerCountMap>(CollectionS); if (!CountS) return State; SValBuilder &SvalBuilder = C.getSValBuilder(); SVal CountGreaterThanZeroVal = SvalBuilder.evalBinOp(State, BO_GT, nonloc::SymbolVal(*CountS), SvalBuilder.makeIntVal(0, (*CountS)->getType()), SvalBuilder.getConditionType()); Optional<DefinedSVal> CountGreaterThanZero = CountGreaterThanZeroVal.getAs<DefinedSVal>(); if (!CountGreaterThanZero) { // The SValBuilder cannot construct a valid SVal for this condition. // This means we cannot properly reason about it. return State; } return State->assume(*CountGreaterThanZero, Assumption); }
void UndefinedArraySubscriptChecker::checkPreStmt(const ArraySubscriptExpr *A, CheckerContext &C) const { const Expr *Index = A->getIdx(); if (!C.getSVal(Index).isUndef()) return; // Sema generates anonymous array variables for copying array struct fields. // Don't warn if we're in an implicitly-generated constructor. const Decl *D = C.getLocationContext()->getDecl(); if (const CXXConstructorDecl *Ctor = dyn_cast<CXXConstructorDecl>(D)) if (Ctor->isDefaulted()) return; ExplodedNode *N = C.generateErrorNode(); if (!N) return; if (!BT) BT.reset(new BuiltinBug(this, "Array subscript is undefined")); // Generate a report for this bug. auto R = llvm::make_unique<BugReport>(*BT, BT->getName(), N); R->addRange(A->getIdx()->getSourceRange()); bugreporter::trackExpressionValue(N, A->getIdx(), *R); C.emitReport(std::move(R)); }
void TestAfterDivZeroChecker::checkBranchCondition(const Stmt *Condition, CheckerContext &C) const { if (const BinaryOperator *B = dyn_cast<BinaryOperator>(Condition)) { if (B->isComparisonOp()) { const IntegerLiteral *IntLiteral = dyn_cast<IntegerLiteral>(B->getRHS()); bool LRHS = true; if (!IntLiteral) { IntLiteral = dyn_cast<IntegerLiteral>(B->getLHS()); LRHS = false; } if (!IntLiteral || IntLiteral->getValue() != 0) return; SVal Val = C.getSVal(LRHS ? B->getLHS() : B->getRHS()); if (hasDivZeroMap(Val, C)) reportBug(Val, C); } } else if (const UnaryOperator *U = dyn_cast<UnaryOperator>(Condition)) { if (U->getOpcode() == UO_LNot) { SVal Val; if (const ImplicitCastExpr *I = dyn_cast<ImplicitCastExpr>(U->getSubExpr())) Val = C.getSVal(I->getSubExpr()); if (hasDivZeroMap(Val, C)) reportBug(Val, C); else { Val = C.getSVal(U->getSubExpr()); if (hasDivZeroMap(Val, C)) reportBug(Val, C); } } } else if (const ImplicitCastExpr *IE = dyn_cast<ImplicitCastExpr>(Condition)) { SVal Val = C.getSVal(IE->getSubExpr()); if (hasDivZeroMap(Val, C)) reportBug(Val, C); else { SVal Val = C.getSVal(Condition); if (hasDivZeroMap(Val, C)) reportBug(Val, C); } } }
ProgramStateRef ObjCNonNilReturnValueChecker::assumeExprIsNonNull(const Expr *NonNullExpr, ProgramStateRef State, CheckerContext &C) const { SVal Val = C.getSVal(NonNullExpr); if (Optional<DefinedOrUnknownSVal> DV = Val.getAs<DefinedOrUnknownSVal>()) return State->assume(*DV, true); return State; }
/// Returns true of the value of the expression is the object that 'self' /// points to and is an object that did not come from the result of calling /// an initializer. static bool isInvalidSelf(const Expr *E, CheckerContext &C) { SVal exprVal = C.getSVal(E); if (!hasSelfFlag(exprVal, SelfFlag_Self, C)) return false; // value did not come from 'self'. if (hasSelfFlag(exprVal, SelfFlag_InitRes, C)) return false; // 'self' is properly initialized. return true; }
static ProgramStateRef assumeCollectionNonEmpty(CheckerContext &C, ProgramStateRef State, const ObjCForCollectionStmt *FCS, bool Assumption) { if (!State) return nullptr; SymbolRef CollectionS = C.getSVal(FCS->getCollection()).getAsSymbol(); return assumeCollectionNonEmpty(C, State, CollectionS, Assumption); }
void TestAfterDivZeroChecker::checkPreStmt(const BinaryOperator *B, CheckerContext &C) const { BinaryOperator::Opcode Op = B->getOpcode(); if (Op == BO_Div || Op == BO_Rem || Op == BO_DivAssign || Op == BO_RemAssign) { SVal S = C.getSVal(B->getRHS()); if (!isZero(S, C)) setDivZeroMap(S, C); } }
void NilArgChecker::warnIfNilExpr(const Expr *E, const char *Msg, CheckerContext &C) const { ProgramStateRef State = C.getState(); if (State->isNull(C.getSVal(E)).isConstrainedTrue()) { if (ExplodedNode *N = C.generateErrorNode()) { generateBugReport(N, Msg, E->getSourceRange(), E, C); } } }
/// Explicit casts are trusted. If there is a disagreement in the nullability /// annotations in the destination and the source or '0' is casted to nonnull /// track the value as having contraditory nullability. This will allow users to /// suppress warnings. void NullabilityChecker::checkPostStmt(const ExplicitCastExpr *CE, CheckerContext &C) const { QualType OriginType = CE->getSubExpr()->getType(); QualType DestType = CE->getType(); if (!OriginType->isAnyPointerType()) return; if (!DestType->isAnyPointerType()) return; ProgramStateRef State = C.getState(); if (State->get<InvariantViolated>()) return; Nullability DestNullability = getNullabilityAnnotation(DestType); // No explicit nullability in the destination type, so this cast does not // change the nullability. if (DestNullability == Nullability::Unspecified) return; auto RegionSVal = C.getSVal(CE).getAs<DefinedOrUnknownSVal>(); const MemRegion *Region = getTrackRegion(*RegionSVal); if (!Region) return; // When 0 is converted to nonnull mark it as contradicted. if (DestNullability == Nullability::Nonnull) { NullConstraint Nullness = getNullConstraint(*RegionSVal, State); if (Nullness == NullConstraint::IsNull) { State = State->set<NullabilityMap>(Region, Nullability::Contradicted); C.addTransition(State); return; } } const NullabilityState *TrackedNullability = State->get<NullabilityMap>(Region); if (!TrackedNullability) { if (DestNullability != Nullability::Nullable) return; State = State->set<NullabilityMap>(Region, NullabilityState(DestNullability, CE)); C.addTransition(State); return; } if (TrackedNullability->getValue() != DestNullability && TrackedNullability->getValue() != Nullability::Contradicted) { State = State->set<NullabilityMap>(Region, Nullability::Contradicted); C.addTransition(State); } }
void ObjCAtSyncChecker::checkPreStmt(const ObjCAtSynchronizedStmt *S, CheckerContext &C) const { const Expr *Ex = S->getSynchExpr(); ProgramStateRef state = C.getState(); SVal V = C.getSVal(Ex); // Uninitialized value used for the mutex? if (V.getAs<UndefinedVal>()) { if (ExplodedNode *N = C.generateErrorNode()) { if (!BT_undef) BT_undef.reset(new BuiltinBug(this, "Uninitialized value used as mutex " "for @synchronized")); auto report = llvm::make_unique<BugReport>(*BT_undef, BT_undef->getDescription(), N); bugreporter::trackExpressionValue(N, Ex, *report); C.emitReport(std::move(report)); } return; } if (V.isUnknown()) return; // Check for null mutexes. ProgramStateRef notNullState, nullState; std::tie(notNullState, nullState) = state->assume(V.castAs<DefinedSVal>()); if (nullState) { if (!notNullState) { // Generate an error node. This isn't a sink since // a null mutex just means no synchronization occurs. if (ExplodedNode *N = C.generateNonFatalErrorNode(nullState)) { if (!BT_null) BT_null.reset(new BuiltinBug( this, "Nil value used as mutex for @synchronized() " "(no synchronization will occur)")); auto report = llvm::make_unique<BugReport>(*BT_null, BT_null->getDescription(), N); bugreporter::trackExpressionValue(N, Ex, *report); C.emitReport(std::move(report)); return; } } // Don't add a transition for 'nullState'. If the value is // under-constrained to be null or non-null, assume it is non-null // afterwards. } if (notNullState) C.addTransition(notNullState); }
void DynamicTypePropagation::checkPostStmt(const CXXNewExpr *NewE, CheckerContext &C) const { if (NewE->isArray()) return; // We only track dynamic type info for regions. const MemRegion *MR = C.getSVal(NewE).getAsRegion(); if (!MR) return; C.addTransition(setDynamicTypeInfo(C.getState(), MR, NewE->getType(), /*CanBeSubclass=*/false)); }
void ObjCLoopChecker::checkPostStmt(const ObjCForCollectionStmt *FCS, CheckerContext &C) const { // Check if this is the branch for the end of the loop. SVal CollectionSentinel = C.getSVal(FCS); if (CollectionSentinel.isZeroConstant()) return; ProgramStateRef State = C.getState(); State = checkCollectionNonNil(C, State, FCS); State = checkElementNonNil(C, State, FCS); if (!State) C.generateSink(); else if (State != C.getState()) C.addTransition(State); }
void DynamicTypePropagation::checkPostStmt(const ImplicitCastExpr *CastE, CheckerContext &C) const { // We only track dynamic type info for regions. const MemRegion *ToR = C.getSVal(CastE).getAsRegion(); if (!ToR) return; switch (CastE->getCastKind()) { default: break; case CK_BitCast: // Only handle ObjCObjects for now. if (const Type *NewTy = getBetterObjCType(CastE, C)) C.addTransition(C.getState()->setDynamicTypeInfo(ToR, QualType(NewTy,0))); break; } return; }
void IntegerOverflowChecker::checkPostStmt(const CXXNewExpr *NewExpr, CheckerContext &C) const { if (!Filter.CheckIntegerOverflowDef) return; if (NewExpr->getOperatorNew()->getOverloadedOperator() != OO_Array_New) return; const Expr *ArrSize = NewExpr->getArraySize(); SVal ElementCount = C.getSVal(ArrSize); ProgramStateRef State = C.getState(); if (makeGlobalsMembersHeuristics(ElementCount, ArrSize, C)) { C.addTransition(addToWhiteList(ElementCount, State)); return; } QualType NewExprType = NewExpr->getAllocatedType(); uint64_t NewExprTypeSize = C.getASTContext().getTypeSizeInChars(NewExprType) .getQuantity(); SValBuilder &SvalBuilder = C.getSValBuilder(); SVal NewExprTypeSizeVal = SvalBuilder.makeIntVal(NewExprTypeSize, true); bool isOverflow; Optional<DefinedOrUnknownSVal> CondOverflow = checkMul(C, NewExprTypeSizeVal, ElementCount, ArrSize->getType(), isOverflow); if (!CondOverflow) return; ProgramStateRef StateOverflow, StateNotOverflow; std::tie(StateOverflow, StateNotOverflow) = State->assume(*CondOverflow); if (!StateOverflow || (StateNotOverflow && !State->isTainted(ElementCount))) return; std::string Msg = composeMsg(StateNotOverflow, NewExprTypeSizeVal, ElementCount, 0, ArrSize, false, isOverflow, 0, C); reportBug(Msg, C, NewExpr->getExprLoc(), false); }
void DanglingDelegateChecker::checkPreStmt(const Stmt *stmt, CheckerContext &context) const { // hack to deal with pseudo-init methods resetInitialStateIfNeeded(context); // Next we track assignments to "interesting" ivars. // These are not objc messages so we need to deal with them separately. // no need for checking ivar assignments in non-ARC mode (the verification is on the release) if (!context.getLangOpts().ObjCAutoRefCount) { return; } const BinaryOperator *binOp = dyn_cast<BinaryOperator>(stmt); if (!binOp || !binOp->isAssignmentOp()) { return; } // look for an ivarref on the left of the assignment const Expr *lhs = binOp->getLHS()->IgnoreParenCasts(); if (!lhs || !lhs->getType()->getAsObjCInterfacePointerType()) { return; } const ObjCIvarRefExpr *ivarRef = dyn_cast<ObjCIvarRefExpr>(lhs); if (!ivarRef) { return; } const ObjCIvarDecl *ivarDecl = ivarRef->getDecl(); if (!ivarDecl) { return; } // want a non-null previous value in the ivar SVal ivarLVal = context.getSVal(lhs); const MemRegion *region = ivarLVal.getAsRegion(); if (region) { SVal ivarRVal = context.getState()->getSVal(region); if (isKnownToBeNil(ivarRVal, context)) { // we are sure that the ivar is nil => abort return; } } verifyIvarDynamicStateAgainstStaticFacts(*binOp, ivarDecl, context); }
void CallDumper::checkPostCall(const CallEvent &Call, CheckerContext &C) const { const Expr *CallE = Call.getOriginExpr(); if (!CallE) return; unsigned Indentation = 0; for (const LocationContext *LC = C.getLocationContext()->getParent(); LC != nullptr; LC = LC->getParent()) ++Indentation; // It is mildly evil to print directly to llvm::outs() rather than emitting // warnings, but this ensures things do not get filtered out by the rest of // the static analyzer machinery. llvm::outs().indent(Indentation); if (Call.getResultType()->isVoidType()) llvm::outs() << "Returning void\n"; else llvm::outs() << "Returning " << C.getSVal(CallE) << "\n"; }
void DivZeroChecker::checkPreStmt(const BinaryOperator *B, CheckerContext &C) const { BinaryOperator::Opcode Op = B->getOpcode(); if (Op != BO_Div && Op != BO_Rem && Op != BO_DivAssign && Op != BO_RemAssign) return; if (!B->getRHS()->getType()->isScalarType()) return; SVal Denom = C.getSVal(B->getRHS()); Optional<DefinedSVal> DV = Denom.getAs<DefinedSVal>(); // Divide-by-undefined handled in the generic checking for uses of // undefined values. if (!DV) return; // Check for divide by zero. ConstraintManager &CM = C.getConstraintManager(); ProgramStateRef stateNotZero, stateZero; std::tie(stateNotZero, stateZero) = CM.assumeDual(C.getState(), *DV); if (!stateNotZero) { assert(stateZero); reportBug("Division by zero", stateZero, C); return; } bool TaintedD = C.getState()->isTainted(*DV); if ((stateNotZero && stateZero && TaintedD)) { reportBug("Division by a tainted value, possibly zero", stateZero, C, llvm::make_unique<TaintBugVisitor>(*DV)); return; } // If we get here, then the denom should not be zero. We abandon the implicit // zero denom case for now. C.addTransition(stateNotZero); }
/// Assumes that the collection is non-nil. /// /// If the collection is known to be nil, returns NULL to indicate an infeasible /// path. static ProgramStateRef checkCollectionNonNil(CheckerContext &C, ProgramStateRef State, const ObjCForCollectionStmt *FCS) { if (!State) return nullptr; SVal CollectionVal = C.getSVal(FCS->getCollection()); Optional<DefinedSVal> KnownCollection = CollectionVal.getAs<DefinedSVal>(); if (!KnownCollection) return State; ProgramStateRef StNonNil, StNil; std::tie(StNonNil, StNil) = State->assume(*KnownCollection); if (StNil && !StNonNil) { // The collection is nil. This path is infeasible. return nullptr; } return StNonNil; }
void StackAddrEscapeChecker::checkPreStmt(const ReturnStmt *RS, CheckerContext &C) const { if (!ChecksEnabled[CK_StackAddrEscapeChecker]) return; const Expr *RetE = RS->getRetValue(); if (!RetE) return; RetE = RetE->IgnoreParens(); SVal V = C.getSVal(RetE); const MemRegion *R = V.getAsRegion(); if (!R) return; if (const BlockDataRegion *B = dyn_cast<BlockDataRegion>(R)) checkReturnedBlockCaptures(*B, C); if (!isa<StackSpaceRegion>(R->getMemorySpace()) || isNotInCurrentFrame(R, C) || isArcManagedBlock(R, C)) return; // Returning a record by value is fine. (In this case, the returned // expression will be a copy-constructor, possibly wrapped in an // ExprWithCleanups node.) if (const ExprWithCleanups *Cleanup = dyn_cast<ExprWithCleanups>(RetE)) RetE = Cleanup->getSubExpr(); if (isa<CXXConstructExpr>(RetE) && RetE->getType()->isRecordType()) return; // The CK_CopyAndAutoreleaseBlockObject cast causes the block to be copied // so the stack address is not escaping here. if (auto *ICE = dyn_cast<ImplicitCastExpr>(RetE)) { if (isa<BlockDataRegion>(R) && ICE->getCastKind() == CK_CopyAndAutoreleaseBlockObject) { return; } } EmitStackError(C, R, RetE); }
void ReturnUndefChecker::checkPreStmt(const ReturnStmt *RS, CheckerContext &C) const { const Expr *RetE = RS->getRetValue(); if (!RetE) return; SVal RetVal = C.getSVal(RetE); const StackFrameContext *SFC = C.getStackFrame(); QualType RT = CallEvent::getDeclaredResultType(SFC->getDecl()); if (RetVal.isUndef()) { // "return;" is modeled to evaluate to an UndefinedVal. Allow UndefinedVal // to be returned in functions returning void to support this pattern: // void foo() { // return; // } // void test() { // return foo(); // } if (!RT.isNull() && RT->isVoidType()) return; // Not all blocks have explicitly-specified return types; if the return type // is not available, but the return value expression has 'void' type, assume // Sema already checked it. if (RT.isNull() && isa<BlockDecl>(SFC->getDecl()) && RetE->getType()->isVoidType()) return; emitUndef(C, RetE); return; } if (RT.isNull()) return; if (RT->isReferenceType()) { checkReference(C, RetE, RetVal.castAs<DefinedOrUnknownSVal>()); return; } }
void DeleteWithNonVirtualDtorChecker::checkPreStmt(const CXXDeleteExpr *DE, CheckerContext &C) const { const Expr *DeletedObj = DE->getArgument(); const MemRegion *MR = C.getSVal(DeletedObj).getAsRegion(); if (!MR) return; const auto *BaseClassRegion = MR->getAs<TypedValueRegion>(); const auto *DerivedClassRegion = MR->getBaseRegion()->getAs<SymbolicRegion>(); if (!BaseClassRegion || !DerivedClassRegion) return; const auto *BaseClass = BaseClassRegion->getValueType()->getAsCXXRecordDecl(); const auto *DerivedClass = DerivedClassRegion->getSymbol()->getType()->getPointeeCXXRecordDecl(); if (!BaseClass || !DerivedClass) return; if (!BaseClass->hasDefinition() || !DerivedClass->hasDefinition()) return; if (BaseClass->getDestructor()->isVirtual()) return; if (!DerivedClass->isDerivedFrom(BaseClass)) return; if (!BT) BT.reset(new BugType(this, "Destruction of a polymorphic object with no " "virtual destructor", "Logic error")); ExplodedNode *N = C.generateNonFatalErrorNode(); auto R = llvm::make_unique<BugReport>(*BT, BT->getName(), N); // Mark region of problematic base class for later use in the BugVisitor. R->markInteresting(BaseClassRegion); R->addVisitor(llvm::make_unique<DeleteBugVisitor>()); C.emitReport(std::move(R)); }
void PthreadLockChecker::checkPostStmt(const CallExpr *CE, CheckerContext &C) const { StringRef FName = C.getCalleeName(CE); if (FName.empty()) return; if (CE->getNumArgs() != 1 && CE->getNumArgs() != 2) return; if (FName == "pthread_mutex_lock" || FName == "pthread_rwlock_rdlock" || FName == "pthread_rwlock_wrlock") AcquireLock(C, CE, C.getSVal(CE->getArg(0)), false, PthreadSemantics); else if (FName == "lck_mtx_lock" || FName == "lck_rw_lock_exclusive" || FName == "lck_rw_lock_shared") AcquireLock(C, CE, C.getSVal(CE->getArg(0)), false, XNUSemantics); else if (FName == "pthread_mutex_trylock" || FName == "pthread_rwlock_tryrdlock" || FName == "pthread_rwlock_trywrlock") AcquireLock(C, CE, C.getSVal(CE->getArg(0)), true, PthreadSemantics); else if (FName == "lck_mtx_try_lock" || FName == "lck_rw_try_lock_exclusive" || FName == "lck_rw_try_lock_shared") AcquireLock(C, CE, C.getSVal(CE->getArg(0)), true, XNUSemantics); else if (FName == "pthread_mutex_unlock" || FName == "pthread_rwlock_unlock" || FName == "lck_mtx_unlock" || FName == "lck_rw_done") ReleaseLock(C, CE, C.getSVal(CE->getArg(0))); else if (FName == "pthread_mutex_destroy") DestroyLock(C, CE, C.getSVal(CE->getArg(0)), PthreadSemantics); else if (FName == "lck_mtx_destroy") DestroyLock(C, CE, C.getSVal(CE->getArg(0)), XNUSemantics); else if (FName == "pthread_mutex_init") InitLock(C, CE, C.getSVal(CE->getArg(0))); }
bool IntegerOverflowChecker::hasGlobalVariablesOrMembers(const Stmt *S, CheckerContext &C) const { if (S == NULL || S->getStmtClass() == Stmt::IntegerLiteralClass) return false; ProgramStateRef State = C.getState(); const LocationContext *LCtx = C.getLocationContext(); if ((S->getStmtClass() != Stmt::ImplicitCastExprClass) && isInWhiteList(S, State, LCtx)) return true; if (const MemberExpr *MExpr = dyn_cast<MemberExpr>(S)) { if (MExpr->getMemberDecl()->isFunctionOrFunctionTemplate()) return hasGlobalVariablesOrMembers(MExpr->getMemberDecl()->getBody(), C); // We found member usage! return true; } if (const ImplicitCastExpr *ICE = dyn_cast<ImplicitCastExpr>(S)) if (isa<DeclRefExpr>(ICE->getSubExpr()) && isInWhiteList(C.getSVal(ICE), State)) return true; if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(S)) if (const VarDecl *VarD = dyn_cast<VarDecl>(DRE->getDecl())) { Loc VLoc = C.getStoreManager().getLValueVar(VarD, LCtx); SVal VVal = C.getStoreManager().getBinding(State->getStore(), VLoc); if (isInWhiteList(VVal, State)) return true; } // We will not surrender! for (auto I = S->child_begin(); I != S->child_end(); I++) if (hasGlobalVariablesOrMembers(*I, C)) return true; return false; }