SVal GRSimpleVals::EvalEquality(GRExprEngine& Eng, Loc L, Loc R, bool isEqual) { BasicValueFactory& BasicVals = Eng.getBasicVals(); switch (L.getSubKind()) { default: assert(false && "EQ/NE not implemented for this Loc."); return UnknownVal(); case loc::ConcreteIntKind: if (isa<loc::ConcreteInt>(R)) { bool b = cast<loc::ConcreteInt>(L).getValue() == cast<loc::ConcreteInt>(R).getValue(); // Are we computing '!='? Flip the result. if (!isEqual) b = !b; return NonLoc::MakeIntTruthVal(BasicVals, b); } else if (SymbolRef Sym = R.getAsSymbol()) { const SymIntExpr * SE = Eng.getSymbolManager().getSymIntExpr(Sym, isEqual ? BinaryOperator::EQ : BinaryOperator::NE, cast<loc::ConcreteInt>(L).getValue(), Eng.getContext().IntTy); return nonloc::SymExprVal(SE); } break; case loc::MemRegionKind: { if (SymbolRef LSym = L.getAsLocSymbol()) { if (isa<loc::ConcreteInt>(R)) { const SymIntExpr *SE = Eng.getSymbolManager().getSymIntExpr(LSym, isEqual ? BinaryOperator::EQ : BinaryOperator::NE, cast<loc::ConcreteInt>(R).getValue(), Eng.getContext().IntTy); return nonloc::SymExprVal(SE); } } } // Fall-through. case loc::GotoLabelKind: return NonLoc::MakeIntTruthVal(BasicVals, isEqual ? L == R : L != R); } return NonLoc::MakeIntTruthVal(BasicVals, isEqual ? false : true); }
void clang::RegisterNSAutoreleasePoolChecks(GRExprEngine &Eng) { ASTContext &Ctx = Eng.getContext(); if (Ctx.getLangOptions().getGCMode() != LangOptions::NonGC) { Eng.registerCheck(new NSAutoreleasePoolChecker(GetNullarySelector("release", Ctx))); } }
SVal GRSimpleVals::EvalCast(GRExprEngine& Eng, Loc X, QualType T) { // Casts from pointers -> pointers, just return the lval. // // Casts from pointers -> references, just return the lval. These // can be introduced by the frontend for corner cases, e.g // casting from va_list* to __builtin_va_list&. // assert (!X.isUnknownOrUndef()); if (Loc::IsLocType(T) || T->isReferenceType()) return X; // FIXME: Handle transparent unions where a value can be "transparently" // lifted into a union type. if (T->isUnionType()) return UnknownVal(); assert (T->isIntegerType()); BasicValueFactory& BasicVals = Eng.getBasicVals(); unsigned BitWidth = Eng.getContext().getTypeSize(T); if (!isa<loc::ConcreteInt>(X)) return nonloc::LocAsInteger::Make(BasicVals, X, BitWidth); llvm::APSInt V = cast<loc::ConcreteInt>(X).getValue(); V.setIsUnsigned(T->isUnsignedIntegerType() || Loc::IsLocType(T)); V.extOrTrunc(BitWidth); return nonloc::ConcreteInt(BasicVals.getValue(V)); }
SVal GRSimpleVals::EvalBinOp(GRExprEngine& Eng, const GRState *state, BinaryOperator::Opcode Op, Loc L, NonLoc R) { // Special case: 'R' is an integer that has the same width as a pointer and // we are using the integer location in a comparison. Normally this cannot be // triggered, but transfer functions like those for OSCommpareAndSwapBarrier32 // can generate comparisons that trigger this code. // FIXME: Are all locations guaranteed to have pointer width? if (BinaryOperator::isEqualityOp(Op)) { if (nonloc::ConcreteInt *RInt = dyn_cast<nonloc::ConcreteInt>(&R)) { const llvm::APSInt *X = &RInt->getValue(); ASTContext &C = Eng.getContext(); if (C.getTypeSize(C.VoidPtrTy) == X->getBitWidth()) { // Convert the signedness of the integer (if necessary). if (X->isSigned()) X = &Eng.getBasicVals().getValue(*X, true); return EvalBinOp(Eng, Op, L, loc::ConcreteInt(*X)); } } } // Delegate pointer arithmetic to store manager. return Eng.getStoreManager().EvalBinOp(state, Op, L, R); }
void clang::RegisterAppleChecks(GRExprEngine& Eng, const Decl &D) { ASTContext& Ctx = Eng.getContext(); BugReporter &BR = Eng.getBugReporter(); Eng.AddCheck(CreateBasicObjCFoundationChecks(Ctx, BR), Stmt::ObjCMessageExprClass); Eng.AddCheck(CreateAuditCFNumberCreate(Ctx, BR), Stmt::CallExprClass); Eng.AddCheck(CreateAuditCFRetainRelease(Ctx, BR), Stmt::CallExprClass); RegisterNSErrorChecks(BR, Eng, D); RegisterNSAutoreleasePoolChecks(Eng); Eng.registerCheck(new ClassReleaseChecker(Ctx)); }
SVal GRSimpleVals::EvalCast(GRExprEngine& Eng, NonLoc X, QualType T) { if (!isa<nonloc::ConcreteInt>(X)) return UnknownVal(); bool isLocType = Loc::IsLocType(T); // Only handle casts from integers to integers. if (!isLocType && !T->isIntegerType()) return UnknownVal(); BasicValueFactory& BasicVals = Eng.getBasicVals(); llvm::APSInt V = cast<nonloc::ConcreteInt>(X).getValue(); V.setIsUnsigned(T->isUnsignedIntegerType() || Loc::IsLocType(T)); V.extOrTrunc(Eng.getContext().getTypeSize(T)); if (isLocType) return loc::ConcreteInt(BasicVals.getValue(V)); else return nonloc::ConcreteInt(BasicVals.getValue(V)); }
SVal GRSimpleVals::DetermEvalBinOpNN(GRExprEngine& Eng, BinaryOperator::Opcode Op, NonLoc L, NonLoc R, QualType T) { BasicValueFactory& BasicVals = Eng.getBasicVals(); unsigned subkind = L.getSubKind(); while (1) { switch (subkind) { default: return UnknownVal(); case nonloc::LocAsIntegerKind: { Loc LL = cast<nonloc::LocAsInteger>(L).getLoc(); switch (R.getSubKind()) { case nonloc::LocAsIntegerKind: return EvalBinOp(Eng, Op, LL, cast<nonloc::LocAsInteger>(R).getLoc()); case nonloc::ConcreteIntKind: { // Transform the integer into a location and compare. ASTContext& Ctx = Eng.getContext(); llvm::APSInt V = cast<nonloc::ConcreteInt>(R).getValue(); V.setIsUnsigned(true); V.extOrTrunc(Ctx.getTypeSize(Ctx.VoidPtrTy)); return EvalBinOp(Eng, Op, LL, loc::ConcreteInt(BasicVals.getValue(V))); } default: switch (Op) { case BinaryOperator::EQ: return NonLoc::MakeIntTruthVal(BasicVals, false); case BinaryOperator::NE: return NonLoc::MakeIntTruthVal(BasicVals, true); default: // This case also handles pointer arithmetic. return UnknownVal(); } } } case nonloc::SymExprValKind: { // Logical not? if (!(Op == BinaryOperator::EQ && R.isZeroConstant())) return UnknownVal(); const SymExpr &SE=*cast<nonloc::SymExprVal>(L).getSymbolicExpression(); // Only handle ($sym op constant) for now. if (const SymIntExpr *E = dyn_cast<SymIntExpr>(&SE)) { BinaryOperator::Opcode Opc = E->getOpcode(); if (Opc < BinaryOperator::LT || Opc > BinaryOperator::NE) return UnknownVal(); // For comparison operators, translate the constraint by // changing the opcode. int idx = (unsigned) Opc - (unsigned) BinaryOperator::LT; assert (idx >= 0 && (unsigned) idx < sizeof(LNotOpMap)/sizeof(unsigned char)); Opc = (BinaryOperator::Opcode) LNotOpMap[idx]; assert(E->getType(Eng.getContext()) == T); E = Eng.getSymbolManager().getSymIntExpr(E->getLHS(), Opc, E->getRHS(), T); return nonloc::SymExprVal(E); } return UnknownVal(); } case nonloc::ConcreteIntKind: if (isa<nonloc::ConcreteInt>(R)) { const nonloc::ConcreteInt& L_CI = cast<nonloc::ConcreteInt>(L); const nonloc::ConcreteInt& R_CI = cast<nonloc::ConcreteInt>(R); return L_CI.EvalBinOp(BasicVals, Op, R_CI); } else { subkind = R.getSubKind(); NonLoc tmp = R; R = L; L = tmp; // Swap the operators. switch (Op) { case BinaryOperator::LT: Op = BinaryOperator::GT; break; case BinaryOperator::GT: Op = BinaryOperator::LT; break; case BinaryOperator::LE: Op = BinaryOperator::GE; break; case BinaryOperator::GE: Op = BinaryOperator::LE; break; default: break; } continue; } case nonloc::SymbolValKind: if (isa<nonloc::ConcreteInt>(R)) { ValueManager &ValMgr = Eng.getValueManager(); return ValMgr.makeNonLoc(cast<nonloc::SymbolVal>(L).getSymbol(), Op, cast<nonloc::ConcreteInt>(R).getValue(), T); } else return UnknownVal(); } } }
void clang::RegisterMacOSXAPIChecker(GRExprEngine &Eng) { if (Eng.getContext().Target.getTriple().getVendor() == llvm::Triple::Apple) Eng.registerCheck(new MacOSXAPIChecker()); }