bool AliasAnalysis::canApplyDecrementRefCount(FullApplySite FAS, SILValue Ptr) { // Treat applications of @noreturn functions as decrementing ref counts. This // causes the apply to become a sink barrier for ref count increments. if (FAS.getCallee().getType().getAs<SILFunctionType>()->isNoReturn()) return true; /// If the pointer cannot escape to the function we are done. if (!EA->canEscapeTo(Ptr, FAS)) return false; SideEffectAnalysis::FunctionEffects ApplyEffects; SEA->getEffects(ApplyEffects, FAS); auto &GlobalEffects = ApplyEffects.getGlobalEffects(); if (ApplyEffects.mayReadRC() || GlobalEffects.mayRelease()) return true; /// The function has no unidentified releases, so let's look at the arguments // in detail. for (unsigned Idx = 0, End = FAS.getNumArguments(); Idx < End; ++Idx) { auto &ArgEffect = ApplyEffects.getParameterEffects()[Idx]; if (ArgEffect.mayRelease()) { // The function may release this argument, so check if the pointer can // escape to it. if (EA->canEscapeToValue(Ptr, FAS.getArgument(Idx))) return true; } } return false; }
/// Returns true if the \p MayWrites set contains any memory writes which may /// alias with any memory which is read by \p AI. static bool mayWriteTo(AliasAnalysis *AA, SideEffectAnalysis *SEA, WriteSet &MayWrites, ApplyInst *AI) { SideEffectAnalysis::FunctionEffects E; SEA->getEffects(E, AI); assert(E.getMemBehavior(RetainObserveKind::IgnoreRetains) <= SILInstruction::MemoryBehavior::MayRead && "apply should only read from memory"); if (E.getGlobalEffects().mayRead() && !MayWrites.empty()) { // We don't know which memory is read in the callee. Therefore we bail if // there are any writes in the loop. return true; } for (unsigned Idx = 0, End = AI->getNumArguments(); Idx < End; ++Idx) { auto &ArgEffect = E.getParameterEffects()[Idx]; if (!ArgEffect.mayRead()) continue; SILValue Arg = AI->getArgument(Idx); // Check if the memory addressed by the argument may alias any writes. for (auto *W : MayWrites) { if (AA->mayWriteToMemory(W, Arg)) { DEBUG(llvm::dbgs() << " mayWriteTo\n" << *W << " to " << *AI << "\n"); return true; } } } return false; }
bool swift::isPureCall(FullApplySite AI, SideEffectAnalysis *SEA) { // If a call has only constant arguments and the call is pure, i.e. has // no side effects, then we should always inline it. SideEffectAnalysis::FunctionEffects ApplyEffects; SEA->getEffects(ApplyEffects, AI); auto GE = ApplyEffects.getGlobalEffects(); if (GE.mayRead() || GE.mayWrite() || GE.mayRetain() || GE.mayRelease()) return false; // Check if all parameters are constant. auto Args = AI.getArgumentsWithoutIndirectResults(); for (auto Arg : Args) { if (!isConstantValue(Arg)) { return false; } } return true; }
void LoopTreeOptimization::analyzeCurrentLoop( std::unique_ptr<LoopNestSummary> &CurrSummary, ReadSet &SafeReads) { WriteSet &MayWrites = CurrSummary->MayWrites; SILLoop *Loop = CurrSummary->Loop; DEBUG(llvm::dbgs() << " Analyzing accesses.\n"); // Contains function calls in the loop, which only read from memory. SmallVector<ApplyInst *, 8> ReadOnlyApplies; for (auto *BB : Loop->getBlocks()) { for (auto &Inst : *BB) { // Ignore fix_lifetime instructions. if (isa<FixLifetimeInst>(&Inst)) continue; // Collect loads. auto LI = dyn_cast<LoadInst>(&Inst); if (LI) { if (!mayWriteTo(AA, MayWrites, LI)) SafeReads.insert(LI); continue; } if (auto *AI = dyn_cast<ApplyInst>(&Inst)) { // In contrast to load instructions, we first collect all read-only // function calls and add them later to SafeReads. SideEffectAnalysis::FunctionEffects E; SEA->getEffects(E, AI); auto MB = E.getMemBehavior(RetainObserveKind::ObserveRetains); if (MB <= SILInstruction::MemoryBehavior::MayRead) ReadOnlyApplies.push_back(AI); } if (Inst.mayHaveSideEffects()) { MayWrites.push_back(&Inst); // Remove clobbered loads we have seen before. removeWrittenTo(AA, SafeReads, &Inst); } } } for (auto *AI : ReadOnlyApplies) { if (!mayWriteTo(AA, SEA, MayWrites, AI)) SafeReads.insert(AI); } }
bool CSE::canHandle(SILInstruction *Inst) { if (auto *AI = dyn_cast<ApplyInst>(Inst)) { if (!AI->mayReadOrWriteMemory()) return true; if (RunsOnHighLevelSil) { ArraySemanticsCall SemCall(AI); switch (SemCall.getKind()) { case ArrayCallKind::kGetCount: case ArrayCallKind::kGetCapacity: case ArrayCallKind::kCheckIndex: case ArrayCallKind::kCheckSubscript: if (SemCall.hasGuaranteedSelf()) { return true; } return false; default: return false; } } // We can CSE function calls which do not read or write memory and don't // have any other side effects. SideEffectAnalysis::FunctionEffects Effects; SEA->getEffects(Effects, AI); // Note that the function also may not contain any retains. And there are // functions which are read-none and have a retain, e.g. functions which // _convert_ a global_addr to a reference and retain it. auto MB = Effects.getMemBehavior(RetainObserveKind::ObserveRetains); if (MB == SILInstruction::MemoryBehavior::None) return true; return false; } if (auto *BI = dyn_cast<BuiltinInst>(Inst)) { // Although the onFastPath builtin has no side-effects we don't want to // (re-)move it. if (BI->getBuiltinInfo().ID == BuiltinValueKind::OnFastPath) return false; return !BI->mayReadOrWriteMemory(); } if (auto *CMI = dyn_cast<ClassMethodInst>(Inst)) { return !CMI->isVolatile(); } if (auto *WMI = dyn_cast<WitnessMethodInst>(Inst)) { return !WMI->isVolatile(); } if (auto *EMI = dyn_cast<ExistentialMetatypeInst>(Inst)) { return !EMI->getOperand()->getType().isAddress(); } switch (Inst->getKind()) { case ValueKind::FunctionRefInst: case ValueKind::GlobalAddrInst: case ValueKind::IntegerLiteralInst: case ValueKind::FloatLiteralInst: case ValueKind::StringLiteralInst: case ValueKind::StructInst: case ValueKind::StructExtractInst: case ValueKind::StructElementAddrInst: case ValueKind::TupleInst: case ValueKind::TupleExtractInst: case ValueKind::TupleElementAddrInst: case ValueKind::MetatypeInst: case ValueKind::ValueMetatypeInst: case ValueKind::ObjCProtocolInst: case ValueKind::RefElementAddrInst: case ValueKind::RefTailAddrInst: case ValueKind::ProjectBoxInst: case ValueKind::IndexRawPointerInst: case ValueKind::IndexAddrInst: case ValueKind::PointerToAddressInst: case ValueKind::AddressToPointerInst: case ValueKind::CondFailInst: case ValueKind::EnumInst: case ValueKind::UncheckedEnumDataInst: case ValueKind::IsNonnullInst: case ValueKind::UncheckedTrivialBitCastInst: case ValueKind::UncheckedBitwiseCastInst: case ValueKind::RefToRawPointerInst: case ValueKind::RawPointerToRefInst: case ValueKind::RefToUnownedInst: case ValueKind::UnownedToRefInst: case ValueKind::RefToUnmanagedInst: case ValueKind::UnmanagedToRefInst: case ValueKind::UpcastInst: case ValueKind::ThickToObjCMetatypeInst: case ValueKind::ObjCToThickMetatypeInst: case ValueKind::UncheckedRefCastInst: case ValueKind::UncheckedAddrCastInst: case ValueKind::ObjCMetatypeToObjectInst: case ValueKind::ObjCExistentialMetatypeToObjectInst: case ValueKind::SelectEnumInst: case ValueKind::SelectValueInst: case ValueKind::RefToBridgeObjectInst: case ValueKind::BridgeObjectToRefInst: case ValueKind::BridgeObjectToWordInst: case ValueKind::ThinFunctionToPointerInst: case ValueKind::PointerToThinFunctionInst: case ValueKind::MarkDependenceInst: case ValueKind::OpenExistentialRefInst: return true; default: return false; } }