/// Model the effect of an instruction on the set of available values. static void TransferInstruction(const Instruction &I, bool &Cleared, DenseSet<const Value *> &Available) { if (isStatepoint(I)) { Cleared = true; Available.clear(); } else if (containsGCPtrType(I.getType())) Available.insert(&I); }
void AliasAnalysisChecker::collectDynamicAliases( DenseSet<ValuePair> &DynamicAliases) { DynamicAliases.clear(); if (InputDynamicAliases == "") { DynamicAliasAnalysis &DAA = getAnalysis<DynamicAliasAnalysis>(); DynamicAliases.insert(DAA.getAllAliases().begin(), DAA.getAllAliases().end()); } else { IDAssigner &IDA = getAnalysis<IDAssigner>(); ifstream InputFile(InputDynamicAliases.c_str()); unsigned VID1, VID2; while (InputFile >> VID1 >> VID2) { Value *V1 = IDA.getValue(VID1), *V2 = IDA.getValue(VID2); DynamicAliases.insert(make_pair(V1, V2)); } } }
/// Removes redundant check_unowned calls if they check the same reference and /// there is no instruction in between which could decrement the reference count. static void performRedundantCheckUnownedRemoval(BasicBlock &BB) { DenseSet<Value *> checkedValues; for (BasicBlock::iterator BBI = BB.begin(), E = BB.end(); BBI != E; ) { // Preincrement the iterator to avoid invalidation and out trouble. Instruction &I = *BBI++; switch (classifyInstruction(I)) { case RT_NoMemoryAccessed: case RT_AllocObject: case RT_FixLifetime: case RT_Retain: case RT_UnknownRetain: case RT_BridgeRetain: case RT_RetainUnowned: case RT_ObjCRetain: // All this cannot decrement reference counts. continue; case RT_CheckUnowned: { Value *Arg = cast<CallInst>(&I)->getArgOperand(0); if (checkedValues.count(Arg) != 0) { // We checked this reference already -> delete the second check. I.eraseFromParent(); } else { // Record the check. checkedValues.insert(Arg); } continue; } case RT_Unknown: // Loads cannot affect the retain. if (isa<LoadInst>(I) || isa<StoreInst>(I) || isa<MemIntrinsic>(I)) continue; break; default: break; } // We found some potential reference decrementing instruction. Bail out. checkedValues.clear(); } }
// run - Calculate the top down data structure graphs for each function in the // program. // bool TDDataStructures::runOnModule(Module &M) { init(useEQBU ? &getAnalysis<EquivBUDataStructures>() : &getAnalysis<BUDataStructures>(), true, true, true, false); // Figure out which functions must not mark their arguments complete because // they are accessible outside this compilation unit. Currently, these // arguments are functions which are reachable by incomplete or external // nodes in the globals graph. const DSScalarMap &GGSM = GlobalsGraph->getScalarMap(); DenseSet<DSNode*> Visited; for (DSScalarMap::global_iterator I=GGSM.global_begin(), E=GGSM.global_end(); I != E; ++I) { DSNode *N = GGSM.find(*I)->second.getNode(); if (N->isIncompleteNode() || N->isExternalNode()) markReachableFunctionsExternallyAccessible(N, Visited); } // Loop over unresolved call nodes. Any functions passed into (but not // returned!) from unresolvable call nodes may be invoked outside of the // current module. for (DSGraph::afc_iterator I = GlobalsGraph->afc_begin(), E = GlobalsGraph->afc_end(); I != E; ++I) for (unsigned arg = 0, e = I->getNumPtrArgs(); arg != e; ++arg) markReachableFunctionsExternallyAccessible(I->getPtrArg(arg).getNode(), Visited); Visited.clear(); // Clear Aux of Globals Graph to be refilled in later by post-TD unresolved // functions GlobalsGraph->getAuxFunctionCalls().clear(); // Functions without internal linkage are definitely externally callable! for (Module::iterator I = M.begin(), E = M.end(); I != E; ++I) if (!I->isDeclaration() && !I->hasInternalLinkage() && !I->hasPrivateLinkage()) ExternallyCallable.insert(I); // Debug code to print the functions that are externally callable #if 0 for (Module::iterator I = M.begin(), E = M.end(); I != E; ++I) if (ExternallyCallable.count(I)) { errs() << "ExternallyCallable: " << I->getNameStr() << "\n"; } #endif // We want to traverse the call graph in reverse post-order. To do this, we // calculate a post-order traversal, then reverse it. DenseSet<DSGraph*> VisitedGraph; std::vector<DSGraph*> PostOrder; {TIME_REGION(XXX, "td:Compute postorder"); // Calculate top-down from main... if (Function *F = M.getFunction("main")) ComputePostOrder(*F, VisitedGraph, PostOrder); // Next calculate the graphs for each unreachable function... for (Module::iterator I = M.begin(), E = M.end(); I != E; ++I) if (!I->isDeclaration()) ComputePostOrder(*I, VisitedGraph, PostOrder); VisitedGraph.clear(); // Release memory! } {TIME_REGION(XXX, "td:Inline stuff"); // Visit each of the graphs in reverse post-order now! while (!PostOrder.empty()) { InlineCallersIntoGraph(PostOrder.back()); PostOrder.pop_back(); } } // Free the IndCallMap. while (!IndCallMap.empty()) { delete IndCallMap.begin()->second; IndCallMap.erase(IndCallMap.begin()); } formGlobalECs(); ExternallyCallable.clear(); GlobalsGraph->removeTriviallyDeadNodes(); GlobalsGraph->computeExternalFlags(DSGraph::DontMarkFormalsExternal); GlobalsGraph->computeIntPtrFlags(); // Make sure each graph has updated external information about globals // in the globals graph. VisitedGraph.clear(); for (Module::iterator F = M.begin(); F != M.end(); ++F) { if (!(F->isDeclaration())){ DSGraph *Graph = getOrCreateGraph(F); if (!VisitedGraph.insert(Graph).second) continue; cloneGlobalsInto(Graph, DSGraph::DontCloneCallNodes | DSGraph::DontCloneAuxCallNodes); Graph->computeExternalFlags(DSGraph::DontMarkFormalsExternal); Graph->computeIntPtrFlags(); // Clean up uninteresting nodes Graph->removeDeadNodes(0); } } // CBU contains the correct call graph. // Restore it, so that subsequent passes and clients can get it. restoreCorrectCallGraph(); /// Added by Zhiyuan: print out the DSGraph. if (llvm::DebugFlag) { print(errs(), &M); } return false; }
/// canonicalizeInputFunction - Functions like swift_retain return an /// argument as a low-level performance optimization. This makes it difficult /// to reason about pointer equality though, so undo it as an initial /// canonicalization step. After this step, all swift_retain's have been /// replaced with swift_retain. /// /// This also does some trivial peep-hole optimizations as we go. static bool canonicalizeInputFunction(Function &F, ARCEntryPointBuilder &B, SwiftRCIdentity *RC) { bool Changed = false; DenseSet<Value *> NativeRefs; DenseMap<Value *, TinyPtrVector<Instruction *>> UnknownRetains; DenseMap<Value *, TinyPtrVector<Instruction *>> UnknownReleases; for (auto &BB : F) { UnknownRetains.clear(); UnknownReleases.clear(); NativeRefs.clear(); for (auto I = BB.begin(); I != BB.end(); ) { Instruction &Inst = *I++; switch (classifyInstruction(Inst)) { // These instructions should not reach here based on the pass ordering. // i.e. LLVMARCOpt -> LLVMContractOpt. case RT_RetainN: case RT_UnknownRetainN: case RT_BridgeRetainN: case RT_ReleaseN: case RT_UnknownReleaseN: case RT_BridgeReleaseN: llvm_unreachable("These are only created by LLVMARCContract !"); case RT_Unknown: case RT_BridgeRelease: case RT_AllocObject: case RT_FixLifetime: case RT_NoMemoryAccessed: case RT_RetainUnowned: case RT_CheckUnowned: break; case RT_Retain: { CallInst &CI = cast<CallInst>(Inst); Value *ArgVal = RC->getSwiftRCIdentityRoot(CI.getArgOperand(0)); // retain(null) is a no-op. if (isa<ConstantPointerNull>(ArgVal)) { CI.eraseFromParent(); Changed = true; ++NumNoopDeleted; continue; } // Rewrite unknown retains into swift_retains. NativeRefs.insert(ArgVal); for (auto &X : UnknownRetains[ArgVal]) { B.setInsertPoint(X); B.createRetain(ArgVal, cast<CallInst>(X)); X->eraseFromParent(); ++NumUnknownRetainReleaseSRed; Changed = true; } UnknownRetains[ArgVal].clear(); break; } case RT_UnknownRetain: { CallInst &CI = cast<CallInst>(Inst); Value *ArgVal = RC->getSwiftRCIdentityRoot(CI.getArgOperand(0)); // unknownRetain(null) is a no-op. if (isa<ConstantPointerNull>(ArgVal)) { CI.eraseFromParent(); Changed = true; ++NumNoopDeleted; continue; } // Have not encountered a strong retain/release. keep it in the // unknown retain/release list for now. It might get replaced // later. if (NativeRefs.find(ArgVal) == NativeRefs.end()) { UnknownRetains[ArgVal].push_back(&CI); } else { B.setInsertPoint(&CI); B.createRetain(ArgVal, &CI); CI.eraseFromParent(); ++NumUnknownRetainReleaseSRed; Changed = true; } break; } case RT_Release: { CallInst &CI = cast<CallInst>(Inst); Value *ArgVal = RC->getSwiftRCIdentityRoot(CI.getArgOperand(0)); // release(null) is a no-op. if (isa<ConstantPointerNull>(ArgVal)) { CI.eraseFromParent(); Changed = true; ++NumNoopDeleted; continue; } // Rewrite unknown releases into swift_releases. NativeRefs.insert(ArgVal); for (auto &X : UnknownReleases[ArgVal]) { B.setInsertPoint(X); B.createRelease(ArgVal, cast<CallInst>(X)); X->eraseFromParent(); ++NumUnknownRetainReleaseSRed; Changed = true; } UnknownReleases[ArgVal].clear(); break; } case RT_UnknownRelease: { CallInst &CI = cast<CallInst>(Inst); Value *ArgVal = RC->getSwiftRCIdentityRoot(CI.getArgOperand(0)); // unknownRelease(null) is a no-op. if (isa<ConstantPointerNull>(ArgVal)) { CI.eraseFromParent(); Changed = true; ++NumNoopDeleted; continue; } // Have not encountered a strong retain/release. keep it in the // unknown retain/release list for now. It might get replaced // later. if (NativeRefs.find(ArgVal) == NativeRefs.end()) { UnknownReleases[ArgVal].push_back(&CI); } else { B.setInsertPoint(&CI); B.createRelease(ArgVal, &CI); CI.eraseFromParent(); ++NumUnknownRetainReleaseSRed; Changed = true; } break; } case RT_ObjCRelease: { CallInst &CI = cast<CallInst>(Inst); Value *ArgVal = RC->getSwiftRCIdentityRoot(CI.getArgOperand(0)); // objc_release(null) is a noop, zap it. if (isa<ConstantPointerNull>(ArgVal)) { CI.eraseFromParent(); Changed = true; ++NumNoopDeleted; continue; } break; } // These retain instructions return their argument so must be processed // specially. case RT_BridgeRetain: case RT_ObjCRetain: { // Canonicalize the retain so that nothing uses its result. CallInst &CI = cast<CallInst>(Inst); // Do not get RC identical value here, could end up with a // crash in replaceAllUsesWith as the type maybe different. Value *ArgVal = CI.getArgOperand(0); if (!CI.use_empty()) { CI.replaceAllUsesWith(ArgVal); Changed = true; } // {objc_retain,swift_unknownRetain}(null) is a noop, delete it. if (isa<ConstantPointerNull>(ArgVal)) { CI.eraseFromParent(); Changed = true; ++NumNoopDeleted; continue; } break; } } } } return Changed; }