diff options
author | Johannes Doerfert <jdoerfert@anl.gov> | 2019-09-04 16:35:20 +0000 |
---|---|---|
committer | Johannes Doerfert <jdoerfert@anl.gov> | 2019-09-04 16:35:20 +0000 |
commit | 2f6220633c71913f6b85c5f82cb793fc911efbeb (patch) | |
tree | c726a779b8e6a91d5b0ae0b074d7b014b87e1786 /llvm/lib/Transforms/IPO/Attributor.cpp | |
parent | 7afffb54eac8c7aedc8c24f75ccdd70b176a9d9f (diff) | |
download | bcm5719-llvm-2f6220633c71913f6b85c5f82cb793fc911efbeb.tar.gz bcm5719-llvm-2f6220633c71913f6b85c5f82cb793fc911efbeb.zip |
[Attributor] Look at internal functions only on-demand
Summary:
Instead of building attributes for internal functions which we do not
update as long as we assume they are dead, we now do not create
attributes until we assume the internal function to be live. This
improves the number of required iterations, as well as the number of
required updates, in real code. On our tests, the results are mixed.
Reviewers: sstefan1, uenoku
Subscribers: hiraditya, bollu, llvm-commits
Tags: #llvm
Differential Revision: https://reviews.llvm.org/D66914
llvm-svn: 370924
Diffstat (limited to 'llvm/lib/Transforms/IPO/Attributor.cpp')
-rw-r--r-- | llvm/lib/Transforms/IPO/Attributor.cpp | 147 |
1 files changed, 92 insertions, 55 deletions
diff --git a/llvm/lib/Transforms/IPO/Attributor.cpp b/llvm/lib/Transforms/IPO/Attributor.cpp index 7dd07404614..a9591212254 100644 --- a/llvm/lib/Transforms/IPO/Attributor.cpp +++ b/llvm/lib/Transforms/IPO/Attributor.cpp @@ -1796,7 +1796,7 @@ struct AAIsDeadImpl : public AAIsDead { void exploreFromEntry(Attributor &A, const Function *F) { ToBeExploredPaths.insert(&(F->getEntryBlock().front())); - AssumedLiveBlocks.insert(&(F->getEntryBlock())); + assumeLive(A, F->getEntryBlock()); for (size_t i = 0; i < ToBeExploredPaths.size(); ++i) if (const Instruction *NextNoReturnI = @@ -1882,7 +1882,7 @@ struct AAIsDeadImpl : public AAIsDead { } if (SplitPos == &NormalDestBB->front()) - AssumedLiveBlocks.insert(NormalDestBB); + assumeLive(A, *NormalDestBB); } BB = SplitPos->getParent(); @@ -1946,6 +1946,23 @@ struct AAIsDeadImpl : public AAIsDead { return F.hasPersonalityFn() && !canSimplifyInvokeNoUnwind(&F); } + /// Assume \p BB is (partially) live now and indicate to the Attributor \p A + /// that internal function called from \p BB should now be looked at. + void assumeLive(Attributor &A, const BasicBlock &BB) { + if (!AssumedLiveBlocks.insert(&BB).second) + return; + + // We assume that all of BB is (probably) live now and if there are calls to + // internal functions we will assume that those are now live as well. This + // is a performance optimization for blocks with calls to a lot of internal + // functions. It can however cause dead functions to be treated as live. + for (const Instruction &I : BB) + if (ImmutableCallSite ICS = ImmutableCallSite(&I)) + if (const Function *F = ICS.getCalledFunction()) + if (F->hasInternalLinkage()) + A.markLiveInternalFunction(*F); + } + /// Collection of to be explored paths. SmallSetVector<const Instruction *, 8> ToBeExploredPaths; @@ -1961,18 +1978,6 @@ struct AAIsDeadFunction final : public AAIsDeadImpl { /// See AbstractAttribute::trackStatistics() void trackStatistics() const override { - STATS_DECL( - DeadInternalFunction, Function, - "Number of internal functions classified as dead (no live callsite)"); - BUILD_STAT_NAME(DeadInternalFunction, Function) += - (getAssociatedFunction()->hasInternalLinkage() && - AssumedLiveBlocks.empty()) - ? 1 - : 0; - STATS_DECL(DeadBlocks, Function, - "Number of basic blocks classified as dead"); - BUILD_STAT_NAME(DeadBlocks, Function) += - getAssociatedFunction()->size() - AssumedLiveBlocks.size(); STATS_DECL(PartiallyDeadBlocks, Function, "Number of basic blocks classified as partially dead"); BUILD_STAT_NAME(PartiallyDeadBlocks, Function) += NoReturnCalls.size(); @@ -2016,7 +2021,7 @@ const Instruction *AAIsDeadImpl::findNextNoReturn(Attributor &A, // Use nounwind to justify the unwind block is dead as well. const auto &AANoUnw = A.getAAFor<AANoUnwind>(*this, IPos); if (!Invoke2CallAllowed || !AANoUnw.isAssumedNoUnwind()) { - AssumedLiveBlocks.insert(Invoke->getUnwindDest()); + assumeLive(A, *Invoke->getUnwindDest()); ToBeExploredPaths.insert(&Invoke->getUnwindDest()->front()); } } @@ -2031,7 +2036,7 @@ const Instruction *AAIsDeadImpl::findNextNoReturn(Attributor &A, // get new paths (reachable blocks). for (const BasicBlock *SuccBB : successors(BB)) { - AssumedLiveBlocks.insert(SuccBB); + assumeLive(A, *SuccBB); ToBeExploredPaths.insert(&SuccBB->front()); } @@ -2040,22 +2045,8 @@ const Instruction *AAIsDeadImpl::findNextNoReturn(Attributor &A, } ChangeStatus AAIsDeadImpl::updateImpl(Attributor &A) { - const Function *F = getAssociatedFunction(); ChangeStatus Status = ChangeStatus::UNCHANGED; - if (F->hasInternalLinkage() && AssumedLiveBlocks.empty()) { - auto CallSiteCheck = [&](CallSite) { return false; }; - - // All callsites of F are dead. - if (A.checkForAllCallSites(CallSiteCheck, *this, true)) - return ChangeStatus::UNCHANGED; - - // There exists at least one live call site, so we explore the function. - Status = ChangeStatus::CHANGED; - - exploreFromEntry(A, F); - } - // Temporary collection to iterate over existing noreturn instructions. This // will alow easier modification of NoReturnCalls collection SmallVector<const Instruction *, 8> NoReturnChanged; @@ -3099,11 +3090,7 @@ bool Attributor::checkForAllReadWriteInstructions( return true; } -ChangeStatus Attributor::run() { - // Initialize all abstract attributes, allow new ones to be created. - for (unsigned u = 0; u < AllAbstractAttributes.size(); u++) - AllAbstractAttributes[u]->initialize(*this); - +ChangeStatus Attributor::run(Module &M) { LLVM_DEBUG(dbgs() << "[Attributor] Identified and initialized " << AllAbstractAttributes.size() << " abstract attributes.\n"); @@ -3256,7 +3243,7 @@ ChangeStatus Attributor::run() { if (VerifyAttributor && FinishedAtFixpoint && ManifestChange == ChangeStatus::CHANGED) { VerifyAttributor = false; - ChangeStatus VerifyStatus = run(); + ChangeStatus VerifyStatus = run(M); if (VerifyStatus != ChangeStatus::UNCHANGED) llvm_unreachable( "Attributor verification failed, re-run did result in an IR change " @@ -3274,26 +3261,64 @@ ChangeStatus Attributor::run() { "Expected the final number of abstract attributes to remain unchanged!"); // Delete stuff at the end to avoid invalid references and a nice order. - LLVM_DEBUG(dbgs() << "\n[Attributor] Delete " << ToBeDeletedFunctions.size() - << " functions and " << ToBeDeletedBlocks.size() - << " blocks and " << ToBeDeletedInsts.size() - << " instructions\n"); - for (Instruction *I : ToBeDeletedInsts) { - if (!I->use_empty()) - I->replaceAllUsesWith(UndefValue::get(I->getType())); - I->eraseFromParent(); - } + { + LLVM_DEBUG(dbgs() << "\n[Attributor] Delete at least " + << ToBeDeletedFunctions.size() << " functions and " + << ToBeDeletedBlocks.size() << " blocks and " + << ToBeDeletedInsts.size() << " instructions\n"); + for (Instruction *I : ToBeDeletedInsts) { + if (!I->use_empty()) + I->replaceAllUsesWith(UndefValue::get(I->getType())); + I->eraseFromParent(); + } - if (unsigned NumDeadBlocks = ToBeDeletedBlocks.size()) { - SmallVector<BasicBlock *, 8> ToBeDeletedBBs; - ToBeDeletedBBs.reserve(NumDeadBlocks); - ToBeDeletedBBs.append(ToBeDeletedBlocks.begin(), ToBeDeletedBlocks.end()); - DeleteDeadBlocks(ToBeDeletedBBs); - } + if (unsigned NumDeadBlocks = ToBeDeletedBlocks.size()) { + SmallVector<BasicBlock *, 8> ToBeDeletedBBs; + ToBeDeletedBBs.reserve(NumDeadBlocks); + ToBeDeletedBBs.append(ToBeDeletedBlocks.begin(), ToBeDeletedBlocks.end()); + DeleteDeadBlocks(ToBeDeletedBBs); + STATS_DECLTRACK(AAIsDead, BasicBlock, + "Number of dead basic blocks deleted."); + } + + STATS_DECL(AAIsDead, Function, "Number of dead functions deleted."); + for (Function *Fn : ToBeDeletedFunctions) { + Fn->replaceAllUsesWith(UndefValue::get(Fn->getType())); + Fn->eraseFromParent(); + STATS_TRACK(AAIsDead, Function); + } + + // Identify dead internal functions and delete them. This happens outside + // the other fixpoint analysis as we might treat potentially dead functions + // as live to lower the number of iterations. If they happen to be dead, the + // below fixpoint loop will identify and eliminate them. + SmallVector<Function *, 8> InternalFns; + for (Function &F : M) + if (F.hasInternalLinkage()) + InternalFns.push_back(&F); + + bool FoundDeadFn = true; + while (FoundDeadFn) { + FoundDeadFn = false; + for (unsigned u = 0, e = InternalFns.size(); u < e; ++u) { + Function *F = InternalFns[u]; + if (!F) + continue; + + const auto *LivenessAA = + lookupAAFor<AAIsDead>(IRPosition::function(*F)); + if (LivenessAA && + !checkForAllCallSites([](CallSite CS) { return false; }, + *LivenessAA, true)) + continue; - for (Function *Fn : ToBeDeletedFunctions) { - Fn->replaceAllUsesWith(UndefValue::get(Fn->getType())); - Fn->eraseFromParent(); + STATS_TRACK(AAIsDead, Function); + F->replaceAllUsesWith(UndefValue::get(F->getType())); + F->eraseFromParent(); + InternalFns[u] = nullptr; + FoundDeadFn = true; + } + } } if (VerifyMaxFixpointIterations && @@ -3309,6 +3334,8 @@ ChangeStatus Attributor::run() { } void Attributor::identifyDefaultAbstractAttributes(Function &F) { + if (!VisitedFunctions.insert(&F).second) + return; IRPosition FPos = IRPosition::function(F); @@ -3526,12 +3553,22 @@ static bool runAttributorOnModule(Module &M) { F.hasFnAttribute(Attribute::OptimizeNone)) continue; + // We look at internal functions only on-demand but if any use is not a + // direct call, we have to do it eagerly. + if (F.hasInternalLinkage()) { + if (llvm::all_of(F.uses(), [](const Use &U) { + return ImmutableCallSite(U.getUser()) && + ImmutableCallSite(U.getUser()).isCallee(&U); + })) + continue; + } + // Populate the Attributor with abstract attribute opportunities in the // function and the information cache with IR information. A.identifyDefaultAbstractAttributes(F); } - return A.run() == ChangeStatus::CHANGED; + return A.run(M) == ChangeStatus::CHANGED; } PreservedAnalyses AttributorPass::run(Module &M, ModuleAnalysisManager &AM) { |