summaryrefslogtreecommitdiffstats
path: root/llvm/lib/Transforms
diff options
context:
space:
mode:
Diffstat (limited to 'llvm/lib/Transforms')
-rw-r--r--llvm/lib/Transforms/Scalar/GVNHoist.cpp62
-rw-r--r--llvm/lib/Transforms/Utils/MemorySSA.cpp107
2 files changed, 88 insertions, 81 deletions
diff --git a/llvm/lib/Transforms/Scalar/GVNHoist.cpp b/llvm/lib/Transforms/Scalar/GVNHoist.cpp
index 8b2164c5071..92b7e35a17f 100644
--- a/llvm/lib/Transforms/Scalar/GVNHoist.cpp
+++ b/llvm/lib/Transforms/Scalar/GVNHoist.cpp
@@ -329,31 +329,36 @@ private:
return I1DFS < I2DFS;
}
- // Return true when there are users of Def in BB.
- bool hasMemoryUseOnPath(MemoryAccess *Def, const BasicBlock *BB,
- const Instruction *OldPt) {
- const BasicBlock *DefBB = Def->getBlock();
- const BasicBlock *OldBB = OldPt->getParent();
+ // Return true when there are memory uses of Def in BB.
+ bool hasMemoryUse(const Instruction *NewPt, MemoryDef *Def,
+ const BasicBlock *BB) {
+ const MemorySSA::AccessList *Acc = MSSA->getBlockAccesses(BB);
+ if (!Acc)
+ return false;
- for (User *U : Def->users())
- if (auto *MU = dyn_cast<MemoryUse>(U)) {
- // FIXME: MU->getBlock() does not get updated when we move the instruction.
- BasicBlock *UBB = MU->getMemoryInst()->getParent();
- // Only analyze uses in BB.
- if (BB != UBB)
- continue;
+ Instruction *OldPt = Def->getMemoryInst();
+ const BasicBlock *OldBB = OldPt->getParent();
+ const BasicBlock *NewBB = NewPt->getParent();
+ bool ReachedNewPt = false;
- // A use in the same block as the Def is on the path.
- if (UBB == DefBB) {
- assert(MSSA->locallyDominates(Def, MU) && "def not dominating use");
- return true;
- }
+ for (const MemoryAccess &MA : *Acc)
+ if (const MemoryUse *MU = dyn_cast<MemoryUse>(&MA)) {
+ Instruction *Insn = MU->getMemoryInst();
- if (UBB != OldBB)
- return true;
+ // Do not check whether MU aliases Def when MU occurs after OldPt.
+ if (BB == OldBB && firstInBB(OldPt, Insn))
+ break;
- // It is only harmful to hoist when the use is before OldPt.
- if (firstInBB(MU->getMemoryInst(), OldPt))
+ // Do not check whether MU aliases Def when MU occurs before NewPt.
+ if (BB == NewBB) {
+ if (!ReachedNewPt) {
+ if (firstInBB(Insn, NewPt))
+ continue;
+ ReachedNewPt = true;
+ }
+ }
+ if (instructionClobbersQuery(Def, MemoryLocation::get(Insn), Insn,
+ *AA))
return true;
}
@@ -361,17 +366,18 @@ private:
}
// Return true when there are exception handling or loads of memory Def
- // between OldPt and NewPt.
+ // between Def and NewPt. This function is only called for stores: Def is
+ // the MemoryDef of the store to be hoisted.
// Decrement by 1 NBBsOnAllPaths for each block between HoistPt and BB, and
// return true when the counter NBBsOnAllPaths reaces 0, except when it is
// initialized to -1 which is unlimited.
- bool hasEHOrLoadsOnPath(const Instruction *NewPt, const Instruction *OldPt,
- MemoryAccess *Def, int &NBBsOnAllPaths) {
+ bool hasEHOrLoadsOnPath(const Instruction *NewPt, MemoryDef *Def,
+ int &NBBsOnAllPaths) {
const BasicBlock *NewBB = NewPt->getParent();
- const BasicBlock *OldBB = OldPt->getParent();
+ const BasicBlock *OldBB = Def->getBlock();
assert(DT->dominates(NewBB, OldBB) && "invalid path");
- assert(DT->dominates(Def->getBlock(), NewBB) &&
+ assert(DT->dominates(Def->getDefiningAccess()->getBlock(), NewBB) &&
"def does not dominate new hoisting point");
// Walk all basic blocks reachable in depth-first iteration on the inverse
@@ -390,7 +396,7 @@ private:
return true;
// Check that we do not move a store past loads.
- if (hasMemoryUseOnPath(Def, *I, OldPt))
+ if (hasMemoryUse(NewPt, Def, *I))
return true;
// Stop walk once the limit is reached.
@@ -473,7 +479,7 @@ private:
// Check for unsafe hoistings due to side effects.
if (K == InsKind::Store) {
- if (hasEHOrLoadsOnPath(NewPt, OldPt, D, NBBsOnAllPaths))
+ if (hasEHOrLoadsOnPath(NewPt, dyn_cast<MemoryDef>(U), NBBsOnAllPaths))
return false;
} else if (hasEHOnPath(NewBB, OldBB, NBBsOnAllPaths))
return false;
diff --git a/llvm/lib/Transforms/Utils/MemorySSA.cpp b/llvm/lib/Transforms/Utils/MemorySSA.cpp
index e06b60484c5..d549f6a9476 100644
--- a/llvm/lib/Transforms/Utils/MemorySSA.cpp
+++ b/llvm/lib/Transforms/Utils/MemorySSA.cpp
@@ -169,44 +169,6 @@ template <> struct DenseMapInfo<MemoryLocOrCall> {
return LHS == RHS;
}
};
-}
-
-namespace {
-struct UpwardsMemoryQuery {
- // True if our original query started off as a call
- bool IsCall;
- // The pointer location we started the query with. This will be empty if
- // IsCall is true.
- MemoryLocation StartingLoc;
- // This is the instruction we were querying about.
- const Instruction *Inst;
- // The MemoryAccess we actually got called with, used to test local domination
- const MemoryAccess *OriginalAccess;
-
- UpwardsMemoryQuery()
- : IsCall(false), Inst(nullptr), OriginalAccess(nullptr) {}
-
- UpwardsMemoryQuery(const Instruction *Inst, const MemoryAccess *Access)
- : IsCall(ImmutableCallSite(Inst)), Inst(Inst), OriginalAccess(Access) {
- if (!IsCall)
- StartingLoc = MemoryLocation::get(Inst);
- }
-};
-
-static bool lifetimeEndsAt(MemoryDef *MD, const MemoryLocation &Loc,
- AliasAnalysis &AA) {
- Instruction *Inst = MD->getMemoryInst();
- if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(Inst)) {
- switch (II->getIntrinsicID()) {
- case Intrinsic::lifetime_start:
- case Intrinsic::lifetime_end:
- return AA.isMustAlias(MemoryLocation(II->getArgOperand(1)), Loc);
- default:
- return false;
- }
- }
- return false;
-}
enum class Reorderability { Always, IfNoAlias, Never };
@@ -248,21 +210,10 @@ static Reorderability getLoadReorderability(const LoadInst *Use,
return Result;
}
-static bool isUseTriviallyOptimizableToLiveOnEntry(AliasAnalysis &AA,
- const Instruction *I) {
- // If the memory can't be changed, then loads of the memory can't be
- // clobbered.
- //
- // FIXME: We should handle invariant groups, as well. It's a bit harder,
- // because we need to pay close attention to invariant group barriers.
- return isa<LoadInst>(I) && (I->getMetadata(LLVMContext::MD_invariant_load) ||
- AA.pointsToConstantMemory(I));
-}
-
-static bool instructionClobbersQuery(MemoryDef *MD,
- const MemoryLocation &UseLoc,
- const Instruction *UseInst,
- AliasAnalysis &AA) {
+bool instructionClobbersQuery(MemoryDef *MD,
+ const MemoryLocation &UseLoc,
+ const Instruction *UseInst,
+ AliasAnalysis &AA) {
Instruction *DefInst = MD->getMemoryInst();
assert(DefInst && "Defining instruction not actually an instruction");
@@ -303,6 +254,56 @@ static bool instructionClobbersQuery(MemoryDef *MD,
return AA.getModRefInfo(DefInst, UseLoc) & MRI_Mod;
}
+}
+
+namespace {
+struct UpwardsMemoryQuery {
+ // True if our original query started off as a call
+ bool IsCall;
+ // The pointer location we started the query with. This will be empty if
+ // IsCall is true.
+ MemoryLocation StartingLoc;
+ // This is the instruction we were querying about.
+ const Instruction *Inst;
+ // The MemoryAccess we actually got called with, used to test local domination
+ const MemoryAccess *OriginalAccess;
+
+ UpwardsMemoryQuery()
+ : IsCall(false), Inst(nullptr), OriginalAccess(nullptr) {}
+
+ UpwardsMemoryQuery(const Instruction *Inst, const MemoryAccess *Access)
+ : IsCall(ImmutableCallSite(Inst)), Inst(Inst), OriginalAccess(Access) {
+ if (!IsCall)
+ StartingLoc = MemoryLocation::get(Inst);
+ }
+};
+
+static bool lifetimeEndsAt(MemoryDef *MD, const MemoryLocation &Loc,
+ AliasAnalysis &AA) {
+ Instruction *Inst = MD->getMemoryInst();
+ if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(Inst)) {
+ switch (II->getIntrinsicID()) {
+ case Intrinsic::lifetime_start:
+ case Intrinsic::lifetime_end:
+ return AA.isMustAlias(MemoryLocation(II->getArgOperand(1)), Loc);
+ default:
+ return false;
+ }
+ }
+ return false;
+}
+
+static bool isUseTriviallyOptimizableToLiveOnEntry(AliasAnalysis &AA,
+ const Instruction *I) {
+ // If the memory can't be changed, then loads of the memory can't be
+ // clobbered.
+ //
+ // FIXME: We should handle invariant groups, as well. It's a bit harder,
+ // because we need to pay close attention to invariant group barriers.
+ return isa<LoadInst>(I) && (I->getMetadata(LLVMContext::MD_invariant_load) ||
+ AA.pointsToConstantMemory(I));
+}
+
static bool instructionClobbersQuery(MemoryDef *MD, MemoryUse *MU,
const MemoryLocOrCall &UseMLOC,
AliasAnalysis &AA) {
OpenPOWER on IntegriCloud