diff options
| author | Sebastian Pop <sebpop@gmail.com> | 2016-10-13 03:23:33 +0000 |
|---|---|---|
| committer | Sebastian Pop <sebpop@gmail.com> | 2016-10-13 03:23:33 +0000 |
| commit | 5068d7a338fea589baad6e4f4330937268c7127d (patch) | |
| tree | a5aa95ba3ec4ee23f42082d3941ea2c4d3ab1b68 | |
| parent | 5ba9f24ed720009a9ab25a9999ed94d17623d4fa (diff) | |
| download | bcm5719-llvm-5068d7a338fea589baad6e4f4330937268c7127d.tar.gz bcm5719-llvm-5068d7a338fea589baad6e4f4330937268c7127d.zip | |
Memory-SSA: strengthen defClobbersUseOrDef interface
As Danny pointed out, defClobbersUseOrDef should use MemoryLocOrCall to make
sure fences are properly handled.
llvm-svn: 284099
| -rw-r--r-- | llvm/lib/Transforms/Utils/MemorySSA.cpp | 34 |
1 files changed, 15 insertions, 19 deletions
diff --git a/llvm/lib/Transforms/Utils/MemorySSA.cpp b/llvm/lib/Transforms/Utils/MemorySSA.cpp index abcfc5a7b51..b02a3f55d03 100644 --- a/llvm/lib/Transforms/Utils/MemorySSA.cpp +++ b/llvm/lib/Transforms/Utils/MemorySSA.cpp @@ -105,6 +105,8 @@ public: MemoryLocOrCall() : IsCall(false) {} MemoryLocOrCall(MemoryUseOrDef *MUD) : MemoryLocOrCall(MUD->getMemoryInst()) {} + MemoryLocOrCall(const MemoryUseOrDef *MUD) + : MemoryLocOrCall(MUD->getMemoryInst()) {} MemoryLocOrCall(Instruction *Inst) { if (ImmutableCallSite(Inst)) { @@ -254,16 +256,22 @@ static bool instructionClobbersQuery(MemoryDef *MD, return AA.getModRefInfo(DefInst, UseLoc) & MRI_Mod; } +static bool instructionClobbersQuery(MemoryDef *MD, const MemoryUseOrDef *MU, + const MemoryLocOrCall &UseMLOC, + AliasAnalysis &AA) { + // FIXME: This is a temporary hack to allow a single instructionClobbersQuery + // to exist while MemoryLocOrCall is pushed through places. + if (UseMLOC.IsCall) + return instructionClobbersQuery(MD, MemoryLocation(), MU->getMemoryInst(), + AA); + return instructionClobbersQuery(MD, UseMLOC.getLoc(), MU->getMemoryInst(), + AA); +} + // Return true when MD may alias MU, return false otherwise. bool defClobbersUseOrDef(MemoryDef *MD, const MemoryUseOrDef *MU, AliasAnalysis &AA) { - Instruction *UseInst = MU->getMemoryInst(); - MemoryLocation UseLoc; - if (ImmutableCallSite(UseInst)) - UseLoc = MemoryLocation(); - else - UseLoc = MemoryLocation::get(UseInst); - return instructionClobbersQuery(MD, UseLoc, UseInst, AA); + return instructionClobbersQuery(MD, MU, MemoryLocOrCall(MU), AA); } } @@ -315,18 +323,6 @@ static bool isUseTriviallyOptimizableToLiveOnEntry(AliasAnalysis &AA, AA.pointsToConstantMemory(I)); } -static bool instructionClobbersQuery(MemoryDef *MD, MemoryUse *MU, - const MemoryLocOrCall &UseMLOC, - AliasAnalysis &AA) { - // FIXME: This is a temporary hack to allow a single instructionClobbersQuery - // to exist while MemoryLocOrCall is pushed through places. - if (UseMLOC.IsCall) - return instructionClobbersQuery(MD, MemoryLocation(), MU->getMemoryInst(), - AA); - return instructionClobbersQuery(MD, UseMLOC.getLoc(), MU->getMemoryInst(), - AA); -} - /// Cache for our caching MemorySSA walker. class WalkerCache { DenseMap<ConstMemoryAccessPair, MemoryAccess *> Accesses; |

