summaryrefslogtreecommitdiffstats
path: root/llvm/lib/Analysis/MemorySSA.cpp
diff options
context:
space:
mode:
authorAlina Sbirlea <asbirlea@google.com>2018-09-07 23:51:41 +0000
committerAlina Sbirlea <asbirlea@google.com>2018-09-07 23:51:41 +0000
commit65f385da639368f933b14a98657b03c0a4a034d9 (patch)
treebf0f410e7a326ac0742426cd510d37b4dbcb402a /llvm/lib/Analysis/MemorySSA.cpp
parentfc89d18bb1ffb21d3bd827f32de60b3a7ea37d18 (diff)
downloadbcm5719-llvm-65f385da639368f933b14a98657b03c0a4a034d9.tar.gz
bcm5719-llvm-65f385da639368f933b14a98657b03c0a4a034d9.zip
[MemorySSA] Relax verification of clobbering accesses.
llvm-svn: 341733
Diffstat (limited to 'llvm/lib/Analysis/MemorySSA.cpp')
-rw-r--r--llvm/lib/Analysis/MemorySSA.cpp14
1 files changed, 12 insertions, 2 deletions
diff --git a/llvm/lib/Analysis/MemorySSA.cpp b/llvm/lib/Analysis/MemorySSA.cpp
index 2d42624c805..2126d841a30 100644
--- a/llvm/lib/Analysis/MemorySSA.cpp
+++ b/llvm/lib/Analysis/MemorySSA.cpp
@@ -380,10 +380,12 @@ static bool isUseTriviallyOptimizableToLiveOnEntry(AliasAnalysis &AA,
/// \param MSSA The MemorySSA instance that Start and ClobberAt belong to.
/// \param Query The UpwardsMemoryQuery we used for our search.
/// \param AA The AliasAnalysis we used for our search.
+/// \param AllowImpreciseClobber Always false, unless we do relaxed verify.
static void
checkClobberSanity(const MemoryAccess *Start, MemoryAccess *ClobberAt,
const MemoryLocation &StartLoc, const MemorySSA &MSSA,
- const UpwardsMemoryQuery &Query, AliasAnalysis &AA) {
+ const UpwardsMemoryQuery &Query, AliasAnalysis &AA,
+ bool AllowImpreciseClobber = false) {
assert(MSSA.dominates(ClobberAt, Start) && "Clobber doesn't dominate start?");
if (MSSA.isLiveOnEntryDef(Start)) {
@@ -454,6 +456,14 @@ checkClobberSanity(const MemoryAccess *Start, MemoryAccess *ClobberAt,
}
}
+ // If the verify is done following an optimization, it's possible that
+ // ClobberAt was a conservative clobbering, that we can now infer is not a
+ // true clobbering access. Don't fail the verify if that's the case.
+ // We do have accesses that claim they're optimized, but could be optimized
+ // further. Updating all these can be expensive, so allow it for now (FIXME).
+ if (AllowImpreciseClobber)
+ return;
+
// If ClobberAt is a MemoryPhi, we can assume something above it acted as a
// clobber. Otherwise, `ClobberAt` should've acted as a clobber at some point.
assert((isa<MemoryPhi>(ClobberAt) || FoundClobber) &&
@@ -1694,7 +1704,7 @@ void MemorySSA::checkClobberSanityAccess(const MemoryAccess *MA) const {
return;
auto *Clobber = MUD->getOptimized();
UpwardsMemoryQuery Q(I, MUD);
- checkClobberSanity(MUD, Clobber, *Loc, *this, Q, *AA);
+ checkClobberSanity(MUD, Clobber, *Loc, *this, Q, *AA, true);
}
}
OpenPOWER on IntegriCloud