summaryrefslogtreecommitdiffstats
path: root/llvm/lib/Analysis/MemoryDependenceAnalysis.cpp
diff options
context:
space:
mode:
authorDavid Majnemer <david.majnemer@gmail.com>2015-03-21 06:19:17 +0000
committerDavid Majnemer <david.majnemer@gmail.com>2015-03-21 06:19:17 +0000
commite165502ed7675f5c9ce9c0d064df6beed8961224 (patch)
treef40b75c731fd6b8aadfbc7ca8f894fcd959a92d6 /llvm/lib/Analysis/MemoryDependenceAnalysis.cpp
parentea00c2a06f5135b88c8331f35108d174d412725b (diff)
downloadbcm5719-llvm-e165502ed7675f5c9ce9c0d064df6beed8961224.tar.gz
bcm5719-llvm-e165502ed7675f5c9ce9c0d064df6beed8961224.zip
MemoryDependenceAnalysis: Don't miscompile atomics
r216771 introduced a change to MemoryDependenceAnalysis that allowed it to reason about acquire/release operations. However, this change does not ensure that the acquire/release operations pair. Unfortunately, this leads to miscompiles as we won't see an acquire load as properly memory effecting. This largely reverts r216771. This fixes PR22708. llvm-svn: 232889
Diffstat (limited to 'llvm/lib/Analysis/MemoryDependenceAnalysis.cpp')
-rw-r--r--llvm/lib/Analysis/MemoryDependenceAnalysis.cpp15
1 files changed, 4 insertions, 11 deletions
diff --git a/llvm/lib/Analysis/MemoryDependenceAnalysis.cpp b/llvm/lib/Analysis/MemoryDependenceAnalysis.cpp
index 8fdca0be353..ab293932da5 100644
--- a/llvm/lib/Analysis/MemoryDependenceAnalysis.cpp
+++ b/llvm/lib/Analysis/MemoryDependenceAnalysis.cpp
@@ -407,7 +407,6 @@ getPointerDependencyFrom(const AliasAnalysis::Location &MemLoc, bool isLoad,
// by every program that can detect any optimisation of that kind: either
// it is racy (undefined) or there is a release followed by an acquire
// between the pair of accesses under consideration.
- bool HasSeenAcquire = false;
if (isLoad && QueryInst) {
LoadInst *LI = dyn_cast<LoadInst>(QueryInst);
@@ -468,12 +467,12 @@ getPointerDependencyFrom(const AliasAnalysis::Location &MemLoc, bool isLoad,
// Atomic loads have complications involved.
// A Monotonic (or higher) load is OK if the query inst is itself not atomic.
- // An Acquire (or higher) load sets the HasSeenAcquire flag, so that any
- // release store will know to return getClobber.
// FIXME: This is overly conservative.
if (LI->isAtomic() && LI->getOrdering() > Unordered) {
if (!QueryInst)
return MemDepResult::getClobber(LI);
+ if (LI->getOrdering() != Monotonic)
+ return MemDepResult::getClobber(LI);
if (auto *QueryLI = dyn_cast<LoadInst>(QueryInst)) {
if (!QueryLI->isSimple())
return MemDepResult::getClobber(LI);
@@ -483,9 +482,6 @@ getPointerDependencyFrom(const AliasAnalysis::Location &MemLoc, bool isLoad,
} else if (QueryInst->mayReadOrWriteMemory()) {
return MemDepResult::getClobber(LI);
}
-
- if (isAtLeastAcquire(LI->getOrdering()))
- HasSeenAcquire = true;
}
AliasAnalysis::Location LoadLoc = AA->getLocation(LI);
@@ -545,12 +541,12 @@ getPointerDependencyFrom(const AliasAnalysis::Location &MemLoc, bool isLoad,
if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
// Atomic stores have complications involved.
// A Monotonic store is OK if the query inst is itself not atomic.
- // A Release (or higher) store further requires that no acquire load
- // has been seen.
// FIXME: This is overly conservative.
if (!SI->isUnordered()) {
if (!QueryInst)
return MemDepResult::getClobber(SI);
+ if (SI->getOrdering() != Monotonic)
+ return MemDepResult::getClobber(SI);
if (auto *QueryLI = dyn_cast<LoadInst>(QueryInst)) {
if (!QueryLI->isSimple())
return MemDepResult::getClobber(SI);
@@ -560,9 +556,6 @@ getPointerDependencyFrom(const AliasAnalysis::Location &MemLoc, bool isLoad,
} else if (QueryInst->mayReadOrWriteMemory()) {
return MemDepResult::getClobber(SI);
}
-
- if (HasSeenAcquire && isAtLeastRelease(SI->getOrdering()))
- return MemDepResult::getClobber(SI);
}
// FIXME: this is overly conservative.
OpenPOWER on IntegriCloud