summaryrefslogtreecommitdiffstats
path: root/llvm/lib/Transforms/Scalar/GVN.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'llvm/lib/Transforms/Scalar/GVN.cpp')
-rw-r--r--llvm/lib/Transforms/Scalar/GVN.cpp25
1 files changed, 21 insertions, 4 deletions
diff --git a/llvm/lib/Transforms/Scalar/GVN.cpp b/llvm/lib/Transforms/Scalar/GVN.cpp
index 9565dc8c3dc..116dc69b372 100644
--- a/llvm/lib/Transforms/Scalar/GVN.cpp
+++ b/llvm/lib/Transforms/Scalar/GVN.cpp
@@ -1219,6 +1219,7 @@ bool GVN::AnalyzeLoadAvailability(LoadInst *LI, MemDepResult DepInfo,
assert((DepInfo.isDef() || DepInfo.isClobber()) &&
"expected a local dependence");
+ assert(LI->isUnordered() && "rules below are incorrect for ordered access");
const DataLayout &DL = LI->getModule()->getDataLayout();
@@ -1227,7 +1228,8 @@ bool GVN::AnalyzeLoadAvailability(LoadInst *LI, MemDepResult DepInfo,
// read by the load, we can extract the bits we need for the load from the
// stored value.
if (StoreInst *DepSI = dyn_cast<StoreInst>(DepInfo.getInst())) {
- if (Address) {
+ // Can't forward from non-atomic to atomic without violating memory model.
+ if (Address && LI->isAtomic() <= DepSI->isAtomic()) {
int Offset =
AnalyzeLoadFromClobberingStore(LI->getType(), Address, DepSI);
if (Offset != -1) {
@@ -1244,7 +1246,8 @@ bool GVN::AnalyzeLoadAvailability(LoadInst *LI, MemDepResult DepInfo,
if (LoadInst *DepLI = dyn_cast<LoadInst>(DepInfo.getInst())) {
// If this is a clobber and L is the first instruction in its block, then
// we have the first instruction in the entry block.
- if (DepLI != LI && Address) {
+ // Can't forward from non-atomic to atomic without violating memory model.
+ if (DepLI != LI && Address && LI->isAtomic() <= DepLI->isAtomic()) {
int Offset =
AnalyzeLoadFromClobberingLoad(LI->getType(), Address, DepLI, DL);
@@ -1258,7 +1261,7 @@ bool GVN::AnalyzeLoadAvailability(LoadInst *LI, MemDepResult DepInfo,
// If the clobbering value is a memset/memcpy/memmove, see if we can
// forward a value on from it.
if (MemIntrinsic *DepMI = dyn_cast<MemIntrinsic>(DepInfo.getInst())) {
- if (Address) {
+ if (Address && !LI->isAtomic()) {
int Offset = AnalyzeLoadFromClobberingMemInst(LI->getType(), Address,
DepMI, DL);
if (Offset != -1) {
@@ -1304,6 +1307,10 @@ bool GVN::AnalyzeLoadAvailability(LoadInst *LI, MemDepResult DepInfo,
LI->getType(), DL))
return false;
+ // Can't forward from non-atomic to atomic without violating memory model.
+ if (S->isAtomic() < LI->isAtomic())
+ return false;
+
Res = AvailableValue::get(S->getValueOperand());
return true;
}
@@ -1316,6 +1323,10 @@ bool GVN::AnalyzeLoadAvailability(LoadInst *LI, MemDepResult DepInfo,
!CanCoerceMustAliasedValueToLoad(LD, LI->getType(), DL))
return false;
+ // Can't forward from non-atomic to atomic without violating memory model.
+ if (LD->isAtomic() < LI->isAtomic())
+ return false;
+
Res = AvailableValue::getLoad(LD);
return true;
}
@@ -1587,6 +1598,11 @@ bool GVN::processNonLocalLoad(LoadInst *LI) {
if (LI->getParent()->getParent()->hasFnAttribute(Attribute::SanitizeAddress))
return false;
+ // This code hasn't been audited for atomic, ordered, or volatile memory
+ // access.
+ if (!LI->isSimple())
+ return false;
+
// Step 1: Find the non-local dependencies of the load.
LoadDepVect Deps;
MD->getNonLocalPointerDependency(LI, Deps);
@@ -1755,7 +1771,8 @@ bool GVN::processLoad(LoadInst *L) {
if (!MD)
return false;
- if (!L->isSimple())
+ // This code hasn't been audited for ordered or volatile memory access
+ if (!L->isUnordered())
return false;
if (L->use_empty()) {
OpenPOWER on IntegriCloud