summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--llvm/lib/Analysis/Loads.cpp8
-rw-r--r--llvm/test/Transforms/SROA/phi-and-select.ll12
2 files changed, 20 insertions, 0 deletions
diff --git a/llvm/lib/Analysis/Loads.cpp b/llvm/lib/Analysis/Loads.cpp
index ba4f759a17b..7da9bd718a5 100644
--- a/llvm/lib/Analysis/Loads.cpp
+++ b/llvm/lib/Analysis/Loads.cpp
@@ -280,9 +280,17 @@ bool llvm::isSafeToLoadUnconditionally(Value *V, unsigned Align,
Value *AccessedPtr;
unsigned AccessedAlign;
if (LoadInst *LI = dyn_cast<LoadInst>(BBI)) {
+ // Ignore volatile loads. The execution of a volatile load cannot
+ // be used to prove an address is backed by regular memory; it can,
+ // for example, point to an MMIO register.
+ if (LI->isVolatile())
+ continue;
AccessedPtr = LI->getPointerOperand();
AccessedAlign = LI->getAlignment();
} else if (StoreInst *SI = dyn_cast<StoreInst>(BBI)) {
+ // Ignore volatile stores (see comment for loads).
+ if (SI->isVolatile())
+ continue;
AccessedPtr = SI->getPointerOperand();
AccessedAlign = SI->getAlignment();
} else
diff --git a/llvm/test/Transforms/SROA/phi-and-select.ll b/llvm/test/Transforms/SROA/phi-and-select.ll
index e7ba2e89d79..d0904cecd9f 100644
--- a/llvm/test/Transforms/SROA/phi-and-select.ll
+++ b/llvm/test/Transforms/SROA/phi-and-select.ll
@@ -632,3 +632,15 @@ exit:
%result = load i32, i32* %phi, align 4
ret i32 %result
}
+
+; Don't speculate a load based on an earlier volatile operation.
+define i8 @volatile_select(i8* %p, i1 %b) {
+; CHECK-LABEL: @volatile_select(
+; CHECK: select i1 %b, i8* %p, i8* %p2
+ %p2 = alloca i8
+ store i8 0, i8* %p2
+ store volatile i8 0, i8* %p
+ %px = select i1 %b, i8* %p, i8* %p2
+ %v2 = load i8, i8* %px
+ ret i8 %v2
+}
OpenPOWER on IntegriCloud