summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorBenjamin Kramer <benny.kra@googlemail.com>2013-09-23 14:16:38 +0000
committerBenjamin Kramer <benny.kra@googlemail.com>2013-09-23 14:16:38 +0000
commit942dfe625bcadadb7cd514bfcaf3a804ffc95820 (patch)
tree933f49d76a3d950fb5704ad268a20d1f8653f73f
parenta4c8f3a7b0f02f16b186462d2071235fc3fd070f (diff)
downloadbcm5719-llvm-942dfe625bcadadb7cd514bfcaf3a804ffc95820.tar.gz
bcm5719-llvm-942dfe625bcadadb7cd514bfcaf3a804ffc95820.zip
InstSimplify: Fold equality comparisons between non-inbounds GEPs.
Overflow doesn't affect the correctness of equalities. Computing this is cheap, we just reuse the computation for the inbounds case and try to peel of more non-inbounds GEPs. This pattern is unlikely to ever appear in code generated by Clang, but SCEV occasionally produces it. llvm-svn: 191200
-rw-r--r--llvm/lib/Analysis/InstructionSimplify.cpp17
-rw-r--r--llvm/test/Transforms/InstSimplify/compare.ll9
2 files changed, 24 insertions, 2 deletions
diff --git a/llvm/lib/Analysis/InstructionSimplify.cpp b/llvm/lib/Analysis/InstructionSimplify.cpp
index 4b29824082f..af65cb74083 100644
--- a/llvm/lib/Analysis/InstructionSimplify.cpp
+++ b/llvm/lib/Analysis/InstructionSimplify.cpp
@@ -668,7 +668,8 @@ Value *llvm::SimplifyAddInst(Value *Op0, Value *Op1, bool isNSW, bool isNUW,
/// follow non-inbounds geps. This allows it to remain usable for icmp ult/etc.
/// folding.
static Constant *stripAndComputeConstantOffsets(const DataLayout *TD,
- Value *&V) {
+ Value *&V,
+ bool AllowNonInbounds = false) {
assert(V->getType()->getScalarType()->isPointerTy());
// Without DataLayout, just be conservative for now. Theoretically, more could
@@ -685,7 +686,8 @@ static Constant *stripAndComputeConstantOffsets(const DataLayout *TD,
Visited.insert(V);
do {
if (GEPOperator *GEP = dyn_cast<GEPOperator>(V)) {
- if (!GEP->isInBounds() || !GEP->accumulateConstantOffset(*TD, Offset))
+ if ((!AllowNonInbounds && !GEP->isInBounds()) ||
+ !GEP->accumulateConstantOffset(*TD, Offset))
break;
V = GEP->getPointerOperand();
} else if (Operator::getOpcode(V) == Instruction::BitCast) {
@@ -1837,6 +1839,17 @@ static Constant *computePointerICmp(const DataLayout *TD,
return ConstantInt::get(GetCompareTy(LHS),
!CmpInst::isTrueWhenEqual(Pred));
}
+
+ // Even if an non-inbounds GEP occurs along the path we can still optimize
+ // equality comparisons concerning the result. We avoid walking the whole
+ // chain again by starting where the last calls to
+ // stripAndComputeConstantOffsets left off and accumulate the offsets.
+ Constant *LHSNoBound = stripAndComputeConstantOffsets(TD, LHS, true);
+ Constant *RHSNoBound = stripAndComputeConstantOffsets(TD, RHS, true);
+ if (LHS == RHS)
+ return ConstantExpr::getICmp(Pred,
+ ConstantExpr::getAdd(LHSOffset, LHSNoBound),
+ ConstantExpr::getAdd(RHSOffset, RHSNoBound));
}
// Otherwise, fail.
diff --git a/llvm/test/Transforms/InstSimplify/compare.ll b/llvm/test/Transforms/InstSimplify/compare.ll
index 095794923b9..73188aa697b 100644
--- a/llvm/test/Transforms/InstSimplify/compare.ll
+++ b/llvm/test/Transforms/InstSimplify/compare.ll
@@ -717,3 +717,12 @@ define i1 @alloca_gep(i64 %a, i64 %b) {
ret i1 %cmp
; CHECK-NEXT: ret i1 false
}
+
+define i1 @non_inbounds_gep_compare(i64* %a) {
+; CHECK-LABEL: @non_inbounds_gep_compare(
+; Equality compares with non-inbounds GEPs can be folded.
+ %x = getelementptr i64* %a, i64 42
+ %cmp = icmp eq i64* %a, %x
+ ret i1 %cmp
+; CHECK-NEXT: ret i1 false
+}
OpenPOWER on IntegriCloud