summaryrefslogtreecommitdiffstats
path: root/llvm/lib/Analysis/ScalarEvolution.cpp
diff options
context:
space:
mode:
authorSanjoy Das <sanjoy@playingwithpointers.com>2016-05-17 17:51:14 +0000
committerSanjoy Das <sanjoy@playingwithpointers.com>2016-05-17 17:51:14 +0000
commitf5d40d5350e455a3efacfe4c9bff604a517dd879 (patch)
tree683bbcc476dfc1d4d3c40eeb121919f9cee6c247 /llvm/lib/Analysis/ScalarEvolution.cpp
parentbc6a7df0726fd6a5857da83a980aa88ac28fa13b (diff)
downloadbcm5719-llvm-f5d40d5350e455a3efacfe4c9bff604a517dd879.tar.gz
bcm5719-llvm-f5d40d5350e455a3efacfe4c9bff604a517dd879.zip
[SCEV] Be more aggressive in proving NUW
... for AddRec's in loops for which SCEV is unable to compute a max tripcount. This is the NUW variant of r269211 and fixes PR27691. (Note: PR27691 is not a correct or stability bug, it was created to track a pending task). llvm-svn: 269790
Diffstat (limited to 'llvm/lib/Analysis/ScalarEvolution.cpp')
-rw-r--r--llvm/lib/Analysis/ScalarEvolution.cpp27
1 files changed, 20 insertions, 7 deletions
diff --git a/llvm/lib/Analysis/ScalarEvolution.cpp b/llvm/lib/Analysis/ScalarEvolution.cpp
index e0376d8f43a..bfd333d911c 100644
--- a/llvm/lib/Analysis/ScalarEvolution.cpp
+++ b/llvm/lib/Analysis/ScalarEvolution.cpp
@@ -1521,11 +1521,22 @@ const SCEV *ScalarEvolution::getZeroExtendExpr(const SCEV *Op,
getSignExtendExpr(Step, Ty), L, AR->getNoWrapFlags());
}
}
+ }
- // If the backedge is guarded by a comparison with the pre-inc value
- // the addrec is safe. Also, if the entry is guarded by a comparison
- // with the start value and the backedge is guarded by a comparison
- // with the post-inc value, the addrec is safe.
+ // Normally, in the cases we can prove no-overflow via a
+ // backedge guarding condition, we can also compute a backedge
+ // taken count for the loop. The exceptions are assumptions and
+ // guards present in the loop -- SCEV is not great at exploiting
+ // these to compute max backedge taken counts, but can still use
+ // these to prove lack of overflow. Use this fact to avoid
+ // doing extra work that may not pay off.
+ if (!isa<SCEVCouldNotCompute>(MaxBECount) || HasGuards ||
+ !AC.assumptions().empty()) {
+ // If the backedge is guarded by a comparison with the pre-inc
+ // value the addrec is safe. Also, if the entry is guarded by
+ // a comparison with the start value and the backedge is
+ // guarded by a comparison with the post-inc value, the addrec
+ // is safe.
if (isKnownPositive(Step)) {
const SCEV *N = getConstant(APInt::getMinValue(BitWidth) -
getUnsignedRange(Step).getUnsignedMax());
@@ -1533,7 +1544,8 @@ const SCEV *ScalarEvolution::getZeroExtendExpr(const SCEV *Op,
(isLoopEntryGuardedByCond(L, ICmpInst::ICMP_ULT, Start, N) &&
isLoopBackedgeGuardedByCond(L, ICmpInst::ICMP_ULT,
AR->getPostIncExpr(*this), N))) {
- // Cache knowledge of AR NUW, which is propagated to this AddRec.
+ // Cache knowledge of AR NUW, which is propagated to this
+ // AddRec.
const_cast<SCEVAddRecExpr *>(AR)->setNoWrapFlags(SCEV::FlagNUW);
// Return the expression with the addrec on the outside.
return getAddRecExpr(
@@ -1547,8 +1559,9 @@ const SCEV *ScalarEvolution::getZeroExtendExpr(const SCEV *Op,
(isLoopEntryGuardedByCond(L, ICmpInst::ICMP_UGT, Start, N) &&
isLoopBackedgeGuardedByCond(L, ICmpInst::ICMP_UGT,
AR->getPostIncExpr(*this), N))) {
- // Cache knowledge of AR NW, which is propagated to this AddRec.
- // Negative step causes unsigned wrap, but it still can't self-wrap.
+ // Cache knowledge of AR NW, which is propagated to this
+ // AddRec. Negative step causes unsigned wrap, but it
+ // still can't self-wrap.
const_cast<SCEVAddRecExpr *>(AR)->setNoWrapFlags(SCEV::FlagNW);
// Return the expression with the addrec on the outside.
return getAddRecExpr(
OpenPOWER on IntegriCloud