summaryrefslogtreecommitdiffstats
path: root/llvm/lib/Transforms/Scalar/LoopStrengthReduce.cpp
diff options
context:
space:
mode:
authorChris Lattner <sabre@nondot.org>2007-05-19 01:22:21 +0000
committerChris Lattner <sabre@nondot.org>2007-05-19 01:22:21 +0000
commite8bd53c36a7ae1b18af4d05e689b652496a53b23 (patch)
treeae81afadbf42c80d3a7158c1ac9e077409d82897 /llvm/lib/Transforms/Scalar/LoopStrengthReduce.cpp
parentafa90dc355ad20601c724ff90b50205d6da54d80 (diff)
downloadbcm5719-llvm-e8bd53c36a7ae1b18af4d05e689b652496a53b23.tar.gz
bcm5719-llvm-e8bd53c36a7ae1b18af4d05e689b652496a53b23.zip
Handle negative strides much more optimally. This compiles X86/lsr-negative-stride.ll
into: _t: movl 8(%esp), %ecx movl 4(%esp), %eax cmpl %ecx, %eax je LBB1_3 #bb17 LBB1_1: #bb cmpl %ecx, %eax jg LBB1_4 #cond_true LBB1_2: #cond_false subl %eax, %ecx cmpl %ecx, %eax jne LBB1_1 #bb LBB1_3: #bb17 ret LBB1_4: #cond_true subl %ecx, %eax cmpl %ecx, %eax jne LBB1_1 #bb jmp LBB1_3 #bb17 instead of: _t: subl $4, %esp movl %esi, (%esp) movl 12(%esp), %ecx movl 8(%esp), %eax cmpl %ecx, %eax je LBB1_4 #bb17 LBB1_1: #bb.outer movl %ecx, %edx negl %edx LBB1_2: #bb cmpl %ecx, %eax jle LBB1_5 #cond_false LBB1_3: #cond_true addl %edx, %eax cmpl %ecx, %eax jne LBB1_2 #bb LBB1_4: #bb17 movl (%esp), %esi addl $4, %esp ret LBB1_5: #cond_false movl %ecx, %edx subl %eax, %edx movl %eax, %esi addl %esi, %esi cmpl %ecx, %esi je LBB1_4 #bb17 LBB1_6: #cond_false.bb.outer_crit_edge movl %edx, %ecx jmp LBB1_1 #bb.outer llvm-svn: 37252
Diffstat (limited to 'llvm/lib/Transforms/Scalar/LoopStrengthReduce.cpp')
-rw-r--r--llvm/lib/Transforms/Scalar/LoopStrengthReduce.cpp29
1 files changed, 26 insertions, 3 deletions
diff --git a/llvm/lib/Transforms/Scalar/LoopStrengthReduce.cpp b/llvm/lib/Transforms/Scalar/LoopStrengthReduce.cpp
index 1a9b9881d70..449cee3bda2 100644
--- a/llvm/lib/Transforms/Scalar/LoopStrengthReduce.cpp
+++ b/llvm/lib/Transforms/Scalar/LoopStrengthReduce.cpp
@@ -987,6 +987,20 @@ static bool PartitionByIsUseOfPostIncrementedValue(const BasedUser &Val) {
return Val.isUseOfPostIncrementedValue;
}
+/// isNonConstantNegative - REturn true if the specified scev is negated, but
+/// not a constant.
+static bool isNonConstantNegative(const SCEVHandle &Expr) {
+ SCEVMulExpr *Mul = dyn_cast<SCEVMulExpr>(Expr);
+ if (!Mul) return false;
+
+ // If there is a constant factor, it will be first.
+ SCEVConstant *SC = dyn_cast<SCEVConstant>(Mul->getOperand(0));
+ if (!SC) return false;
+
+ // Return true if the value is negative, this matches things like (-42 * V).
+ return SC->getValue()->getValue().isNegative();
+}
+
/// StrengthReduceStridedIVUsers - Strength reduce all of the users of a single
/// stride of IV. All of the users may have different starting values, and this
/// may not be the only stride (we know it is if isOnlyStride is true).
@@ -1104,15 +1118,24 @@ void LoopStrengthReduce::StrengthReduceStridedIVUsers(const SCEVHandle &Stride,
// Add common base to the new Phi node.
NewPHI->addIncoming(CommonBaseV, Preheader);
+ // If the stride is negative, insert a sub instead of an add for the
+ // increment.
+ bool isNegative = isNonConstantNegative(Stride);
+ SCEVHandle IncAmount = Stride;
+ if (isNegative)
+ IncAmount = SCEV::getNegativeSCEV(Stride);
+
// Insert the stride into the preheader.
- Value *StrideV = PreheaderRewriter.expandCodeFor(Stride, PreInsertPt,
+ Value *StrideV = PreheaderRewriter.expandCodeFor(IncAmount, PreInsertPt,
ReplacedTy);
if (!isa<ConstantInt>(StrideV)) ++NumVariable;
// Emit the increment of the base value before the terminator of the loop
// latch block, and add it to the Phi node.
- SCEVHandle IncExp = SCEVAddExpr::get(SCEVUnknown::get(NewPHI),
- SCEVUnknown::get(StrideV));
+ SCEVHandle IncExp = SCEVUnknown::get(StrideV);
+ if (isNegative)
+ IncExp = SCEV::getNegativeSCEV(IncExp);
+ IncExp = SCEVAddExpr::get(SCEVUnknown::get(NewPHI), IncExp);
IncV = Rewriter.expandCodeFor(IncExp, LatchBlock->getTerminator(),
ReplacedTy);
OpenPOWER on IntegriCloud