summaryrefslogtreecommitdiffstats
path: root/gcc/fold-const.c
diff options
context:
space:
mode:
authorbonzini <bonzini@138bc75d-0d04-0410-961f-82ee72b054a4>2005-05-17 09:55:44 +0000
committerbonzini <bonzini@138bc75d-0d04-0410-961f-82ee72b054a4>2005-05-17 09:55:44 +0000
commit429f2f90251e5a4bfdd214017e9824995f5d4ae1 (patch)
tree8e47af6adacdca86c18a58438d78923a88ce610b /gcc/fold-const.c
parentadcfa3a3f7ceb81c9e701b82dfda6d5a5f5b93f3 (diff)
downloadppe42-gcc-429f2f90251e5a4bfdd214017e9824995f5d4ae1.tar.gz
ppe42-gcc-429f2f90251e5a4bfdd214017e9824995f5d4ae1.zip
gcc:
2005-05-17 Paolo Bonzini <bonzini@gnu.org> * Makefile.in: Add tree-ssa-math-opts.c. * expr.c (expand_expr_real_1) <case RDIV_EXPR>: Never emit as a*(1/b). * fold-const.c (distribute_real_division): New. (fold_binary) <case PLUS_EXPR, case MINUS_EXPR>: Use it. * tree-pass.h (pass_cse_reciprocals): New. * tree-optimize.c (init_tree_optimization_passes): Run it. * tree-ssa-math-opts.c: New file. * doc/passes.texi: Document the new pass. gcc/testsuite: 2005-05-17 Paolo Bonzini <bonzini@gnu.org> * gcc.dg/fold-div-1.c, gcc.dg/recip-1.c, gcc.dg/recip-2.c: New. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@99826 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'gcc/fold-const.c')
-rw-r--r--gcc/fold-const.c52
1 files changed, 52 insertions, 0 deletions
diff --git a/gcc/fold-const.c b/gcc/fold-const.c
index deb8780b8d5..050d45c6069 100644
--- a/gcc/fold-const.c
+++ b/gcc/fold-const.c
@@ -3103,6 +3103,46 @@ distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
return fold_build2 (TREE_CODE (arg0), type, common,
fold_build2 (code, type, left, right));
}
+
+/* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
+ with code CODE. This optimization is unsafe. */
+static tree
+distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
+{
+ bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
+ bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
+
+ /* (A / C) +- (B / C) -> (A +- B) / C. */
+ if (mul0 == mul1
+ && operand_equal_p (TREE_OPERAND (arg0, 1),
+ TREE_OPERAND (arg1, 1), 0))
+ return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
+ fold_build2 (code, type,
+ TREE_OPERAND (arg0, 0),
+ TREE_OPERAND (arg1, 0)),
+ TREE_OPERAND (arg0, 1));
+
+ /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
+ if (operand_equal_p (TREE_OPERAND (arg0, 0),
+ TREE_OPERAND (arg1, 0), 0)
+ && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
+ && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
+ {
+ REAL_VALUE_TYPE r0, r1;
+ r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
+ r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
+ if (!mul0)
+ real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
+ if (!mul1)
+ real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
+ real_arithmetic (&r0, code, &r0, &r1);
+ return fold_build2 (MULT_EXPR, type,
+ TREE_OPERAND (arg0, 0),
+ build_real (type, r0));
+ }
+
+ return NULL_TREE;
+}
/* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
@@ -7528,6 +7568,12 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1)
fold_convert (type, tem));
}
+ if (flag_unsafe_math_optimizations
+ && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
+ && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
+ && (tem = distribute_real_division (code, type, arg0, arg1)))
+ return tem;
+
/* Convert x+x into x*2.0. */
if (operand_equal_p (arg0, arg1, 0)
&& SCALAR_FLOAT_TYPE_P (type))
@@ -7925,6 +7971,12 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1)
return fold_convert (type, fold (tem));
}
+ if (flag_unsafe_math_optimizations
+ && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
+ && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
+ && (tem = distribute_real_division (code, type, arg0, arg1)))
+ return tem;
+
if (TREE_CODE (arg0) == MULT_EXPR
&& TREE_CODE (arg1) == MULT_EXPR
&& (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
OpenPOWER on IntegriCloud