summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authoredlinger <edlinger@138bc75d-0d04-0410-961f-82ee72b054a4>2013-11-26 16:58:40 +0000
committeredlinger <edlinger@138bc75d-0d04-0410-961f-82ee72b054a4>2013-11-26 16:58:40 +0000
commit583b9c26c16fa9caf9e0e1c6f135edd1468351fc (patch)
tree83e80b72a625b55464fa9b30bf680242b4e80a5b
parent9f2b70c26f37f498269e781d83bd20a710589075 (diff)
downloadppe42-gcc-583b9c26c16fa9caf9e0e1c6f135edd1468351fc.tar.gz
ppe42-gcc-583b9c26c16fa9caf9e0e1c6f135edd1468351fc.zip
2013-11-26 Bernd Edlinger <bernd.edlinger@hotmail.de>
Remove parameter keep_aligning from get_inner_reference. * tree.h (get_inner_reference): Adjust header. * expr.c (get_inner_reference): Remove parameter keep_aligning. (get_bit_range, expand_assignment, expand_expr_addr_expr_1, expand_expr_real_1): Adjust. * asan.c (instrument_derefs): Adjust. * builtins.c (get_object_alignment_2): Adjust. Remove handling of VIEW_CONVERT_EXPR. * cfgexpand.c (expand_debug_expr): Adjust. * dbxout.c (dbxout_expand_expr): Adjust. * dwarf2out.c (loc_list_for_address_of_addr_expr_of_indirect_ref, loc_list_from_tree, fortran_common): Adjust. * fold-const.c (optimize_bit_field_compare, decode_field_reference, fold_unary_loc, fold_comparison, split_address_to_core_and_offset): Adjust. * gimple-ssa-strength-reduction.c (slsr_process_ref): Adjust. * simplifx-rtx.c (delegitimize_mem_from_attrs): Adjust. * tree-affine.c (tree_to_aff_combination, get_inner_reference_aff): Adjust. * tree-data-ref.c (split_constant_offset_1, dr_analyze_innermost): Adjust. * tree-vect-data-refs.c (vect_check_gather, vect_analyze_data_refs): Adjust. * tree-scalar-evolution.c (interpret_rhs_expr): Adjust. * tree-ssa-loop-ivopts.c (may_be_unaligned_p, split_address_cost): Adjust. * tsan.c (instrument_expr): Adjust. * ada/gcc-interface/decl.c (elaborate_expression_1): Adjust. * ada/gcc-interface/trans.c (Attribute_to_gnu): Adjust. * ada/gcc-interface/utils2.c (build_unary_op): Adjust. * config/mips/mips.c (r10k_safe_mem_expr_p): Adjust. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@205398 138bc75d-0d04-0410-961f-82ee72b054a4
-rw-r--r--gcc/ChangeLog34
-rw-r--r--gcc/ada/gcc-interface/decl.c2
-rw-r--r--gcc/ada/gcc-interface/trans.c2
-rw-r--r--gcc/ada/gcc-interface/utils2.c3
-rw-r--r--gcc/asan.c2
-rw-r--r--gcc/builtins.c6
-rw-r--r--gcc/cfgexpand.c2
-rw-r--r--gcc/config/mips/mips.c2
-rw-r--r--gcc/dbxout.c2
-rw-r--r--gcc/dwarf2out.c6
-rw-r--r--gcc/expr.c37
-rw-r--r--gcc/fold-const.c15
-rw-r--r--gcc/gimple-ssa-strength-reduction.c2
-rw-r--r--gcc/simplify-rtx.c2
-rw-r--r--gcc/tree-affine.c5
-rw-r--r--gcc/tree-data-ref.c4
-rw-r--r--gcc/tree-scalar-evolution.c2
-rw-r--r--gcc/tree-ssa-loop-ivopts.c4
-rw-r--r--gcc/tree-vect-data-refs.c4
-rw-r--r--gcc/tree.h3
-rw-r--r--gcc/tsan.c2
21 files changed, 72 insertions, 69 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index 62dc6e0bca5..11994746d9f 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,37 @@
+2013-11-26 Bernd Edlinger <bernd.edlinger@hotmail.de>
+
+ Remove parameter keep_aligning from get_inner_reference.
+ * tree.h (get_inner_reference): Adjust header.
+ * expr.c (get_inner_reference): Remove parameter keep_aligning.
+ (get_bit_range, expand_assignment,
+ expand_expr_addr_expr_1, expand_expr_real_1): Adjust.
+ * asan.c (instrument_derefs): Adjust.
+ * builtins.c (get_object_alignment_2): Adjust. Remove handling of
+ VIEW_CONVERT_EXPR.
+ * cfgexpand.c (expand_debug_expr): Adjust.
+ * dbxout.c (dbxout_expand_expr): Adjust.
+ * dwarf2out.c (loc_list_for_address_of_addr_expr_of_indirect_ref,
+ loc_list_from_tree, fortran_common): Adjust.
+ * fold-const.c (optimize_bit_field_compare,
+ decode_field_reference, fold_unary_loc, fold_comparison,
+ split_address_to_core_and_offset): Adjust.
+ * gimple-ssa-strength-reduction.c (slsr_process_ref): Adjust.
+ * simplifx-rtx.c (delegitimize_mem_from_attrs): Adjust.
+ * tree-affine.c (tree_to_aff_combination,
+ get_inner_reference_aff): Adjust.
+ * tree-data-ref.c (split_constant_offset_1,
+ dr_analyze_innermost): Adjust.
+ * tree-vect-data-refs.c (vect_check_gather,
+ vect_analyze_data_refs): Adjust.
+ * tree-scalar-evolution.c (interpret_rhs_expr): Adjust.
+ * tree-ssa-loop-ivopts.c (may_be_unaligned_p,
+ split_address_cost): Adjust.
+ * tsan.c (instrument_expr): Adjust.
+ * ada/gcc-interface/decl.c (elaborate_expression_1): Adjust.
+ * ada/gcc-interface/trans.c (Attribute_to_gnu): Adjust.
+ * ada/gcc-interface/utils2.c (build_unary_op): Adjust.
+ * config/mips/mips.c (r10k_safe_mem_expr_p): Adjust.
+
2013-11-26 Yufeng Zhang <yufeng.zhang@arm.com>
* config/arm/arm.c (arm_legitimize_address): Check xop1 is not
diff --git a/gcc/ada/gcc-interface/decl.c b/gcc/ada/gcc-interface/decl.c
index ee76a9d160f..51adf18e0a3 100644
--- a/gcc/ada/gcc-interface/decl.c
+++ b/gcc/ada/gcc-interface/decl.c
@@ -6269,7 +6269,7 @@ elaborate_expression_1 (tree gnu_expr, Entity_Id gnat_entity, tree gnu_name,
int unsignedp, volatilep;
inner = get_inner_reference (inner, &bitsize, &bitpos, &offset,
- &mode, &unsignedp, &volatilep, false);
+ &mode, &unsignedp, &volatilep);
/* If the offset is variable, err on the side of caution. */
if (offset)
inner = NULL_TREE;
diff --git a/gcc/ada/gcc-interface/trans.c b/gcc/ada/gcc-interface/trans.c
index e533de6dcbf..6c7a418662f 100644
--- a/gcc/ada/gcc-interface/trans.c
+++ b/gcc/ada/gcc-interface/trans.c
@@ -2060,7 +2060,7 @@ Attribute_to_gnu (Node_Id gnat_node, tree *gnu_result_type_p, int attribute)
&& TREE_CODE (gnu_prefix) == FIELD_DECL));
get_inner_reference (gnu_prefix, &bitsize, &bitpos, &gnu_offset,
- &mode, &unsignedp, &volatilep, false);
+ &mode, &unsignedp, &volatilep);
if (TREE_CODE (gnu_prefix) == COMPONENT_REF)
{
diff --git a/gcc/ada/gcc-interface/utils2.c b/gcc/ada/gcc-interface/utils2.c
index 224a87d8777..b6299cbeefa 100644
--- a/gcc/ada/gcc-interface/utils2.c
+++ b/gcc/ada/gcc-interface/utils2.c
@@ -1312,8 +1312,7 @@ build_unary_op (enum tree_code op_code, tree result_type, tree operand)
int unsignedp, volatilep;
inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
- &mode, &unsignedp, &volatilep,
- false);
+ &mode, &unsignedp, &volatilep);
/* If INNER is a padding type whose field has a self-referential
size, convert to that inner type. We know the offset is zero
diff --git a/gcc/asan.c b/gcc/asan.c
index 677435e05ae..c901e1da380 100644
--- a/gcc/asan.c
+++ b/gcc/asan.c
@@ -1488,7 +1488,7 @@ instrument_derefs (gimple_stmt_iterator *iter, tree t,
enum machine_mode mode;
int volatilep = 0, unsignedp = 0;
tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset,
- &mode, &unsignedp, &volatilep, false);
+ &mode, &unsignedp, &volatilep);
if (bitpos % (size_in_bytes * BITS_PER_UNIT)
|| bitsize != size_in_bytes * BITS_PER_UNIT)
{
diff --git a/gcc/builtins.c b/gcc/builtins.c
index 5f657aed1d4..912c14c1174 100644
--- a/gcc/builtins.c
+++ b/gcc/builtins.c
@@ -329,7 +329,7 @@ get_object_alignment_2 (tree exp, unsigned int *alignp,
/* Get the innermost object and the constant (bitpos) and possibly
variable (offset) offset of the access. */
exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
- &mode, &unsignedp, &volatilep, true);
+ &mode, &unsignedp, &volatilep);
/* Extract alignment information from the innermost object and
possibly adjust bitpos and offset. */
@@ -360,10 +360,6 @@ get_object_alignment_2 (tree exp, unsigned int *alignp,
align = DECL_ALIGN (exp);
known_alignment = true;
}
- else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
- {
- align = TYPE_ALIGN (TREE_TYPE (exp));
- }
else if (TREE_CODE (exp) == INDIRECT_REF
|| TREE_CODE (exp) == MEM_REF
|| TREE_CODE (exp) == TARGET_MEM_REF)
diff --git a/gcc/cfgexpand.c b/gcc/cfgexpand.c
index 207f8767573..98983f40b3b 100644
--- a/gcc/cfgexpand.c
+++ b/gcc/cfgexpand.c
@@ -3941,7 +3941,7 @@ expand_debug_expr (tree exp)
tree offset;
int volatilep = 0;
tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
- &mode1, &unsignedp, &volatilep, false);
+ &mode1, &unsignedp, &volatilep);
rtx orig_op0;
if (bitsize == 0)
diff --git a/gcc/config/mips/mips.c b/gcc/config/mips/mips.c
index 36ba6df7a4c..69e67be4973 100644
--- a/gcc/config/mips/mips.c
+++ b/gcc/config/mips/mips.c
@@ -14948,7 +14948,7 @@ r10k_safe_mem_expr_p (tree expr, unsigned HOST_WIDE_INT offset)
int unsigned_p, volatile_p;
inner = get_inner_reference (expr, &bitsize, &bitoffset, &var_offset, &mode,
- &unsigned_p, &volatile_p, false);
+ &unsigned_p, &volatile_p);
if (!DECL_P (inner) || !DECL_SIZE_UNIT (inner) || var_offset)
return false;
diff --git a/gcc/dbxout.c b/gcc/dbxout.c
index bc6a3af0f96..5988c7e1bd4 100644
--- a/gcc/dbxout.c
+++ b/gcc/dbxout.c
@@ -2515,7 +2515,7 @@ dbxout_expand_expr (tree expr)
rtx x;
tem = get_inner_reference (expr, &bitsize, &bitpos, &offset,
- &mode, &unsignedp, &volatilep, true);
+ &mode, &unsignedp, &volatilep);
x = dbxout_expand_expr (tem);
if (x == NULL || !MEM_P (x))
diff --git a/gcc/dwarf2out.c b/gcc/dwarf2out.c
index 3448ec4a98c..6376306802a 100644
--- a/gcc/dwarf2out.c
+++ b/gcc/dwarf2out.c
@@ -13934,7 +13934,7 @@ loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev)
obj = get_inner_reference (TREE_OPERAND (loc, 0),
&bitsize, &bitpos, &offset, &mode,
- &unsignedp, &volatilep, false);
+ &unsignedp, &volatilep);
STRIP_NOPS (obj);
if (bitpos % BITS_PER_UNIT)
{
@@ -14211,7 +14211,7 @@ loc_list_from_tree (tree loc, int want_address)
int unsignedp, volatilep = 0;
obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
- &unsignedp, &volatilep, false);
+ &unsignedp, &volatilep);
gcc_assert (obj != loc);
@@ -15521,7 +15521,7 @@ fortran_common (tree decl, HOST_WIDE_INT *value)
return NULL_TREE;
cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset,
- &mode, &unsignedp, &volatilep, true);
+ &mode, &unsignedp, &volatilep);
if (cvar == NULL_TREE
|| TREE_CODE (cvar) != VAR_DECL
diff --git a/gcc/expr.c b/gcc/expr.c
index df1c1e88f6f..dc379dc1094 100644
--- a/gcc/expr.c
+++ b/gcc/expr.c
@@ -4648,7 +4648,7 @@ get_bit_range (unsigned HOST_WIDE_INT *bitstart,
int unsignedp;
int volatilep = 0;
get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
- &roffset, &rmode, &unsignedp, &volatilep, false);
+ &roffset, &rmode, &unsignedp, &volatilep);
if ((rbitpos % BITS_PER_UNIT) != 0)
{
*bitstart = *bitend = 0;
@@ -4802,7 +4802,7 @@ expand_assignment (tree to, tree from, bool nontemporal)
push_temp_slots ();
tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
- &unsignedp, &volatilep, true);
+ &unsignedp, &volatilep);
/* Make sure bitpos is not negative, it can wreak havoc later. */
if (bitpos < 0)
@@ -6644,27 +6644,13 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
If the field describes a variable-sized object, *PMODE is set to
BLKmode and *PBITSIZE is set to -1. An access cannot be made in
- this case, but the address of the object can be found.
-
- If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
- look through nodes that serve as markers of a greater alignment than
- the one that can be deduced from the expression. These nodes make it
- possible for front-ends to prevent temporaries from being created by
- the middle-end on alignment considerations. For that purpose, the
- normal operating mode at high-level is to always pass FALSE so that
- the ultimate containing object is really returned; moreover, the
- associated predicate handled_component_p will always return TRUE
- on these nodes, thus indicating that they are essentially handled
- by get_inner_reference. TRUE should only be passed when the caller
- is scanning the expression in order to build another representation
- and specifically knows how to handle these nodes; as such, this is
- the normal operating mode in the RTL expanders. */
+ this case, but the address of the object can be found. */
tree
get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
HOST_WIDE_INT *pbitpos, tree *poffset,
enum machine_mode *pmode, int *punsignedp,
- int *pvolatilep, bool keep_aligning)
+ int *pvolatilep)
{
tree size_tree = 0;
enum machine_mode mode = VOIDmode;
@@ -6784,14 +6770,6 @@ get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
break;
case VIEW_CONVERT_EXPR:
- if (keep_aligning && STRICT_ALIGNMENT
- && (TYPE_ALIGN (TREE_TYPE (exp))
- > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
- && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
- < BIGGEST_ALIGNMENT)
- && (TYPE_ALIGN_OK (TREE_TYPE (exp))
- || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
- goto done;
break;
case MEM_REF:
@@ -7656,7 +7634,7 @@ expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
they won't change the final object whose address will be returned
(they actually exist only for that purpose). */
inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
- &mode1, &unsignedp, &volatilep, false);
+ &mode1, &unsignedp, &volatilep);
break;
}
@@ -9933,7 +9911,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
tree offset;
int volatilep = 0, must_force_mem;
tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
- &mode1, &unsignedp, &volatilep, true);
+ &mode1, &unsignedp, &volatilep);
rtx orig_op0, memloc;
bool mem_attrs_from_type = false;
@@ -10294,8 +10272,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
int volatilep = 0;
tree tem
= get_inner_reference (treeop0, &bitsize, &bitpos,
- &offset, &mode1, &unsignedp, &volatilep,
- true);
+ &offset, &mode1, &unsignedp, &volatilep);
rtx orig_op0;
/* ??? We should work harder and deal with non-zero offsets. */
diff --git a/gcc/fold-const.c b/gcc/fold-const.c
index d99cd93c5ea..2289ba41c4f 100644
--- a/gcc/fold-const.c
+++ b/gcc/fold-const.c
@@ -3488,7 +3488,7 @@ optimize_bit_field_compare (location_t loc, enum tree_code code,
do anything if the inner expression is a PLACEHOLDER_EXPR since we
then will no longer be able to replace it. */
linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
- &lunsignedp, &lvolatilep, false);
+ &lunsignedp, &lvolatilep);
if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
|| offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
return 0;
@@ -3498,7 +3498,7 @@ optimize_bit_field_compare (location_t loc, enum tree_code code,
/* If this is not a constant, we can only do something if bit positions,
sizes, and signedness are the same. */
rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
- &runsignedp, &rvolatilep, false);
+ &runsignedp, &rvolatilep);
if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
|| lunsignedp != runsignedp || offset != 0
@@ -3672,7 +3672,7 @@ decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
}
inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
- punsignedp, pvolatilep, false);
+ punsignedp, pvolatilep);
if ((inner == exp && and_mask == 0)
|| *pbitsize < 0 || offset != 0
|| TREE_CODE (inner) == PLACEHOLDER_EXPR)
@@ -8056,7 +8056,7 @@ fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
int unsignedp, volatilep;
tree base = TREE_OPERAND (op0, 0);
base = get_inner_reference (base, &bitsize, &bitpos, &offset,
- &mode, &unsignedp, &volatilep, false);
+ &mode, &unsignedp, &volatilep);
/* If the reference was to a (constant) zero offset, we can use
the address of the base if it has the same base type
as the result type and the pointer type is unqualified. */
@@ -9081,7 +9081,7 @@ fold_comparison (location_t loc, enum tree_code code, tree type,
{
base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
&bitsize, &bitpos0, &offset0, &mode,
- &unsignedp, &volatilep, false);
+ &unsignedp, &volatilep);
if (TREE_CODE (base0) == INDIRECT_REF)
base0 = TREE_OPERAND (base0, 0);
else
@@ -9115,7 +9115,7 @@ fold_comparison (location_t loc, enum tree_code code, tree type,
{
base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
&bitsize, &bitpos1, &offset1, &mode,
- &unsignedp, &volatilep, false);
+ &unsignedp, &volatilep);
if (TREE_CODE (base1) == INDIRECT_REF)
base1 = TREE_OPERAND (base1, 0);
else
@@ -16982,8 +16982,7 @@ split_address_to_core_and_offset (tree exp,
if (TREE_CODE (exp) == ADDR_EXPR)
{
core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
- poffset, &mode, &unsignedp, &volatilep,
- false);
+ poffset, &mode, &unsignedp, &volatilep);
core = build_fold_addr_expr_loc (loc, core);
}
else
diff --git a/gcc/gimple-ssa-strength-reduction.c b/gcc/gimple-ssa-strength-reduction.c
index bc2484b5b13..1f881350021 100644
--- a/gcc/gimple-ssa-strength-reduction.c
+++ b/gcc/gimple-ssa-strength-reduction.c
@@ -940,7 +940,7 @@ slsr_process_ref (gimple gs)
return;
base = get_inner_reference (ref_expr, &bitsize, &bitpos, &offset, &mode,
- &unsignedp, &volatilep, false);
+ &unsignedp, &volatilep);
index = double_int::from_uhwi (bitpos);
if (!restructure_reference (&base, &offset, &index, &type))
diff --git a/gcc/simplify-rtx.c b/gcc/simplify-rtx.c
index ec138584c38..f680e1773a7 100644
--- a/gcc/simplify-rtx.c
+++ b/gcc/simplify-rtx.c
@@ -296,7 +296,7 @@ delegitimize_mem_from_attrs (rtx x)
int unsignedp, volatilep = 0;
decl = get_inner_reference (decl, &bitsize, &bitpos, &toffset,
- &mode, &unsignedp, &volatilep, false);
+ &mode, &unsignedp, &volatilep);
if (bitsize != GET_MODE_BITSIZE (mode)
|| (bitpos % BITS_PER_UNIT)
|| (toffset && !tree_fits_shwi_p (toffset)))
diff --git a/gcc/tree-affine.c b/gcc/tree-affine.c
index f93f186a761..ea80e7593e6 100644
--- a/gcc/tree-affine.c
+++ b/gcc/tree-affine.c
@@ -325,8 +325,7 @@ tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
return;
}
core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
- &toffset, &mode, &unsignedp, &volatilep,
- false);
+ &toffset, &mode, &unsignedp, &volatilep);
if (bitpos % BITS_PER_UNIT != 0)
break;
aff_combination_const (comb, type,
@@ -895,7 +894,7 @@ get_inner_reference_aff (tree ref, aff_tree *addr, double_int *size)
int uns, vol;
aff_tree tmp;
tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode,
- &uns, &vol, false);
+ &uns, &vol);
tree base_addr = build_fold_addr_expr (base);
/* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
diff --git a/gcc/tree-data-ref.c b/gcc/tree-data-ref.c
index fef6a716b7a..559a546d29f 100644
--- a/gcc/tree-data-ref.c
+++ b/gcc/tree-data-ref.c
@@ -619,7 +619,7 @@ split_constant_offset_1 (tree type, tree op0, enum tree_code code, tree op1,
op0 = TREE_OPERAND (op0, 0);
base = get_inner_reference (op0, &pbitsize, &pbitpos, &poffset,
- &pmode, &punsignedp, &pvolatilep, false);
+ &pmode, &punsignedp, &pvolatilep);
if (pbitpos % BITS_PER_UNIT != 0)
return false;
@@ -769,7 +769,7 @@ dr_analyze_innermost (struct data_reference *dr, struct loop *nest)
fprintf (dump_file, "analyze_innermost: ");
base = get_inner_reference (ref, &pbitsize, &pbitpos, &poffset,
- &pmode, &punsignedp, &pvolatilep, false);
+ &pmode, &punsignedp, &pvolatilep);
gcc_assert (base != NULL_TREE);
if (pbitpos % BITS_PER_UNIT != 0)
diff --git a/gcc/tree-scalar-evolution.c b/gcc/tree-scalar-evolution.c
index ada942df389..ddea81b3b8e 100644
--- a/gcc/tree-scalar-evolution.c
+++ b/gcc/tree-scalar-evolution.c
@@ -1658,7 +1658,7 @@ interpret_rhs_expr (struct loop *loop, gimple at_stmt,
base = get_inner_reference (TREE_OPERAND (rhs1, 0),
&bitsize, &bitpos, &offset,
- &mode, &unsignedp, &volatilep, false);
+ &mode, &unsignedp, &volatilep);
if (TREE_CODE (base) == MEM_REF)
{
diff --git a/gcc/tree-ssa-loop-ivopts.c b/gcc/tree-ssa-loop-ivopts.c
index 1f5590a7ac2..f790bb180cd 100644
--- a/gcc/tree-ssa-loop-ivopts.c
+++ b/gcc/tree-ssa-loop-ivopts.c
@@ -1684,7 +1684,7 @@ may_be_unaligned_p (tree ref, tree step)
does to check whether the object must be loaded by parts when
STRICT_ALIGNMENT is true. */
base = get_inner_reference (ref, &bitsize, &bitpos, &toffset, &mode,
- &unsignedp, &volatilep, true);
+ &unsignedp, &volatilep);
base_type = TREE_TYPE (base);
base_align = get_object_alignment (base);
base_align = MAX (base_align, TYPE_ALIGN (base_type));
@@ -3781,7 +3781,7 @@ split_address_cost (struct ivopts_data *data,
int unsignedp, volatilep;
core = get_inner_reference (addr, &bitsize, &bitpos, &toffset, &mode,
- &unsignedp, &volatilep, false);
+ &unsignedp, &volatilep);
if (toffset != 0
|| bitpos % BITS_PER_UNIT != 0
diff --git a/gcc/tree-vect-data-refs.c b/gcc/tree-vect-data-refs.c
index 5e3b5209e68..82616450e1e 100644
--- a/gcc/tree-vect-data-refs.c
+++ b/gcc/tree-vect-data-refs.c
@@ -2970,7 +2970,7 @@ vect_check_gather (gimple stmt, loop_vec_info loop_vinfo, tree *basep,
SSA_NAME OFF and put the loop invariants into a tree BASE
that can be gimplified before the loop. */
base = get_inner_reference (DR_REF (dr), &pbitsize, &pbitpos, &off,
- &pmode, &punsignedp, &pvolatilep, false);
+ &pmode, &punsignedp, &pvolatilep);
gcc_assert (base != NULL_TREE && (pbitpos % BITS_PER_UNIT) == 0);
if (TREE_CODE (base) == MEM_REF)
@@ -3466,7 +3466,7 @@ again:
}
outer_base = get_inner_reference (inner_base, &pbitsize, &pbitpos,
- &poffset, &pmode, &punsignedp, &pvolatilep, false);
+ &poffset, &pmode, &punsignedp, &pvolatilep);
gcc_assert (outer_base != NULL_TREE);
if (pbitpos % BITS_PER_UNIT != 0)
diff --git a/gcc/tree.h b/gcc/tree.h
index 68f98261b56..be57f651fd6 100644
--- a/gcc/tree.h
+++ b/gcc/tree.h
@@ -4511,8 +4511,7 @@ extern tree build_personality_function (const char *);
look for the ultimate containing object, which is returned and specify
the access position and size. */
extern tree get_inner_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
- tree *, enum machine_mode *, int *, int *,
- bool);
+ tree *, enum machine_mode *, int *, int *);
/* Return a tree representing the lower bound of the array mentioned in
EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
diff --git a/gcc/tsan.c b/gcc/tsan.c
index 4efcfe565aa..10b74fd96ee 100644
--- a/gcc/tsan.c
+++ b/gcc/tsan.c
@@ -121,7 +121,7 @@ instrument_expr (gimple_stmt_iterator gsi, tree expr, bool is_write)
enum machine_mode mode;
int volatilep = 0, unsignedp = 0;
base = get_inner_reference (expr, &bitsize, &bitpos, &offset,
- &mode, &unsignedp, &volatilep, false);
+ &mode, &unsignedp, &volatilep);
/* No need to instrument accesses to decls that don't escape,
they can't escape to other threads then. */
OpenPOWER on IntegriCloud