summaryrefslogtreecommitdiffstats
path: root/gcc/expr.c
diff options
context:
space:
mode:
authorkenner <kenner@138bc75d-0d04-0410-961f-82ee72b054a4>1999-11-01 01:11:22 +0000
committerkenner <kenner@138bc75d-0d04-0410-961f-82ee72b054a4>1999-11-01 01:11:22 +0000
commit155b05dc9a5885072b6a6e73c378b1fddbf1d831 (patch)
tree58ab23a2ebe35102f1a66447caaf4626bb2b4641 /gcc/expr.c
parent39a564b774b5a408d4269a7bb0f8a1c2aeeefe94 (diff)
downloadppe42-gcc-155b05dc9a5885072b6a6e73c378b1fddbf1d831.tar.gz
ppe42-gcc-155b05dc9a5885072b6a6e73c378b1fddbf1d831.zip
Fri Oct 29 15:25:07 1999 Arnaud Charlet <charlet@ACT-Europe.FR>
* gcov.c (DIR_SEPARATOR): Provide default. (output_data): Add test for MS-DOS format absolute filename. (fancy_abort): Correct program name. (open_files): Open all files in binary mode. * libgcc2.c (__bb_exit_func): Likewise. * profile.c (init_branch_prob): Specify binary when opening files. * flags.h (flag_unwind_tables): New decl. * toplev.c (flag_unwind_table): New definition. (f_options): Add -funwind-tables. (decode_g_option): Clarify warning when unknown -g option is given. (rest_of_compilation): If inside an inlined external function, pretend we are just being declared. * dwarf2out.c (dwarf2out_do_frame): Check -funwind_tables. (dwarf2out_frame_finish): Likewise. Fri Oct 29 06:32:44 1999 Geoffrey Keating <geoffk@cygnus.com> * flow.c (propagate_block): When the last reference to a label before an ADDR_VEC is deleted because the reference is a dead store, delete the ADDR_VEC. Fri Oct 29 07:44:26 1999 Vasco Pedro <vp@di.fct.unl.pt> * fold-const.c (merge_ranges): In not in0, but in1, handle upper bounds equal like subset case. Thu Oct 28 19:22:24 1999 Douglas Rupp <rupp@gnat.com> * dbxout.c (dbxout_parms): Generate a second stabs line for parameters passed in a register but moved to the stack. Thu Oct 28 19:12:57 1999 Sam Tardieu <tardieu@act-europe.fr> * gcc.c (pass_exit_codes, greatest_status): New variables. (struct option_map): Add entry for "--pass-exit-codes". (execute): Update greatest_status if error. (display_help): Add documentation for -pass-exit-codes. (process_command): Handle -pass-exit-codes. (main): Look at pass_exit_codes and greatest_status on call to exit. Thu Oct 28 18:06:50 1999 Richard Kenner <kenner@vlsi1.ultra.nyu.edu> * reload.c (find_reloads): Refine test for no input reload case to not includes reloads emitted after insn. * function.c (find_temp_slots_from_address): Handle sum involving a register that points to a temp slot. (update_temp_slot_address): Make recursive call if both old and new are PLUS with a common operand. * calls.c (expand_call): Mark temp slot for result as having address taken. * rtlanal.c (reg_referenced_p, case IF_THEN_ELSE): New case. * gcc.c (process_command): Add standard_exec_prefix with "GCC" component as well as "BINUTILS". * integrate.h (copy_rtx_and_substitute): New arg, FOR_LHS. * integrate.c (copy_rtx_and_substitute): Likewise. (expand_inline_function, integrate_parm_decls, integrate_decl_tree): All callers changed. * unroll.c (inital_reg_note_copy, copy_loop_body): Likewise. * dbxout.c (dbxout_type, case INTEGER_TYPE_NODE): If can use gdb extensions, write size of type; also be more consistent in using references when this is a subtype. * pa.md (extv, extzv, insv): Use define_expand to reject constant that is out of range. * loop.c (unknown_constant_address_altered): New variable. (prescan_loop): Initialize it. (note_addr_stored): Set it for RTX_UNCHANGING_P MEM. (invariant_p, case MEM): Remove handling for volatile and readony; check new variable if readonly. (check_dbra_loop): Chdeck unknown_constant_address_altered. * cse.c (canon_hash, case MEM): Do not record if BLKmode. (addr_affects_sp_p): Removed from note_mem_written and only define #ifdef AUTO_INC_DEC. * alpha.c (input_operand, case ADDRESSOF): Treat as REG. * regclass.c (record_reg_classes): Properly handle register move directions. * varasm.c (initializer_constant_valid_p, case MINUS_EXPR): Don't think valid if both operands are invalid. (struct constant_descriptor): New field RTL. (mark_const_hash_entry): Mark it. (record_constant{,_rtx}): Initialize it. (output_constant_def): Allocate RTL in permanent obstack and save in table. ({record,compare}_constant_1): Modes must match for CONSTRUCTOR of ARRAY_TYPE. * c-common.h (initializer_constant_valid_p): Delete decl from here. * output.h (initializer_constant_valid_p): Move decl to here. * c-common.c (initializer_constant_valid_p): Delete function from here. * varasm.c (initializer_constant_valid_p): Move function to here. * tree.h (STRIP_SIGN_NOPS): New macro. * fold-const.c (optimize_minmax_comparison): New function. (invert_truthvalue, case WITH_RECORD_EXPR): New case. (fold): Use STRIP_SIGN_NOPS instead of STRIP_TYPE_NOPS. (fold, case EQ_EXPR): Call optimize_minmax_comparison and add cases with ABS_EXPR, NEGATE_EXPR, PLUS_EXPR, MINUS_EXPR, and widening conversions. (fold, case LE_EXPR): Rework changing unsigned to signed comparisons to look at size of mode, not precision of type; also add missing cases. (optimize_bit_field_compare, decode_field_reference): Don't try to optimize COMPONENT_REF of a PLACEHOLDER_EXPR. * dwarf2out.c (ctype.h): Include. (dwarf2out_set_demangle_name_func): New function. (size_of_line_info): Deleted. (output_line_info): Compute size of line info table from difference of labels. (base_type_die, add_name_attribute): Call demangle function, if any. (field_byte_offset): Use bits per word for variable length fields. (gen_array_type_die): Add array name. (gen_subprogram_die): Ignore DECL_INLINE if -fno-inline. (dwarf2out_add_library_unit_info): New function. * explow.c (set_stack_check_libfunc): New function. (stack_check_libfunc): New static variable. (probe_stack_range): Allow front-end to set up a libfunc to call. * combine.c (simplify_comparison): When making comparison in wider mode, check for having commuted an AND and a SUBREG. (contains_muldiv): New function. (try_combine): Call it when dividing a PARALLEL. (simplify_rtx, case TRUNCATE): Don't remove for umulsi3_highpart. (simplify_comparison, case ASHIFTRT): Recognize sign-extension of a PLUS. (record_value_for_reg): If TEM is a binary operation with two CLOBBERs, use one of the CLOBBERs instead. (if_then_else_cond): If comparing against zero, just return thing being compared. * optabs.c (expand_abs): If machine has MAX, ABS (x) is MAX (x, -x). Don't generate shifts and subtract if have conditional arithmetic. * rtl.h (delete_barrier): New declaration. * jump.c (jump_optimize): Set up to handle conditional call. In conditional arithmetic case, handle CALL_INSN followed by a BARRIER. (delete_barrier): New function. * rtl.c (read_rtx): Call fatal if bad RTL code; check for bad mode. * recog.c (nonmemory_operand): Accept ADDRESSOF. * tree.c (build_type_attribute_variant): Push to obstack of ttype around type_hash_canon call. * expr.c (placeholder_list): Move decl to file scope. (expand_expr): Don't force access to volatile just because its address is taken. If ignoring reference operations, just expand the operands. (expand_expr, case COMPONENT_REF): Propagate EXPAND_CONST_ADDRESS to recursive call when expanding inner. Refine test for using bitfield operations vs pointer punning. (expand_expr, case CONVERT_EXPR): If converting to BLKmode UNION_TYPE from BLKmode, just return inner object. Use proper mode in store_field call. Properly set sizes of object to store and total size in store_field call for convert to union. (expand_expr, case ARRAY_REF): If OP0 is in a register, put it in memory (like for ADDR_EXPR). Also, don't put constant in register if we'll want it in memory. (readonly_fields_p): New function. (expand_expr, case INDIRECT_REF): Call it if LHS. (expand_assignment): Handle a RESULT_DECL where DECL_RTL is a PARALLEL. (do_jump, case WITH_RECORD_EXPR): New case. (get_inner_reference): Always go inside a CONVERT_EXPR and NOP_EXPR if both modes are the same. (store_field): Use bitfield operations if size of bitsize is not same as size of RHS's type. Check for bitpos not a multiple of alignment in BLKmode case. Do block move in largest possible alignment. (store_constructor): Set BITSIZE to -1 for variable size and properly in case of array of BLKmode. (expand_expr_unaligned): New function. (do_compare_and_jump): Call it. * mips/iris5.h (SWITCHES_NEED_SPACES): New macro. * collect2.c (main): Only allow -ofoo if SWITCHES_NEED_SPACES does not include 'o'. * function.c (instantiate_virtual_regs_1, case SET): Handle case where both SET_DEST and SET_SRC reference a virtual register. (gen_mem_addressof): Copy RTX_UNCHANGING_P from new REG to old REG. * integrate.c (expand_inline_function): Handle case of setting virtual stack vars register (from built in setjmp); when parameter lives in memory, expand virtual_{stack_vars,incoming_args}_rtx early. (subst_constant): Add new parm, MEMONLY. (expand_inline_function, integrate_parm_decls): Pass new parm. (integrate_decl_tree): Likewise. (copy_rtx_and_substitute, case MEM): Do copy RTX_UNCHANGING_P. (try_constants): Call subst_constants twice, with MEMONLY 0 and 1. (copy_rtx_and_substitute, case SET): Add explicit calls to copy_rtx_and_substitute for both sides. * stmt.c (expand_asm_operands): Don't use TREE_STRING_LENGTH for constraints. (pushcase{,_range}): Convert to NOMINAL_TYPE after checking for within INDEX_TYPE, instead of before. (fixup_gotos): Use f->target_rtl, not the next insn, since latter may be from a later fixup. (expand_value_return): Correctly convert VAL when promoting function return; support RETURN_REG being a PARALLEL. (expand_return): When checking for result in regs and having cleanup, consider PARALLEL in DECL_RTL as being in regs. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@30299 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'gcc/expr.c')
-rw-r--r--gcc/expr.c453
1 files changed, 414 insertions, 39 deletions
diff --git a/gcc/expr.c b/gcc/expr.c
index 36559608720..59ae687c5d8 100644
--- a/gcc/expr.c
+++ b/gcc/expr.c
@@ -90,6 +90,9 @@ int do_preexpand_calls = 1;
infinite recursion. */
static int in_check_memory_usage;
+/* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
+static tree placeholder_list = 0;
+
/* This structure is used by move_by_pieces to describe the move to
be performed. */
struct move_by_pieces
@@ -153,6 +156,8 @@ static tree init_noncopied_parts PROTO((tree, tree));
static int safe_from_p PROTO((rtx, tree, int));
static int fixed_type_p PROTO((tree));
static rtx var_rtx PROTO((tree));
+static int readonly_fields_p PROTO((tree));
+static rtx expand_expr_unaligned PROTO((tree, int *));
static rtx expand_increment PROTO((tree, int, int));
static void preexpand_calls PROTO((tree));
static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
@@ -3492,13 +3497,20 @@ expand_assignment (to, from, want_value, suggest_reg)
}
/* Don't move directly into a return register. */
- if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
+ if (TREE_CODE (to) == RESULT_DECL
+ && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
{
rtx temp;
push_temp_slots ();
temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
- emit_move_insn (to_rtx, temp);
+
+ if (GET_CODE (to_rtx) == PARALLEL)
+ emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
+ TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
+ else
+ emit_move_insn (to_rtx, temp);
+
preserve_temp_slots (to_rtx);
free_temp_slots ();
pop_temp_slots ();
@@ -4142,7 +4154,11 @@ store_constructor (exp, target, align, cleared)
if (cleared && is_zeros_p (TREE_VALUE (elt)))
continue;
- bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
+ if (TREE_CODE (DECL_SIZE (field)) == INTEGER_CST)
+ bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
+ else
+ bitsize = -1;
+
unsignedp = TREE_UNSIGNED (field);
mode = DECL_MODE (field);
if (DECL_BIT_FIELD (field))
@@ -4317,9 +4333,18 @@ store_constructor (exp, target, align, cleared)
if (cleared && is_zeros_p (value))
continue;
- mode = TYPE_MODE (elttype);
- bitsize = GET_MODE_BITSIZE (mode);
unsignedp = TREE_UNSIGNED (elttype);
+ mode = TYPE_MODE (elttype);
+ if (mode == BLKmode)
+ {
+ if (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
+ && TREE_INT_CST_HIGH (TYPE_SIZE (elttype)) == 0)
+ bitsize = TREE_INT_CST_LOW (TYPE_SIZE (elttype));
+ else
+ bitsize = -1;
+ }
+ else
+ bitsize = GET_MODE_BITSIZE (mode);
if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
{
@@ -4709,9 +4734,19 @@ store_field (target, bitsize, bitpos, mode, exp, value_mode,
|| GET_CODE (target) == SUBREG
/* If the field isn't aligned enough to store as an ordinary memref,
store it as a bit field. */
- || (SLOW_UNALIGNED_ACCESS
- && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
- || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
+ || (mode != BLKmode && SLOW_UNALIGNED_ACCESS
+ && (align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode)
+ || bitpos % GET_MODE_ALIGNMENT (mode)))
+ || (mode == BLKmode && SLOW_UNALIGNED_ACCESS
+ && (TYPE_ALIGN (TREE_TYPE (exp)) > align * BITS_PER_UNIT
+ || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
+ /* If the RHS and field are a constant size and the size of the
+ RHS isn't the same size as the bitfield, we must use bitfield
+ operations. */
+ || ((bitsize >= 0
+ && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
+ && (TREE_INT_CST_HIGH (TYPE_SIZE (TREE_TYPE (exp))) != 0
+ || TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))) != bitsize)))
{
rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
@@ -4746,10 +4781,14 @@ store_field (target, bitsize, bitpos, mode, exp, value_mode,
plus_constant (XEXP (target, 0),
bitpos / BITS_PER_UNIT));
+ /* Find an alignment that is consistent with the bit position. */
+ while ((bitpos % (align * BITS_PER_UNIT)) != 0)
+ align >>= 1;
+
emit_block_move (target, temp,
GEN_INT ((bitsize + BITS_PER_UNIT - 1)
/ BITS_PER_UNIT),
- 1);
+ align);
return value_mode == VOIDmode ? const0_rtx : target;
}
@@ -4985,9 +5024,6 @@ get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
else if (TREE_CODE (exp) != NON_LVALUE_EXPR
&& ! ((TREE_CODE (exp) == NOP_EXPR
|| TREE_CODE (exp) == CONVERT_EXPR)
- && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
- && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
- != UNION_TYPE))
&& (TYPE_MODE (TREE_TYPE (exp))
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
break;
@@ -5526,6 +5562,25 @@ check_max_integer_computation_mode (exp)
#endif
+/* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
+ has any readonly fields. If any of the fields have types that
+ contain readonly fields, return true as well. */
+
+static int
+readonly_fields_p (type)
+ tree type;
+{
+ tree field;
+
+ for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
+ if (TREE_READONLY (field)
+ || (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
+ && readonly_fields_p (TREE_TYPE (field))))
+ return 1;
+
+ return 0;
+}
+
/* expand_expr: generate code for computing expression EXP.
An rtx for the computed value is returned. The value is never null.
In the case of a void EXP, const0_rtx is returned.
@@ -5568,9 +5623,6 @@ expand_expr (exp, target, tmode, modifier)
enum machine_mode tmode;
enum expand_modifier modifier;
{
- /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
- This is static so it will be accessible to our recursive callees. */
- static tree placeholder_list = 0;
register rtx op0, op1, temp;
tree type = TREE_TYPE (exp);
int unsignedp = TREE_UNSIGNED (type);
@@ -5629,10 +5681,12 @@ expand_expr (exp, target, tmode, modifier)
if (! TREE_SIDE_EFFECTS (exp))
return const0_rtx;
- /* Ensure we reference a volatile object even if value is ignored. */
+ /* Ensure we reference a volatile object even if value is ignored, but
+ don't do this if all we are doing is taking its address. */
if (TREE_THIS_VOLATILE (exp)
&& TREE_CODE (exp) != FUNCTION_DECL
- && mode != VOIDmode && mode != BLKmode)
+ && mode != VOIDmode && mode != BLKmode
+ && modifier != EXPAND_CONST_ADDRESS)
{
temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
if (GET_CODE (temp) == MEM)
@@ -5640,11 +5694,12 @@ expand_expr (exp, target, tmode, modifier)
return const0_rtx;
}
- if (TREE_CODE_CLASS (code) == '1')
+ if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
+ || code == INDIRECT_REF || code == BUFFER_REF)
return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
VOIDmode, ro_modifier);
- else if (TREE_CODE_CLASS (code) == '2'
- || TREE_CODE_CLASS (code) == '<')
+ else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
+ || code == ARRAY_REF)
{
expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
@@ -5656,7 +5711,14 @@ expand_expr (exp, target, tmode, modifier)
the first. */
return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
VOIDmode, ro_modifier);
-
+ else if (code == BIT_FIELD_REF)
+ {
+ expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
+ expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
+ expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
+ return const0_rtx;
+ }
+;
target = 0;
}
@@ -6334,6 +6396,14 @@ expand_expr (exp, target, tmode, modifier)
never change. Languages where it can never change should
also set TREE_STATIC. */
RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
+
+ /* If we are writing to this object and its type is a record with
+ readonly fields, we must mark it as readonly so it will
+ conflict with readonly references to those fields. */
+ if (modifier == EXPAND_MEMORY_USE_WO
+ && TREE_CODE (type) == RECORD_TYPE && readonly_fields_p (type))
+ RTX_UNCHANGING_P (temp) = 1;
+
return temp;
}
@@ -6516,15 +6586,17 @@ expand_expr (exp, target, tmode, modifier)
!= INTEGER_CST)
? target : NULL_RTX),
VOIDmode,
- modifier == EXPAND_INITIALIZER
+ (modifier == EXPAND_INITIALIZER
+ || modifier == EXPAND_CONST_ADDRESS)
? modifier : EXPAND_NORMAL);
/* If this is a constant, put it into a register if it is a
- legitimate constant and memory if it isn't. */
+ legitimate constant and OFFSET is 0 and memory if it isn't. */
if (CONSTANT_P (op0))
{
enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
- if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
+ if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
+ && offset == 0)
op0 = force_reg (mode, op0);
else
op0 = validize_mem (force_const_mem (mode, op0));
@@ -6534,6 +6606,20 @@ expand_expr (exp, target, tmode, modifier)
{
rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
+ /* If this object is in memory, put it into a register.
+ This case can't occur in C, but can in Ada if we have
+ unchecked conversion of an expression from a scalar type to
+ an array or record type. */
+ if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
+ || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
+ {
+ rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
+
+ mark_temp_addr_taken (memloc);
+ emit_move_insn (memloc, op0);
+ op0 = memloc;
+ }
+
if (GET_CODE (op0) != MEM)
abort ();
@@ -6546,12 +6632,12 @@ expand_expr (exp, target, tmode, modifier)
#endif
}
- /* A constant address in TO_RTX can have VOIDmode, we must not try
+ /* A constant address in OP0 can have VOIDmode, we must not try
to call force_reg for that case. Avoid that case. */
if (GET_CODE (op0) == MEM
&& GET_MODE (op0) == BLKmode
&& GET_MODE (XEXP (op0, 0)) != VOIDmode
- && bitsize
+ && bitsize != 0
&& (bitpos % bitsize) == 0
&& (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
&& (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
@@ -6625,13 +6711,23 @@ expand_expr (exp, target, tmode, modifier)
&& GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
/* If the field isn't aligned enough to fetch as a memref,
fetch it as a bit field. */
- || (SLOW_UNALIGNED_ACCESS
- && ((TYPE_ALIGN (TREE_TYPE (tem)) < (unsigned int) GET_MODE_ALIGNMENT (mode))
- || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
+ || (mode1 != BLKmode && SLOW_UNALIGNED_ACCESS
+ && ((TYPE_ALIGN (TREE_TYPE (tem))
+ < (unsigned int) GET_MODE_ALIGNMENT (mode))
+ || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))))
+ || (modifier != EXPAND_CONST_ADDRESS
+ && modifier != EXPAND_INITIALIZER
+ && mode == BLKmode
+ && SLOW_UNALIGNED_ACCESS
+ && (TYPE_ALIGN (type) > alignment * BITS_PER_UNIT
+ || bitpos % TYPE_ALIGN (type) != 0)))
{
enum machine_mode ext_mode = mode;
- if (ext_mode == BLKmode)
+ if (ext_mode == BLKmode
+ && ! (target != 0 && GET_CODE (op0) == MEM
+ && GET_CODE (target) == MEM
+ && bitpos % BITS_PER_UNIT == 0))
ext_mode = mode_for_size (bitsize, MODE_INT, 1);
if (ext_mode == BLKmode)
@@ -6709,7 +6805,7 @@ expand_expr (exp, target, tmode, modifier)
if (GET_CODE (op0) == MEM)
MEM_ALIAS_SET (op0) = get_alias_set (exp);
-
+
if (GET_CODE (XEXP (op0, 0)) == REG)
mark_reg_pointer (XEXP (op0, 0), alignment);
@@ -6890,6 +6986,16 @@ expand_expr (exp, target, tmode, modifier)
if (TREE_CODE (type) == UNION_TYPE)
{
tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
+
+ /* If both input and output are BLKmode, this conversion
+ isn't actually doing anything unless we need to make the
+ alignment stricter. */
+ if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
+ && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
+ || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
+ return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
+ modifier);
+
if (target == 0)
{
if (mode != BLKmode)
@@ -6905,11 +7011,13 @@ expand_expr (exp, target, tmode, modifier)
else if (GET_CODE (target) == REG)
/* Store this field into a union of the proper type. */
- store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
- TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
- VOIDmode, 0, 1,
- int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))),
- 0);
+ store_field (target,
+ MIN ((int_size_in_bytes (TREE_TYPE
+ (TREE_OPERAND (exp, 0)))
+ * BITS_PER_UNIT),
+ GET_MODE_BITSIZE (mode)),
+ 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
+ VOIDmode, 0, 1, int_size_in_bytes (type), 0);
else
abort ();
@@ -8306,6 +8414,263 @@ expand_expr (exp, target, tmode, modifier)
return temp;
}
+/* Similar to expand_expr, except that we don't specify a target, target
+ mode, or modifier and we return the alignment of the inner type. This is
+ used in cases where it is not necessary to align the result to the
+ alignment of its type as long as we know the alignment of the result, for
+ example for comparisons of BLKmode values. */
+
+static rtx
+expand_expr_unaligned (exp, palign)
+ register tree exp;
+ int *palign;
+{
+ register rtx op0;
+ tree type = TREE_TYPE (exp);
+ register enum machine_mode mode = TYPE_MODE (type);
+
+ /* Default the alignment we return to that of the type. */
+ *palign = TYPE_ALIGN (type);
+
+ /* The only cases in which we do anything special is if the resulting mode
+ is BLKmode. */
+ if (mode != BLKmode)
+ return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
+
+ switch (TREE_CODE (exp))
+ {
+ case CONVERT_EXPR:
+ case NOP_EXPR:
+ case NON_LVALUE_EXPR:
+ /* Conversions between BLKmode values don't change the underlying
+ alignment or value. */
+ if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
+ return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
+ break;
+
+ case ARRAY_REF:
+ /* Much of the code for this case is copied directly from expand_expr.
+ We need to duplicate it here because we will do something different
+ in the fall-through case, so we need to handle the same exceptions
+ it does. */
+ {
+ tree array = TREE_OPERAND (exp, 0);
+ tree domain = TYPE_DOMAIN (TREE_TYPE (array));
+ tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
+ tree index = TREE_OPERAND (exp, 1);
+ tree index_type = TREE_TYPE (index);
+ HOST_WIDE_INT i;
+
+ if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
+ abort ();
+
+ /* Optimize the special-case of a zero lower bound.
+
+ We convert the low_bound to sizetype to avoid some problems
+ with constant folding. (E.g. suppose the lower bound is 1,
+ and its mode is QI. Without the conversion, (ARRAY
+ +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
+ +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
+
+ But sizetype isn't quite right either (especially if
+ the lowbound is negative). FIXME */
+
+ if (! integer_zerop (low_bound))
+ index = fold (build (MINUS_EXPR, index_type, index,
+ convert (sizetype, low_bound)));
+
+ /* If this is a constant index into a constant array,
+ just get the value from the array. Handle both the cases when
+ we have an explicit constructor and when our operand is a variable
+ that was declared const. */
+
+ if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
+ {
+ if (TREE_CODE (index) == INTEGER_CST
+ && TREE_INT_CST_HIGH (index) == 0)
+ {
+ tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
+
+ i = TREE_INT_CST_LOW (index);
+ while (elem && i--)
+ elem = TREE_CHAIN (elem);
+ if (elem)
+ return expand_expr_unaligned (fold (TREE_VALUE (elem)),
+ palign);
+ }
+ }
+
+ else if (optimize >= 1
+ && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
+ && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
+ && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
+ {
+ if (TREE_CODE (index) == INTEGER_CST)
+ {
+ tree init = DECL_INITIAL (array);
+
+ i = TREE_INT_CST_LOW (index);
+ if (TREE_CODE (init) == CONSTRUCTOR)
+ {
+ tree elem = CONSTRUCTOR_ELTS (init);
+
+ while (elem
+ && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
+ elem = TREE_CHAIN (elem);
+ if (elem)
+ return expand_expr_unaligned (fold (TREE_VALUE (elem)),
+ palign);
+ }
+ }
+ }
+ }
+
+ /* ... fall through ... */
+
+ case COMPONENT_REF:
+ case BIT_FIELD_REF:
+ /* If the operand is a CONSTRUCTOR, we can just extract the
+ appropriate field if it is present. Don't do this if we have
+ already written the data since we want to refer to that copy
+ and varasm.c assumes that's what we'll do. */
+ if (TREE_CODE (exp) != ARRAY_REF
+ && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
+ && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
+ {
+ tree elt;
+
+ for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
+ elt = TREE_CHAIN (elt))
+ if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
+ /* Note that unlike the case in expand_expr, we know this is
+ BLKmode and hence not an integer. */
+ return expand_expr_unaligned (TREE_VALUE (elt), palign);
+ }
+
+ {
+ enum machine_mode mode1;
+ int bitsize;
+ int bitpos;
+ tree offset;
+ int volatilep = 0;
+ int alignment;
+ int unsignedp;
+ tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
+ &mode1, &unsignedp, &volatilep,
+ &alignment);
+
+ /* If we got back the original object, something is wrong. Perhaps
+ we are evaluating an expression too early. In any event, don't
+ infinitely recurse. */
+ if (tem == exp)
+ abort ();
+
+ op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
+
+ /* If this is a constant, put it into a register if it is a
+ legitimate constant and OFFSET is 0 and memory if it isn't. */
+ if (CONSTANT_P (op0))
+ {
+ enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
+
+ if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
+ && offset == 0)
+ op0 = force_reg (inner_mode, op0);
+ else
+ op0 = validize_mem (force_const_mem (inner_mode, op0));
+ }
+
+ if (offset != 0)
+ {
+ rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
+
+ /* If this object is in a register, put it into memory.
+ This case can't occur in C, but can in Ada if we have
+ unchecked conversion of an expression from a scalar type to
+ an array or record type. */
+ if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
+ || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
+ {
+ rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
+
+ mark_temp_addr_taken (memloc);
+ emit_move_insn (memloc, op0);
+ op0 = memloc;
+ }
+
+ if (GET_CODE (op0) != MEM)
+ abort ();
+
+ if (GET_MODE (offset_rtx) != ptr_mode)
+ {
+#ifdef POINTERS_EXTEND_UNSIGNED
+ offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
+#else
+ offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
+#endif
+ }
+
+ op0 = change_address (op0, VOIDmode,
+ gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
+ force_reg (ptr_mode,
+ offset_rtx)));
+ }
+
+ /* Don't forget about volatility even if this is a bitfield. */
+ if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
+ {
+ op0 = copy_rtx (op0);
+ MEM_VOLATILE_P (op0) = 1;
+ }
+
+ /* Check the access. */
+ if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
+ {
+ rtx to;
+ int size;
+
+ to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
+ size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
+
+ /* Check the access right of the pointer. */
+ if (size > BITS_PER_UNIT)
+ emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
+ to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
+ TYPE_MODE (sizetype),
+ GEN_INT (MEMORY_USE_RO),
+ TYPE_MODE (integer_type_node));
+ }
+
+ /* Get a reference to just this component. */
+ op0 = change_address (op0, mode1,
+ plus_constant (XEXP (op0, 0),
+ (bitpos / BITS_PER_UNIT)));
+
+ MEM_ALIAS_SET (op0) = get_alias_set (exp);
+
+ /* Adjust the alignment in case the bit position is not
+ a multiple of the alignment of the inner object. */
+ while (bitpos % alignment != 0)
+ alignment >>= 1;
+
+ if (GET_CODE (XEXP (op0, 0)) == REG)
+ mark_reg_pointer (XEXP (op0, 0), alignment);
+
+ MEM_IN_STRUCT_P (op0) = 1;
+ MEM_VOLATILE_P (op0) |= volatilep;
+
+ *palign = alignment;
+ return op0;
+ }
+
+ default:
+ break;
+
+ }
+
+ return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
+}
+
/* Return the tree node and offset if a given argument corresponds to
a string constant. */
@@ -8771,6 +9136,15 @@ do_jump (exp, if_false_label, if_true_label)
do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
break;
+ case WITH_RECORD_EXPR:
+ /* Put the object on the placeholder list, recurse through our first
+ operand, and pop the list. */
+ placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
+ placeholder_list);
+ do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
+ placeholder_list = TREE_CHAIN (placeholder_list);
+ break;
+
#if 0
/* This is never less insns than evaluating the PLUS_EXPR followed by
a test and can be longer if the test is eliminated. */
@@ -9424,6 +9798,7 @@ do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
enum rtx_code signed_code, unsigned_code;
rtx if_false_label, if_true_label;
{
+ int align0, align1;
register rtx op0, op1;
register tree type;
register enum machine_mode mode;
@@ -9431,11 +9806,11 @@ do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
enum rtx_code code;
/* Don't crash if the comparison was erroneous. */
- op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
+ op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
return;
- op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
+ op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
type = TREE_TYPE (TREE_OPERAND (exp, 0));
mode = TYPE_MODE (type);
unsignedp = TREE_UNSIGNED (type);
@@ -9473,7 +9848,7 @@ do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
((mode == BLKmode)
? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
- TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT,
+ MIN (align0, align1) / BITS_PER_UNIT,
if_false_label, if_true_label);
}
OpenPOWER on IntegriCloud