summaryrefslogtreecommitdiffstats
path: root/gcc/tree.h
diff options
context:
space:
mode:
authoramylaar <amylaar@138bc75d-0d04-0410-961f-82ee72b054a4>2005-05-11 12:24:43 +0000
committeramylaar <amylaar@138bc75d-0d04-0410-961f-82ee72b054a4>2005-05-11 12:24:43 +0000
commit23325b336bf95d02c81b57fabe142ae13a3cae2b (patch)
tree0c50bf11c578486bacdaffc10fe47cf62d14a852 /gcc/tree.h
parent4a012ac69efa2902026c2ee51370e07cceb5b445 (diff)
downloadppe42-gcc-23325b336bf95d02c81b57fabe142ae13a3cae2b.tar.gz
ppe42-gcc-23325b336bf95d02c81b57fabe142ae13a3cae2b.zip
PR middle-end/20371:
* tree.h (record_layout_info_s): New member prev_packed. * stor-layout.c (update_alignment_for_field): Fix comment about KNOWN_ALIGN. For MS bitfields, if we start a new run, make sure we start it properly aligned. (place_field): At the beginning of a record, pass 0 as KNOWN_ALIGN to update_alignment_for_field, and recompute it afterwards using the alignment of the record. When a packed bitfield precedes an MS bitfield, don't add padding at the end of the packed bitfield on behalf of the base type of the packed bit field. Don't adjust rli->bitpos at the end of an MS bitfield run if we already adjusted bitpos/offset for an alignment as large or larger than the bitfield type size. Take possible record alignment > BIGGEST_ALIGNMENT into account when calculating actual_align. Only put packed buit fields into rli->prev_field if they end up suitably aligned. Also set rli->remaining_in_alignment when we re-set rli->prev_field. Update rli->remaining_in_alignment when we have already started a run of bit fields and we process a packed bit field. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@99574 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'gcc/tree.h')
-rw-r--r--gcc/tree.h3
1 files changed, 3 insertions, 0 deletions
diff --git a/gcc/tree.h b/gcc/tree.h
index 4600d51fdfc..44a07afb676 100644
--- a/gcc/tree.h
+++ b/gcc/tree.h
@@ -3142,6 +3142,9 @@ typedef struct record_layout_info_s
tree pending_statics;
/* Bits remaining in the current alignment group */
int remaining_in_alignment;
+ /* True if prev_field was packed and we haven't found any non-packed
+ fields that we have put in the same alignment group. */
+ int prev_packed;
/* True if we've seen a packed field that didn't have normal
alignment anyway. */
int packed_maybe_necessary;
OpenPOWER on IntegriCloud