{
/* Strip any simple arithmetic from EXPR to see if it has an underlying
SAVE_EXPR. */
- while (TREE_CODE_CLASS (TREE_CODE (expr)) == '1'
- || (TREE_CODE_CLASS (TREE_CODE (expr)) == '2'
- && TREE_CONSTANT (TREE_OPERAND (expr, 1))))
- expr = TREE_OPERAND (expr, 0);
+ expr = skip_simple_arithmetic (expr);
if (TREE_CODE (expr) == SAVE_EXPR)
pending_sizes = tree_cons (NULL_TREE, expr, pending_sizes);
int limit;
{
if (TREE_CODE (size) != INTEGER_CST
+ || TREE_OVERFLOW (size)
/* What we really want to say here is that the size can fit in a
host integer, but we know there's no way we'd find a mode for
this many bits, so there's no point in doing the precise test. */
|| compare_tree_int (size, 1000) > 0)
return BLKmode;
else
- return mode_for_size (TREE_INT_CST_LOW (size), class, limit);
+ return mode_for_size (tree_low_cst (size, 1), class, limit);
}
/* Similar, but never return BLKmode; return the narrowest mode that
DECL_SIZE (decl) = TYPE_SIZE (type);
DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
}
- else
+ else if (DECL_SIZE_UNIT (decl) == 0)
DECL_SIZE_UNIT (decl)
= convert (sizetype, size_binop (CEIL_DIV_EXPR, DECL_SIZE (decl),
bitsize_unit_node));
if (size != 0 && TREE_CODE (size) == INTEGER_CST
&& compare_tree_int (size, larger_than_size) > 0)
{
- unsigned int size_as_int = TREE_INT_CST_LOW (size);
+ int size_as_int = TREE_INT_CST_LOW (size);
if (compare_tree_int (size, size_as_int) == 0)
warning_with_decl (decl, "size of `%s' is %d bytes", size_as_int);
if (DECL_BIT_FIELD_TYPE (field)
&& !integer_zerop (DECL_SIZE (field))
&& !integer_zerop (DECL_SIZE (rli->prev_field))
+ && host_integerp (DECL_SIZE (rli->prev_field), 0)
+ && host_integerp (TYPE_SIZE (type), 0)
&& simple_cst_equal (TYPE_SIZE (type),
- TYPE_SIZE (TREE_TYPE (rli->prev_field))) )
+ TYPE_SIZE (TREE_TYPE (rli->prev_field))))
{
/* We're in the middle of a run of equal type size fields; make
sure we realign if we run out of bits. (Not decl size,
type size!) */
- int bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
- tree type_size = TYPE_SIZE(TREE_TYPE(rli->prev_field));
+ HOST_WIDE_INT bitsize = tree_low_cst (DECL_SIZE (field), 0);
if (rli->remaining_in_alignment < bitsize)
{
/* out of bits; bump up to next 'word'. */
rli->offset = DECL_FIELD_OFFSET (rli->prev_field);
- rli->bitpos = size_binop (PLUS_EXPR,
- type_size,
- DECL_FIELD_BIT_OFFSET(rli->prev_field));
+ rli->bitpos
+ = size_binop (PLUS_EXPR, TYPE_SIZE (type),
+ DECL_FIELD_BIT_OFFSET (rli->prev_field));
rli->prev_field = field;
- rli->remaining_in_alignment = TREE_INT_CST_LOW (type_size);
+ rli->remaining_in_alignment
+ = tree_low_cst (TYPE_SIZE (type), 0);
}
+
rli->remaining_in_alignment -= bitsize;
}
else
if (!integer_zerop (DECL_SIZE (rli->prev_field)))
{
- tree type_size = TYPE_SIZE(TREE_TYPE(rli->prev_field));
- rli->bitpos = size_binop (PLUS_EXPR,
- type_size,
- DECL_FIELD_BIT_OFFSET(rli->prev_field));
+ tree type_size = TYPE_SIZE (TREE_TYPE (rli->prev_field));
+
+ rli->bitpos
+ = size_binop (PLUS_EXPR, type_size,
+ DECL_FIELD_BIT_OFFSET (rli->prev_field));
}
else
- {
- /* We "use up" size zero fields; the code below should behave
- as if the prior field was not a bitfield. */
- prev_saved = NULL;
- }
+ /* We "use up" size zero fields; the code below should behave
+ as if the prior field was not a bitfield. */
+ prev_saved = NULL;
/* Cause a new bitfield to be captured, either this time (if
currently a bitfield) or next time we see one. */
if (!DECL_BIT_FIELD_TYPE(field)
|| integer_zerop (DECL_SIZE (field)))
- {
- rli->prev_field = NULL;
- }
+ rli->prev_field = NULL;
}
+
normalize_rli (rli);
}
if (!DECL_BIT_FIELD_TYPE (field)
|| ( prev_saved != NULL
? !simple_cst_equal (TYPE_SIZE (type),
- TYPE_SIZE (TREE_TYPE (prev_saved)))
- : !integer_zerop (DECL_SIZE (field)) ))
+ TYPE_SIZE (TREE_TYPE (prev_saved)))
+ : !integer_zerop (DECL_SIZE (field)) ))
{
- unsigned int type_align = 8; /* Never below 8 for compatibility */
+ /* Never smaller than a byte for compatibility. */
+ unsigned int type_align = BITS_PER_UNIT;
/* (When not a bitfield), we could be seeing a flex array (with
no DECL_SIZE). Since we won't be using remaining_in_alignment
until we see a bitfield (and come by here again) we just skip
calculating it. */
-
- if (DECL_SIZE (field) != NULL)
- rli->remaining_in_alignment
- = TREE_INT_CST_LOW (TYPE_SIZE(TREE_TYPE(field)))
- - TREE_INT_CST_LOW (DECL_SIZE (field));
+ if (DECL_SIZE (field) != NULL
+ && host_integerp (TYPE_SIZE (TREE_TYPE (field)), 0)
+ && host_integerp (DECL_SIZE (field), 0))
+ rli->remaining_in_alignment
+ = tree_low_cst (TYPE_SIZE (TREE_TYPE(field)), 0)
+ - tree_low_cst (DECL_SIZE (field), 0);
/* Now align (conventionally) for the new type. */
if (!DECL_PACKED(field))
- type_align = MAX(TYPE_ALIGN (type), type_align);
+ type_align = MAX(TYPE_ALIGN (type), type_align);
if (prev_saved
&& DECL_BIT_FIELD_TYPE (prev_saved)
type_align = MIN (type_align, maximum_field_alignment);
rli->bitpos = round_up (rli->bitpos, type_align);
+
/* If we really aligned, don't allow subsequent bitfields
to undo that. */
rli->prev_field = NULL;
rli->offset
= size_binop (PLUS_EXPR, rli->offset, DECL_SIZE_UNIT (field));
rli->bitpos = bitsize_zero_node;
- rli->offset_align = MIN (rli->offset_align, DECL_ALIGN (field));
+ rli->offset_align = MIN (rli->offset_align, desired_align);
}
else
{
= size_binop (PLUS_EXPR, unpadded_size_unit, size_one_node);
/* Round the size up to be a multiple of the required alignment */
-#ifdef ROUND_TYPE_SIZE
- TYPE_SIZE (rli->t) = ROUND_TYPE_SIZE (rli->t, unpadded_size,
- TYPE_ALIGN (rli->t));
- TYPE_SIZE_UNIT (rli->t)
- = ROUND_TYPE_SIZE_UNIT (rli->t, unpadded_size_unit,
- TYPE_ALIGN (rli->t) / BITS_PER_UNIT);
-#else
TYPE_SIZE (rli->t) = round_up (unpadded_size, TYPE_ALIGN (rli->t));
TYPE_SIZE_UNIT (rli->t) = round_up (unpadded_size_unit,
TYPE_ALIGN (rli->t) / BITS_PER_UNIT);
-#endif
if (warn_padded && TREE_CONSTANT (unpadded_size)
&& simple_cst_equal (unpadded_size, TYPE_SIZE (rli->t)) == 0)
rli->unpacked_align = MAX (TYPE_ALIGN (rli->t), rli->unpacked_align);
#endif
-#ifdef ROUND_TYPE_SIZE
- unpacked_size = ROUND_TYPE_SIZE (rli->t, TYPE_SIZE (rli->t),
- rli->unpacked_align);
-#else
unpacked_size = round_up (TYPE_SIZE (rli->t), rli->unpacked_align);
-#endif
-
if (simple_cst_equal (unpacked_size, TYPE_SIZE (rli->t)))
{
TYPE_PACKED (rli->t) = 0;
if (bitpos / BITS_PER_WORD
!= ((tree_low_cst (DECL_SIZE (field), 1) + bitpos - 1)
/ BITS_PER_WORD)
- /* But there is no problem if the field is entire words. */
- && tree_low_cst (DECL_SIZE (field), 1) % BITS_PER_WORD != 0)
+ /* But there is no problem if the field is entire words
+ or bigger than a word. */
+ && ! (tree_low_cst (DECL_SIZE (field), 1) % BITS_PER_WORD == 0
+ || compare_tree_int (DECL_SIZE (field), BITS_PER_WORD) > 0))
return;
/* If this field is the whole struct, remember its mode so
if (TYPE_SIZE (type) != 0)
{
-#ifdef ROUND_TYPE_SIZE
- TYPE_SIZE (type)
- = ROUND_TYPE_SIZE (type, TYPE_SIZE (type), TYPE_ALIGN (type));
- TYPE_SIZE_UNIT (type)
- = ROUND_TYPE_SIZE_UNIT (type, TYPE_SIZE_UNIT (type),
- TYPE_ALIGN (type) / BITS_PER_UNIT);
-#else
TYPE_SIZE (type) = round_up (TYPE_SIZE (type), TYPE_ALIGN (type));
TYPE_SIZE_UNIT (type)
= round_up (TYPE_SIZE_UNIT (type), TYPE_ALIGN (type) / BITS_PER_UNIT);
-#endif
}
/* Evaluate nonconstant sizes only once, either now or as soon as safe. */
TYPE_ALIGN (type) = MAX (TYPE_ALIGN (element), BITS_PER_UNIT);
#endif
TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (element);
-
-#ifdef ROUND_TYPE_SIZE
- if (TYPE_SIZE (type) != 0)
- {
- tree tmp
- = ROUND_TYPE_SIZE (type, TYPE_SIZE (type), TYPE_ALIGN (type));
-
- /* If the rounding changed the size of the type, remove any
- pre-calculated TYPE_SIZE_UNIT. */
- if (simple_cst_equal (TYPE_SIZE (type), tmp) != 1)
- TYPE_SIZE_UNIT (type) = NULL;
-
- TYPE_SIZE (type) = tmp;
- }
-#endif
-
TYPE_MODE (type) = BLKmode;
if (TYPE_SIZE (type) != 0
#ifdef MEMBER_TYPE_FORCES_BLK
#endif
unsigned int alignment
= set_alignment ? set_alignment : SET_WORD_SIZE;
- int size_in_bits
- = (TREE_INT_CST_LOW (TYPE_MAX_VALUE (TYPE_DOMAIN (type)))
- - TREE_INT_CST_LOW (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) + 1);
- int rounded_size
+ HOST_WIDE_INT size_in_bits
+ = (tree_low_cst (TYPE_MAX_VALUE (TYPE_DOMAIN (type)), 0)
+ - tree_low_cst (TYPE_MIN_VALUE (TYPE_DOMAIN (type)), 0) + 1);
+ HOST_WIDE_INT rounded_size
= ((size_in_bits + alignment - 1) / alignment) * alignment;
if (rounded_size > (int) alignment)