/* ... and its original value in bytes, specified via -fpack-struct=<value>. */
unsigned int initial_max_fld_align = TARGET_DEFAULT_PACK_STRUCT;
-/* Nonzero if all REFERENCE_TYPEs are internal and hence should be
- allocated in Pmode, not ptr_mode. Set only by internal_reference_types
- called only by a front end. */
+/* Nonzero if all REFERENCE_TYPEs are internal and hence should be allocated
+ in the address spaces' address_mode, not pointer_mode. Set only by
+ internal_reference_types called only by a front end. */
static int reference_types_internal = 0;
static tree self_referential_size (tree);
static GTY(()) tree pending_sizes;
-/* Show that REFERENCE_TYPES are internal and should be Pmode. Called only
- by front end. */
+/* Show that REFERENCE_TYPES are internal and should use address_mode.
+ Called only by front end. */
void
internal_reference_types (void)
if (STRICT_ALIGNMENT)
warning (OPT_Wattributes, "packed attribute causes "
"inefficient alignment for %q+D", field);
- else
+ /* Don't warn if DECL_PACKED was set by the type. */
+ else if (!TYPE_PACKED (rli->t))
warning (OPT_Wattributes, "packed attribute is "
"unnecessary for %q+D", field);
}
/* No, we need to skip space before this field.
Bump the cumulative size to multiple of field alignment. */
- warning (OPT_Wpadded, "padding struct to align %q+D", field);
+ if (DECL_SOURCE_LOCATION (field) != BUILTINS_LOCATION)
+ warning (OPT_Wpadded, "padding struct to align %q+D", field);
/* If the alignment is still within offset_align, just align
the bit position. */
until we see a bitfield (and come by here again) we just skip
calculating it. */
if (DECL_SIZE (field) != NULL
- && host_integerp (TYPE_SIZE (TREE_TYPE (field)), 0)
- && host_integerp (DECL_SIZE (field), 0))
+ && host_integerp (TYPE_SIZE (TREE_TYPE (field)), 1)
+ && host_integerp (DECL_SIZE (field), 1))
{
- HOST_WIDE_INT bitsize = tree_low_cst (DECL_SIZE (field), 1);
- HOST_WIDE_INT typesize
+ unsigned HOST_WIDE_INT bitsize
+ = tree_low_cst (DECL_SIZE (field), 1);
+ unsigned HOST_WIDE_INT typesize
= tree_low_cst (TYPE_SIZE (TREE_TYPE (field)), 1);
if (typesize < bitsize)
= round_up_loc (input_location, unpadded_size_unit, TYPE_ALIGN_UNIT (rli->t));
if (TREE_CONSTANT (unpadded_size)
- && simple_cst_equal (unpadded_size, TYPE_SIZE (rli->t)) == 0)
+ && simple_cst_equal (unpadded_size, TYPE_SIZE (rli->t)) == 0
+ && input_location != BUILTINS_LOCATION)
warning (OPT_Wpadded, "padding struct size to alignment boundary");
if (warn_packed && TREE_CODE (rli->t) == RECORD_TYPE
/* A pointer might be MODE_PARTIAL_INT,
but ptrdiff_t must be integral. */
SET_TYPE_MODE (type, mode_for_size (POINTER_SIZE, MODE_INT, 0));
+ TYPE_PRECISION (type) = POINTER_SIZE;
break;
case FUNCTION_TYPE:
case POINTER_TYPE:
case REFERENCE_TYPE:
{
- enum machine_mode mode = ((TREE_CODE (type) == REFERENCE_TYPE
- && reference_types_internal)
- ? Pmode : TYPE_MODE (type));
-
- int nbits = GET_MODE_BITSIZE (mode);
+ enum machine_mode mode = TYPE_MODE (type);
+ if (TREE_CODE (type) == REFERENCE_TYPE && reference_types_internal)
+ {
+ addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (type));
+ mode = targetm.addr_space.address_mode (as);
+ }
- TYPE_SIZE (type) = bitsize_int (nbits);
+ TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (mode));
TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (mode));
TYPE_UNSIGNED (type) = 1;
- TYPE_PRECISION (type) = nbits;
+ TYPE_PRECISION (type) = GET_MODE_BITSIZE (mode);
}
break;
tree element_size = TYPE_SIZE (element);
tree length;
+ /* Make sure that an array of zero-sized element is zero-sized
+ regardless of its extent. */
+ if (integer_zerop (element_size))
+ length = size_zero_node;
+
/* The initial subtraction should happen in the original type so
that (possible) negative values are handled appropriately. */
- length = size_binop (PLUS_EXPR, size_one_node,
- fold_convert (sizetype,
- fold_build2_loc (input_location,
- MINUS_EXPR,
- TREE_TYPE (lb),
- ub, lb)));
+ else
+ length
+ = size_binop (PLUS_EXPR, size_one_node,
+ fold_convert (sizetype,
+ fold_build2_loc (input_location,
+ MINUS_EXPR,
+ TREE_TYPE (lb),
+ ub, lb)));
TYPE_SIZE (type) = size_binop (MULT_EXPR, element_size,
fold_convert (bitsizetype,
change the result of vector_mode_supported_p and have_regs_of_mode
on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can
change on a per-function basis. */
-/* ??? Possibly a better solution is to run through all the types
+/* ??? Possibly a better solution is to run through all the types
referenced by a function and re-compute the TYPE_MODE once, rather
than make the TYPE_MODE macro call a function. */