1 /* C-compiler utilities for types and variables storage layout
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1996, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
37 #include "langhooks.h"
41 /* Data type for the expressions representing sizes of data types.
42 It is the first integer type laid out. */
43 tree sizetype_tab[(int) TYPE_KIND_LAST];
45 /* If nonzero, this is an upper limit on alignment of structure fields.
46 The value is measured in bits. */
47 unsigned int maximum_field_alignment = TARGET_DEFAULT_PACK_STRUCT * BITS_PER_UNIT;
48 /* ... and its original value in bytes, specified via -fpack-struct=<value>. */
49 unsigned int initial_max_fld_align = TARGET_DEFAULT_PACK_STRUCT;
51 /* Nonzero if all REFERENCE_TYPEs are internal and hence should be
52 allocated in Pmode, not ptr_mode. Set only by internal_reference_types
53 called only by a front end. */
54 static int reference_types_internal = 0;
56 static void finalize_record_size (record_layout_info);
57 static void finalize_type_size (tree);
58 static void place_union_field (record_layout_info, tree);
59 #if defined (PCC_BITFIELD_TYPE_MATTERS) || defined (BITFIELD_NBYTES_LIMITED)
60 static int excess_unit_span (HOST_WIDE_INT, HOST_WIDE_INT, HOST_WIDE_INT,
63 extern void debug_rli (record_layout_info);
65 /* SAVE_EXPRs for sizes of types and decls, waiting to be expanded. */
67 static GTY(()) tree pending_sizes;
69 /* Show that REFERENCE_TYPES are internal and should be Pmode. Called only
73 internal_reference_types (void)
75 reference_types_internal = 1;
78 /* Get a list of all the objects put on the pending sizes list. */
81 get_pending_sizes (void)
83 tree chain = pending_sizes;
89 /* Add EXPR to the pending sizes list. */
92 put_pending_size (tree expr)
94 /* Strip any simple arithmetic from EXPR to see if it has an underlying
96 expr = skip_simple_arithmetic (expr);
98 if (TREE_CODE (expr) == SAVE_EXPR)
99 pending_sizes = tree_cons (NULL_TREE, expr, pending_sizes);
102 /* Put a chain of objects into the pending sizes list, which must be
106 put_pending_sizes (tree chain)
108 gcc_assert (!pending_sizes);
109 pending_sizes = chain;
112 /* Given a size SIZE that may not be a constant, return a SAVE_EXPR
113 to serve as the actual size-expression for a type or decl. */
116 variable_size (tree size)
120 /* If the language-processor is to take responsibility for variable-sized
121 items (e.g., languages which have elaboration procedures like Ada),
122 just return SIZE unchanged. Likewise for self-referential sizes and
124 if (TREE_CONSTANT (size)
125 || lang_hooks.decls.global_bindings_p () < 0
126 || CONTAINS_PLACEHOLDER_P (size))
129 size = save_expr (size);
131 /* If an array with a variable number of elements is declared, and
132 the elements require destruction, we will emit a cleanup for the
133 array. That cleanup is run both on normal exit from the block
134 and in the exception-handler for the block. Normally, when code
135 is used in both ordinary code and in an exception handler it is
136 `unsaved', i.e., all SAVE_EXPRs are recalculated. However, we do
137 not wish to do that here; the array-size is the same in both
139 save = skip_simple_arithmetic (size);
141 if (cfun && cfun->x_dont_save_pending_sizes_p)
142 /* The front-end doesn't want us to keep a list of the expressions
143 that determine sizes for variable size objects. Trust it. */
146 if (lang_hooks.decls.global_bindings_p ())
148 if (TREE_CONSTANT (size))
149 error ("type size can%'t be explicitly evaluated");
151 error ("variable-size type declared outside of any function");
153 return size_one_node;
156 put_pending_size (save);
161 #ifndef MAX_FIXED_MODE_SIZE
162 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
165 /* Return the machine mode to use for a nonscalar of SIZE bits. The
166 mode must be in class CLASS, and have exactly that many value bits;
167 it may have padding as well. If LIMIT is nonzero, modes of wider
168 than MAX_FIXED_MODE_SIZE will not be used. */
171 mode_for_size (unsigned int size, enum mode_class class, int limit)
173 enum machine_mode mode;
175 if (limit && size > MAX_FIXED_MODE_SIZE)
178 /* Get the first mode which has this size, in the specified class. */
179 for (mode = GET_CLASS_NARROWEST_MODE (class); mode != VOIDmode;
180 mode = GET_MODE_WIDER_MODE (mode))
181 if (GET_MODE_PRECISION (mode) == size)
187 /* Similar, except passed a tree node. */
190 mode_for_size_tree (const_tree size, enum mode_class class, int limit)
192 unsigned HOST_WIDE_INT uhwi;
195 if (!host_integerp (size, 1))
197 uhwi = tree_low_cst (size, 1);
201 return mode_for_size (ui, class, limit);
204 /* Similar, but never return BLKmode; return the narrowest mode that
205 contains at least the requested number of value bits. */
208 smallest_mode_for_size (unsigned int size, enum mode_class class)
210 enum machine_mode mode;
212 /* Get the first mode which has at least this size, in the
214 for (mode = GET_CLASS_NARROWEST_MODE (class); mode != VOIDmode;
215 mode = GET_MODE_WIDER_MODE (mode))
216 if (GET_MODE_PRECISION (mode) >= size)
222 /* Find an integer mode of the exact same size, or BLKmode on failure. */
225 int_mode_for_mode (enum machine_mode mode)
227 switch (GET_MODE_CLASS (mode))
230 case MODE_PARTIAL_INT:
233 case MODE_COMPLEX_INT:
234 case MODE_COMPLEX_FLOAT:
236 case MODE_DECIMAL_FLOAT:
237 case MODE_VECTOR_INT:
238 case MODE_VECTOR_FLOAT:
239 mode = mode_for_size (GET_MODE_BITSIZE (mode), MODE_INT, 0);
246 /* ... fall through ... */
256 /* Return the alignment of MODE. This will be bounded by 1 and
257 BIGGEST_ALIGNMENT. */
260 get_mode_alignment (enum machine_mode mode)
262 return MIN (BIGGEST_ALIGNMENT, MAX (1, mode_base_align[mode]*BITS_PER_UNIT));
266 /* Subroutine of layout_decl: Force alignment required for the data type.
267 But if the decl itself wants greater alignment, don't override that. */
270 do_type_align (tree type, tree decl)
272 if (TYPE_ALIGN (type) > DECL_ALIGN (decl))
274 DECL_ALIGN (decl) = TYPE_ALIGN (type);
275 if (TREE_CODE (decl) == FIELD_DECL)
276 DECL_USER_ALIGN (decl) = TYPE_USER_ALIGN (type);
280 /* Set the size, mode and alignment of a ..._DECL node.
281 TYPE_DECL does need this for C++.
282 Note that LABEL_DECL and CONST_DECL nodes do not need this,
283 and FUNCTION_DECL nodes have them set up in a special (and simple) way.
284 Don't call layout_decl for them.
286 KNOWN_ALIGN is the amount of alignment we can assume this
287 decl has with no special effort. It is relevant only for FIELD_DECLs
288 and depends on the previous fields.
289 All that matters about KNOWN_ALIGN is which powers of 2 divide it.
290 If KNOWN_ALIGN is 0, it means, "as much alignment as you like":
291 the record will be aligned to suit. */
294 layout_decl (tree decl, unsigned int known_align)
296 tree type = TREE_TYPE (decl);
297 enum tree_code code = TREE_CODE (decl);
300 if (code == CONST_DECL)
303 gcc_assert (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL
304 || code == TYPE_DECL ||code == FIELD_DECL);
306 rtl = DECL_RTL_IF_SET (decl);
308 if (type == error_mark_node)
309 type = void_type_node;
311 /* Usually the size and mode come from the data type without change,
312 however, the front-end may set the explicit width of the field, so its
313 size may not be the same as the size of its type. This happens with
314 bitfields, of course (an `int' bitfield may be only 2 bits, say), but it
315 also happens with other fields. For example, the C++ front-end creates
316 zero-sized fields corresponding to empty base classes, and depends on
317 layout_type setting DECL_FIELD_BITPOS correctly for the field. Set the
318 size in bytes from the size in bits. If we have already set the mode,
319 don't set it again since we can be called twice for FIELD_DECLs. */
321 DECL_UNSIGNED (decl) = TYPE_UNSIGNED (type);
322 if (DECL_MODE (decl) == VOIDmode)
323 DECL_MODE (decl) = TYPE_MODE (type);
325 if (DECL_SIZE (decl) == 0)
327 DECL_SIZE (decl) = TYPE_SIZE (type);
328 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
330 else if (DECL_SIZE_UNIT (decl) == 0)
331 DECL_SIZE_UNIT (decl)
332 = fold_convert (sizetype, size_binop (CEIL_DIV_EXPR, DECL_SIZE (decl),
335 if (code != FIELD_DECL)
336 /* For non-fields, update the alignment from the type. */
337 do_type_align (type, decl);
339 /* For fields, it's a bit more complicated... */
341 bool old_user_align = DECL_USER_ALIGN (decl);
342 bool zero_bitfield = false;
343 bool packed_p = DECL_PACKED (decl);
346 if (DECL_BIT_FIELD (decl))
348 DECL_BIT_FIELD_TYPE (decl) = type;
350 /* A zero-length bit-field affects the alignment of the next
351 field. In essence such bit-fields are not influenced by
352 any packing due to #pragma pack or attribute packed. */
353 if (integer_zerop (DECL_SIZE (decl))
354 && ! targetm.ms_bitfield_layout_p (DECL_FIELD_CONTEXT (decl)))
356 zero_bitfield = true;
358 #ifdef PCC_BITFIELD_TYPE_MATTERS
359 if (PCC_BITFIELD_TYPE_MATTERS)
360 do_type_align (type, decl);
364 #ifdef EMPTY_FIELD_BOUNDARY
365 if (EMPTY_FIELD_BOUNDARY > DECL_ALIGN (decl))
367 DECL_ALIGN (decl) = EMPTY_FIELD_BOUNDARY;
368 DECL_USER_ALIGN (decl) = 0;
374 /* See if we can use an ordinary integer mode for a bit-field.
375 Conditions are: a fixed size that is correct for another mode
376 and occupying a complete byte or bytes on proper boundary. */
377 if (TYPE_SIZE (type) != 0
378 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
379 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT)
381 enum machine_mode xmode
382 = mode_for_size_tree (DECL_SIZE (decl), MODE_INT, 1);
386 || known_align >= GET_MODE_ALIGNMENT (xmode)))
388 DECL_ALIGN (decl) = MAX (GET_MODE_ALIGNMENT (xmode),
390 DECL_MODE (decl) = xmode;
391 DECL_BIT_FIELD (decl) = 0;
395 /* Turn off DECL_BIT_FIELD if we won't need it set. */
396 if (TYPE_MODE (type) == BLKmode && DECL_MODE (decl) == BLKmode
397 && known_align >= TYPE_ALIGN (type)
398 && DECL_ALIGN (decl) >= TYPE_ALIGN (type))
399 DECL_BIT_FIELD (decl) = 0;
401 else if (packed_p && DECL_USER_ALIGN (decl))
402 /* Don't touch DECL_ALIGN. For other packed fields, go ahead and
403 round up; we'll reduce it again below. We want packing to
404 supersede USER_ALIGN inherited from the type, but defer to
405 alignment explicitly specified on the field decl. */;
407 do_type_align (type, decl);
409 /* If the field is of variable size, we can't misalign it since we
410 have no way to make a temporary to align the result. But this
411 isn't an issue if the decl is not addressable. Likewise if it
414 Note that do_type_align may set DECL_USER_ALIGN, so we need to
415 check old_user_align instead. */
418 && (DECL_NONADDRESSABLE_P (decl)
419 || DECL_SIZE_UNIT (decl) == 0
420 || TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST))
421 DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), BITS_PER_UNIT);
423 if (! packed_p && ! DECL_USER_ALIGN (decl))
425 /* Some targets (i.e. i386, VMS) limit struct field alignment
426 to a lower boundary than alignment of variables unless
427 it was overridden by attribute aligned. */
428 #ifdef BIGGEST_FIELD_ALIGNMENT
430 = MIN (DECL_ALIGN (decl), (unsigned) BIGGEST_FIELD_ALIGNMENT);
432 #ifdef ADJUST_FIELD_ALIGN
433 DECL_ALIGN (decl) = ADJUST_FIELD_ALIGN (decl, DECL_ALIGN (decl));
438 mfa = initial_max_fld_align * BITS_PER_UNIT;
440 mfa = maximum_field_alignment;
441 /* Should this be controlled by DECL_USER_ALIGN, too? */
443 DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), mfa);
446 /* Evaluate nonconstant size only once, either now or as soon as safe. */
447 if (DECL_SIZE (decl) != 0 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
448 DECL_SIZE (decl) = variable_size (DECL_SIZE (decl));
449 if (DECL_SIZE_UNIT (decl) != 0
450 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST)
451 DECL_SIZE_UNIT (decl) = variable_size (DECL_SIZE_UNIT (decl));
453 /* If requested, warn about definitions of large data objects. */
455 && (code == VAR_DECL || code == PARM_DECL)
456 && ! DECL_EXTERNAL (decl))
458 tree size = DECL_SIZE_UNIT (decl);
460 if (size != 0 && TREE_CODE (size) == INTEGER_CST
461 && compare_tree_int (size, larger_than_size) > 0)
463 int size_as_int = TREE_INT_CST_LOW (size);
465 if (compare_tree_int (size, size_as_int) == 0)
466 warning (0, "size of %q+D is %d bytes", decl, size_as_int);
468 warning (0, "size of %q+D is larger than %wd bytes",
469 decl, larger_than_size);
473 /* If the RTL was already set, update its mode and mem attributes. */
476 PUT_MODE (rtl, DECL_MODE (decl));
477 SET_DECL_RTL (decl, 0);
478 set_mem_attributes (rtl, decl, 1);
479 SET_DECL_RTL (decl, rtl);
483 /* Given a VAR_DECL, PARM_DECL or RESULT_DECL, clears the results of
484 a previous call to layout_decl and calls it again. */
487 relayout_decl (tree decl)
489 DECL_SIZE (decl) = DECL_SIZE_UNIT (decl) = 0;
490 DECL_MODE (decl) = VOIDmode;
491 if (!DECL_USER_ALIGN (decl))
492 DECL_ALIGN (decl) = 0;
493 SET_DECL_RTL (decl, 0);
495 layout_decl (decl, 0);
498 /* Hook for a front-end function that can modify the record layout as needed
499 immediately before it is finalized. */
501 static void (*lang_adjust_rli) (record_layout_info) = 0;
504 set_lang_adjust_rli (void (*f) (record_layout_info))
509 /* Begin laying out type T, which may be a RECORD_TYPE, UNION_TYPE, or
510 QUAL_UNION_TYPE. Return a pointer to a struct record_layout_info which
511 is to be passed to all other layout functions for this record. It is the
512 responsibility of the caller to call `free' for the storage returned.
513 Note that garbage collection is not permitted until we finish laying
517 start_record_layout (tree t)
519 record_layout_info rli = xmalloc (sizeof (struct record_layout_info_s));
523 /* If the type has a minimum specified alignment (via an attribute
524 declaration, for example) use it -- otherwise, start with a
525 one-byte alignment. */
526 rli->record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (t));
527 rli->unpacked_align = rli->record_align;
528 rli->offset_align = MAX (rli->record_align, BIGGEST_ALIGNMENT);
530 #ifdef STRUCTURE_SIZE_BOUNDARY
531 /* Packed structures don't need to have minimum size. */
532 if (! TYPE_PACKED (t))
536 /* #pragma pack overrides STRUCTURE_SIZE_BOUNDARY. */
537 tmp = (unsigned) STRUCTURE_SIZE_BOUNDARY;
538 if (maximum_field_alignment != 0)
539 tmp = MIN (tmp, maximum_field_alignment);
540 rli->record_align = MAX (rli->record_align, tmp);
544 rli->offset = size_zero_node;
545 rli->bitpos = bitsize_zero_node;
547 rli->pending_statics = 0;
548 rli->packed_maybe_necessary = 0;
549 rli->remaining_in_alignment = 0;
554 /* These four routines perform computations that convert between
555 the offset/bitpos forms and byte and bit offsets. */
558 bit_from_pos (tree offset, tree bitpos)
560 return size_binop (PLUS_EXPR, bitpos,
561 size_binop (MULT_EXPR,
562 fold_convert (bitsizetype, offset),
567 byte_from_pos (tree offset, tree bitpos)
569 return size_binop (PLUS_EXPR, offset,
570 fold_convert (sizetype,
571 size_binop (TRUNC_DIV_EXPR, bitpos,
572 bitsize_unit_node)));
576 pos_from_bit (tree *poffset, tree *pbitpos, unsigned int off_align,
579 *poffset = size_binop (MULT_EXPR,
580 fold_convert (sizetype,
581 size_binop (FLOOR_DIV_EXPR, pos,
582 bitsize_int (off_align))),
583 size_int (off_align / BITS_PER_UNIT));
584 *pbitpos = size_binop (FLOOR_MOD_EXPR, pos, bitsize_int (off_align));
587 /* Given a pointer to bit and byte offsets and an offset alignment,
588 normalize the offsets so they are within the alignment. */
591 normalize_offset (tree *poffset, tree *pbitpos, unsigned int off_align)
593 /* If the bit position is now larger than it should be, adjust it
595 if (compare_tree_int (*pbitpos, off_align) >= 0)
597 tree extra_aligns = size_binop (FLOOR_DIV_EXPR, *pbitpos,
598 bitsize_int (off_align));
601 = size_binop (PLUS_EXPR, *poffset,
602 size_binop (MULT_EXPR,
603 fold_convert (sizetype, extra_aligns),
604 size_int (off_align / BITS_PER_UNIT)));
607 = size_binop (FLOOR_MOD_EXPR, *pbitpos, bitsize_int (off_align));
611 /* Print debugging information about the information in RLI. */
614 debug_rli (record_layout_info rli)
616 print_node_brief (stderr, "type", rli->t, 0);
617 print_node_brief (stderr, "\noffset", rli->offset, 0);
618 print_node_brief (stderr, " bitpos", rli->bitpos, 0);
620 fprintf (stderr, "\naligns: rec = %u, unpack = %u, off = %u\n",
621 rli->record_align, rli->unpacked_align,
624 /* The ms_struct code is the only that uses this. */
625 if (targetm.ms_bitfield_layout_p (rli->t))
626 fprintf (stderr, "remaining in alignment = %u\n", rli->remaining_in_alignment);
628 if (rli->packed_maybe_necessary)
629 fprintf (stderr, "packed may be necessary\n");
631 if (rli->pending_statics)
633 fprintf (stderr, "pending statics:\n");
634 debug_tree (rli->pending_statics);
638 /* Given an RLI with a possibly-incremented BITPOS, adjust OFFSET and
639 BITPOS if necessary to keep BITPOS below OFFSET_ALIGN. */
642 normalize_rli (record_layout_info rli)
644 normalize_offset (&rli->offset, &rli->bitpos, rli->offset_align);
647 /* Returns the size in bytes allocated so far. */
650 rli_size_unit_so_far (record_layout_info rli)
652 return byte_from_pos (rli->offset, rli->bitpos);
655 /* Returns the size in bits allocated so far. */
658 rli_size_so_far (record_layout_info rli)
660 return bit_from_pos (rli->offset, rli->bitpos);
663 /* FIELD is about to be added to RLI->T. The alignment (in bits) of
664 the next available location within the record is given by KNOWN_ALIGN.
665 Update the variable alignment fields in RLI, and return the alignment
666 to give the FIELD. */
669 update_alignment_for_field (record_layout_info rli, tree field,
670 unsigned int known_align)
672 /* The alignment required for FIELD. */
673 unsigned int desired_align;
674 /* The type of this field. */
675 tree type = TREE_TYPE (field);
676 /* True if the field was explicitly aligned by the user. */
680 /* Do not attempt to align an ERROR_MARK node */
681 if (TREE_CODE (type) == ERROR_MARK)
684 /* Lay out the field so we know what alignment it needs. */
685 layout_decl (field, known_align);
686 desired_align = DECL_ALIGN (field);
687 user_align = DECL_USER_ALIGN (field);
689 is_bitfield = (type != error_mark_node
690 && DECL_BIT_FIELD_TYPE (field)
691 && ! integer_zerop (TYPE_SIZE (type)));
693 /* Record must have at least as much alignment as any field.
694 Otherwise, the alignment of the field within the record is
696 if (targetm.ms_bitfield_layout_p (rli->t))
698 /* Here, the alignment of the underlying type of a bitfield can
699 affect the alignment of a record; even a zero-sized field
700 can do this. The alignment should be to the alignment of
701 the type, except that for zero-size bitfields this only
702 applies if there was an immediately prior, nonzero-size
703 bitfield. (That's the way it is, experimentally.) */
704 if ((!is_bitfield && !DECL_PACKED (field))
705 || (!integer_zerop (DECL_SIZE (field))
706 ? !DECL_PACKED (field)
708 && DECL_BIT_FIELD_TYPE (rli->prev_field)
709 && ! integer_zerop (DECL_SIZE (rli->prev_field)))))
711 unsigned int type_align = TYPE_ALIGN (type);
712 type_align = MAX (type_align, desired_align);
713 if (maximum_field_alignment != 0)
714 type_align = MIN (type_align, maximum_field_alignment);
715 rli->record_align = MAX (rli->record_align, type_align);
716 rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
719 #ifdef PCC_BITFIELD_TYPE_MATTERS
720 else if (is_bitfield && PCC_BITFIELD_TYPE_MATTERS)
722 /* Named bit-fields cause the entire structure to have the
723 alignment implied by their type. Some targets also apply the same
724 rules to unnamed bitfields. */
725 if (DECL_NAME (field) != 0
726 || targetm.align_anon_bitfield ())
728 unsigned int type_align = TYPE_ALIGN (type);
730 #ifdef ADJUST_FIELD_ALIGN
731 if (! TYPE_USER_ALIGN (type))
732 type_align = ADJUST_FIELD_ALIGN (field, type_align);
735 /* Targets might chose to handle unnamed and hence possibly
736 zero-width bitfield. Those are not influenced by #pragmas
737 or packed attributes. */
738 if (integer_zerop (DECL_SIZE (field)))
740 if (initial_max_fld_align)
741 type_align = MIN (type_align,
742 initial_max_fld_align * BITS_PER_UNIT);
744 else if (maximum_field_alignment != 0)
745 type_align = MIN (type_align, maximum_field_alignment);
746 else if (DECL_PACKED (field))
747 type_align = MIN (type_align, BITS_PER_UNIT);
749 /* The alignment of the record is increased to the maximum
750 of the current alignment, the alignment indicated on the
751 field (i.e., the alignment specified by an __aligned__
752 attribute), and the alignment indicated by the type of
754 rli->record_align = MAX (rli->record_align, desired_align);
755 rli->record_align = MAX (rli->record_align, type_align);
758 rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
759 user_align |= TYPE_USER_ALIGN (type);
765 rli->record_align = MAX (rli->record_align, desired_align);
766 rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
769 TYPE_USER_ALIGN (rli->t) |= user_align;
771 return desired_align;
774 /* Called from place_field to handle unions. */
777 place_union_field (record_layout_info rli, tree field)
779 update_alignment_for_field (rli, field, /*known_align=*/0);
781 DECL_FIELD_OFFSET (field) = size_zero_node;
782 DECL_FIELD_BIT_OFFSET (field) = bitsize_zero_node;
783 SET_DECL_OFFSET_ALIGN (field, BIGGEST_ALIGNMENT);
785 /* If this is an ERROR_MARK return *after* having set the
786 field at the start of the union. This helps when parsing
788 if (TREE_CODE (TREE_TYPE (field)) == ERROR_MARK)
791 /* We assume the union's size will be a multiple of a byte so we don't
792 bother with BITPOS. */
793 if (TREE_CODE (rli->t) == UNION_TYPE)
794 rli->offset = size_binop (MAX_EXPR, rli->offset, DECL_SIZE_UNIT (field));
795 else if (TREE_CODE (rli->t) == QUAL_UNION_TYPE)
796 rli->offset = fold_build3 (COND_EXPR, sizetype,
797 DECL_QUALIFIER (field),
798 DECL_SIZE_UNIT (field), rli->offset);
801 #if defined (PCC_BITFIELD_TYPE_MATTERS) || defined (BITFIELD_NBYTES_LIMITED)
802 /* A bitfield of SIZE with a required access alignment of ALIGN is allocated
803 at BYTE_OFFSET / BIT_OFFSET. Return nonzero if the field would span more
804 units of alignment than the underlying TYPE. */
806 excess_unit_span (HOST_WIDE_INT byte_offset, HOST_WIDE_INT bit_offset,
807 HOST_WIDE_INT size, HOST_WIDE_INT align, tree type)
809 /* Note that the calculation of OFFSET might overflow; we calculate it so
810 that we still get the right result as long as ALIGN is a power of two. */
811 unsigned HOST_WIDE_INT offset = byte_offset * BITS_PER_UNIT + bit_offset;
813 offset = offset % align;
814 return ((offset + size + align - 1) / align
815 > ((unsigned HOST_WIDE_INT) tree_low_cst (TYPE_SIZE (type), 1)
820 /* RLI contains information about the layout of a RECORD_TYPE. FIELD
821 is a FIELD_DECL to be added after those fields already present in
822 T. (FIELD is not actually added to the TYPE_FIELDS list here;
823 callers that desire that behavior must manually perform that step.) */
826 place_field (record_layout_info rli, tree field)
828 /* The alignment required for FIELD. */
829 unsigned int desired_align;
830 /* The alignment FIELD would have if we just dropped it into the
831 record as it presently stands. */
832 unsigned int known_align;
833 unsigned int actual_align;
834 /* The type of this field. */
835 tree type = TREE_TYPE (field);
837 gcc_assert (TREE_CODE (field) != ERROR_MARK);
839 /* If FIELD is static, then treat it like a separate variable, not
840 really like a structure field. If it is a FUNCTION_DECL, it's a
841 method. In both cases, all we do is lay out the decl, and we do
842 it *after* the record is laid out. */
843 if (TREE_CODE (field) == VAR_DECL)
845 rli->pending_statics = tree_cons (NULL_TREE, field,
846 rli->pending_statics);
850 /* Enumerators and enum types which are local to this class need not
851 be laid out. Likewise for initialized constant fields. */
852 else if (TREE_CODE (field) != FIELD_DECL)
855 /* Unions are laid out very differently than records, so split
856 that code off to another function. */
857 else if (TREE_CODE (rli->t) != RECORD_TYPE)
859 place_union_field (rli, field);
863 else if (TREE_CODE (type) == ERROR_MARK)
865 /* Place this field at the current allocation position, so we
866 maintain monotonicity. */
867 DECL_FIELD_OFFSET (field) = rli->offset;
868 DECL_FIELD_BIT_OFFSET (field) = rli->bitpos;
869 SET_DECL_OFFSET_ALIGN (field, rli->offset_align);
873 /* Work out the known alignment so far. Note that A & (-A) is the
874 value of the least-significant bit in A that is one. */
875 if (! integer_zerop (rli->bitpos))
876 known_align = (tree_low_cst (rli->bitpos, 1)
877 & - tree_low_cst (rli->bitpos, 1));
878 else if (integer_zerop (rli->offset))
880 else if (host_integerp (rli->offset, 1))
881 known_align = (BITS_PER_UNIT
882 * (tree_low_cst (rli->offset, 1)
883 & - tree_low_cst (rli->offset, 1)));
885 known_align = rli->offset_align;
887 desired_align = update_alignment_for_field (rli, field, known_align);
888 if (known_align == 0)
889 known_align = MAX (BIGGEST_ALIGNMENT, rli->record_align);
891 if (warn_packed && DECL_PACKED (field))
893 if (known_align >= TYPE_ALIGN (type))
895 if (TYPE_ALIGN (type) > desired_align)
897 if (STRICT_ALIGNMENT)
898 warning (OPT_Wattributes, "packed attribute causes "
899 "inefficient alignment for %q+D", field);
901 warning (OPT_Wattributes, "packed attribute is "
902 "unnecessary for %q+D", field);
906 rli->packed_maybe_necessary = 1;
909 /* Does this field automatically have alignment it needs by virtue
910 of the fields that precede it and the record's own alignment?
911 We already align ms_struct fields, so don't re-align them. */
912 if (known_align < desired_align
913 && !targetm.ms_bitfield_layout_p (rli->t))
915 /* No, we need to skip space before this field.
916 Bump the cumulative size to multiple of field alignment. */
918 warning (OPT_Wpadded, "padding struct to align %q+D", field);
920 /* If the alignment is still within offset_align, just align
922 if (desired_align < rli->offset_align)
923 rli->bitpos = round_up (rli->bitpos, desired_align);
926 /* First adjust OFFSET by the partial bits, then align. */
928 = size_binop (PLUS_EXPR, rli->offset,
929 fold_convert (sizetype,
930 size_binop (CEIL_DIV_EXPR, rli->bitpos,
931 bitsize_unit_node)));
932 rli->bitpos = bitsize_zero_node;
934 rli->offset = round_up (rli->offset, desired_align / BITS_PER_UNIT);
937 if (! TREE_CONSTANT (rli->offset))
938 rli->offset_align = desired_align;
942 /* Handle compatibility with PCC. Note that if the record has any
943 variable-sized fields, we need not worry about compatibility. */
944 #ifdef PCC_BITFIELD_TYPE_MATTERS
945 if (PCC_BITFIELD_TYPE_MATTERS
946 && ! targetm.ms_bitfield_layout_p (rli->t)
947 && TREE_CODE (field) == FIELD_DECL
948 && type != error_mark_node
949 && DECL_BIT_FIELD (field)
950 && ! DECL_PACKED (field)
951 && maximum_field_alignment == 0
952 && ! integer_zerop (DECL_SIZE (field))
953 && host_integerp (DECL_SIZE (field), 1)
954 && host_integerp (rli->offset, 1)
955 && host_integerp (TYPE_SIZE (type), 1))
957 unsigned int type_align = TYPE_ALIGN (type);
958 tree dsize = DECL_SIZE (field);
959 HOST_WIDE_INT field_size = tree_low_cst (dsize, 1);
960 HOST_WIDE_INT offset = tree_low_cst (rli->offset, 0);
961 HOST_WIDE_INT bit_offset = tree_low_cst (rli->bitpos, 0);
963 #ifdef ADJUST_FIELD_ALIGN
964 if (! TYPE_USER_ALIGN (type))
965 type_align = ADJUST_FIELD_ALIGN (field, type_align);
968 /* A bit field may not span more units of alignment of its type
969 than its type itself. Advance to next boundary if necessary. */
970 if (excess_unit_span (offset, bit_offset, field_size, type_align, type))
971 rli->bitpos = round_up (rli->bitpos, type_align);
973 TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type);
977 #ifdef BITFIELD_NBYTES_LIMITED
978 if (BITFIELD_NBYTES_LIMITED
979 && ! targetm.ms_bitfield_layout_p (rli->t)
980 && TREE_CODE (field) == FIELD_DECL
981 && type != error_mark_node
982 && DECL_BIT_FIELD_TYPE (field)
983 && ! DECL_PACKED (field)
984 && ! integer_zerop (DECL_SIZE (field))
985 && host_integerp (DECL_SIZE (field), 1)
986 && host_integerp (rli->offset, 1)
987 && host_integerp (TYPE_SIZE (type), 1))
989 unsigned int type_align = TYPE_ALIGN (type);
990 tree dsize = DECL_SIZE (field);
991 HOST_WIDE_INT field_size = tree_low_cst (dsize, 1);
992 HOST_WIDE_INT offset = tree_low_cst (rli->offset, 0);
993 HOST_WIDE_INT bit_offset = tree_low_cst (rli->bitpos, 0);
995 #ifdef ADJUST_FIELD_ALIGN
996 if (! TYPE_USER_ALIGN (type))
997 type_align = ADJUST_FIELD_ALIGN (field, type_align);
1000 if (maximum_field_alignment != 0)
1001 type_align = MIN (type_align, maximum_field_alignment);
1002 /* ??? This test is opposite the test in the containing if
1003 statement, so this code is unreachable currently. */
1004 else if (DECL_PACKED (field))
1005 type_align = MIN (type_align, BITS_PER_UNIT);
1007 /* A bit field may not span the unit of alignment of its type.
1008 Advance to next boundary if necessary. */
1009 if (excess_unit_span (offset, bit_offset, field_size, type_align, type))
1010 rli->bitpos = round_up (rli->bitpos, type_align);
1012 TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type);
1016 /* See the docs for TARGET_MS_BITFIELD_LAYOUT_P for details.
1018 When a bit field is inserted into a packed record, the whole
1019 size of the underlying type is used by one or more same-size
1020 adjacent bitfields. (That is, if its long:3, 32 bits is
1021 used in the record, and any additional adjacent long bitfields are
1022 packed into the same chunk of 32 bits. However, if the size
1023 changes, a new field of that size is allocated.) In an unpacked
1024 record, this is the same as using alignment, but not equivalent
1027 Note: for compatibility, we use the type size, not the type alignment
1028 to determine alignment, since that matches the documentation */
1030 if (targetm.ms_bitfield_layout_p (rli->t))
1032 tree prev_saved = rli->prev_field;
1033 tree prev_type = prev_saved ? DECL_BIT_FIELD_TYPE (prev_saved) : NULL;
1035 /* This is a bitfield if it exists. */
1036 if (rli->prev_field)
1038 /* If both are bitfields, nonzero, and the same size, this is
1039 the middle of a run. Zero declared size fields are special
1040 and handled as "end of run". (Note: it's nonzero declared
1041 size, but equal type sizes!) (Since we know that both
1042 the current and previous fields are bitfields by the
1043 time we check it, DECL_SIZE must be present for both.) */
1044 if (DECL_BIT_FIELD_TYPE (field)
1045 && !integer_zerop (DECL_SIZE (field))
1046 && !integer_zerop (DECL_SIZE (rli->prev_field))
1047 && host_integerp (DECL_SIZE (rli->prev_field), 0)
1048 && host_integerp (TYPE_SIZE (type), 0)
1049 && simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (prev_type)))
1051 /* We're in the middle of a run of equal type size fields; make
1052 sure we realign if we run out of bits. (Not decl size,
1054 HOST_WIDE_INT bitsize = tree_low_cst (DECL_SIZE (field), 1);
1056 if (rli->remaining_in_alignment < bitsize)
1058 HOST_WIDE_INT typesize = tree_low_cst (TYPE_SIZE (type), 1);
1060 /* out of bits; bump up to next 'word'. */
1062 = size_binop (PLUS_EXPR, rli->bitpos,
1063 bitsize_int (rli->remaining_in_alignment));
1064 rli->prev_field = field;
1065 if (typesize < bitsize)
1066 rli->remaining_in_alignment = 0;
1068 rli->remaining_in_alignment = typesize - bitsize;
1071 rli->remaining_in_alignment -= bitsize;
1075 /* End of a run: if leaving a run of bitfields of the same type
1076 size, we have to "use up" the rest of the bits of the type
1079 Compute the new position as the sum of the size for the prior
1080 type and where we first started working on that type.
1081 Note: since the beginning of the field was aligned then
1082 of course the end will be too. No round needed. */
1084 if (!integer_zerop (DECL_SIZE (rli->prev_field)))
1087 = size_binop (PLUS_EXPR, rli->bitpos,
1088 bitsize_int (rli->remaining_in_alignment));
1091 /* We "use up" size zero fields; the code below should behave
1092 as if the prior field was not a bitfield. */
1095 /* Cause a new bitfield to be captured, either this time (if
1096 currently a bitfield) or next time we see one. */
1097 if (!DECL_BIT_FIELD_TYPE(field)
1098 || integer_zerop (DECL_SIZE (field)))
1099 rli->prev_field = NULL;
1102 normalize_rli (rli);
1105 /* If we're starting a new run of same size type bitfields
1106 (or a run of non-bitfields), set up the "first of the run"
1109 That is, if the current field is not a bitfield, or if there
1110 was a prior bitfield the type sizes differ, or if there wasn't
1111 a prior bitfield the size of the current field is nonzero.
1113 Note: we must be sure to test ONLY the type size if there was
1114 a prior bitfield and ONLY for the current field being zero if
1117 if (!DECL_BIT_FIELD_TYPE (field)
1118 || (prev_saved != NULL
1119 ? !simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (prev_type))
1120 : !integer_zerop (DECL_SIZE (field)) ))
1122 /* Never smaller than a byte for compatibility. */
1123 unsigned int type_align = BITS_PER_UNIT;
1125 /* (When not a bitfield), we could be seeing a flex array (with
1126 no DECL_SIZE). Since we won't be using remaining_in_alignment
1127 until we see a bitfield (and come by here again) we just skip
1129 if (DECL_SIZE (field) != NULL
1130 && host_integerp (TYPE_SIZE (TREE_TYPE (field)), 0)
1131 && host_integerp (DECL_SIZE (field), 0))
1133 HOST_WIDE_INT bitsize = tree_low_cst (DECL_SIZE (field), 1);
1134 HOST_WIDE_INT typesize
1135 = tree_low_cst (TYPE_SIZE (TREE_TYPE (field)), 1);
1137 if (typesize < bitsize)
1138 rli->remaining_in_alignment = 0;
1140 rli->remaining_in_alignment = typesize - bitsize;
1143 /* Now align (conventionally) for the new type. */
1144 type_align = TYPE_ALIGN (TREE_TYPE (field));
1146 if (maximum_field_alignment != 0)
1147 type_align = MIN (type_align, maximum_field_alignment);
1149 rli->bitpos = round_up (rli->bitpos, type_align);
1151 /* If we really aligned, don't allow subsequent bitfields
1153 rli->prev_field = NULL;
1157 /* Offset so far becomes the position of this field after normalizing. */
1158 normalize_rli (rli);
1159 DECL_FIELD_OFFSET (field) = rli->offset;
1160 DECL_FIELD_BIT_OFFSET (field) = rli->bitpos;
1161 SET_DECL_OFFSET_ALIGN (field, rli->offset_align);
1163 /* If this field ended up more aligned than we thought it would be (we
1164 approximate this by seeing if its position changed), lay out the field
1165 again; perhaps we can use an integral mode for it now. */
1166 if (! integer_zerop (DECL_FIELD_BIT_OFFSET (field)))
1167 actual_align = (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1168 & - tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1));
1169 else if (integer_zerop (DECL_FIELD_OFFSET (field)))
1170 actual_align = MAX (BIGGEST_ALIGNMENT, rli->record_align);
1171 else if (host_integerp (DECL_FIELD_OFFSET (field), 1))
1172 actual_align = (BITS_PER_UNIT
1173 * (tree_low_cst (DECL_FIELD_OFFSET (field), 1)
1174 & - tree_low_cst (DECL_FIELD_OFFSET (field), 1)));
1176 actual_align = DECL_OFFSET_ALIGN (field);
1177 /* ACTUAL_ALIGN is still the actual alignment *within the record* .
1178 store / extract bit field operations will check the alignment of the
1179 record against the mode of bit fields. */
1181 if (known_align != actual_align)
1182 layout_decl (field, actual_align);
1184 if (rli->prev_field == NULL && DECL_BIT_FIELD_TYPE (field))
1185 rli->prev_field = field;
1187 /* Now add size of this field to the size of the record. If the size is
1188 not constant, treat the field as being a multiple of bytes and just
1189 adjust the offset, resetting the bit position. Otherwise, apportion the
1190 size amongst the bit position and offset. First handle the case of an
1191 unspecified size, which can happen when we have an invalid nested struct
1192 definition, such as struct j { struct j { int i; } }. The error message
1193 is printed in finish_struct. */
1194 if (DECL_SIZE (field) == 0)
1196 else if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
1197 || TREE_OVERFLOW (DECL_SIZE (field)))
1200 = size_binop (PLUS_EXPR, rli->offset,
1201 fold_convert (sizetype,
1202 size_binop (CEIL_DIV_EXPR, rli->bitpos,
1203 bitsize_unit_node)));
1205 = size_binop (PLUS_EXPR, rli->offset, DECL_SIZE_UNIT (field));
1206 rli->bitpos = bitsize_zero_node;
1207 rli->offset_align = MIN (rli->offset_align, desired_align);
1209 else if (targetm.ms_bitfield_layout_p (rli->t))
1211 rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos, DECL_SIZE (field));
1213 /* If we ended a bitfield before the full length of the type then
1214 pad the struct out to the full length of the last type. */
1215 if ((TREE_CHAIN (field) == NULL
1216 || TREE_CODE (TREE_CHAIN (field)) != FIELD_DECL)
1217 && DECL_BIT_FIELD_TYPE (field)
1218 && !integer_zerop (DECL_SIZE (field)))
1219 rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos,
1220 bitsize_int (rli->remaining_in_alignment));
1222 normalize_rli (rli);
1226 rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos, DECL_SIZE (field));
1227 normalize_rli (rli);
1231 /* Assuming that all the fields have been laid out, this function uses
1232 RLI to compute the final TYPE_SIZE, TYPE_ALIGN, etc. for the type
1233 indicated by RLI. */
1236 finalize_record_size (record_layout_info rli)
1238 tree unpadded_size, unpadded_size_unit;
1240 /* Now we want just byte and bit offsets, so set the offset alignment
1241 to be a byte and then normalize. */
1242 rli->offset_align = BITS_PER_UNIT;
1243 normalize_rli (rli);
1245 /* Determine the desired alignment. */
1246 #ifdef ROUND_TYPE_ALIGN
1247 TYPE_ALIGN (rli->t) = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t),
1250 TYPE_ALIGN (rli->t) = MAX (TYPE_ALIGN (rli->t), rli->record_align);
1253 /* Compute the size so far. Be sure to allow for extra bits in the
1254 size in bytes. We have guaranteed above that it will be no more
1255 than a single byte. */
1256 unpadded_size = rli_size_so_far (rli);
1257 unpadded_size_unit = rli_size_unit_so_far (rli);
1258 if (! integer_zerop (rli->bitpos))
1260 = size_binop (PLUS_EXPR, unpadded_size_unit, size_one_node);
1262 /* Round the size up to be a multiple of the required alignment. */
1263 TYPE_SIZE (rli->t) = round_up (unpadded_size, TYPE_ALIGN (rli->t));
1264 TYPE_SIZE_UNIT (rli->t)
1265 = round_up (unpadded_size_unit, TYPE_ALIGN_UNIT (rli->t));
1267 if (TREE_CONSTANT (unpadded_size)
1268 && simple_cst_equal (unpadded_size, TYPE_SIZE (rli->t)) == 0)
1269 warning (OPT_Wpadded, "padding struct size to alignment boundary");
1271 if (warn_packed && TREE_CODE (rli->t) == RECORD_TYPE
1272 && TYPE_PACKED (rli->t) && ! rli->packed_maybe_necessary
1273 && TREE_CONSTANT (unpadded_size))
1277 #ifdef ROUND_TYPE_ALIGN
1279 = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t), rli->unpacked_align);
1281 rli->unpacked_align = MAX (TYPE_ALIGN (rli->t), rli->unpacked_align);
1284 unpacked_size = round_up (TYPE_SIZE (rli->t), rli->unpacked_align);
1285 if (simple_cst_equal (unpacked_size, TYPE_SIZE (rli->t)))
1287 TYPE_PACKED (rli->t) = 0;
1289 if (TYPE_NAME (rli->t))
1293 if (TREE_CODE (TYPE_NAME (rli->t)) == IDENTIFIER_NODE)
1294 name = IDENTIFIER_POINTER (TYPE_NAME (rli->t));
1296 name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (rli->t)));
1298 if (STRICT_ALIGNMENT)
1299 warning (OPT_Wpacked, "packed attribute causes inefficient "
1300 "alignment for %qs", name);
1302 warning (OPT_Wpacked,
1303 "packed attribute is unnecessary for %qs", name);
1307 if (STRICT_ALIGNMENT)
1308 warning (OPT_Wpacked,
1309 "packed attribute causes inefficient alignment");
1311 warning (OPT_Wpacked, "packed attribute is unnecessary");
1317 /* Compute the TYPE_MODE for the TYPE (which is a RECORD_TYPE). */
1320 compute_record_mode (tree type)
1323 enum machine_mode mode = VOIDmode;
1325 /* Most RECORD_TYPEs have BLKmode, so we start off assuming that.
1326 However, if possible, we use a mode that fits in a register
1327 instead, in order to allow for better optimization down the
1329 TYPE_MODE (type) = BLKmode;
1331 if (! host_integerp (TYPE_SIZE (type), 1))
1334 /* A record which has any BLKmode members must itself be
1335 BLKmode; it can't go in a register. Unless the member is
1336 BLKmode only because it isn't aligned. */
1337 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1339 if (TREE_CODE (field) != FIELD_DECL)
1342 if (TREE_CODE (TREE_TYPE (field)) == ERROR_MARK
1343 || (TYPE_MODE (TREE_TYPE (field)) == BLKmode
1344 && ! TYPE_NO_FORCE_BLK (TREE_TYPE (field))
1345 && !(TYPE_SIZE (TREE_TYPE (field)) != 0
1346 && integer_zerop (TYPE_SIZE (TREE_TYPE (field)))))
1347 || ! host_integerp (bit_position (field), 1)
1348 || DECL_SIZE (field) == 0
1349 || ! host_integerp (DECL_SIZE (field), 1))
1352 /* If this field is the whole struct, remember its mode so
1353 that, say, we can put a double in a class into a DF
1354 register instead of forcing it to live in the stack. */
1355 if (simple_cst_equal (TYPE_SIZE (type), DECL_SIZE (field)))
1356 mode = DECL_MODE (field);
1358 #ifdef MEMBER_TYPE_FORCES_BLK
1359 /* With some targets, eg. c4x, it is sub-optimal
1360 to access an aligned BLKmode structure as a scalar. */
1362 if (MEMBER_TYPE_FORCES_BLK (field, mode))
1364 #endif /* MEMBER_TYPE_FORCES_BLK */
1367 /* If we only have one real field; use its mode if that mode's size
1368 matches the type's size. This only applies to RECORD_TYPE. This
1369 does not apply to unions. */
1370 if (TREE_CODE (type) == RECORD_TYPE && mode != VOIDmode
1371 && host_integerp (TYPE_SIZE (type), 1)
1372 && GET_MODE_BITSIZE (mode) == TREE_INT_CST_LOW (TYPE_SIZE (type)))
1373 TYPE_MODE (type) = mode;
1375 TYPE_MODE (type) = mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1);
1377 /* If structure's known alignment is less than what the scalar
1378 mode would need, and it matters, then stick with BLKmode. */
1379 if (TYPE_MODE (type) != BLKmode
1381 && ! (TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT
1382 || TYPE_ALIGN (type) >= GET_MODE_ALIGNMENT (TYPE_MODE (type))))
1384 /* If this is the only reason this type is BLKmode, then
1385 don't force containing types to be BLKmode. */
1386 TYPE_NO_FORCE_BLK (type) = 1;
1387 TYPE_MODE (type) = BLKmode;
1391 /* Compute TYPE_SIZE and TYPE_ALIGN for TYPE, once it has been laid
1395 finalize_type_size (tree type)
1397 /* Normally, use the alignment corresponding to the mode chosen.
1398 However, where strict alignment is not required, avoid
1399 over-aligning structures, since most compilers do not do this
1402 if (TYPE_MODE (type) != BLKmode && TYPE_MODE (type) != VOIDmode
1403 && (STRICT_ALIGNMENT
1404 || (TREE_CODE (type) != RECORD_TYPE && TREE_CODE (type) != UNION_TYPE
1405 && TREE_CODE (type) != QUAL_UNION_TYPE
1406 && TREE_CODE (type) != ARRAY_TYPE)))
1408 unsigned mode_align = GET_MODE_ALIGNMENT (TYPE_MODE (type));
1410 /* Don't override a larger alignment requirement coming from a user
1411 alignment of one of the fields. */
1412 if (mode_align >= TYPE_ALIGN (type))
1414 TYPE_ALIGN (type) = mode_align;
1415 TYPE_USER_ALIGN (type) = 0;
1419 /* Do machine-dependent extra alignment. */
1420 #ifdef ROUND_TYPE_ALIGN
1422 = ROUND_TYPE_ALIGN (type, TYPE_ALIGN (type), BITS_PER_UNIT);
1425 /* If we failed to find a simple way to calculate the unit size
1426 of the type, find it by division. */
1427 if (TYPE_SIZE_UNIT (type) == 0 && TYPE_SIZE (type) != 0)
1428 /* TYPE_SIZE (type) is computed in bitsizetype. After the division, the
1429 result will fit in sizetype. We will get more efficient code using
1430 sizetype, so we force a conversion. */
1431 TYPE_SIZE_UNIT (type)
1432 = fold_convert (sizetype,
1433 size_binop (FLOOR_DIV_EXPR, TYPE_SIZE (type),
1434 bitsize_unit_node));
1436 if (TYPE_SIZE (type) != 0)
1438 TYPE_SIZE (type) = round_up (TYPE_SIZE (type), TYPE_ALIGN (type));
1439 TYPE_SIZE_UNIT (type) = round_up (TYPE_SIZE_UNIT (type),
1440 TYPE_ALIGN_UNIT (type));
1443 /* Evaluate nonconstant sizes only once, either now or as soon as safe. */
1444 if (TYPE_SIZE (type) != 0 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1445 TYPE_SIZE (type) = variable_size (TYPE_SIZE (type));
1446 if (TYPE_SIZE_UNIT (type) != 0
1447 && TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)
1448 TYPE_SIZE_UNIT (type) = variable_size (TYPE_SIZE_UNIT (type));
1450 /* Also layout any other variants of the type. */
1451 if (TYPE_NEXT_VARIANT (type)
1452 || type != TYPE_MAIN_VARIANT (type))
1455 /* Record layout info of this variant. */
1456 tree size = TYPE_SIZE (type);
1457 tree size_unit = TYPE_SIZE_UNIT (type);
1458 unsigned int align = TYPE_ALIGN (type);
1459 unsigned int user_align = TYPE_USER_ALIGN (type);
1460 enum machine_mode mode = TYPE_MODE (type);
1462 /* Copy it into all variants. */
1463 for (variant = TYPE_MAIN_VARIANT (type);
1465 variant = TYPE_NEXT_VARIANT (variant))
1467 TYPE_SIZE (variant) = size;
1468 TYPE_SIZE_UNIT (variant) = size_unit;
1469 TYPE_ALIGN (variant) = align;
1470 TYPE_USER_ALIGN (variant) = user_align;
1471 TYPE_MODE (variant) = mode;
1476 /* Do all of the work required to layout the type indicated by RLI,
1477 once the fields have been laid out. This function will call `free'
1478 for RLI, unless FREE_P is false. Passing a value other than false
1479 for FREE_P is bad practice; this option only exists to support the
1483 finish_record_layout (record_layout_info rli, int free_p)
1487 /* Compute the final size. */
1488 finalize_record_size (rli);
1490 /* Compute the TYPE_MODE for the record. */
1491 compute_record_mode (rli->t);
1493 /* Perform any last tweaks to the TYPE_SIZE, etc. */
1494 finalize_type_size (rli->t);
1496 /* Propagate TYPE_PACKED to variants. With C++ templates,
1497 handle_packed_attribute is too early to do this. */
1498 for (variant = TYPE_NEXT_VARIANT (rli->t); variant;
1499 variant = TYPE_NEXT_VARIANT (variant))
1500 TYPE_PACKED (variant) = TYPE_PACKED (rli->t);
1502 /* Lay out any static members. This is done now because their type
1503 may use the record's type. */
1504 while (rli->pending_statics)
1506 layout_decl (TREE_VALUE (rli->pending_statics), 0);
1507 rli->pending_statics = TREE_CHAIN (rli->pending_statics);
1516 /* Finish processing a builtin RECORD_TYPE type TYPE. It's name is
1517 NAME, its fields are chained in reverse on FIELDS.
1519 If ALIGN_TYPE is non-null, it is given the same alignment as
1523 finish_builtin_struct (tree type, const char *name, tree fields,
1528 for (tail = NULL_TREE; fields; tail = fields, fields = next)
1530 DECL_FIELD_CONTEXT (fields) = type;
1531 next = TREE_CHAIN (fields);
1532 TREE_CHAIN (fields) = tail;
1534 TYPE_FIELDS (type) = tail;
1538 TYPE_ALIGN (type) = TYPE_ALIGN (align_type);
1539 TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (align_type);
1543 #if 0 /* not yet, should get fixed properly later */
1544 TYPE_NAME (type) = make_type_decl (get_identifier (name), type);
1546 TYPE_NAME (type) = build_decl (TYPE_DECL, get_identifier (name), type);
1548 TYPE_STUB_DECL (type) = TYPE_NAME (type);
1549 layout_decl (TYPE_NAME (type), 0);
1552 /* Calculate the mode, size, and alignment for TYPE.
1553 For an array type, calculate the element separation as well.
1554 Record TYPE on the chain of permanent or temporary types
1555 so that dbxout will find out about it.
1557 TYPE_SIZE of a type is nonzero if the type has been laid out already.
1558 layout_type does nothing on such a type.
1560 If the type is incomplete, its TYPE_SIZE remains zero. */
1563 layout_type (tree type)
1567 if (type == error_mark_node)
1570 /* Do nothing if type has been laid out before. */
1571 if (TYPE_SIZE (type))
1574 switch (TREE_CODE (type))
1577 /* This kind of type is the responsibility
1578 of the language-specific code. */
1581 case BOOLEAN_TYPE: /* Used for Java, Pascal, and Chill. */
1582 if (TYPE_PRECISION (type) == 0)
1583 TYPE_PRECISION (type) = 1; /* default to one byte/boolean. */
1585 /* ... fall through ... */
1589 if (TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST
1590 && tree_int_cst_sgn (TYPE_MIN_VALUE (type)) >= 0)
1591 TYPE_UNSIGNED (type) = 1;
1593 TYPE_MODE (type) = smallest_mode_for_size (TYPE_PRECISION (type),
1595 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1596 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1600 TYPE_MODE (type) = mode_for_size (TYPE_PRECISION (type), MODE_FLOAT, 0);
1601 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1602 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1606 TYPE_UNSIGNED (type) = TYPE_UNSIGNED (TREE_TYPE (type));
1608 = mode_for_size (2 * TYPE_PRECISION (TREE_TYPE (type)),
1609 (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE
1610 ? MODE_COMPLEX_FLOAT : MODE_COMPLEX_INT),
1612 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1613 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1618 int nunits = TYPE_VECTOR_SUBPARTS (type);
1619 tree innertype = TREE_TYPE (type);
1621 gcc_assert (!(nunits & (nunits - 1)));
1623 /* Find an appropriate mode for the vector type. */
1624 if (TYPE_MODE (type) == VOIDmode)
1626 enum machine_mode innermode = TYPE_MODE (innertype);
1627 enum machine_mode mode;
1629 /* First, look for a supported vector type. */
1630 if (SCALAR_FLOAT_MODE_P (innermode))
1631 mode = MIN_MODE_VECTOR_FLOAT;
1633 mode = MIN_MODE_VECTOR_INT;
1635 for (; mode != VOIDmode ; mode = GET_MODE_WIDER_MODE (mode))
1636 if (GET_MODE_NUNITS (mode) == nunits
1637 && GET_MODE_INNER (mode) == innermode
1638 && targetm.vector_mode_supported_p (mode))
1641 /* For integers, try mapping it to a same-sized scalar mode. */
1642 if (mode == VOIDmode
1643 && GET_MODE_CLASS (innermode) == MODE_INT)
1644 mode = mode_for_size (nunits * GET_MODE_BITSIZE (innermode),
1647 if (mode == VOIDmode || !have_regs_of_mode[mode])
1648 TYPE_MODE (type) = BLKmode;
1650 TYPE_MODE (type) = mode;
1653 TYPE_UNSIGNED (type) = TYPE_UNSIGNED (TREE_TYPE (type));
1654 TYPE_SIZE_UNIT (type) = int_const_binop (MULT_EXPR,
1655 TYPE_SIZE_UNIT (innertype),
1656 size_int (nunits), 0);
1657 TYPE_SIZE (type) = int_const_binop (MULT_EXPR, TYPE_SIZE (innertype),
1658 bitsize_int (nunits), 0);
1660 /* Always naturally align vectors. This prevents ABI changes
1661 depending on whether or not native vector modes are supported. */
1662 TYPE_ALIGN (type) = tree_low_cst (TYPE_SIZE (type), 0);
1667 /* This is an incomplete type and so doesn't have a size. */
1668 TYPE_ALIGN (type) = 1;
1669 TYPE_USER_ALIGN (type) = 0;
1670 TYPE_MODE (type) = VOIDmode;
1674 TYPE_SIZE (type) = bitsize_int (POINTER_SIZE);
1675 TYPE_SIZE_UNIT (type) = size_int (POINTER_SIZE / BITS_PER_UNIT);
1676 /* A pointer might be MODE_PARTIAL_INT,
1677 but ptrdiff_t must be integral. */
1678 TYPE_MODE (type) = mode_for_size (POINTER_SIZE, MODE_INT, 0);
1683 /* It's hard to see what the mode and size of a function ought to
1684 be, but we do know the alignment is FUNCTION_BOUNDARY, so
1685 make it consistent with that. */
1686 TYPE_MODE (type) = mode_for_size (FUNCTION_BOUNDARY, MODE_INT, 0);
1687 TYPE_SIZE (type) = bitsize_int (FUNCTION_BOUNDARY);
1688 TYPE_SIZE_UNIT (type) = size_int (FUNCTION_BOUNDARY / BITS_PER_UNIT);
1692 case REFERENCE_TYPE:
1695 enum machine_mode mode = ((TREE_CODE (type) == REFERENCE_TYPE
1696 && reference_types_internal)
1697 ? Pmode : TYPE_MODE (type));
1699 int nbits = GET_MODE_BITSIZE (mode);
1701 TYPE_SIZE (type) = bitsize_int (nbits);
1702 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (mode));
1703 TYPE_UNSIGNED (type) = 1;
1704 TYPE_PRECISION (type) = nbits;
1710 tree index = TYPE_DOMAIN (type);
1711 tree element = TREE_TYPE (type);
1713 build_pointer_type (element);
1715 /* We need to know both bounds in order to compute the size. */
1716 if (index && TYPE_MAX_VALUE (index) && TYPE_MIN_VALUE (index)
1717 && TYPE_SIZE (element))
1719 tree ub = TYPE_MAX_VALUE (index);
1720 tree lb = TYPE_MIN_VALUE (index);
1724 /* The initial subtraction should happen in the original type so
1725 that (possible) negative values are handled appropriately. */
1726 length = size_binop (PLUS_EXPR, size_one_node,
1727 fold_convert (sizetype,
1728 fold_build2 (MINUS_EXPR,
1732 /* Special handling for arrays of bits (for Chill). */
1733 element_size = TYPE_SIZE (element);
1734 if (TYPE_PACKED (type) && INTEGRAL_TYPE_P (element)
1735 && (integer_zerop (TYPE_MAX_VALUE (element))
1736 || integer_onep (TYPE_MAX_VALUE (element)))
1737 && host_integerp (TYPE_MIN_VALUE (element), 1))
1739 HOST_WIDE_INT maxvalue
1740 = tree_low_cst (TYPE_MAX_VALUE (element), 1);
1741 HOST_WIDE_INT minvalue
1742 = tree_low_cst (TYPE_MIN_VALUE (element), 1);
1744 if (maxvalue - minvalue == 1
1745 && (maxvalue == 1 || maxvalue == 0))
1746 element_size = integer_one_node;
1749 /* If neither bound is a constant and sizetype is signed, make
1750 sure the size is never negative. We should really do this
1751 if *either* bound is non-constant, but this is the best
1752 compromise between C and Ada. */
1753 if (!TYPE_UNSIGNED (sizetype)
1754 && TREE_CODE (TYPE_MIN_VALUE (index)) != INTEGER_CST
1755 && TREE_CODE (TYPE_MAX_VALUE (index)) != INTEGER_CST)
1756 length = size_binop (MAX_EXPR, length, size_zero_node);
1758 TYPE_SIZE (type) = size_binop (MULT_EXPR, element_size,
1759 fold_convert (bitsizetype,
1762 /* If we know the size of the element, calculate the total
1763 size directly, rather than do some division thing below.
1764 This optimization helps Fortran assumed-size arrays
1765 (where the size of the array is determined at runtime)
1767 Note that we can't do this in the case where the size of
1768 the elements is one bit since TYPE_SIZE_UNIT cannot be
1769 set correctly in that case. */
1770 if (TYPE_SIZE_UNIT (element) != 0 && ! integer_onep (element_size))
1771 TYPE_SIZE_UNIT (type)
1772 = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (element), length);
1775 /* Now round the alignment and size,
1776 using machine-dependent criteria if any. */
1778 #ifdef ROUND_TYPE_ALIGN
1780 = ROUND_TYPE_ALIGN (type, TYPE_ALIGN (element), BITS_PER_UNIT);
1782 TYPE_ALIGN (type) = MAX (TYPE_ALIGN (element), BITS_PER_UNIT);
1784 if (!TYPE_SIZE (element))
1785 /* We don't know the size of the underlying element type, so
1786 our alignment calculations will be wrong, forcing us to
1787 fall back on structural equality. */
1788 SET_TYPE_STRUCTURAL_EQUALITY (type);
1789 TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (element);
1790 TYPE_MODE (type) = BLKmode;
1791 if (TYPE_SIZE (type) != 0
1792 #ifdef MEMBER_TYPE_FORCES_BLK
1793 && ! MEMBER_TYPE_FORCES_BLK (type, VOIDmode)
1795 /* BLKmode elements force BLKmode aggregate;
1796 else extract/store fields may lose. */
1797 && (TYPE_MODE (TREE_TYPE (type)) != BLKmode
1798 || TYPE_NO_FORCE_BLK (TREE_TYPE (type))))
1800 /* One-element arrays get the component type's mode. */
1801 if (simple_cst_equal (TYPE_SIZE (type),
1802 TYPE_SIZE (TREE_TYPE (type))))
1803 TYPE_MODE (type) = TYPE_MODE (TREE_TYPE (type));
1806 = mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1);
1808 if (TYPE_MODE (type) != BLKmode
1809 && STRICT_ALIGNMENT && TYPE_ALIGN (type) < BIGGEST_ALIGNMENT
1810 && TYPE_ALIGN (type) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
1812 TYPE_NO_FORCE_BLK (type) = 1;
1813 TYPE_MODE (type) = BLKmode;
1816 /* When the element size is constant, check that it is at least as
1817 large as the element alignment. */
1818 if (TYPE_SIZE_UNIT (element)
1819 && TREE_CODE (TYPE_SIZE_UNIT (element)) == INTEGER_CST
1820 /* If TYPE_SIZE_UNIT overflowed, then it is certainly larger than
1822 && !TREE_OVERFLOW (TYPE_SIZE_UNIT (element))
1823 && !integer_zerop (TYPE_SIZE_UNIT (element))
1824 && compare_tree_int (TYPE_SIZE_UNIT (element),
1825 TYPE_ALIGN_UNIT (element)) < 0)
1826 error ("alignment of array elements is greater than element size");
1832 case QUAL_UNION_TYPE:
1835 record_layout_info rli;
1837 /* Initialize the layout information. */
1838 rli = start_record_layout (type);
1840 /* If this is a QUAL_UNION_TYPE, we want to process the fields
1841 in the reverse order in building the COND_EXPR that denotes
1842 its size. We reverse them again later. */
1843 if (TREE_CODE (type) == QUAL_UNION_TYPE)
1844 TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
1846 /* Place all the fields. */
1847 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1848 place_field (rli, field);
1850 if (TREE_CODE (type) == QUAL_UNION_TYPE)
1851 TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
1853 if (lang_adjust_rli)
1854 (*lang_adjust_rli) (rli);
1856 /* Finish laying out the record. */
1857 finish_record_layout (rli, /*free_p=*/true);
1865 /* Compute the final TYPE_SIZE, TYPE_ALIGN, etc. for TYPE. For
1866 records and unions, finish_record_layout already called this
1868 if (TREE_CODE (type) != RECORD_TYPE
1869 && TREE_CODE (type) != UNION_TYPE
1870 && TREE_CODE (type) != QUAL_UNION_TYPE)
1871 finalize_type_size (type);
1873 /* If an alias set has been set for this aggregate when it was incomplete,
1874 force it into alias set 0.
1875 This is too conservative, but we cannot call record_component_aliases
1876 here because some frontends still change the aggregates after
1878 if (AGGREGATE_TYPE_P (type) && TYPE_ALIAS_SET_KNOWN_P (type))
1879 TYPE_ALIAS_SET (type) = 0;
1882 /* Create and return a type for signed integers of PRECISION bits. */
1885 make_signed_type (int precision)
1887 tree type = make_node (INTEGER_TYPE);
1889 TYPE_PRECISION (type) = precision;
1891 fixup_signed_type (type);
1895 /* Create and return a type for unsigned integers of PRECISION bits. */
1898 make_unsigned_type (int precision)
1900 tree type = make_node (INTEGER_TYPE);
1902 TYPE_PRECISION (type) = precision;
1904 fixup_unsigned_type (type);
1908 /* Initialize sizetype and bitsizetype to a reasonable and temporary
1909 value to enable integer types to be created. */
1912 initialize_sizetypes (bool signed_p)
1914 tree t = make_node (INTEGER_TYPE);
1915 int precision = GET_MODE_BITSIZE (SImode);
1917 TYPE_MODE (t) = SImode;
1918 TYPE_ALIGN (t) = GET_MODE_ALIGNMENT (SImode);
1919 TYPE_USER_ALIGN (t) = 0;
1920 TYPE_IS_SIZETYPE (t) = 1;
1921 TYPE_UNSIGNED (t) = !signed_p;
1922 TYPE_SIZE (t) = build_int_cst (t, precision);
1923 TYPE_SIZE_UNIT (t) = build_int_cst (t, GET_MODE_SIZE (SImode));
1924 TYPE_PRECISION (t) = precision;
1926 /* Set TYPE_MIN_VALUE and TYPE_MAX_VALUE. */
1927 set_min_and_max_values_for_integral_type (t, precision, !signed_p);
1930 bitsizetype = build_distinct_type_copy (t);
1933 /* Make sizetype a version of TYPE, and initialize *sizetype
1934 accordingly. We do this by overwriting the stub sizetype and
1935 bitsizetype nodes created by initialize_sizetypes. This makes sure
1936 that (a) anything stubby about them no longer exists, (b) any
1937 INTEGER_CSTs created with such a type, remain valid. */
1940 set_sizetype (tree type)
1942 int oprecision = TYPE_PRECISION (type);
1943 /* The *bitsizetype types use a precision that avoids overflows when
1944 calculating signed sizes / offsets in bits. However, when
1945 cross-compiling from a 32 bit to a 64 bit host, we are limited to 64 bit
1947 int precision = MIN (MIN (oprecision + BITS_PER_UNIT_LOG + 1,
1948 MAX_FIXED_MODE_SIZE),
1949 2 * HOST_BITS_PER_WIDE_INT);
1952 gcc_assert (TYPE_UNSIGNED (type) == TYPE_UNSIGNED (sizetype));
1954 t = build_distinct_type_copy (type);
1955 /* We do want to use sizetype's cache, as we will be replacing that
1957 TYPE_CACHED_VALUES (t) = TYPE_CACHED_VALUES (sizetype);
1958 TYPE_CACHED_VALUES_P (t) = TYPE_CACHED_VALUES_P (sizetype);
1959 TREE_TYPE (TYPE_CACHED_VALUES (t)) = type;
1960 TYPE_UID (t) = TYPE_UID (sizetype);
1961 TYPE_IS_SIZETYPE (t) = 1;
1963 /* Replace our original stub sizetype. */
1964 memcpy (sizetype, t, tree_size (sizetype));
1965 TYPE_MAIN_VARIANT (sizetype) = sizetype;
1967 t = make_node (INTEGER_TYPE);
1968 TYPE_NAME (t) = get_identifier ("bit_size_type");
1969 /* We do want to use bitsizetype's cache, as we will be replacing that
1971 TYPE_CACHED_VALUES (t) = TYPE_CACHED_VALUES (bitsizetype);
1972 TYPE_CACHED_VALUES_P (t) = TYPE_CACHED_VALUES_P (bitsizetype);
1973 TYPE_PRECISION (t) = precision;
1974 TYPE_UID (t) = TYPE_UID (bitsizetype);
1975 TYPE_IS_SIZETYPE (t) = 1;
1977 /* Replace our original stub bitsizetype. */
1978 memcpy (bitsizetype, t, tree_size (bitsizetype));
1979 TYPE_MAIN_VARIANT (bitsizetype) = bitsizetype;
1981 if (TYPE_UNSIGNED (type))
1983 fixup_unsigned_type (bitsizetype);
1984 ssizetype = build_distinct_type_copy (make_signed_type (oprecision));
1985 TYPE_IS_SIZETYPE (ssizetype) = 1;
1986 sbitsizetype = build_distinct_type_copy (make_signed_type (precision));
1987 TYPE_IS_SIZETYPE (sbitsizetype) = 1;
1991 fixup_signed_type (bitsizetype);
1992 ssizetype = sizetype;
1993 sbitsizetype = bitsizetype;
1996 /* If SIZETYPE is unsigned, we need to fix TYPE_MAX_VALUE so that
1997 it is sign extended in a way consistent with force_fit_type. */
1998 if (TYPE_UNSIGNED (type))
2000 tree orig_max, new_max;
2002 orig_max = TYPE_MAX_VALUE (sizetype);
2004 /* Build a new node with the same values, but a different type.
2005 Sign extend it to ensure consistency. */
2006 new_max = build_int_cst_wide_type (sizetype,
2007 TREE_INT_CST_LOW (orig_max),
2008 TREE_INT_CST_HIGH (orig_max));
2009 TYPE_MAX_VALUE (sizetype) = new_max;
2013 /* TYPE is an integral type, i.e., an INTEGRAL_TYPE, ENUMERAL_TYPE
2014 or BOOLEAN_TYPE. Set TYPE_MIN_VALUE and TYPE_MAX_VALUE
2015 for TYPE, based on the PRECISION and whether or not the TYPE
2016 IS_UNSIGNED. PRECISION need not correspond to a width supported
2017 natively by the hardware; for example, on a machine with 8-bit,
2018 16-bit, and 32-bit register modes, PRECISION might be 7, 23, or
2022 set_min_and_max_values_for_integral_type (tree type,
2031 min_value = build_int_cst (type, 0);
2033 = build_int_cst_wide (type, precision - HOST_BITS_PER_WIDE_INT >= 0
2035 : ((HOST_WIDE_INT) 1 << precision) - 1,
2036 precision - HOST_BITS_PER_WIDE_INT > 0
2037 ? ((unsigned HOST_WIDE_INT) ~0
2038 >> (HOST_BITS_PER_WIDE_INT
2039 - (precision - HOST_BITS_PER_WIDE_INT)))
2045 = build_int_cst_wide (type,
2046 (precision - HOST_BITS_PER_WIDE_INT > 0
2048 : (HOST_WIDE_INT) (-1) << (precision - 1)),
2049 (((HOST_WIDE_INT) (-1)
2050 << (precision - HOST_BITS_PER_WIDE_INT - 1 > 0
2051 ? precision - HOST_BITS_PER_WIDE_INT - 1
2054 = build_int_cst_wide (type,
2055 (precision - HOST_BITS_PER_WIDE_INT > 0
2057 : ((HOST_WIDE_INT) 1 << (precision - 1)) - 1),
2058 (precision - HOST_BITS_PER_WIDE_INT - 1 > 0
2059 ? (((HOST_WIDE_INT) 1
2060 << (precision - HOST_BITS_PER_WIDE_INT - 1))) - 1
2064 TYPE_MIN_VALUE (type) = min_value;
2065 TYPE_MAX_VALUE (type) = max_value;
2068 /* Set the extreme values of TYPE based on its precision in bits,
2069 then lay it out. Used when make_signed_type won't do
2070 because the tree code is not INTEGER_TYPE.
2071 E.g. for Pascal, when the -fsigned-char option is given. */
2074 fixup_signed_type (tree type)
2076 int precision = TYPE_PRECISION (type);
2078 /* We can not represent properly constants greater then
2079 2 * HOST_BITS_PER_WIDE_INT, still we need the types
2080 as they are used by i386 vector extensions and friends. */
2081 if (precision > HOST_BITS_PER_WIDE_INT * 2)
2082 precision = HOST_BITS_PER_WIDE_INT * 2;
2084 set_min_and_max_values_for_integral_type (type, precision,
2085 /*is_unsigned=*/false);
2087 /* Lay out the type: set its alignment, size, etc. */
2091 /* Set the extreme values of TYPE based on its precision in bits,
2092 then lay it out. This is used both in `make_unsigned_type'
2093 and for enumeral types. */
2096 fixup_unsigned_type (tree type)
2098 int precision = TYPE_PRECISION (type);
2100 /* We can not represent properly constants greater then
2101 2 * HOST_BITS_PER_WIDE_INT, still we need the types
2102 as they are used by i386 vector extensions and friends. */
2103 if (precision > HOST_BITS_PER_WIDE_INT * 2)
2104 precision = HOST_BITS_PER_WIDE_INT * 2;
2106 TYPE_UNSIGNED (type) = 1;
2108 set_min_and_max_values_for_integral_type (type, precision,
2109 /*is_unsigned=*/true);
2111 /* Lay out the type: set its alignment, size, etc. */
2115 /* Find the best machine mode to use when referencing a bit field of length
2116 BITSIZE bits starting at BITPOS.
2118 The underlying object is known to be aligned to a boundary of ALIGN bits.
2119 If LARGEST_MODE is not VOIDmode, it means that we should not use a mode
2120 larger than LARGEST_MODE (usually SImode).
2122 If no mode meets all these conditions, we return VOIDmode.
2124 If VOLATILEP is false and SLOW_BYTE_ACCESS is false, we return the
2125 smallest mode meeting these conditions.
2127 If VOLATILEP is false and SLOW_BYTE_ACCESS is true, we return the
2128 largest mode (but a mode no wider than UNITS_PER_WORD) that meets
2131 If VOLATILEP is true the narrow_volatile_bitfields target hook is used to
2132 decide which of the above modes should be used. */
2135 get_best_mode (int bitsize, int bitpos, unsigned int align,
2136 enum machine_mode largest_mode, int volatilep)
2138 enum machine_mode mode;
2139 unsigned int unit = 0;
2141 /* Find the narrowest integer mode that contains the bit field. */
2142 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2143 mode = GET_MODE_WIDER_MODE (mode))
2145 unit = GET_MODE_BITSIZE (mode);
2146 if ((bitpos % unit) + bitsize <= unit)
2150 if (mode == VOIDmode
2151 /* It is tempting to omit the following line
2152 if STRICT_ALIGNMENT is true.
2153 But that is incorrect, since if the bitfield uses part of 3 bytes
2154 and we use a 4-byte mode, we could get a spurious segv
2155 if the extra 4th byte is past the end of memory.
2156 (Though at least one Unix compiler ignores this problem:
2157 that on the Sequent 386 machine. */
2158 || MIN (unit, BIGGEST_ALIGNMENT) > align
2159 || (largest_mode != VOIDmode && unit > GET_MODE_BITSIZE (largest_mode)))
2162 if ((SLOW_BYTE_ACCESS && ! volatilep)
2163 || (volatilep && !targetm.narrow_volatile_bitfield ()))
2165 enum machine_mode wide_mode = VOIDmode, tmode;
2167 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); tmode != VOIDmode;
2168 tmode = GET_MODE_WIDER_MODE (tmode))
2170 unit = GET_MODE_BITSIZE (tmode);
2171 if (bitpos / unit == (bitpos + bitsize - 1) / unit
2172 && unit <= BITS_PER_WORD
2173 && unit <= MIN (align, BIGGEST_ALIGNMENT)
2174 && (largest_mode == VOIDmode
2175 || unit <= GET_MODE_BITSIZE (largest_mode)))
2179 if (wide_mode != VOIDmode)
2186 /* Gets minimal and maximal values for MODE (signed or unsigned depending on
2187 SIGN). The returned constants are made to be usable in TARGET_MODE. */
2190 get_mode_bounds (enum machine_mode mode, int sign,
2191 enum machine_mode target_mode,
2192 rtx *mmin, rtx *mmax)
2194 unsigned size = GET_MODE_BITSIZE (mode);
2195 unsigned HOST_WIDE_INT min_val, max_val;
2197 gcc_assert (size <= HOST_BITS_PER_WIDE_INT);
2201 min_val = -((unsigned HOST_WIDE_INT) 1 << (size - 1));
2202 max_val = ((unsigned HOST_WIDE_INT) 1 << (size - 1)) - 1;
2207 max_val = ((unsigned HOST_WIDE_INT) 1 << (size - 1) << 1) - 1;
2210 *mmin = gen_int_mode (min_val, target_mode);
2211 *mmax = gen_int_mode (max_val, target_mode);
2214 #include "gt-stor-layout.h"