1 /* C-compiler utilities for types and variables storage layout
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1996, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
37 #include "langhooks.h"
41 /* Data type for the expressions representing sizes of data types.
42 It is the first integer type laid out. */
43 tree sizetype_tab[(int) TYPE_KIND_LAST];
45 /* If nonzero, this is an upper limit on alignment of structure fields.
46 The value is measured in bits. */
47 unsigned int maximum_field_alignment = TARGET_DEFAULT_PACK_STRUCT * BITS_PER_UNIT;
48 /* ... and its original value in bytes, specified via -fpack-struct=<value>. */
49 unsigned int initial_max_fld_align = TARGET_DEFAULT_PACK_STRUCT;
51 /* Nonzero if all REFERENCE_TYPEs are internal and hence should be
52 allocated in Pmode, not ptr_mode. Set only by internal_reference_types
53 called only by a front end. */
54 static int reference_types_internal = 0;
56 static void finalize_record_size (record_layout_info);
57 static void finalize_type_size (tree);
58 static void place_union_field (record_layout_info, tree);
59 #if defined (PCC_BITFIELD_TYPE_MATTERS) || defined (BITFIELD_NBYTES_LIMITED)
60 static int excess_unit_span (HOST_WIDE_INT, HOST_WIDE_INT, HOST_WIDE_INT,
63 extern void debug_rli (record_layout_info);
65 /* SAVE_EXPRs for sizes of types and decls, waiting to be expanded. */
67 static GTY(()) tree pending_sizes;
69 /* Show that REFERENCE_TYPES are internal and should be Pmode. Called only
73 internal_reference_types (void)
75 reference_types_internal = 1;
78 /* Get a list of all the objects put on the pending sizes list. */
81 get_pending_sizes (void)
83 tree chain = pending_sizes;
89 /* Add EXPR to the pending sizes list. */
92 put_pending_size (tree expr)
94 /* Strip any simple arithmetic from EXPR to see if it has an underlying
96 expr = skip_simple_arithmetic (expr);
98 if (TREE_CODE (expr) == SAVE_EXPR)
99 pending_sizes = tree_cons (NULL_TREE, expr, pending_sizes);
102 /* Put a chain of objects into the pending sizes list, which must be
106 put_pending_sizes (tree chain)
108 gcc_assert (!pending_sizes);
109 pending_sizes = chain;
112 /* Given a size SIZE that may not be a constant, return a SAVE_EXPR
113 to serve as the actual size-expression for a type or decl. */
116 variable_size (tree size)
120 /* If the language-processor is to take responsibility for variable-sized
121 items (e.g., languages which have elaboration procedures like Ada),
122 just return SIZE unchanged. Likewise for self-referential sizes and
124 if (TREE_CONSTANT (size)
125 || lang_hooks.decls.global_bindings_p () < 0
126 || CONTAINS_PLACEHOLDER_P (size))
129 size = save_expr (size);
131 /* If an array with a variable number of elements is declared, and
132 the elements require destruction, we will emit a cleanup for the
133 array. That cleanup is run both on normal exit from the block
134 and in the exception-handler for the block. Normally, when code
135 is used in both ordinary code and in an exception handler it is
136 `unsaved', i.e., all SAVE_EXPRs are recalculated. However, we do
137 not wish to do that here; the array-size is the same in both
139 save = skip_simple_arithmetic (size);
141 if (cfun && cfun->x_dont_save_pending_sizes_p)
142 /* The front-end doesn't want us to keep a list of the expressions
143 that determine sizes for variable size objects. Trust it. */
146 if (lang_hooks.decls.global_bindings_p ())
148 if (TREE_CONSTANT (size))
149 error ("type size can%'t be explicitly evaluated");
151 error ("variable-size type declared outside of any function");
153 return size_one_node;
156 put_pending_size (save);
161 #ifndef MAX_FIXED_MODE_SIZE
162 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
165 /* Return the machine mode to use for a nonscalar of SIZE bits. The
166 mode must be in class CLASS, and have exactly that many value bits;
167 it may have padding as well. If LIMIT is nonzero, modes of wider
168 than MAX_FIXED_MODE_SIZE will not be used. */
171 mode_for_size (unsigned int size, enum mode_class class, int limit)
173 enum machine_mode mode;
175 if (limit && size > MAX_FIXED_MODE_SIZE)
178 /* Get the first mode which has this size, in the specified class. */
179 for (mode = GET_CLASS_NARROWEST_MODE (class); mode != VOIDmode;
180 mode = GET_MODE_WIDER_MODE (mode))
181 if (GET_MODE_PRECISION (mode) == size)
187 /* Similar, except passed a tree node. */
190 mode_for_size_tree (const_tree size, enum mode_class class, int limit)
192 unsigned HOST_WIDE_INT uhwi;
195 if (!host_integerp (size, 1))
197 uhwi = tree_low_cst (size, 1);
201 return mode_for_size (ui, class, limit);
204 /* Similar, but never return BLKmode; return the narrowest mode that
205 contains at least the requested number of value bits. */
208 smallest_mode_for_size (unsigned int size, enum mode_class class)
210 enum machine_mode mode;
212 /* Get the first mode which has at least this size, in the
214 for (mode = GET_CLASS_NARROWEST_MODE (class); mode != VOIDmode;
215 mode = GET_MODE_WIDER_MODE (mode))
216 if (GET_MODE_PRECISION (mode) >= size)
222 /* Find an integer mode of the exact same size, or BLKmode on failure. */
225 int_mode_for_mode (enum machine_mode mode)
227 switch (GET_MODE_CLASS (mode))
230 case MODE_PARTIAL_INT:
233 case MODE_COMPLEX_INT:
234 case MODE_COMPLEX_FLOAT:
236 case MODE_DECIMAL_FLOAT:
237 case MODE_VECTOR_INT:
238 case MODE_VECTOR_FLOAT:
243 case MODE_VECTOR_FRACT:
244 case MODE_VECTOR_ACCUM:
245 case MODE_VECTOR_UFRACT:
246 case MODE_VECTOR_UACCUM:
247 mode = mode_for_size (GET_MODE_BITSIZE (mode), MODE_INT, 0);
254 /* ... fall through ... */
264 /* Return the alignment of MODE. This will be bounded by 1 and
265 BIGGEST_ALIGNMENT. */
268 get_mode_alignment (enum machine_mode mode)
270 return MIN (BIGGEST_ALIGNMENT, MAX (1, mode_base_align[mode]*BITS_PER_UNIT));
274 /* Subroutine of layout_decl: Force alignment required for the data type.
275 But if the decl itself wants greater alignment, don't override that. */
278 do_type_align (tree type, tree decl)
280 if (TYPE_ALIGN (type) > DECL_ALIGN (decl))
282 DECL_ALIGN (decl) = TYPE_ALIGN (type);
283 if (TREE_CODE (decl) == FIELD_DECL)
284 DECL_USER_ALIGN (decl) = TYPE_USER_ALIGN (type);
288 /* Set the size, mode and alignment of a ..._DECL node.
289 TYPE_DECL does need this for C++.
290 Note that LABEL_DECL and CONST_DECL nodes do not need this,
291 and FUNCTION_DECL nodes have them set up in a special (and simple) way.
292 Don't call layout_decl for them.
294 KNOWN_ALIGN is the amount of alignment we can assume this
295 decl has with no special effort. It is relevant only for FIELD_DECLs
296 and depends on the previous fields.
297 All that matters about KNOWN_ALIGN is which powers of 2 divide it.
298 If KNOWN_ALIGN is 0, it means, "as much alignment as you like":
299 the record will be aligned to suit. */
302 layout_decl (tree decl, unsigned int known_align)
304 tree type = TREE_TYPE (decl);
305 enum tree_code code = TREE_CODE (decl);
308 if (code == CONST_DECL)
311 gcc_assert (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL
312 || code == TYPE_DECL ||code == FIELD_DECL);
314 rtl = DECL_RTL_IF_SET (decl);
316 if (type == error_mark_node)
317 type = void_type_node;
319 /* Usually the size and mode come from the data type without change,
320 however, the front-end may set the explicit width of the field, so its
321 size may not be the same as the size of its type. This happens with
322 bitfields, of course (an `int' bitfield may be only 2 bits, say), but it
323 also happens with other fields. For example, the C++ front-end creates
324 zero-sized fields corresponding to empty base classes, and depends on
325 layout_type setting DECL_FIELD_BITPOS correctly for the field. Set the
326 size in bytes from the size in bits. If we have already set the mode,
327 don't set it again since we can be called twice for FIELD_DECLs. */
329 DECL_UNSIGNED (decl) = TYPE_UNSIGNED (type);
330 if (DECL_MODE (decl) == VOIDmode)
331 DECL_MODE (decl) = TYPE_MODE (type);
333 if (DECL_SIZE (decl) == 0)
335 DECL_SIZE (decl) = TYPE_SIZE (type);
336 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
338 else if (DECL_SIZE_UNIT (decl) == 0)
339 DECL_SIZE_UNIT (decl)
340 = fold_convert (sizetype, size_binop (CEIL_DIV_EXPR, DECL_SIZE (decl),
343 if (code != FIELD_DECL)
344 /* For non-fields, update the alignment from the type. */
345 do_type_align (type, decl);
347 /* For fields, it's a bit more complicated... */
349 bool old_user_align = DECL_USER_ALIGN (decl);
350 bool zero_bitfield = false;
351 bool packed_p = DECL_PACKED (decl);
354 if (DECL_BIT_FIELD (decl))
356 DECL_BIT_FIELD_TYPE (decl) = type;
358 /* A zero-length bit-field affects the alignment of the next
359 field. In essence such bit-fields are not influenced by
360 any packing due to #pragma pack or attribute packed. */
361 if (integer_zerop (DECL_SIZE (decl))
362 && ! targetm.ms_bitfield_layout_p (DECL_FIELD_CONTEXT (decl)))
364 zero_bitfield = true;
366 #ifdef PCC_BITFIELD_TYPE_MATTERS
367 if (PCC_BITFIELD_TYPE_MATTERS)
368 do_type_align (type, decl);
372 #ifdef EMPTY_FIELD_BOUNDARY
373 if (EMPTY_FIELD_BOUNDARY > DECL_ALIGN (decl))
375 DECL_ALIGN (decl) = EMPTY_FIELD_BOUNDARY;
376 DECL_USER_ALIGN (decl) = 0;
382 /* See if we can use an ordinary integer mode for a bit-field.
383 Conditions are: a fixed size that is correct for another mode
384 and occupying a complete byte or bytes on proper boundary. */
385 if (TYPE_SIZE (type) != 0
386 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
387 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT)
389 enum machine_mode xmode
390 = mode_for_size_tree (DECL_SIZE (decl), MODE_INT, 1);
391 unsigned int xalign = GET_MODE_ALIGNMENT (xmode);
394 && !(xalign > BITS_PER_UNIT && DECL_PACKED (decl))
395 && (known_align == 0 || known_align >= xalign))
397 DECL_ALIGN (decl) = MAX (xalign, DECL_ALIGN (decl));
398 DECL_MODE (decl) = xmode;
399 DECL_BIT_FIELD (decl) = 0;
403 /* Turn off DECL_BIT_FIELD if we won't need it set. */
404 if (TYPE_MODE (type) == BLKmode && DECL_MODE (decl) == BLKmode
405 && known_align >= TYPE_ALIGN (type)
406 && DECL_ALIGN (decl) >= TYPE_ALIGN (type))
407 DECL_BIT_FIELD (decl) = 0;
409 else if (packed_p && DECL_USER_ALIGN (decl))
410 /* Don't touch DECL_ALIGN. For other packed fields, go ahead and
411 round up; we'll reduce it again below. We want packing to
412 supersede USER_ALIGN inherited from the type, but defer to
413 alignment explicitly specified on the field decl. */;
415 do_type_align (type, decl);
417 /* If the field is packed and not explicitly aligned, give it the
418 minimum alignment. Note that do_type_align may set
419 DECL_USER_ALIGN, so we need to check old_user_align instead. */
422 DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), BITS_PER_UNIT);
424 if (! packed_p && ! DECL_USER_ALIGN (decl))
426 /* Some targets (i.e. i386, VMS) limit struct field alignment
427 to a lower boundary than alignment of variables unless
428 it was overridden by attribute aligned. */
429 #ifdef BIGGEST_FIELD_ALIGNMENT
431 = MIN (DECL_ALIGN (decl), (unsigned) BIGGEST_FIELD_ALIGNMENT);
433 #ifdef ADJUST_FIELD_ALIGN
434 DECL_ALIGN (decl) = ADJUST_FIELD_ALIGN (decl, DECL_ALIGN (decl));
439 mfa = initial_max_fld_align * BITS_PER_UNIT;
441 mfa = maximum_field_alignment;
442 /* Should this be controlled by DECL_USER_ALIGN, too? */
444 DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), mfa);
447 /* Evaluate nonconstant size only once, either now or as soon as safe. */
448 if (DECL_SIZE (decl) != 0 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
449 DECL_SIZE (decl) = variable_size (DECL_SIZE (decl));
450 if (DECL_SIZE_UNIT (decl) != 0
451 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST)
452 DECL_SIZE_UNIT (decl) = variable_size (DECL_SIZE_UNIT (decl));
454 /* If requested, warn about definitions of large data objects. */
456 && (code == VAR_DECL || code == PARM_DECL)
457 && ! DECL_EXTERNAL (decl))
459 tree size = DECL_SIZE_UNIT (decl);
461 if (size != 0 && TREE_CODE (size) == INTEGER_CST
462 && compare_tree_int (size, larger_than_size) > 0)
464 int size_as_int = TREE_INT_CST_LOW (size);
466 if (compare_tree_int (size, size_as_int) == 0)
467 warning (OPT_Wlarger_than_eq, "size of %q+D is %d bytes", decl, size_as_int);
469 warning (OPT_Wlarger_than_eq, "size of %q+D is larger than %wd bytes",
470 decl, larger_than_size);
474 /* If the RTL was already set, update its mode and mem attributes. */
477 PUT_MODE (rtl, DECL_MODE (decl));
478 SET_DECL_RTL (decl, 0);
479 set_mem_attributes (rtl, decl, 1);
480 SET_DECL_RTL (decl, rtl);
484 /* Given a VAR_DECL, PARM_DECL or RESULT_DECL, clears the results of
485 a previous call to layout_decl and calls it again. */
488 relayout_decl (tree decl)
490 DECL_SIZE (decl) = DECL_SIZE_UNIT (decl) = 0;
491 DECL_MODE (decl) = VOIDmode;
492 if (!DECL_USER_ALIGN (decl))
493 DECL_ALIGN (decl) = 0;
494 SET_DECL_RTL (decl, 0);
496 layout_decl (decl, 0);
499 /* Begin laying out type T, which may be a RECORD_TYPE, UNION_TYPE, or
500 QUAL_UNION_TYPE. Return a pointer to a struct record_layout_info which
501 is to be passed to all other layout functions for this record. It is the
502 responsibility of the caller to call `free' for the storage returned.
503 Note that garbage collection is not permitted until we finish laying
507 start_record_layout (tree t)
509 record_layout_info rli = xmalloc (sizeof (struct record_layout_info_s));
513 /* If the type has a minimum specified alignment (via an attribute
514 declaration, for example) use it -- otherwise, start with a
515 one-byte alignment. */
516 rli->record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (t));
517 rli->unpacked_align = rli->record_align;
518 rli->offset_align = MAX (rli->record_align, BIGGEST_ALIGNMENT);
520 #ifdef STRUCTURE_SIZE_BOUNDARY
521 /* Packed structures don't need to have minimum size. */
522 if (! TYPE_PACKED (t))
526 /* #pragma pack overrides STRUCTURE_SIZE_BOUNDARY. */
527 tmp = (unsigned) STRUCTURE_SIZE_BOUNDARY;
528 if (maximum_field_alignment != 0)
529 tmp = MIN (tmp, maximum_field_alignment);
530 rli->record_align = MAX (rli->record_align, tmp);
534 rli->offset = size_zero_node;
535 rli->bitpos = bitsize_zero_node;
537 rli->pending_statics = 0;
538 rli->packed_maybe_necessary = 0;
539 rli->remaining_in_alignment = 0;
544 /* These four routines perform computations that convert between
545 the offset/bitpos forms and byte and bit offsets. */
548 bit_from_pos (tree offset, tree bitpos)
550 return size_binop (PLUS_EXPR, bitpos,
551 size_binop (MULT_EXPR,
552 fold_convert (bitsizetype, offset),
557 byte_from_pos (tree offset, tree bitpos)
559 return size_binop (PLUS_EXPR, offset,
560 fold_convert (sizetype,
561 size_binop (TRUNC_DIV_EXPR, bitpos,
562 bitsize_unit_node)));
566 pos_from_bit (tree *poffset, tree *pbitpos, unsigned int off_align,
569 *poffset = size_binop (MULT_EXPR,
570 fold_convert (sizetype,
571 size_binop (FLOOR_DIV_EXPR, pos,
572 bitsize_int (off_align))),
573 size_int (off_align / BITS_PER_UNIT));
574 *pbitpos = size_binop (FLOOR_MOD_EXPR, pos, bitsize_int (off_align));
577 /* Given a pointer to bit and byte offsets and an offset alignment,
578 normalize the offsets so they are within the alignment. */
581 normalize_offset (tree *poffset, tree *pbitpos, unsigned int off_align)
583 /* If the bit position is now larger than it should be, adjust it
585 if (compare_tree_int (*pbitpos, off_align) >= 0)
587 tree extra_aligns = size_binop (FLOOR_DIV_EXPR, *pbitpos,
588 bitsize_int (off_align));
591 = size_binop (PLUS_EXPR, *poffset,
592 size_binop (MULT_EXPR,
593 fold_convert (sizetype, extra_aligns),
594 size_int (off_align / BITS_PER_UNIT)));
597 = size_binop (FLOOR_MOD_EXPR, *pbitpos, bitsize_int (off_align));
601 /* Print debugging information about the information in RLI. */
604 debug_rli (record_layout_info rli)
606 print_node_brief (stderr, "type", rli->t, 0);
607 print_node_brief (stderr, "\noffset", rli->offset, 0);
608 print_node_brief (stderr, " bitpos", rli->bitpos, 0);
610 fprintf (stderr, "\naligns: rec = %u, unpack = %u, off = %u\n",
611 rli->record_align, rli->unpacked_align,
614 /* The ms_struct code is the only that uses this. */
615 if (targetm.ms_bitfield_layout_p (rli->t))
616 fprintf (stderr, "remaining in alignment = %u\n", rli->remaining_in_alignment);
618 if (rli->packed_maybe_necessary)
619 fprintf (stderr, "packed may be necessary\n");
621 if (rli->pending_statics)
623 fprintf (stderr, "pending statics:\n");
624 debug_tree (rli->pending_statics);
628 /* Given an RLI with a possibly-incremented BITPOS, adjust OFFSET and
629 BITPOS if necessary to keep BITPOS below OFFSET_ALIGN. */
632 normalize_rli (record_layout_info rli)
634 normalize_offset (&rli->offset, &rli->bitpos, rli->offset_align);
637 /* Returns the size in bytes allocated so far. */
640 rli_size_unit_so_far (record_layout_info rli)
642 return byte_from_pos (rli->offset, rli->bitpos);
645 /* Returns the size in bits allocated so far. */
648 rli_size_so_far (record_layout_info rli)
650 return bit_from_pos (rli->offset, rli->bitpos);
653 /* FIELD is about to be added to RLI->T. The alignment (in bits) of
654 the next available location within the record is given by KNOWN_ALIGN.
655 Update the variable alignment fields in RLI, and return the alignment
656 to give the FIELD. */
659 update_alignment_for_field (record_layout_info rli, tree field,
660 unsigned int known_align)
662 /* The alignment required for FIELD. */
663 unsigned int desired_align;
664 /* The type of this field. */
665 tree type = TREE_TYPE (field);
666 /* True if the field was explicitly aligned by the user. */
670 /* Do not attempt to align an ERROR_MARK node */
671 if (TREE_CODE (type) == ERROR_MARK)
674 /* Lay out the field so we know what alignment it needs. */
675 layout_decl (field, known_align);
676 desired_align = DECL_ALIGN (field);
677 user_align = DECL_USER_ALIGN (field);
679 is_bitfield = (type != error_mark_node
680 && DECL_BIT_FIELD_TYPE (field)
681 && ! integer_zerop (TYPE_SIZE (type)));
683 /* Record must have at least as much alignment as any field.
684 Otherwise, the alignment of the field within the record is
686 if (targetm.ms_bitfield_layout_p (rli->t))
688 /* Here, the alignment of the underlying type of a bitfield can
689 affect the alignment of a record; even a zero-sized field
690 can do this. The alignment should be to the alignment of
691 the type, except that for zero-size bitfields this only
692 applies if there was an immediately prior, nonzero-size
693 bitfield. (That's the way it is, experimentally.) */
694 if ((!is_bitfield && !DECL_PACKED (field))
695 || (!integer_zerop (DECL_SIZE (field))
696 ? !DECL_PACKED (field)
698 && DECL_BIT_FIELD_TYPE (rli->prev_field)
699 && ! integer_zerop (DECL_SIZE (rli->prev_field)))))
701 unsigned int type_align = TYPE_ALIGN (type);
702 unsigned int type_size
703 = tree_low_cst (TYPE_SIZE (type), 1);
704 type_align = MAX (type_align, type_size);
705 type_align = MAX (type_align, desired_align);
706 if (maximum_field_alignment != 0)
707 type_align = MIN (type_align, maximum_field_alignment);
708 rli->record_align = MAX (rli->record_align, type_align);
709 rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
712 #ifdef PCC_BITFIELD_TYPE_MATTERS
713 else if (is_bitfield && PCC_BITFIELD_TYPE_MATTERS)
715 /* Named bit-fields cause the entire structure to have the
716 alignment implied by their type. Some targets also apply the same
717 rules to unnamed bitfields. */
718 if (DECL_NAME (field) != 0
719 || targetm.align_anon_bitfield ())
721 unsigned int type_align = TYPE_ALIGN (type);
723 #ifdef ADJUST_FIELD_ALIGN
724 if (! TYPE_USER_ALIGN (type))
725 type_align = ADJUST_FIELD_ALIGN (field, type_align);
728 /* Targets might chose to handle unnamed and hence possibly
729 zero-width bitfield. Those are not influenced by #pragmas
730 or packed attributes. */
731 if (integer_zerop (DECL_SIZE (field)))
733 if (initial_max_fld_align)
734 type_align = MIN (type_align,
735 initial_max_fld_align * BITS_PER_UNIT);
737 else if (maximum_field_alignment != 0)
738 type_align = MIN (type_align, maximum_field_alignment);
739 else if (DECL_PACKED (field))
740 type_align = MIN (type_align, BITS_PER_UNIT);
742 /* The alignment of the record is increased to the maximum
743 of the current alignment, the alignment indicated on the
744 field (i.e., the alignment specified by an __aligned__
745 attribute), and the alignment indicated by the type of
747 rli->record_align = MAX (rli->record_align, desired_align);
748 rli->record_align = MAX (rli->record_align, type_align);
751 rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
752 user_align |= TYPE_USER_ALIGN (type);
758 rli->record_align = MAX (rli->record_align, desired_align);
759 rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
762 TYPE_USER_ALIGN (rli->t) |= user_align;
764 return desired_align;
767 /* Called from place_field to handle unions. */
770 place_union_field (record_layout_info rli, tree field)
772 update_alignment_for_field (rli, field, /*known_align=*/0);
774 DECL_FIELD_OFFSET (field) = size_zero_node;
775 DECL_FIELD_BIT_OFFSET (field) = bitsize_zero_node;
776 SET_DECL_OFFSET_ALIGN (field, BIGGEST_ALIGNMENT);
778 /* If this is an ERROR_MARK return *after* having set the
779 field at the start of the union. This helps when parsing
781 if (TREE_CODE (TREE_TYPE (field)) == ERROR_MARK)
784 /* We assume the union's size will be a multiple of a byte so we don't
785 bother with BITPOS. */
786 if (TREE_CODE (rli->t) == UNION_TYPE)
787 rli->offset = size_binop (MAX_EXPR, rli->offset, DECL_SIZE_UNIT (field));
788 else if (TREE_CODE (rli->t) == QUAL_UNION_TYPE)
789 rli->offset = fold_build3 (COND_EXPR, sizetype,
790 DECL_QUALIFIER (field),
791 DECL_SIZE_UNIT (field), rli->offset);
794 #if defined (PCC_BITFIELD_TYPE_MATTERS) || defined (BITFIELD_NBYTES_LIMITED)
795 /* A bitfield of SIZE with a required access alignment of ALIGN is allocated
796 at BYTE_OFFSET / BIT_OFFSET. Return nonzero if the field would span more
797 units of alignment than the underlying TYPE. */
799 excess_unit_span (HOST_WIDE_INT byte_offset, HOST_WIDE_INT bit_offset,
800 HOST_WIDE_INT size, HOST_WIDE_INT align, tree type)
802 /* Note that the calculation of OFFSET might overflow; we calculate it so
803 that we still get the right result as long as ALIGN is a power of two. */
804 unsigned HOST_WIDE_INT offset = byte_offset * BITS_PER_UNIT + bit_offset;
806 offset = offset % align;
807 return ((offset + size + align - 1) / align
808 > ((unsigned HOST_WIDE_INT) tree_low_cst (TYPE_SIZE (type), 1)
813 /* RLI contains information about the layout of a RECORD_TYPE. FIELD
814 is a FIELD_DECL to be added after those fields already present in
815 T. (FIELD is not actually added to the TYPE_FIELDS list here;
816 callers that desire that behavior must manually perform that step.) */
819 place_field (record_layout_info rli, tree field)
821 /* The alignment required for FIELD. */
822 unsigned int desired_align;
823 /* The alignment FIELD would have if we just dropped it into the
824 record as it presently stands. */
825 unsigned int known_align;
826 unsigned int actual_align;
827 /* The type of this field. */
828 tree type = TREE_TYPE (field);
830 gcc_assert (TREE_CODE (field) != ERROR_MARK);
832 /* If FIELD is static, then treat it like a separate variable, not
833 really like a structure field. If it is a FUNCTION_DECL, it's a
834 method. In both cases, all we do is lay out the decl, and we do
835 it *after* the record is laid out. */
836 if (TREE_CODE (field) == VAR_DECL)
838 rli->pending_statics = tree_cons (NULL_TREE, field,
839 rli->pending_statics);
843 /* Enumerators and enum types which are local to this class need not
844 be laid out. Likewise for initialized constant fields. */
845 else if (TREE_CODE (field) != FIELD_DECL)
848 /* Unions are laid out very differently than records, so split
849 that code off to another function. */
850 else if (TREE_CODE (rli->t) != RECORD_TYPE)
852 place_union_field (rli, field);
856 else if (TREE_CODE (type) == ERROR_MARK)
858 /* Place this field at the current allocation position, so we
859 maintain monotonicity. */
860 DECL_FIELD_OFFSET (field) = rli->offset;
861 DECL_FIELD_BIT_OFFSET (field) = rli->bitpos;
862 SET_DECL_OFFSET_ALIGN (field, rli->offset_align);
866 /* Work out the known alignment so far. Note that A & (-A) is the
867 value of the least-significant bit in A that is one. */
868 if (! integer_zerop (rli->bitpos))
869 known_align = (tree_low_cst (rli->bitpos, 1)
870 & - tree_low_cst (rli->bitpos, 1));
871 else if (integer_zerop (rli->offset))
873 else if (host_integerp (rli->offset, 1))
874 known_align = (BITS_PER_UNIT
875 * (tree_low_cst (rli->offset, 1)
876 & - tree_low_cst (rli->offset, 1)));
878 known_align = rli->offset_align;
880 desired_align = update_alignment_for_field (rli, field, known_align);
881 if (known_align == 0)
882 known_align = MAX (BIGGEST_ALIGNMENT, rli->record_align);
884 if (warn_packed && DECL_PACKED (field))
886 if (known_align >= TYPE_ALIGN (type))
888 if (TYPE_ALIGN (type) > desired_align)
890 if (STRICT_ALIGNMENT)
891 warning (OPT_Wattributes, "packed attribute causes "
892 "inefficient alignment for %q+D", field);
894 warning (OPT_Wattributes, "packed attribute is "
895 "unnecessary for %q+D", field);
899 rli->packed_maybe_necessary = 1;
902 /* Does this field automatically have alignment it needs by virtue
903 of the fields that precede it and the record's own alignment?
904 We already align ms_struct fields, so don't re-align them. */
905 if (known_align < desired_align
906 && !targetm.ms_bitfield_layout_p (rli->t))
908 /* No, we need to skip space before this field.
909 Bump the cumulative size to multiple of field alignment. */
911 warning (OPT_Wpadded, "padding struct to align %q+D", field);
913 /* If the alignment is still within offset_align, just align
915 if (desired_align < rli->offset_align)
916 rli->bitpos = round_up (rli->bitpos, desired_align);
919 /* First adjust OFFSET by the partial bits, then align. */
921 = size_binop (PLUS_EXPR, rli->offset,
922 fold_convert (sizetype,
923 size_binop (CEIL_DIV_EXPR, rli->bitpos,
924 bitsize_unit_node)));
925 rli->bitpos = bitsize_zero_node;
927 rli->offset = round_up (rli->offset, desired_align / BITS_PER_UNIT);
930 if (! TREE_CONSTANT (rli->offset))
931 rli->offset_align = desired_align;
935 /* Handle compatibility with PCC. Note that if the record has any
936 variable-sized fields, we need not worry about compatibility. */
937 #ifdef PCC_BITFIELD_TYPE_MATTERS
938 if (PCC_BITFIELD_TYPE_MATTERS
939 && ! targetm.ms_bitfield_layout_p (rli->t)
940 && TREE_CODE (field) == FIELD_DECL
941 && type != error_mark_node
942 && DECL_BIT_FIELD (field)
943 && ! DECL_PACKED (field)
944 && maximum_field_alignment == 0
945 && ! integer_zerop (DECL_SIZE (field))
946 && host_integerp (DECL_SIZE (field), 1)
947 && host_integerp (rli->offset, 1)
948 && host_integerp (TYPE_SIZE (type), 1))
950 unsigned int type_align = TYPE_ALIGN (type);
951 tree dsize = DECL_SIZE (field);
952 HOST_WIDE_INT field_size = tree_low_cst (dsize, 1);
953 HOST_WIDE_INT offset = tree_low_cst (rli->offset, 0);
954 HOST_WIDE_INT bit_offset = tree_low_cst (rli->bitpos, 0);
956 #ifdef ADJUST_FIELD_ALIGN
957 if (! TYPE_USER_ALIGN (type))
958 type_align = ADJUST_FIELD_ALIGN (field, type_align);
961 /* A bit field may not span more units of alignment of its type
962 than its type itself. Advance to next boundary if necessary. */
963 if (excess_unit_span (offset, bit_offset, field_size, type_align, type))
964 rli->bitpos = round_up (rli->bitpos, type_align);
966 TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type);
970 #ifdef BITFIELD_NBYTES_LIMITED
971 if (BITFIELD_NBYTES_LIMITED
972 && ! targetm.ms_bitfield_layout_p (rli->t)
973 && TREE_CODE (field) == FIELD_DECL
974 && type != error_mark_node
975 && DECL_BIT_FIELD_TYPE (field)
976 && ! DECL_PACKED (field)
977 && ! integer_zerop (DECL_SIZE (field))
978 && host_integerp (DECL_SIZE (field), 1)
979 && host_integerp (rli->offset, 1)
980 && host_integerp (TYPE_SIZE (type), 1))
982 unsigned int type_align = TYPE_ALIGN (type);
983 tree dsize = DECL_SIZE (field);
984 HOST_WIDE_INT field_size = tree_low_cst (dsize, 1);
985 HOST_WIDE_INT offset = tree_low_cst (rli->offset, 0);
986 HOST_WIDE_INT bit_offset = tree_low_cst (rli->bitpos, 0);
988 #ifdef ADJUST_FIELD_ALIGN
989 if (! TYPE_USER_ALIGN (type))
990 type_align = ADJUST_FIELD_ALIGN (field, type_align);
993 if (maximum_field_alignment != 0)
994 type_align = MIN (type_align, maximum_field_alignment);
995 /* ??? This test is opposite the test in the containing if
996 statement, so this code is unreachable currently. */
997 else if (DECL_PACKED (field))
998 type_align = MIN (type_align, BITS_PER_UNIT);
1000 /* A bit field may not span the unit of alignment of its type.
1001 Advance to next boundary if necessary. */
1002 if (excess_unit_span (offset, bit_offset, field_size, type_align, type))
1003 rli->bitpos = round_up (rli->bitpos, type_align);
1005 TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type);
1009 /* See the docs for TARGET_MS_BITFIELD_LAYOUT_P for details.
1011 When a bit field is inserted into a packed record, the whole
1012 size of the underlying type is used by one or more same-size
1013 adjacent bitfields. (That is, if its long:3, 32 bits is
1014 used in the record, and any additional adjacent long bitfields are
1015 packed into the same chunk of 32 bits. However, if the size
1016 changes, a new field of that size is allocated.) In an unpacked
1017 record, this is the same as using alignment, but not equivalent
1020 Note: for compatibility, we use the type size, not the type alignment
1021 to determine alignment, since that matches the documentation */
1023 if (targetm.ms_bitfield_layout_p (rli->t))
1025 tree prev_saved = rli->prev_field;
1026 tree prev_type = prev_saved ? DECL_BIT_FIELD_TYPE (prev_saved) : NULL;
1028 /* This is a bitfield if it exists. */
1029 if (rli->prev_field)
1031 /* If both are bitfields, nonzero, and the same size, this is
1032 the middle of a run. Zero declared size fields are special
1033 and handled as "end of run". (Note: it's nonzero declared
1034 size, but equal type sizes!) (Since we know that both
1035 the current and previous fields are bitfields by the
1036 time we check it, DECL_SIZE must be present for both.) */
1037 if (DECL_BIT_FIELD_TYPE (field)
1038 && !integer_zerop (DECL_SIZE (field))
1039 && !integer_zerop (DECL_SIZE (rli->prev_field))
1040 && host_integerp (DECL_SIZE (rli->prev_field), 0)
1041 && host_integerp (TYPE_SIZE (type), 0)
1042 && simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (prev_type)))
1044 /* We're in the middle of a run of equal type size fields; make
1045 sure we realign if we run out of bits. (Not decl size,
1047 HOST_WIDE_INT bitsize = tree_low_cst (DECL_SIZE (field), 1);
1049 if (rli->remaining_in_alignment < bitsize)
1051 HOST_WIDE_INT typesize = tree_low_cst (TYPE_SIZE (type), 1);
1053 /* out of bits; bump up to next 'word'. */
1055 = size_binop (PLUS_EXPR, rli->bitpos,
1056 bitsize_int (rli->remaining_in_alignment));
1057 rli->prev_field = field;
1058 if (typesize < bitsize)
1059 rli->remaining_in_alignment = 0;
1061 rli->remaining_in_alignment = typesize - bitsize;
1064 rli->remaining_in_alignment -= bitsize;
1068 /* End of a run: if leaving a run of bitfields of the same type
1069 size, we have to "use up" the rest of the bits of the type
1072 Compute the new position as the sum of the size for the prior
1073 type and where we first started working on that type.
1074 Note: since the beginning of the field was aligned then
1075 of course the end will be too. No round needed. */
1077 if (!integer_zerop (DECL_SIZE (rli->prev_field)))
1080 = size_binop (PLUS_EXPR, rli->bitpos,
1081 bitsize_int (rli->remaining_in_alignment));
1084 /* We "use up" size zero fields; the code below should behave
1085 as if the prior field was not a bitfield. */
1088 /* Cause a new bitfield to be captured, either this time (if
1089 currently a bitfield) or next time we see one. */
1090 if (!DECL_BIT_FIELD_TYPE(field)
1091 || integer_zerop (DECL_SIZE (field)))
1092 rli->prev_field = NULL;
1095 normalize_rli (rli);
1098 /* If we're starting a new run of same size type bitfields
1099 (or a run of non-bitfields), set up the "first of the run"
1102 That is, if the current field is not a bitfield, or if there
1103 was a prior bitfield the type sizes differ, or if there wasn't
1104 a prior bitfield the size of the current field is nonzero.
1106 Note: we must be sure to test ONLY the type size if there was
1107 a prior bitfield and ONLY for the current field being zero if
1110 if (!DECL_BIT_FIELD_TYPE (field)
1111 || (prev_saved != NULL
1112 ? !simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (prev_type))
1113 : !integer_zerop (DECL_SIZE (field)) ))
1115 /* Never smaller than a byte for compatibility. */
1116 unsigned int type_align = BITS_PER_UNIT;
1118 /* (When not a bitfield), we could be seeing a flex array (with
1119 no DECL_SIZE). Since we won't be using remaining_in_alignment
1120 until we see a bitfield (and come by here again) we just skip
1122 if (DECL_SIZE (field) != NULL
1123 && host_integerp (TYPE_SIZE (TREE_TYPE (field)), 0)
1124 && host_integerp (DECL_SIZE (field), 0))
1126 HOST_WIDE_INT bitsize = tree_low_cst (DECL_SIZE (field), 1);
1127 HOST_WIDE_INT typesize
1128 = tree_low_cst (TYPE_SIZE (TREE_TYPE (field)), 1);
1130 if (typesize < bitsize)
1131 rli->remaining_in_alignment = 0;
1133 rli->remaining_in_alignment = typesize - bitsize;
1136 /* Now align (conventionally) for the new type. */
1137 type_align = TYPE_ALIGN (TREE_TYPE (field));
1139 if (maximum_field_alignment != 0)
1140 type_align = MIN (type_align, maximum_field_alignment);
1142 rli->bitpos = round_up (rli->bitpos, type_align);
1144 /* If we really aligned, don't allow subsequent bitfields
1146 rli->prev_field = NULL;
1150 /* Offset so far becomes the position of this field after normalizing. */
1151 normalize_rli (rli);
1152 DECL_FIELD_OFFSET (field) = rli->offset;
1153 DECL_FIELD_BIT_OFFSET (field) = rli->bitpos;
1154 SET_DECL_OFFSET_ALIGN (field, rli->offset_align);
1156 /* If this field ended up more aligned than we thought it would be (we
1157 approximate this by seeing if its position changed), lay out the field
1158 again; perhaps we can use an integral mode for it now. */
1159 if (! integer_zerop (DECL_FIELD_BIT_OFFSET (field)))
1160 actual_align = (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1161 & - tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1));
1162 else if (integer_zerop (DECL_FIELD_OFFSET (field)))
1163 actual_align = MAX (BIGGEST_ALIGNMENT, rli->record_align);
1164 else if (host_integerp (DECL_FIELD_OFFSET (field), 1))
1165 actual_align = (BITS_PER_UNIT
1166 * (tree_low_cst (DECL_FIELD_OFFSET (field), 1)
1167 & - tree_low_cst (DECL_FIELD_OFFSET (field), 1)));
1169 actual_align = DECL_OFFSET_ALIGN (field);
1170 /* ACTUAL_ALIGN is still the actual alignment *within the record* .
1171 store / extract bit field operations will check the alignment of the
1172 record against the mode of bit fields. */
1174 if (known_align != actual_align)
1175 layout_decl (field, actual_align);
1177 if (rli->prev_field == NULL && DECL_BIT_FIELD_TYPE (field))
1178 rli->prev_field = field;
1180 /* Now add size of this field to the size of the record. If the size is
1181 not constant, treat the field as being a multiple of bytes and just
1182 adjust the offset, resetting the bit position. Otherwise, apportion the
1183 size amongst the bit position and offset. First handle the case of an
1184 unspecified size, which can happen when we have an invalid nested struct
1185 definition, such as struct j { struct j { int i; } }. The error message
1186 is printed in finish_struct. */
1187 if (DECL_SIZE (field) == 0)
1189 else if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
1190 || TREE_OVERFLOW (DECL_SIZE (field)))
1193 = size_binop (PLUS_EXPR, rli->offset,
1194 fold_convert (sizetype,
1195 size_binop (CEIL_DIV_EXPR, rli->bitpos,
1196 bitsize_unit_node)));
1198 = size_binop (PLUS_EXPR, rli->offset, DECL_SIZE_UNIT (field));
1199 rli->bitpos = bitsize_zero_node;
1200 rli->offset_align = MIN (rli->offset_align, desired_align);
1202 else if (targetm.ms_bitfield_layout_p (rli->t))
1204 rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos, DECL_SIZE (field));
1206 /* If we ended a bitfield before the full length of the type then
1207 pad the struct out to the full length of the last type. */
1208 if ((TREE_CHAIN (field) == NULL
1209 || TREE_CODE (TREE_CHAIN (field)) != FIELD_DECL)
1210 && DECL_BIT_FIELD_TYPE (field)
1211 && !integer_zerop (DECL_SIZE (field)))
1212 rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos,
1213 bitsize_int (rli->remaining_in_alignment));
1215 normalize_rli (rli);
1219 rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos, DECL_SIZE (field));
1220 normalize_rli (rli);
1224 /* Assuming that all the fields have been laid out, this function uses
1225 RLI to compute the final TYPE_SIZE, TYPE_ALIGN, etc. for the type
1226 indicated by RLI. */
1229 finalize_record_size (record_layout_info rli)
1231 tree unpadded_size, unpadded_size_unit;
1233 /* Now we want just byte and bit offsets, so set the offset alignment
1234 to be a byte and then normalize. */
1235 rli->offset_align = BITS_PER_UNIT;
1236 normalize_rli (rli);
1238 /* Determine the desired alignment. */
1239 #ifdef ROUND_TYPE_ALIGN
1240 TYPE_ALIGN (rli->t) = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t),
1243 TYPE_ALIGN (rli->t) = MAX (TYPE_ALIGN (rli->t), rli->record_align);
1246 /* Compute the size so far. Be sure to allow for extra bits in the
1247 size in bytes. We have guaranteed above that it will be no more
1248 than a single byte. */
1249 unpadded_size = rli_size_so_far (rli);
1250 unpadded_size_unit = rli_size_unit_so_far (rli);
1251 if (! integer_zerop (rli->bitpos))
1253 = size_binop (PLUS_EXPR, unpadded_size_unit, size_one_node);
1255 /* Round the size up to be a multiple of the required alignment. */
1256 TYPE_SIZE (rli->t) = round_up (unpadded_size, TYPE_ALIGN (rli->t));
1257 TYPE_SIZE_UNIT (rli->t)
1258 = round_up (unpadded_size_unit, TYPE_ALIGN_UNIT (rli->t));
1260 if (TREE_CONSTANT (unpadded_size)
1261 && simple_cst_equal (unpadded_size, TYPE_SIZE (rli->t)) == 0)
1262 warning (OPT_Wpadded, "padding struct size to alignment boundary");
1264 if (warn_packed && TREE_CODE (rli->t) == RECORD_TYPE
1265 && TYPE_PACKED (rli->t) && ! rli->packed_maybe_necessary
1266 && TREE_CONSTANT (unpadded_size))
1270 #ifdef ROUND_TYPE_ALIGN
1272 = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t), rli->unpacked_align);
1274 rli->unpacked_align = MAX (TYPE_ALIGN (rli->t), rli->unpacked_align);
1277 unpacked_size = round_up (TYPE_SIZE (rli->t), rli->unpacked_align);
1278 if (simple_cst_equal (unpacked_size, TYPE_SIZE (rli->t)))
1280 TYPE_PACKED (rli->t) = 0;
1282 if (TYPE_NAME (rli->t))
1286 if (TREE_CODE (TYPE_NAME (rli->t)) == IDENTIFIER_NODE)
1287 name = IDENTIFIER_POINTER (TYPE_NAME (rli->t));
1289 name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (rli->t)));
1291 if (STRICT_ALIGNMENT)
1292 warning (OPT_Wpacked, "packed attribute causes inefficient "
1293 "alignment for %qs", name);
1295 warning (OPT_Wpacked,
1296 "packed attribute is unnecessary for %qs", name);
1300 if (STRICT_ALIGNMENT)
1301 warning (OPT_Wpacked,
1302 "packed attribute causes inefficient alignment");
1304 warning (OPT_Wpacked, "packed attribute is unnecessary");
1310 /* Compute the TYPE_MODE for the TYPE (which is a RECORD_TYPE). */
1313 compute_record_mode (tree type)
1316 enum machine_mode mode = VOIDmode;
1318 /* Most RECORD_TYPEs have BLKmode, so we start off assuming that.
1319 However, if possible, we use a mode that fits in a register
1320 instead, in order to allow for better optimization down the
1322 TYPE_MODE (type) = BLKmode;
1324 if (! host_integerp (TYPE_SIZE (type), 1))
1327 /* A record which has any BLKmode members must itself be
1328 BLKmode; it can't go in a register. Unless the member is
1329 BLKmode only because it isn't aligned. */
1330 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1332 if (TREE_CODE (field) != FIELD_DECL)
1335 if (TREE_CODE (TREE_TYPE (field)) == ERROR_MARK
1336 || (TYPE_MODE (TREE_TYPE (field)) == BLKmode
1337 && ! TYPE_NO_FORCE_BLK (TREE_TYPE (field))
1338 && !(TYPE_SIZE (TREE_TYPE (field)) != 0
1339 && integer_zerop (TYPE_SIZE (TREE_TYPE (field)))))
1340 || ! host_integerp (bit_position (field), 1)
1341 || DECL_SIZE (field) == 0
1342 || ! host_integerp (DECL_SIZE (field), 1))
1345 /* If this field is the whole struct, remember its mode so
1346 that, say, we can put a double in a class into a DF
1347 register instead of forcing it to live in the stack. */
1348 if (simple_cst_equal (TYPE_SIZE (type), DECL_SIZE (field)))
1349 mode = DECL_MODE (field);
1351 #ifdef MEMBER_TYPE_FORCES_BLK
1352 /* With some targets, eg. c4x, it is sub-optimal
1353 to access an aligned BLKmode structure as a scalar. */
1355 if (MEMBER_TYPE_FORCES_BLK (field, mode))
1357 #endif /* MEMBER_TYPE_FORCES_BLK */
1360 /* If we only have one real field; use its mode if that mode's size
1361 matches the type's size. This only applies to RECORD_TYPE. This
1362 does not apply to unions. */
1363 if (TREE_CODE (type) == RECORD_TYPE && mode != VOIDmode
1364 && host_integerp (TYPE_SIZE (type), 1)
1365 && GET_MODE_BITSIZE (mode) == TREE_INT_CST_LOW (TYPE_SIZE (type)))
1366 TYPE_MODE (type) = mode;
1368 TYPE_MODE (type) = mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1);
1370 /* If structure's known alignment is less than what the scalar
1371 mode would need, and it matters, then stick with BLKmode. */
1372 if (TYPE_MODE (type) != BLKmode
1374 && ! (TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT
1375 || TYPE_ALIGN (type) >= GET_MODE_ALIGNMENT (TYPE_MODE (type))))
1377 /* If this is the only reason this type is BLKmode, then
1378 don't force containing types to be BLKmode. */
1379 TYPE_NO_FORCE_BLK (type) = 1;
1380 TYPE_MODE (type) = BLKmode;
1384 /* Compute TYPE_SIZE and TYPE_ALIGN for TYPE, once it has been laid
1388 finalize_type_size (tree type)
1390 /* Normally, use the alignment corresponding to the mode chosen.
1391 However, where strict alignment is not required, avoid
1392 over-aligning structures, since most compilers do not do this
1395 if (TYPE_MODE (type) != BLKmode && TYPE_MODE (type) != VOIDmode
1396 && (STRICT_ALIGNMENT
1397 || (TREE_CODE (type) != RECORD_TYPE && TREE_CODE (type) != UNION_TYPE
1398 && TREE_CODE (type) != QUAL_UNION_TYPE
1399 && TREE_CODE (type) != ARRAY_TYPE)))
1401 unsigned mode_align = GET_MODE_ALIGNMENT (TYPE_MODE (type));
1403 /* Don't override a larger alignment requirement coming from a user
1404 alignment of one of the fields. */
1405 if (mode_align >= TYPE_ALIGN (type))
1407 TYPE_ALIGN (type) = mode_align;
1408 TYPE_USER_ALIGN (type) = 0;
1412 /* Do machine-dependent extra alignment. */
1413 #ifdef ROUND_TYPE_ALIGN
1415 = ROUND_TYPE_ALIGN (type, TYPE_ALIGN (type), BITS_PER_UNIT);
1418 /* If we failed to find a simple way to calculate the unit size
1419 of the type, find it by division. */
1420 if (TYPE_SIZE_UNIT (type) == 0 && TYPE_SIZE (type) != 0)
1421 /* TYPE_SIZE (type) is computed in bitsizetype. After the division, the
1422 result will fit in sizetype. We will get more efficient code using
1423 sizetype, so we force a conversion. */
1424 TYPE_SIZE_UNIT (type)
1425 = fold_convert (sizetype,
1426 size_binop (FLOOR_DIV_EXPR, TYPE_SIZE (type),
1427 bitsize_unit_node));
1429 if (TYPE_SIZE (type) != 0)
1431 TYPE_SIZE (type) = round_up (TYPE_SIZE (type), TYPE_ALIGN (type));
1432 TYPE_SIZE_UNIT (type) = round_up (TYPE_SIZE_UNIT (type),
1433 TYPE_ALIGN_UNIT (type));
1436 /* Evaluate nonconstant sizes only once, either now or as soon as safe. */
1437 if (TYPE_SIZE (type) != 0 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1438 TYPE_SIZE (type) = variable_size (TYPE_SIZE (type));
1439 if (TYPE_SIZE_UNIT (type) != 0
1440 && TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)
1441 TYPE_SIZE_UNIT (type) = variable_size (TYPE_SIZE_UNIT (type));
1443 /* Also layout any other variants of the type. */
1444 if (TYPE_NEXT_VARIANT (type)
1445 || type != TYPE_MAIN_VARIANT (type))
1448 /* Record layout info of this variant. */
1449 tree size = TYPE_SIZE (type);
1450 tree size_unit = TYPE_SIZE_UNIT (type);
1451 unsigned int align = TYPE_ALIGN (type);
1452 unsigned int user_align = TYPE_USER_ALIGN (type);
1453 enum machine_mode mode = TYPE_MODE (type);
1455 /* Copy it into all variants. */
1456 for (variant = TYPE_MAIN_VARIANT (type);
1458 variant = TYPE_NEXT_VARIANT (variant))
1460 TYPE_SIZE (variant) = size;
1461 TYPE_SIZE_UNIT (variant) = size_unit;
1462 TYPE_ALIGN (variant) = align;
1463 TYPE_USER_ALIGN (variant) = user_align;
1464 TYPE_MODE (variant) = mode;
1469 /* Do all of the work required to layout the type indicated by RLI,
1470 once the fields have been laid out. This function will call `free'
1471 for RLI, unless FREE_P is false. Passing a value other than false
1472 for FREE_P is bad practice; this option only exists to support the
1476 finish_record_layout (record_layout_info rli, int free_p)
1480 /* Compute the final size. */
1481 finalize_record_size (rli);
1483 /* Compute the TYPE_MODE for the record. */
1484 compute_record_mode (rli->t);
1486 /* Perform any last tweaks to the TYPE_SIZE, etc. */
1487 finalize_type_size (rli->t);
1489 /* Propagate TYPE_PACKED to variants. With C++ templates,
1490 handle_packed_attribute is too early to do this. */
1491 for (variant = TYPE_NEXT_VARIANT (rli->t); variant;
1492 variant = TYPE_NEXT_VARIANT (variant))
1493 TYPE_PACKED (variant) = TYPE_PACKED (rli->t);
1495 /* Lay out any static members. This is done now because their type
1496 may use the record's type. */
1497 while (rli->pending_statics)
1499 layout_decl (TREE_VALUE (rli->pending_statics), 0);
1500 rli->pending_statics = TREE_CHAIN (rli->pending_statics);
1509 /* Finish processing a builtin RECORD_TYPE type TYPE. It's name is
1510 NAME, its fields are chained in reverse on FIELDS.
1512 If ALIGN_TYPE is non-null, it is given the same alignment as
1516 finish_builtin_struct (tree type, const char *name, tree fields,
1521 for (tail = NULL_TREE; fields; tail = fields, fields = next)
1523 DECL_FIELD_CONTEXT (fields) = type;
1524 next = TREE_CHAIN (fields);
1525 TREE_CHAIN (fields) = tail;
1527 TYPE_FIELDS (type) = tail;
1531 TYPE_ALIGN (type) = TYPE_ALIGN (align_type);
1532 TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (align_type);
1536 #if 0 /* not yet, should get fixed properly later */
1537 TYPE_NAME (type) = make_type_decl (get_identifier (name), type);
1539 TYPE_NAME (type) = build_decl (TYPE_DECL, get_identifier (name), type);
1541 TYPE_STUB_DECL (type) = TYPE_NAME (type);
1542 layout_decl (TYPE_NAME (type), 0);
1545 /* Calculate the mode, size, and alignment for TYPE.
1546 For an array type, calculate the element separation as well.
1547 Record TYPE on the chain of permanent or temporary types
1548 so that dbxout will find out about it.
1550 TYPE_SIZE of a type is nonzero if the type has been laid out already.
1551 layout_type does nothing on such a type.
1553 If the type is incomplete, its TYPE_SIZE remains zero. */
1556 layout_type (tree type)
1560 if (type == error_mark_node)
1563 /* Do nothing if type has been laid out before. */
1564 if (TYPE_SIZE (type))
1567 switch (TREE_CODE (type))
1570 /* This kind of type is the responsibility
1571 of the language-specific code. */
1574 case BOOLEAN_TYPE: /* Used for Java, Pascal, and Chill. */
1575 if (TYPE_PRECISION (type) == 0)
1576 TYPE_PRECISION (type) = 1; /* default to one byte/boolean. */
1578 /* ... fall through ... */
1582 if (TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST
1583 && tree_int_cst_sgn (TYPE_MIN_VALUE (type)) >= 0)
1584 TYPE_UNSIGNED (type) = 1;
1586 TYPE_MODE (type) = smallest_mode_for_size (TYPE_PRECISION (type),
1588 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1589 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1593 TYPE_MODE (type) = mode_for_size (TYPE_PRECISION (type), MODE_FLOAT, 0);
1594 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1595 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1598 case FIXED_POINT_TYPE:
1599 /* TYPE_MODE (type) has been set already. */
1600 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1601 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1605 TYPE_UNSIGNED (type) = TYPE_UNSIGNED (TREE_TYPE (type));
1607 = mode_for_size (2 * TYPE_PRECISION (TREE_TYPE (type)),
1608 (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE
1609 ? MODE_COMPLEX_FLOAT : MODE_COMPLEX_INT),
1611 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1612 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1617 int nunits = TYPE_VECTOR_SUBPARTS (type);
1618 tree innertype = TREE_TYPE (type);
1620 gcc_assert (!(nunits & (nunits - 1)));
1622 /* Find an appropriate mode for the vector type. */
1623 if (TYPE_MODE (type) == VOIDmode)
1625 enum machine_mode innermode = TYPE_MODE (innertype);
1626 enum machine_mode mode;
1628 /* First, look for a supported vector type. */
1629 if (SCALAR_FLOAT_MODE_P (innermode))
1630 mode = MIN_MODE_VECTOR_FLOAT;
1631 else if (SCALAR_FRACT_MODE_P (innermode))
1632 mode = MIN_MODE_VECTOR_FRACT;
1633 else if (SCALAR_UFRACT_MODE_P (innermode))
1634 mode = MIN_MODE_VECTOR_UFRACT;
1635 else if (SCALAR_ACCUM_MODE_P (innermode))
1636 mode = MIN_MODE_VECTOR_ACCUM;
1637 else if (SCALAR_UACCUM_MODE_P (innermode))
1638 mode = MIN_MODE_VECTOR_UACCUM;
1640 mode = MIN_MODE_VECTOR_INT;
1642 for (; mode != VOIDmode ; mode = GET_MODE_WIDER_MODE (mode))
1643 if (GET_MODE_NUNITS (mode) == nunits
1644 && GET_MODE_INNER (mode) == innermode
1645 && targetm.vector_mode_supported_p (mode))
1648 /* For integers, try mapping it to a same-sized scalar mode. */
1649 if (mode == VOIDmode
1650 && GET_MODE_CLASS (innermode) == MODE_INT)
1651 mode = mode_for_size (nunits * GET_MODE_BITSIZE (innermode),
1654 if (mode == VOIDmode || !have_regs_of_mode[mode])
1655 TYPE_MODE (type) = BLKmode;
1657 TYPE_MODE (type) = mode;
1660 TYPE_SATURATING (type) = TYPE_SATURATING (TREE_TYPE (type));
1661 TYPE_UNSIGNED (type) = TYPE_UNSIGNED (TREE_TYPE (type));
1662 TYPE_SIZE_UNIT (type) = int_const_binop (MULT_EXPR,
1663 TYPE_SIZE_UNIT (innertype),
1664 size_int (nunits), 0);
1665 TYPE_SIZE (type) = int_const_binop (MULT_EXPR, TYPE_SIZE (innertype),
1666 bitsize_int (nunits), 0);
1668 /* Always naturally align vectors. This prevents ABI changes
1669 depending on whether or not native vector modes are supported. */
1670 TYPE_ALIGN (type) = tree_low_cst (TYPE_SIZE (type), 0);
1675 /* This is an incomplete type and so doesn't have a size. */
1676 TYPE_ALIGN (type) = 1;
1677 TYPE_USER_ALIGN (type) = 0;
1678 TYPE_MODE (type) = VOIDmode;
1682 TYPE_SIZE (type) = bitsize_int (POINTER_SIZE);
1683 TYPE_SIZE_UNIT (type) = size_int (POINTER_SIZE / BITS_PER_UNIT);
1684 /* A pointer might be MODE_PARTIAL_INT,
1685 but ptrdiff_t must be integral. */
1686 TYPE_MODE (type) = mode_for_size (POINTER_SIZE, MODE_INT, 0);
1691 /* It's hard to see what the mode and size of a function ought to
1692 be, but we do know the alignment is FUNCTION_BOUNDARY, so
1693 make it consistent with that. */
1694 TYPE_MODE (type) = mode_for_size (FUNCTION_BOUNDARY, MODE_INT, 0);
1695 TYPE_SIZE (type) = bitsize_int (FUNCTION_BOUNDARY);
1696 TYPE_SIZE_UNIT (type) = size_int (FUNCTION_BOUNDARY / BITS_PER_UNIT);
1700 case REFERENCE_TYPE:
1703 enum machine_mode mode = ((TREE_CODE (type) == REFERENCE_TYPE
1704 && reference_types_internal)
1705 ? Pmode : TYPE_MODE (type));
1707 int nbits = GET_MODE_BITSIZE (mode);
1709 TYPE_SIZE (type) = bitsize_int (nbits);
1710 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (mode));
1711 TYPE_UNSIGNED (type) = 1;
1712 TYPE_PRECISION (type) = nbits;
1718 tree index = TYPE_DOMAIN (type);
1719 tree element = TREE_TYPE (type);
1721 build_pointer_type (element);
1723 /* We need to know both bounds in order to compute the size. */
1724 if (index && TYPE_MAX_VALUE (index) && TYPE_MIN_VALUE (index)
1725 && TYPE_SIZE (element))
1727 tree ub = TYPE_MAX_VALUE (index);
1728 tree lb = TYPE_MIN_VALUE (index);
1732 /* The initial subtraction should happen in the original type so
1733 that (possible) negative values are handled appropriately. */
1734 length = size_binop (PLUS_EXPR, size_one_node,
1735 fold_convert (sizetype,
1736 fold_build2 (MINUS_EXPR,
1740 /* Special handling for arrays of bits (for Chill). */
1741 element_size = TYPE_SIZE (element);
1742 if (TYPE_PACKED (type) && INTEGRAL_TYPE_P (element)
1743 && (integer_zerop (TYPE_MAX_VALUE (element))
1744 || integer_onep (TYPE_MAX_VALUE (element)))
1745 && host_integerp (TYPE_MIN_VALUE (element), 1))
1747 HOST_WIDE_INT maxvalue
1748 = tree_low_cst (TYPE_MAX_VALUE (element), 1);
1749 HOST_WIDE_INT minvalue
1750 = tree_low_cst (TYPE_MIN_VALUE (element), 1);
1752 if (maxvalue - minvalue == 1
1753 && (maxvalue == 1 || maxvalue == 0))
1754 element_size = integer_one_node;
1757 /* If neither bound is a constant and sizetype is signed, make
1758 sure the size is never negative. We should really do this
1759 if *either* bound is non-constant, but this is the best
1760 compromise between C and Ada. */
1761 if (!TYPE_UNSIGNED (sizetype)
1762 && TREE_CODE (TYPE_MIN_VALUE (index)) != INTEGER_CST
1763 && TREE_CODE (TYPE_MAX_VALUE (index)) != INTEGER_CST)
1764 length = size_binop (MAX_EXPR, length, size_zero_node);
1766 TYPE_SIZE (type) = size_binop (MULT_EXPR, element_size,
1767 fold_convert (bitsizetype,
1770 /* If we know the size of the element, calculate the total
1771 size directly, rather than do some division thing below.
1772 This optimization helps Fortran assumed-size arrays
1773 (where the size of the array is determined at runtime)
1775 Note that we can't do this in the case where the size of
1776 the elements is one bit since TYPE_SIZE_UNIT cannot be
1777 set correctly in that case. */
1778 if (TYPE_SIZE_UNIT (element) != 0 && ! integer_onep (element_size))
1779 TYPE_SIZE_UNIT (type)
1780 = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (element), length);
1783 /* Now round the alignment and size,
1784 using machine-dependent criteria if any. */
1786 #ifdef ROUND_TYPE_ALIGN
1788 = ROUND_TYPE_ALIGN (type, TYPE_ALIGN (element), BITS_PER_UNIT);
1790 TYPE_ALIGN (type) = MAX (TYPE_ALIGN (element), BITS_PER_UNIT);
1792 if (!TYPE_SIZE (element))
1793 /* We don't know the size of the underlying element type, so
1794 our alignment calculations will be wrong, forcing us to
1795 fall back on structural equality. */
1796 SET_TYPE_STRUCTURAL_EQUALITY (type);
1797 TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (element);
1798 TYPE_MODE (type) = BLKmode;
1799 if (TYPE_SIZE (type) != 0
1800 #ifdef MEMBER_TYPE_FORCES_BLK
1801 && ! MEMBER_TYPE_FORCES_BLK (type, VOIDmode)
1803 /* BLKmode elements force BLKmode aggregate;
1804 else extract/store fields may lose. */
1805 && (TYPE_MODE (TREE_TYPE (type)) != BLKmode
1806 || TYPE_NO_FORCE_BLK (TREE_TYPE (type))))
1808 /* One-element arrays get the component type's mode. */
1809 if (simple_cst_equal (TYPE_SIZE (type),
1810 TYPE_SIZE (TREE_TYPE (type))))
1811 TYPE_MODE (type) = TYPE_MODE (TREE_TYPE (type));
1814 = mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1);
1816 if (TYPE_MODE (type) != BLKmode
1817 && STRICT_ALIGNMENT && TYPE_ALIGN (type) < BIGGEST_ALIGNMENT
1818 && TYPE_ALIGN (type) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
1820 TYPE_NO_FORCE_BLK (type) = 1;
1821 TYPE_MODE (type) = BLKmode;
1824 /* When the element size is constant, check that it is at least as
1825 large as the element alignment. */
1826 if (TYPE_SIZE_UNIT (element)
1827 && TREE_CODE (TYPE_SIZE_UNIT (element)) == INTEGER_CST
1828 /* If TYPE_SIZE_UNIT overflowed, then it is certainly larger than
1830 && !TREE_OVERFLOW (TYPE_SIZE_UNIT (element))
1831 && !integer_zerop (TYPE_SIZE_UNIT (element))
1832 && compare_tree_int (TYPE_SIZE_UNIT (element),
1833 TYPE_ALIGN_UNIT (element)) < 0)
1834 error ("alignment of array elements is greater than element size");
1840 case QUAL_UNION_TYPE:
1843 record_layout_info rli;
1845 /* Initialize the layout information. */
1846 rli = start_record_layout (type);
1848 /* If this is a QUAL_UNION_TYPE, we want to process the fields
1849 in the reverse order in building the COND_EXPR that denotes
1850 its size. We reverse them again later. */
1851 if (TREE_CODE (type) == QUAL_UNION_TYPE)
1852 TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
1854 /* Place all the fields. */
1855 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1856 place_field (rli, field);
1858 if (TREE_CODE (type) == QUAL_UNION_TYPE)
1859 TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
1861 /* Finish laying out the record. */
1862 finish_record_layout (rli, /*free_p=*/true);
1870 /* Compute the final TYPE_SIZE, TYPE_ALIGN, etc. for TYPE. For
1871 records and unions, finish_record_layout already called this
1873 if (TREE_CODE (type) != RECORD_TYPE
1874 && TREE_CODE (type) != UNION_TYPE
1875 && TREE_CODE (type) != QUAL_UNION_TYPE)
1876 finalize_type_size (type);
1878 /* We should never see alias sets on incomplete aggregates. And we
1879 should not call layout_type on not incomplete aggregates. */
1880 if (AGGREGATE_TYPE_P (type))
1881 gcc_assert (!TYPE_ALIAS_SET_KNOWN_P (type));
1884 /* Create and return a type for signed integers of PRECISION bits. */
1887 make_signed_type (int precision)
1889 tree type = make_node (INTEGER_TYPE);
1891 TYPE_PRECISION (type) = precision;
1893 fixup_signed_type (type);
1897 /* Create and return a type for unsigned integers of PRECISION bits. */
1900 make_unsigned_type (int precision)
1902 tree type = make_node (INTEGER_TYPE);
1904 TYPE_PRECISION (type) = precision;
1906 fixup_unsigned_type (type);
1910 /* Create and return a type for fract of PRECISION bits, UNSIGNEDP,
1914 make_fract_type (int precision, int unsignedp, int satp)
1916 tree type = make_node (FIXED_POINT_TYPE);
1918 TYPE_PRECISION (type) = precision;
1921 TYPE_SATURATING (type) = 1;
1923 /* Lay out the type: set its alignment, size, etc. */
1926 TYPE_UNSIGNED (type) = 1;
1927 TYPE_MODE (type) = mode_for_size (precision, MODE_UFRACT, 0);
1930 TYPE_MODE (type) = mode_for_size (precision, MODE_FRACT, 0);
1936 /* Create and return a type for accum of PRECISION bits, UNSIGNEDP,
1940 make_accum_type (int precision, int unsignedp, int satp)
1942 tree type = make_node (FIXED_POINT_TYPE);
1944 TYPE_PRECISION (type) = precision;
1947 TYPE_SATURATING (type) = 1;
1949 /* Lay out the type: set its alignment, size, etc. */
1952 TYPE_UNSIGNED (type) = 1;
1953 TYPE_MODE (type) = mode_for_size (precision, MODE_UACCUM, 0);
1956 TYPE_MODE (type) = mode_for_size (precision, MODE_ACCUM, 0);
1962 /* Initialize sizetype and bitsizetype to a reasonable and temporary
1963 value to enable integer types to be created. */
1966 initialize_sizetypes (bool signed_p)
1968 tree t = make_node (INTEGER_TYPE);
1969 int precision = GET_MODE_BITSIZE (SImode);
1971 TYPE_MODE (t) = SImode;
1972 TYPE_ALIGN (t) = GET_MODE_ALIGNMENT (SImode);
1973 TYPE_USER_ALIGN (t) = 0;
1974 TYPE_IS_SIZETYPE (t) = 1;
1975 TYPE_UNSIGNED (t) = !signed_p;
1976 TYPE_SIZE (t) = build_int_cst (t, precision);
1977 TYPE_SIZE_UNIT (t) = build_int_cst (t, GET_MODE_SIZE (SImode));
1978 TYPE_PRECISION (t) = precision;
1980 /* Set TYPE_MIN_VALUE and TYPE_MAX_VALUE. */
1981 set_min_and_max_values_for_integral_type (t, precision, !signed_p);
1984 bitsizetype = build_distinct_type_copy (t);
1987 /* Make sizetype a version of TYPE, and initialize *sizetype
1988 accordingly. We do this by overwriting the stub sizetype and
1989 bitsizetype nodes created by initialize_sizetypes. This makes sure
1990 that (a) anything stubby about them no longer exists, (b) any
1991 INTEGER_CSTs created with such a type, remain valid. */
1994 set_sizetype (tree type)
1996 int oprecision = TYPE_PRECISION (type);
1997 /* The *bitsizetype types use a precision that avoids overflows when
1998 calculating signed sizes / offsets in bits. However, when
1999 cross-compiling from a 32 bit to a 64 bit host, we are limited to 64 bit
2001 int precision = MIN (MIN (oprecision + BITS_PER_UNIT_LOG + 1,
2002 MAX_FIXED_MODE_SIZE),
2003 2 * HOST_BITS_PER_WIDE_INT);
2006 gcc_assert (TYPE_UNSIGNED (type) == TYPE_UNSIGNED (sizetype));
2008 t = build_distinct_type_copy (type);
2009 /* We do want to use sizetype's cache, as we will be replacing that
2011 TYPE_CACHED_VALUES (t) = TYPE_CACHED_VALUES (sizetype);
2012 TYPE_CACHED_VALUES_P (t) = TYPE_CACHED_VALUES_P (sizetype);
2013 TREE_TYPE (TYPE_CACHED_VALUES (t)) = type;
2014 TYPE_UID (t) = TYPE_UID (sizetype);
2015 TYPE_IS_SIZETYPE (t) = 1;
2017 /* Replace our original stub sizetype. */
2018 memcpy (sizetype, t, tree_size (sizetype));
2019 TYPE_MAIN_VARIANT (sizetype) = sizetype;
2021 t = make_node (INTEGER_TYPE);
2022 TYPE_NAME (t) = get_identifier ("bit_size_type");
2023 /* We do want to use bitsizetype's cache, as we will be replacing that
2025 TYPE_CACHED_VALUES (t) = TYPE_CACHED_VALUES (bitsizetype);
2026 TYPE_CACHED_VALUES_P (t) = TYPE_CACHED_VALUES_P (bitsizetype);
2027 TYPE_PRECISION (t) = precision;
2028 TYPE_UID (t) = TYPE_UID (bitsizetype);
2029 TYPE_IS_SIZETYPE (t) = 1;
2031 /* Replace our original stub bitsizetype. */
2032 memcpy (bitsizetype, t, tree_size (bitsizetype));
2033 TYPE_MAIN_VARIANT (bitsizetype) = bitsizetype;
2035 if (TYPE_UNSIGNED (type))
2037 fixup_unsigned_type (bitsizetype);
2038 ssizetype = build_distinct_type_copy (make_signed_type (oprecision));
2039 TYPE_IS_SIZETYPE (ssizetype) = 1;
2040 sbitsizetype = build_distinct_type_copy (make_signed_type (precision));
2041 TYPE_IS_SIZETYPE (sbitsizetype) = 1;
2045 fixup_signed_type (bitsizetype);
2046 ssizetype = sizetype;
2047 sbitsizetype = bitsizetype;
2050 /* If SIZETYPE is unsigned, we need to fix TYPE_MAX_VALUE so that
2051 it is sign extended in a way consistent with force_fit_type. */
2052 if (TYPE_UNSIGNED (type))
2054 tree orig_max, new_max;
2056 orig_max = TYPE_MAX_VALUE (sizetype);
2058 /* Build a new node with the same values, but a different type.
2059 Sign extend it to ensure consistency. */
2060 new_max = build_int_cst_wide_type (sizetype,
2061 TREE_INT_CST_LOW (orig_max),
2062 TREE_INT_CST_HIGH (orig_max));
2063 TYPE_MAX_VALUE (sizetype) = new_max;
2067 /* TYPE is an integral type, i.e., an INTEGRAL_TYPE, ENUMERAL_TYPE
2068 or BOOLEAN_TYPE. Set TYPE_MIN_VALUE and TYPE_MAX_VALUE
2069 for TYPE, based on the PRECISION and whether or not the TYPE
2070 IS_UNSIGNED. PRECISION need not correspond to a width supported
2071 natively by the hardware; for example, on a machine with 8-bit,
2072 16-bit, and 32-bit register modes, PRECISION might be 7, 23, or
2076 set_min_and_max_values_for_integral_type (tree type,
2085 min_value = build_int_cst (type, 0);
2087 = build_int_cst_wide (type, precision - HOST_BITS_PER_WIDE_INT >= 0
2089 : ((HOST_WIDE_INT) 1 << precision) - 1,
2090 precision - HOST_BITS_PER_WIDE_INT > 0
2091 ? ((unsigned HOST_WIDE_INT) ~0
2092 >> (HOST_BITS_PER_WIDE_INT
2093 - (precision - HOST_BITS_PER_WIDE_INT)))
2099 = build_int_cst_wide (type,
2100 (precision - HOST_BITS_PER_WIDE_INT > 0
2102 : (HOST_WIDE_INT) (-1) << (precision - 1)),
2103 (((HOST_WIDE_INT) (-1)
2104 << (precision - HOST_BITS_PER_WIDE_INT - 1 > 0
2105 ? precision - HOST_BITS_PER_WIDE_INT - 1
2108 = build_int_cst_wide (type,
2109 (precision - HOST_BITS_PER_WIDE_INT > 0
2111 : ((HOST_WIDE_INT) 1 << (precision - 1)) - 1),
2112 (precision - HOST_BITS_PER_WIDE_INT - 1 > 0
2113 ? (((HOST_WIDE_INT) 1
2114 << (precision - HOST_BITS_PER_WIDE_INT - 1))) - 1
2118 TYPE_MIN_VALUE (type) = min_value;
2119 TYPE_MAX_VALUE (type) = max_value;
2122 /* Set the extreme values of TYPE based on its precision in bits,
2123 then lay it out. Used when make_signed_type won't do
2124 because the tree code is not INTEGER_TYPE.
2125 E.g. for Pascal, when the -fsigned-char option is given. */
2128 fixup_signed_type (tree type)
2130 int precision = TYPE_PRECISION (type);
2132 /* We can not represent properly constants greater then
2133 2 * HOST_BITS_PER_WIDE_INT, still we need the types
2134 as they are used by i386 vector extensions and friends. */
2135 if (precision > HOST_BITS_PER_WIDE_INT * 2)
2136 precision = HOST_BITS_PER_WIDE_INT * 2;
2138 set_min_and_max_values_for_integral_type (type, precision,
2139 /*is_unsigned=*/false);
2141 /* Lay out the type: set its alignment, size, etc. */
2145 /* Set the extreme values of TYPE based on its precision in bits,
2146 then lay it out. This is used both in `make_unsigned_type'
2147 and for enumeral types. */
2150 fixup_unsigned_type (tree type)
2152 int precision = TYPE_PRECISION (type);
2154 /* We can not represent properly constants greater then
2155 2 * HOST_BITS_PER_WIDE_INT, still we need the types
2156 as they are used by i386 vector extensions and friends. */
2157 if (precision > HOST_BITS_PER_WIDE_INT * 2)
2158 precision = HOST_BITS_PER_WIDE_INT * 2;
2160 TYPE_UNSIGNED (type) = 1;
2162 set_min_and_max_values_for_integral_type (type, precision,
2163 /*is_unsigned=*/true);
2165 /* Lay out the type: set its alignment, size, etc. */
2169 /* Find the best machine mode to use when referencing a bit field of length
2170 BITSIZE bits starting at BITPOS.
2172 The underlying object is known to be aligned to a boundary of ALIGN bits.
2173 If LARGEST_MODE is not VOIDmode, it means that we should not use a mode
2174 larger than LARGEST_MODE (usually SImode).
2176 If no mode meets all these conditions, we return VOIDmode.
2178 If VOLATILEP is false and SLOW_BYTE_ACCESS is false, we return the
2179 smallest mode meeting these conditions.
2181 If VOLATILEP is false and SLOW_BYTE_ACCESS is true, we return the
2182 largest mode (but a mode no wider than UNITS_PER_WORD) that meets
2185 If VOLATILEP is true the narrow_volatile_bitfields target hook is used to
2186 decide which of the above modes should be used. */
2189 get_best_mode (int bitsize, int bitpos, unsigned int align,
2190 enum machine_mode largest_mode, int volatilep)
2192 enum machine_mode mode;
2193 unsigned int unit = 0;
2195 /* Find the narrowest integer mode that contains the bit field. */
2196 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2197 mode = GET_MODE_WIDER_MODE (mode))
2199 unit = GET_MODE_BITSIZE (mode);
2200 if ((bitpos % unit) + bitsize <= unit)
2204 if (mode == VOIDmode
2205 /* It is tempting to omit the following line
2206 if STRICT_ALIGNMENT is true.
2207 But that is incorrect, since if the bitfield uses part of 3 bytes
2208 and we use a 4-byte mode, we could get a spurious segv
2209 if the extra 4th byte is past the end of memory.
2210 (Though at least one Unix compiler ignores this problem:
2211 that on the Sequent 386 machine. */
2212 || MIN (unit, BIGGEST_ALIGNMENT) > align
2213 || (largest_mode != VOIDmode && unit > GET_MODE_BITSIZE (largest_mode)))
2216 if ((SLOW_BYTE_ACCESS && ! volatilep)
2217 || (volatilep && !targetm.narrow_volatile_bitfield ()))
2219 enum machine_mode wide_mode = VOIDmode, tmode;
2221 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); tmode != VOIDmode;
2222 tmode = GET_MODE_WIDER_MODE (tmode))
2224 unit = GET_MODE_BITSIZE (tmode);
2225 if (bitpos / unit == (bitpos + bitsize - 1) / unit
2226 && unit <= BITS_PER_WORD
2227 && unit <= MIN (align, BIGGEST_ALIGNMENT)
2228 && (largest_mode == VOIDmode
2229 || unit <= GET_MODE_BITSIZE (largest_mode)))
2233 if (wide_mode != VOIDmode)
2240 /* Gets minimal and maximal values for MODE (signed or unsigned depending on
2241 SIGN). The returned constants are made to be usable in TARGET_MODE. */
2244 get_mode_bounds (enum machine_mode mode, int sign,
2245 enum machine_mode target_mode,
2246 rtx *mmin, rtx *mmax)
2248 unsigned size = GET_MODE_BITSIZE (mode);
2249 unsigned HOST_WIDE_INT min_val, max_val;
2251 gcc_assert (size <= HOST_BITS_PER_WIDE_INT);
2255 min_val = -((unsigned HOST_WIDE_INT) 1 << (size - 1));
2256 max_val = ((unsigned HOST_WIDE_INT) 1 << (size - 1)) - 1;
2261 max_val = ((unsigned HOST_WIDE_INT) 1 << (size - 1) << 1) - 1;
2264 *mmin = gen_int_mode (min_val, target_mode);
2265 *mmax = gen_int_mode (max_val, target_mode);
2268 #include "gt-stor-layout.h"