1 /* C-compiler utilities for types and variables storage layout
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1996, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
25 #include "coretypes.h"
36 #include "langhooks.h"
40 /* Data type for the expressions representing sizes of data types.
41 It is the first integer type laid out. */
42 tree sizetype_tab[(int) TYPE_KIND_LAST];
44 /* If nonzero, this is an upper limit on alignment of structure fields.
45 The value is measured in bits. */
46 unsigned int maximum_field_alignment = TARGET_DEFAULT_PACK_STRUCT * BITS_PER_UNIT;
47 /* ... and its original value in bytes, specified via -fpack-struct=<value>. */
48 unsigned int initial_max_fld_align = TARGET_DEFAULT_PACK_STRUCT;
50 /* If nonzero, the alignment of a bitstring or (power-)set value, in bits.
51 May be overridden by front-ends. */
52 unsigned int set_alignment = 0;
54 /* Nonzero if all REFERENCE_TYPEs are internal and hence should be
55 allocated in Pmode, not ptr_mode. Set only by internal_reference_types
56 called only by a front end. */
57 static int reference_types_internal = 0;
59 static void finalize_record_size (record_layout_info);
60 static void finalize_type_size (tree);
61 static void place_union_field (record_layout_info, tree);
62 #if defined (PCC_BITFIELD_TYPE_MATTERS) || defined (BITFIELD_NBYTES_LIMITED)
63 static int excess_unit_span (HOST_WIDE_INT, HOST_WIDE_INT, HOST_WIDE_INT,
66 extern void debug_rli (record_layout_info);
68 /* SAVE_EXPRs for sizes of types and decls, waiting to be expanded. */
70 static GTY(()) tree pending_sizes;
72 /* Show that REFERENCE_TYPES are internal and should be Pmode. Called only
76 internal_reference_types (void)
78 reference_types_internal = 1;
81 /* Get a list of all the objects put on the pending sizes list. */
84 get_pending_sizes (void)
86 tree chain = pending_sizes;
92 /* Add EXPR to the pending sizes list. */
95 put_pending_size (tree expr)
97 /* Strip any simple arithmetic from EXPR to see if it has an underlying
99 expr = skip_simple_arithmetic (expr);
101 if (TREE_CODE (expr) == SAVE_EXPR)
102 pending_sizes = tree_cons (NULL_TREE, expr, pending_sizes);
105 /* Put a chain of objects into the pending sizes list, which must be
109 put_pending_sizes (tree chain)
111 gcc_assert (!pending_sizes);
112 pending_sizes = chain;
115 /* Given a size SIZE that may not be a constant, return a SAVE_EXPR
116 to serve as the actual size-expression for a type or decl. */
119 variable_size (tree size)
123 /* If the language-processor is to take responsibility for variable-sized
124 items (e.g., languages which have elaboration procedures like Ada),
125 just return SIZE unchanged. Likewise for self-referential sizes and
127 if (TREE_CONSTANT (size)
128 || TREE_CODE (size) == SAVE_EXPR
129 || lang_hooks.decls.global_bindings_p () < 0
130 || CONTAINS_PLACEHOLDER_P (size))
133 /* Force creation of a SAVE_EXPR. This solves (1) code duplication
134 problems between parent and nested functions that occasionally can't
135 be cleaned up because of portions of the expression escaping the
136 parent function via the FRAME object, and (2) tree sharing problems
137 between the type system and the gimple code, which can leak SSA_NAME
138 objects into e.g. TYPE_SIZE, which cause heartburn when emitting
139 debug information. */
140 size = build1 (SAVE_EXPR, TREE_TYPE (size), size);
142 /* If an array with a variable number of elements is declared, and
143 the elements require destruction, we will emit a cleanup for the
144 array. That cleanup is run both on normal exit from the block
145 and in the exception-handler for the block. Normally, when code
146 is used in both ordinary code and in an exception handler it is
147 `unsaved', i.e., all SAVE_EXPRs are recalculated. However, we do
148 not wish to do that here; the array-size is the same in both
150 save = skip_simple_arithmetic (size);
152 if (cfun && cfun->x_dont_save_pending_sizes_p)
153 /* The front-end doesn't want us to keep a list of the expressions
154 that determine sizes for variable size objects. Trust it. */
157 if (lang_hooks.decls.global_bindings_p ())
159 if (TREE_CONSTANT (size))
160 error ("type size can%'t be explicitly evaluated");
162 error ("variable-size type declared outside of any function");
164 return size_one_node;
167 put_pending_size (save);
172 #ifndef MAX_FIXED_MODE_SIZE
173 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
176 /* Return the machine mode to use for a nonscalar of SIZE bits. The
177 mode must be in class CLASS, and have exactly that many value bits;
178 it may have padding as well. If LIMIT is nonzero, modes of wider
179 than MAX_FIXED_MODE_SIZE will not be used. */
182 mode_for_size (unsigned int size, enum mode_class class, int limit)
184 enum machine_mode mode;
186 if (limit && size > MAX_FIXED_MODE_SIZE)
189 /* Get the first mode which has this size, in the specified class. */
190 for (mode = GET_CLASS_NARROWEST_MODE (class); mode != VOIDmode;
191 mode = GET_MODE_WIDER_MODE (mode))
192 if (GET_MODE_PRECISION (mode) == size)
198 /* Similar, except passed a tree node. */
201 mode_for_size_tree (tree size, enum mode_class class, int limit)
203 if (TREE_CODE (size) != INTEGER_CST
204 || TREE_OVERFLOW (size)
205 /* What we really want to say here is that the size can fit in a
206 host integer, but we know there's no way we'd find a mode for
207 this many bits, so there's no point in doing the precise test. */
208 || compare_tree_int (size, 1000) > 0)
211 return mode_for_size (tree_low_cst (size, 1), class, limit);
214 /* Similar, but never return BLKmode; return the narrowest mode that
215 contains at least the requested number of value bits. */
218 smallest_mode_for_size (unsigned int size, enum mode_class class)
220 enum machine_mode mode;
222 /* Get the first mode which has at least this size, in the
224 for (mode = GET_CLASS_NARROWEST_MODE (class); mode != VOIDmode;
225 mode = GET_MODE_WIDER_MODE (mode))
226 if (GET_MODE_PRECISION (mode) >= size)
232 /* Find an integer mode of the exact same size, or BLKmode on failure. */
235 int_mode_for_mode (enum machine_mode mode)
237 switch (GET_MODE_CLASS (mode))
240 case MODE_PARTIAL_INT:
243 case MODE_COMPLEX_INT:
244 case MODE_COMPLEX_FLOAT:
246 case MODE_VECTOR_INT:
247 case MODE_VECTOR_FLOAT:
248 mode = mode_for_size (GET_MODE_BITSIZE (mode), MODE_INT, 0);
255 /* ... fall through ... */
265 /* Return the alignment of MODE. This will be bounded by 1 and
266 BIGGEST_ALIGNMENT. */
269 get_mode_alignment (enum machine_mode mode)
271 return MIN (BIGGEST_ALIGNMENT, MAX (1, mode_base_align[mode]*BITS_PER_UNIT));
275 /* Subroutine of layout_decl: Force alignment required for the data type.
276 But if the decl itself wants greater alignment, don't override that. */
279 do_type_align (tree type, tree decl)
281 if (TYPE_ALIGN (type) > DECL_ALIGN (decl))
283 DECL_ALIGN (decl) = TYPE_ALIGN (type);
284 if (TREE_CODE (decl) == FIELD_DECL)
285 DECL_USER_ALIGN (decl) = TYPE_USER_ALIGN (type);
289 /* Set the size, mode and alignment of a ..._DECL node.
290 TYPE_DECL does need this for C++.
291 Note that LABEL_DECL and CONST_DECL nodes do not need this,
292 and FUNCTION_DECL nodes have them set up in a special (and simple) way.
293 Don't call layout_decl for them.
295 KNOWN_ALIGN is the amount of alignment we can assume this
296 decl has with no special effort. It is relevant only for FIELD_DECLs
297 and depends on the previous fields.
298 All that matters about KNOWN_ALIGN is which powers of 2 divide it.
299 If KNOWN_ALIGN is 0, it means, "as much alignment as you like":
300 the record will be aligned to suit. */
303 layout_decl (tree decl, unsigned int known_align)
305 tree type = TREE_TYPE (decl);
306 enum tree_code code = TREE_CODE (decl);
309 if (code == CONST_DECL)
312 gcc_assert (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL
313 || code == TYPE_DECL ||code == FIELD_DECL);
315 rtl = DECL_RTL_IF_SET (decl);
317 if (type == error_mark_node)
318 type = void_type_node;
320 /* Usually the size and mode come from the data type without change,
321 however, the front-end may set the explicit width of the field, so its
322 size may not be the same as the size of its type. This happens with
323 bitfields, of course (an `int' bitfield may be only 2 bits, say), but it
324 also happens with other fields. For example, the C++ front-end creates
325 zero-sized fields corresponding to empty base classes, and depends on
326 layout_type setting DECL_FIELD_BITPOS correctly for the field. Set the
327 size in bytes from the size in bits. If we have already set the mode,
328 don't set it again since we can be called twice for FIELD_DECLs. */
330 DECL_UNSIGNED (decl) = TYPE_UNSIGNED (type);
331 if (DECL_MODE (decl) == VOIDmode)
332 DECL_MODE (decl) = TYPE_MODE (type);
334 if (DECL_SIZE (decl) == 0)
336 DECL_SIZE (decl) = unshare_expr (TYPE_SIZE (type));
337 DECL_SIZE_UNIT (decl) = unshare_expr (TYPE_SIZE_UNIT (type));
339 else if (DECL_SIZE_UNIT (decl) == 0)
340 DECL_SIZE_UNIT (decl)
341 = fold_convert (sizetype, size_binop (CEIL_DIV_EXPR, DECL_SIZE (decl),
344 if (code != FIELD_DECL)
345 /* For non-fields, update the alignment from the type. */
346 do_type_align (type, decl);
348 /* For fields, it's a bit more complicated... */
350 bool old_user_align = DECL_USER_ALIGN (decl);
352 if (DECL_BIT_FIELD (decl))
354 DECL_BIT_FIELD_TYPE (decl) = type;
356 /* A zero-length bit-field affects the alignment of the next
358 if (integer_zerop (DECL_SIZE (decl))
359 && ! DECL_PACKED (decl)
360 && ! targetm.ms_bitfield_layout_p (DECL_FIELD_CONTEXT (decl)))
362 #ifdef PCC_BITFIELD_TYPE_MATTERS
363 if (PCC_BITFIELD_TYPE_MATTERS)
364 do_type_align (type, decl);
368 #ifdef EMPTY_FIELD_BOUNDARY
369 if (EMPTY_FIELD_BOUNDARY > DECL_ALIGN (decl))
371 DECL_ALIGN (decl) = EMPTY_FIELD_BOUNDARY;
372 DECL_USER_ALIGN (decl) = 0;
378 /* See if we can use an ordinary integer mode for a bit-field.
379 Conditions are: a fixed size that is correct for another mode
380 and occupying a complete byte or bytes on proper boundary. */
381 if (TYPE_SIZE (type) != 0
382 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
383 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT)
385 enum machine_mode xmode
386 = mode_for_size_tree (DECL_SIZE (decl), MODE_INT, 1);
390 || known_align >= GET_MODE_ALIGNMENT (xmode)))
392 DECL_ALIGN (decl) = MAX (GET_MODE_ALIGNMENT (xmode),
394 DECL_MODE (decl) = xmode;
395 DECL_BIT_FIELD (decl) = 0;
399 /* Turn off DECL_BIT_FIELD if we won't need it set. */
400 if (TYPE_MODE (type) == BLKmode && DECL_MODE (decl) == BLKmode
401 && known_align >= TYPE_ALIGN (type)
402 && DECL_ALIGN (decl) >= TYPE_ALIGN (type))
403 DECL_BIT_FIELD (decl) = 0;
405 else if (DECL_PACKED (decl) && DECL_USER_ALIGN (decl))
406 /* Don't touch DECL_ALIGN. For other packed fields, go ahead and
407 round up; we'll reduce it again below. We want packing to
408 supersede USER_ALIGN inherited from the type, but defer to
409 alignment explicitly specified on the field decl. */;
411 do_type_align (type, decl);
413 /* If the field is of variable size, we can't misalign it since we
414 have no way to make a temporary to align the result. But this
415 isn't an issue if the decl is not addressable. Likewise if it
418 Note that do_type_align may set DECL_USER_ALIGN, so we need to
419 check old_user_align instead. */
420 if (DECL_PACKED (decl)
422 && (DECL_NONADDRESSABLE_P (decl)
423 || DECL_SIZE_UNIT (decl) == 0
424 || TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST))
425 DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), BITS_PER_UNIT);
427 if (! DECL_USER_ALIGN (decl) && ! DECL_PACKED (decl))
429 /* Some targets (i.e. i386, VMS) limit struct field alignment
430 to a lower boundary than alignment of variables unless
431 it was overridden by attribute aligned. */
432 #ifdef BIGGEST_FIELD_ALIGNMENT
434 = MIN (DECL_ALIGN (decl), (unsigned) BIGGEST_FIELD_ALIGNMENT);
436 #ifdef ADJUST_FIELD_ALIGN
437 DECL_ALIGN (decl) = ADJUST_FIELD_ALIGN (decl, DECL_ALIGN (decl));
441 /* Should this be controlled by DECL_USER_ALIGN, too? */
442 if (maximum_field_alignment != 0)
443 DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), maximum_field_alignment);
446 /* Evaluate nonconstant size only once, either now or as soon as safe. */
447 if (DECL_SIZE (decl) != 0 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
448 DECL_SIZE (decl) = variable_size (DECL_SIZE (decl));
449 if (DECL_SIZE_UNIT (decl) != 0
450 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST)
451 DECL_SIZE_UNIT (decl) = variable_size (DECL_SIZE_UNIT (decl));
453 /* If requested, warn about definitions of large data objects. */
455 && (code == VAR_DECL || code == PARM_DECL)
456 && ! DECL_EXTERNAL (decl))
458 tree size = DECL_SIZE_UNIT (decl);
460 if (size != 0 && TREE_CODE (size) == INTEGER_CST
461 && compare_tree_int (size, larger_than_size) > 0)
463 int size_as_int = TREE_INT_CST_LOW (size);
465 if (compare_tree_int (size, size_as_int) == 0)
466 warning ("%Jsize of %qD is %d bytes", decl, decl, size_as_int);
468 warning ("%Jsize of %qD is larger than %d bytes",
469 decl, decl, larger_than_size);
473 /* If the RTL was already set, update its mode and mem attributes. */
476 PUT_MODE (rtl, DECL_MODE (decl));
477 SET_DECL_RTL (decl, 0);
478 set_mem_attributes (rtl, decl, 1);
479 SET_DECL_RTL (decl, rtl);
483 /* Given a VAR_DECL, PARM_DECL or RESULT_DECL, clears the results of
484 a previous call to layout_decl and calls it again. */
487 relayout_decl (tree decl)
489 DECL_SIZE (decl) = DECL_SIZE_UNIT (decl) = 0;
490 DECL_MODE (decl) = VOIDmode;
491 DECL_ALIGN (decl) = 0;
492 SET_DECL_RTL (decl, 0);
494 layout_decl (decl, 0);
497 /* Hook for a front-end function that can modify the record layout as needed
498 immediately before it is finalized. */
500 void (*lang_adjust_rli) (record_layout_info) = 0;
503 set_lang_adjust_rli (void (*f) (record_layout_info))
508 /* Begin laying out type T, which may be a RECORD_TYPE, UNION_TYPE, or
509 QUAL_UNION_TYPE. Return a pointer to a struct record_layout_info which
510 is to be passed to all other layout functions for this record. It is the
511 responsibility of the caller to call `free' for the storage returned.
512 Note that garbage collection is not permitted until we finish laying
516 start_record_layout (tree t)
518 record_layout_info rli = xmalloc (sizeof (struct record_layout_info_s));
522 /* If the type has a minimum specified alignment (via an attribute
523 declaration, for example) use it -- otherwise, start with a
524 one-byte alignment. */
525 rli->record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (t));
526 rli->unpacked_align = rli->record_align;
527 rli->offset_align = MAX (rli->record_align, BIGGEST_ALIGNMENT);
529 #ifdef STRUCTURE_SIZE_BOUNDARY
530 /* Packed structures don't need to have minimum size. */
531 if (! TYPE_PACKED (t))
532 rli->record_align = MAX (rli->record_align, (unsigned) STRUCTURE_SIZE_BOUNDARY);
535 rli->offset = size_zero_node;
536 rli->bitpos = bitsize_zero_node;
538 rli->pending_statics = 0;
539 rli->packed_maybe_necessary = 0;
544 /* These four routines perform computations that convert between
545 the offset/bitpos forms and byte and bit offsets. */
548 bit_from_pos (tree offset, tree bitpos)
550 return size_binop (PLUS_EXPR, bitpos,
551 size_binop (MULT_EXPR,
552 fold_convert (bitsizetype, offset),
557 byte_from_pos (tree offset, tree bitpos)
559 return size_binop (PLUS_EXPR, offset,
560 fold_convert (sizetype,
561 size_binop (TRUNC_DIV_EXPR, bitpos,
562 bitsize_unit_node)));
566 pos_from_bit (tree *poffset, tree *pbitpos, unsigned int off_align,
569 *poffset = size_binop (MULT_EXPR,
570 fold_convert (sizetype,
571 size_binop (FLOOR_DIV_EXPR, pos,
572 bitsize_int (off_align))),
573 size_int (off_align / BITS_PER_UNIT));
574 *pbitpos = size_binop (FLOOR_MOD_EXPR, pos, bitsize_int (off_align));
577 /* Given a pointer to bit and byte offsets and an offset alignment,
578 normalize the offsets so they are within the alignment. */
581 normalize_offset (tree *poffset, tree *pbitpos, unsigned int off_align)
583 /* If the bit position is now larger than it should be, adjust it
585 if (compare_tree_int (*pbitpos, off_align) >= 0)
587 tree extra_aligns = size_binop (FLOOR_DIV_EXPR, *pbitpos,
588 bitsize_int (off_align));
591 = size_binop (PLUS_EXPR, *poffset,
592 size_binop (MULT_EXPR,
593 fold_convert (sizetype, extra_aligns),
594 size_int (off_align / BITS_PER_UNIT)));
597 = size_binop (FLOOR_MOD_EXPR, *pbitpos, bitsize_int (off_align));
601 /* Print debugging information about the information in RLI. */
604 debug_rli (record_layout_info rli)
606 print_node_brief (stderr, "type", rli->t, 0);
607 print_node_brief (stderr, "\noffset", rli->offset, 0);
608 print_node_brief (stderr, " bitpos", rli->bitpos, 0);
610 fprintf (stderr, "\naligns: rec = %u, unpack = %u, off = %u\n",
611 rli->record_align, rli->unpacked_align,
613 if (rli->packed_maybe_necessary)
614 fprintf (stderr, "packed may be necessary\n");
616 if (rli->pending_statics)
618 fprintf (stderr, "pending statics:\n");
619 debug_tree (rli->pending_statics);
623 /* Given an RLI with a possibly-incremented BITPOS, adjust OFFSET and
624 BITPOS if necessary to keep BITPOS below OFFSET_ALIGN. */
627 normalize_rli (record_layout_info rli)
629 normalize_offset (&rli->offset, &rli->bitpos, rli->offset_align);
632 /* Returns the size in bytes allocated so far. */
635 rli_size_unit_so_far (record_layout_info rli)
637 return byte_from_pos (rli->offset, rli->bitpos);
640 /* Returns the size in bits allocated so far. */
643 rli_size_so_far (record_layout_info rli)
645 return bit_from_pos (rli->offset, rli->bitpos);
648 /* FIELD is about to be added to RLI->T. The alignment (in bits) of
649 the next available location is given by KNOWN_ALIGN. Update the
650 variable alignment fields in RLI, and return the alignment to give
654 update_alignment_for_field (record_layout_info rli, tree field,
655 unsigned int known_align)
657 /* The alignment required for FIELD. */
658 unsigned int desired_align;
659 /* The type of this field. */
660 tree type = TREE_TYPE (field);
661 /* True if the field was explicitly aligned by the user. */
665 /* Lay out the field so we know what alignment it needs. */
666 layout_decl (field, known_align);
667 desired_align = DECL_ALIGN (field);
668 user_align = DECL_USER_ALIGN (field);
670 is_bitfield = (type != error_mark_node
671 && DECL_BIT_FIELD_TYPE (field)
672 && ! integer_zerop (TYPE_SIZE (type)));
674 /* Record must have at least as much alignment as any field.
675 Otherwise, the alignment of the field within the record is
677 if (is_bitfield && targetm.ms_bitfield_layout_p (rli->t))
679 /* Here, the alignment of the underlying type of a bitfield can
680 affect the alignment of a record; even a zero-sized field
681 can do this. The alignment should be to the alignment of
682 the type, except that for zero-size bitfields this only
683 applies if there was an immediately prior, nonzero-size
684 bitfield. (That's the way it is, experimentally.) */
685 if (! integer_zerop (DECL_SIZE (field))
686 ? ! DECL_PACKED (field)
688 && DECL_BIT_FIELD_TYPE (rli->prev_field)
689 && ! integer_zerop (DECL_SIZE (rli->prev_field))))
691 unsigned int type_align = TYPE_ALIGN (type);
692 type_align = MAX (type_align, desired_align);
693 if (maximum_field_alignment != 0)
694 type_align = MIN (type_align, maximum_field_alignment);
695 rli->record_align = MAX (rli->record_align, type_align);
696 rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
699 #ifdef PCC_BITFIELD_TYPE_MATTERS
700 else if (is_bitfield && PCC_BITFIELD_TYPE_MATTERS)
702 /* Named bit-fields cause the entire structure to have the
703 alignment implied by their type. Some targets also apply the same
704 rules to unnamed bitfields. */
705 if (DECL_NAME (field) != 0
706 || targetm.align_anon_bitfield ())
708 unsigned int type_align = TYPE_ALIGN (type);
710 #ifdef ADJUST_FIELD_ALIGN
711 if (! TYPE_USER_ALIGN (type))
712 type_align = ADJUST_FIELD_ALIGN (field, type_align);
715 if (maximum_field_alignment != 0)
716 type_align = MIN (type_align, maximum_field_alignment);
717 else if (DECL_PACKED (field))
718 type_align = MIN (type_align, BITS_PER_UNIT);
720 /* The alignment of the record is increased to the maximum
721 of the current alignment, the alignment indicated on the
722 field (i.e., the alignment specified by an __aligned__
723 attribute), and the alignment indicated by the type of
725 rli->record_align = MAX (rli->record_align, desired_align);
726 rli->record_align = MAX (rli->record_align, type_align);
729 rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
730 user_align |= TYPE_USER_ALIGN (type);
736 rli->record_align = MAX (rli->record_align, desired_align);
737 rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
740 TYPE_USER_ALIGN (rli->t) |= user_align;
742 return desired_align;
745 /* Called from place_field to handle unions. */
748 place_union_field (record_layout_info rli, tree field)
750 update_alignment_for_field (rli, field, /*known_align=*/0);
752 DECL_FIELD_OFFSET (field) = size_zero_node;
753 DECL_FIELD_BIT_OFFSET (field) = bitsize_zero_node;
754 SET_DECL_OFFSET_ALIGN (field, BIGGEST_ALIGNMENT);
756 /* We assume the union's size will be a multiple of a byte so we don't
757 bother with BITPOS. */
758 if (TREE_CODE (rli->t) == UNION_TYPE)
759 rli->offset = size_binop (MAX_EXPR, rli->offset, DECL_SIZE_UNIT (field));
760 else if (TREE_CODE (rli->t) == QUAL_UNION_TYPE)
761 rli->offset = fold (build3 (COND_EXPR, sizetype,
762 DECL_QUALIFIER (field),
763 DECL_SIZE_UNIT (field), rli->offset));
766 #if defined (PCC_BITFIELD_TYPE_MATTERS) || defined (BITFIELD_NBYTES_LIMITED)
767 /* A bitfield of SIZE with a required access alignment of ALIGN is allocated
768 at BYTE_OFFSET / BIT_OFFSET. Return nonzero if the field would span more
769 units of alignment than the underlying TYPE. */
771 excess_unit_span (HOST_WIDE_INT byte_offset, HOST_WIDE_INT bit_offset,
772 HOST_WIDE_INT size, HOST_WIDE_INT align, tree type)
774 /* Note that the calculation of OFFSET might overflow; we calculate it so
775 that we still get the right result as long as ALIGN is a power of two. */
776 unsigned HOST_WIDE_INT offset = byte_offset * BITS_PER_UNIT + bit_offset;
778 offset = offset % align;
779 return ((offset + size + align - 1) / align
780 > ((unsigned HOST_WIDE_INT) tree_low_cst (TYPE_SIZE (type), 1)
785 /* RLI contains information about the layout of a RECORD_TYPE. FIELD
786 is a FIELD_DECL to be added after those fields already present in
787 T. (FIELD is not actually added to the TYPE_FIELDS list here;
788 callers that desire that behavior must manually perform that step.) */
791 place_field (record_layout_info rli, tree field)
793 /* The alignment required for FIELD. */
794 unsigned int desired_align;
795 /* The alignment FIELD would have if we just dropped it into the
796 record as it presently stands. */
797 unsigned int known_align;
798 unsigned int actual_align;
799 /* The type of this field. */
800 tree type = TREE_TYPE (field);
802 if (TREE_CODE (field) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
805 /* If FIELD is static, then treat it like a separate variable, not
806 really like a structure field. If it is a FUNCTION_DECL, it's a
807 method. In both cases, all we do is lay out the decl, and we do
808 it *after* the record is laid out. */
809 if (TREE_CODE (field) == VAR_DECL)
811 rli->pending_statics = tree_cons (NULL_TREE, field,
812 rli->pending_statics);
816 /* Enumerators and enum types which are local to this class need not
817 be laid out. Likewise for initialized constant fields. */
818 else if (TREE_CODE (field) != FIELD_DECL)
821 /* Unions are laid out very differently than records, so split
822 that code off to another function. */
823 else if (TREE_CODE (rli->t) != RECORD_TYPE)
825 place_union_field (rli, field);
829 /* Work out the known alignment so far. Note that A & (-A) is the
830 value of the least-significant bit in A that is one. */
831 if (! integer_zerop (rli->bitpos))
832 known_align = (tree_low_cst (rli->bitpos, 1)
833 & - tree_low_cst (rli->bitpos, 1));
834 else if (integer_zerop (rli->offset))
835 known_align = BIGGEST_ALIGNMENT;
836 else if (host_integerp (rli->offset, 1))
837 known_align = (BITS_PER_UNIT
838 * (tree_low_cst (rli->offset, 1)
839 & - tree_low_cst (rli->offset, 1)));
841 known_align = rli->offset_align;
843 desired_align = update_alignment_for_field (rli, field, known_align);
845 if (warn_packed && DECL_PACKED (field))
847 if (known_align >= TYPE_ALIGN (type))
849 if (TYPE_ALIGN (type) > desired_align)
851 if (STRICT_ALIGNMENT)
852 warning ("%Jpacked attribute causes inefficient alignment "
853 "for %qD", field, field);
855 warning ("%Jpacked attribute is unnecessary for %qD",
860 rli->packed_maybe_necessary = 1;
863 /* Does this field automatically have alignment it needs by virtue
864 of the fields that precede it and the record's own alignment? */
865 if (known_align < desired_align)
867 /* No, we need to skip space before this field.
868 Bump the cumulative size to multiple of field alignment. */
871 warning ("%Jpadding struct to align %qD", field, field);
873 /* If the alignment is still within offset_align, just align
875 if (desired_align < rli->offset_align)
876 rli->bitpos = round_up (rli->bitpos, desired_align);
879 /* First adjust OFFSET by the partial bits, then align. */
881 = size_binop (PLUS_EXPR, rli->offset,
882 fold_convert (sizetype,
883 size_binop (CEIL_DIV_EXPR, rli->bitpos,
884 bitsize_unit_node)));
885 rli->bitpos = bitsize_zero_node;
887 rli->offset = round_up (rli->offset, desired_align / BITS_PER_UNIT);
890 if (! TREE_CONSTANT (rli->offset))
891 rli->offset_align = desired_align;
895 /* Handle compatibility with PCC. Note that if the record has any
896 variable-sized fields, we need not worry about compatibility. */
897 #ifdef PCC_BITFIELD_TYPE_MATTERS
898 if (PCC_BITFIELD_TYPE_MATTERS
899 && ! targetm.ms_bitfield_layout_p (rli->t)
900 && TREE_CODE (field) == FIELD_DECL
901 && type != error_mark_node
902 && DECL_BIT_FIELD (field)
903 && ! DECL_PACKED (field)
904 && maximum_field_alignment == 0
905 && ! integer_zerop (DECL_SIZE (field))
906 && host_integerp (DECL_SIZE (field), 1)
907 && host_integerp (rli->offset, 1)
908 && host_integerp (TYPE_SIZE (type), 1))
910 unsigned int type_align = TYPE_ALIGN (type);
911 tree dsize = DECL_SIZE (field);
912 HOST_WIDE_INT field_size = tree_low_cst (dsize, 1);
913 HOST_WIDE_INT offset = tree_low_cst (rli->offset, 0);
914 HOST_WIDE_INT bit_offset = tree_low_cst (rli->bitpos, 0);
916 #ifdef ADJUST_FIELD_ALIGN
917 if (! TYPE_USER_ALIGN (type))
918 type_align = ADJUST_FIELD_ALIGN (field, type_align);
921 /* A bit field may not span more units of alignment of its type
922 than its type itself. Advance to next boundary if necessary. */
923 if (excess_unit_span (offset, bit_offset, field_size, type_align, type))
924 rli->bitpos = round_up (rli->bitpos, type_align);
926 TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type);
930 #ifdef BITFIELD_NBYTES_LIMITED
931 if (BITFIELD_NBYTES_LIMITED
932 && ! targetm.ms_bitfield_layout_p (rli->t)
933 && TREE_CODE (field) == FIELD_DECL
934 && type != error_mark_node
935 && DECL_BIT_FIELD_TYPE (field)
936 && ! DECL_PACKED (field)
937 && ! integer_zerop (DECL_SIZE (field))
938 && host_integerp (DECL_SIZE (field), 1)
939 && host_integerp (rli->offset, 1)
940 && host_integerp (TYPE_SIZE (type), 1))
942 unsigned int type_align = TYPE_ALIGN (type);
943 tree dsize = DECL_SIZE (field);
944 HOST_WIDE_INT field_size = tree_low_cst (dsize, 1);
945 HOST_WIDE_INT offset = tree_low_cst (rli->offset, 0);
946 HOST_WIDE_INT bit_offset = tree_low_cst (rli->bitpos, 0);
948 #ifdef ADJUST_FIELD_ALIGN
949 if (! TYPE_USER_ALIGN (type))
950 type_align = ADJUST_FIELD_ALIGN (field, type_align);
953 if (maximum_field_alignment != 0)
954 type_align = MIN (type_align, maximum_field_alignment);
955 /* ??? This test is opposite the test in the containing if
956 statement, so this code is unreachable currently. */
957 else if (DECL_PACKED (field))
958 type_align = MIN (type_align, BITS_PER_UNIT);
960 /* A bit field may not span the unit of alignment of its type.
961 Advance to next boundary if necessary. */
962 if (excess_unit_span (offset, bit_offset, field_size, type_align, type))
963 rli->bitpos = round_up (rli->bitpos, type_align);
965 TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type);
969 /* See the docs for TARGET_MS_BITFIELD_LAYOUT_P for details.
971 When a bit field is inserted into a packed record, the whole
972 size of the underlying type is used by one or more same-size
973 adjacent bitfields. (That is, if its long:3, 32 bits is
974 used in the record, and any additional adjacent long bitfields are
975 packed into the same chunk of 32 bits. However, if the size
976 changes, a new field of that size is allocated.) In an unpacked
977 record, this is the same as using alignment, but not equivalent
980 Note: for compatibility, we use the type size, not the type alignment
981 to determine alignment, since that matches the documentation */
983 if (targetm.ms_bitfield_layout_p (rli->t)
984 && ((DECL_BIT_FIELD_TYPE (field) && ! DECL_PACKED (field))
985 || (rli->prev_field && ! DECL_PACKED (rli->prev_field))))
987 /* At this point, either the prior or current are bitfields,
988 (possibly both), and we're dealing with MS packing. */
989 tree prev_saved = rli->prev_field;
991 /* Is the prior field a bitfield? If so, handle "runs" of same
993 if (rli->prev_field /* necessarily a bitfield if it exists. */)
995 /* If both are bitfields, nonzero, and the same size, this is
996 the middle of a run. Zero declared size fields are special
997 and handled as "end of run". (Note: it's nonzero declared
998 size, but equal type sizes!) (Since we know that both
999 the current and previous fields are bitfields by the
1000 time we check it, DECL_SIZE must be present for both.) */
1001 if (DECL_BIT_FIELD_TYPE (field)
1002 && !integer_zerop (DECL_SIZE (field))
1003 && !integer_zerop (DECL_SIZE (rli->prev_field))
1004 && host_integerp (DECL_SIZE (rli->prev_field), 0)
1005 && host_integerp (TYPE_SIZE (type), 0)
1006 && simple_cst_equal (TYPE_SIZE (type),
1007 TYPE_SIZE (TREE_TYPE (rli->prev_field))))
1009 /* We're in the middle of a run of equal type size fields; make
1010 sure we realign if we run out of bits. (Not decl size,
1012 HOST_WIDE_INT bitsize = tree_low_cst (DECL_SIZE (field), 0);
1014 if (rli->remaining_in_alignment < bitsize)
1016 /* out of bits; bump up to next 'word'. */
1017 rli->offset = DECL_FIELD_OFFSET (rli->prev_field);
1019 = size_binop (PLUS_EXPR, TYPE_SIZE (type),
1020 DECL_FIELD_BIT_OFFSET (rli->prev_field));
1021 rli->prev_field = field;
1022 rli->remaining_in_alignment
1023 = tree_low_cst (TYPE_SIZE (type), 0);
1026 rli->remaining_in_alignment -= bitsize;
1030 /* End of a run: if leaving a run of bitfields of the same type
1031 size, we have to "use up" the rest of the bits of the type
1034 Compute the new position as the sum of the size for the prior
1035 type and where we first started working on that type.
1036 Note: since the beginning of the field was aligned then
1037 of course the end will be too. No round needed. */
1039 if (!integer_zerop (DECL_SIZE (rli->prev_field)))
1041 tree type_size = TYPE_SIZE (TREE_TYPE (rli->prev_field));
1044 = size_binop (PLUS_EXPR, type_size,
1045 DECL_FIELD_BIT_OFFSET (rli->prev_field));
1048 /* We "use up" size zero fields; the code below should behave
1049 as if the prior field was not a bitfield. */
1052 /* Cause a new bitfield to be captured, either this time (if
1053 currently a bitfield) or next time we see one. */
1054 if (!DECL_BIT_FIELD_TYPE(field)
1055 || integer_zerop (DECL_SIZE (field)))
1056 rli->prev_field = NULL;
1059 normalize_rli (rli);
1062 /* If we're starting a new run of same size type bitfields
1063 (or a run of non-bitfields), set up the "first of the run"
1066 That is, if the current field is not a bitfield, or if there
1067 was a prior bitfield the type sizes differ, or if there wasn't
1068 a prior bitfield the size of the current field is nonzero.
1070 Note: we must be sure to test ONLY the type size if there was
1071 a prior bitfield and ONLY for the current field being zero if
1074 if (!DECL_BIT_FIELD_TYPE (field)
1075 || ( prev_saved != NULL
1076 ? !simple_cst_equal (TYPE_SIZE (type),
1077 TYPE_SIZE (TREE_TYPE (prev_saved)))
1078 : !integer_zerop (DECL_SIZE (field)) ))
1080 /* Never smaller than a byte for compatibility. */
1081 unsigned int type_align = BITS_PER_UNIT;
1083 /* (When not a bitfield), we could be seeing a flex array (with
1084 no DECL_SIZE). Since we won't be using remaining_in_alignment
1085 until we see a bitfield (and come by here again) we just skip
1087 if (DECL_SIZE (field) != NULL
1088 && host_integerp (TYPE_SIZE (TREE_TYPE (field)), 0)
1089 && host_integerp (DECL_SIZE (field), 0))
1090 rli->remaining_in_alignment
1091 = tree_low_cst (TYPE_SIZE (TREE_TYPE(field)), 0)
1092 - tree_low_cst (DECL_SIZE (field), 0);
1094 /* Now align (conventionally) for the new type. */
1095 if (!DECL_PACKED(field))
1096 type_align = MAX(TYPE_ALIGN (type), type_align);
1099 && DECL_BIT_FIELD_TYPE (prev_saved)
1100 /* If the previous bit-field is zero-sized, we've already
1101 accounted for its alignment needs (or ignored it, if
1102 appropriate) while placing it. */
1103 && ! integer_zerop (DECL_SIZE (prev_saved)))
1104 type_align = MAX (type_align,
1105 TYPE_ALIGN (TREE_TYPE (prev_saved)));
1107 if (maximum_field_alignment != 0)
1108 type_align = MIN (type_align, maximum_field_alignment);
1110 rli->bitpos = round_up (rli->bitpos, type_align);
1112 /* If we really aligned, don't allow subsequent bitfields
1114 rli->prev_field = NULL;
1118 /* Offset so far becomes the position of this field after normalizing. */
1119 normalize_rli (rli);
1120 DECL_FIELD_OFFSET (field) = rli->offset;
1121 DECL_FIELD_BIT_OFFSET (field) = rli->bitpos;
1122 SET_DECL_OFFSET_ALIGN (field, rli->offset_align);
1124 /* If this field ended up more aligned than we thought it would be (we
1125 approximate this by seeing if its position changed), lay out the field
1126 again; perhaps we can use an integral mode for it now. */
1127 if (! integer_zerop (DECL_FIELD_BIT_OFFSET (field)))
1128 actual_align = (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1129 & - tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1));
1130 else if (integer_zerop (DECL_FIELD_OFFSET (field)))
1131 actual_align = BIGGEST_ALIGNMENT;
1132 else if (host_integerp (DECL_FIELD_OFFSET (field), 1))
1133 actual_align = (BITS_PER_UNIT
1134 * (tree_low_cst (DECL_FIELD_OFFSET (field), 1)
1135 & - tree_low_cst (DECL_FIELD_OFFSET (field), 1)));
1137 actual_align = DECL_OFFSET_ALIGN (field);
1139 if (known_align != actual_align)
1140 layout_decl (field, actual_align);
1142 /* Only the MS bitfields use this. */
1143 if (rli->prev_field == NULL && DECL_BIT_FIELD_TYPE(field))
1144 rli->prev_field = field;
1146 /* Now add size of this field to the size of the record. If the size is
1147 not constant, treat the field as being a multiple of bytes and just
1148 adjust the offset, resetting the bit position. Otherwise, apportion the
1149 size amongst the bit position and offset. First handle the case of an
1150 unspecified size, which can happen when we have an invalid nested struct
1151 definition, such as struct j { struct j { int i; } }. The error message
1152 is printed in finish_struct. */
1153 if (DECL_SIZE (field) == 0)
1155 else if (TREE_CODE (DECL_SIZE_UNIT (field)) != INTEGER_CST
1156 || TREE_CONSTANT_OVERFLOW (DECL_SIZE_UNIT (field)))
1159 = size_binop (PLUS_EXPR, rli->offset,
1160 fold_convert (sizetype,
1161 size_binop (CEIL_DIV_EXPR, rli->bitpos,
1162 bitsize_unit_node)));
1164 = size_binop (PLUS_EXPR, rli->offset, DECL_SIZE_UNIT (field));
1165 rli->bitpos = bitsize_zero_node;
1166 rli->offset_align = MIN (rli->offset_align, desired_align);
1170 rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos, DECL_SIZE (field));
1171 normalize_rli (rli);
1175 /* Assuming that all the fields have been laid out, this function uses
1176 RLI to compute the final TYPE_SIZE, TYPE_ALIGN, etc. for the type
1177 indicated by RLI. */
1180 finalize_record_size (record_layout_info rli)
1182 tree unpadded_size, unpadded_size_unit;
1184 /* Now we want just byte and bit offsets, so set the offset alignment
1185 to be a byte and then normalize. */
1186 rli->offset_align = BITS_PER_UNIT;
1187 normalize_rli (rli);
1189 /* Determine the desired alignment. */
1190 #ifdef ROUND_TYPE_ALIGN
1191 TYPE_ALIGN (rli->t) = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t),
1194 TYPE_ALIGN (rli->t) = MAX (TYPE_ALIGN (rli->t), rli->record_align);
1197 /* Compute the size so far. Be sure to allow for extra bits in the
1198 size in bytes. We have guaranteed above that it will be no more
1199 than a single byte. */
1200 unpadded_size = rli_size_so_far (rli);
1201 unpadded_size_unit = rli_size_unit_so_far (rli);
1202 if (! integer_zerop (rli->bitpos))
1204 = size_binop (PLUS_EXPR, unpadded_size_unit, size_one_node);
1206 /* Round the size up to be a multiple of the required alignment. */
1207 TYPE_SIZE (rli->t) = round_up (unpadded_size, TYPE_ALIGN (rli->t));
1208 TYPE_SIZE_UNIT (rli->t)
1209 = round_up (unpadded_size_unit, TYPE_ALIGN_UNIT (rli->t));
1211 if (warn_padded && TREE_CONSTANT (unpadded_size)
1212 && simple_cst_equal (unpadded_size, TYPE_SIZE (rli->t)) == 0)
1213 warning ("padding struct size to alignment boundary");
1215 if (warn_packed && TREE_CODE (rli->t) == RECORD_TYPE
1216 && TYPE_PACKED (rli->t) && ! rli->packed_maybe_necessary
1217 && TREE_CONSTANT (unpadded_size))
1221 #ifdef ROUND_TYPE_ALIGN
1223 = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t), rli->unpacked_align);
1225 rli->unpacked_align = MAX (TYPE_ALIGN (rli->t), rli->unpacked_align);
1228 unpacked_size = round_up (TYPE_SIZE (rli->t), rli->unpacked_align);
1229 if (simple_cst_equal (unpacked_size, TYPE_SIZE (rli->t)))
1231 TYPE_PACKED (rli->t) = 0;
1233 if (TYPE_NAME (rli->t))
1237 if (TREE_CODE (TYPE_NAME (rli->t)) == IDENTIFIER_NODE)
1238 name = IDENTIFIER_POINTER (TYPE_NAME (rli->t));
1240 name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (rli->t)));
1242 if (STRICT_ALIGNMENT)
1243 warning ("packed attribute causes inefficient "
1244 "alignment for %qs", name);
1246 warning ("packed attribute is unnecessary for %qs", name);
1250 if (STRICT_ALIGNMENT)
1251 warning ("packed attribute causes inefficient alignment");
1253 warning ("packed attribute is unnecessary");
1259 /* Compute the TYPE_MODE for the TYPE (which is a RECORD_TYPE). */
1262 compute_record_mode (tree type)
1265 enum machine_mode mode = VOIDmode;
1267 /* Most RECORD_TYPEs have BLKmode, so we start off assuming that.
1268 However, if possible, we use a mode that fits in a register
1269 instead, in order to allow for better optimization down the
1271 TYPE_MODE (type) = BLKmode;
1273 if (! host_integerp (TYPE_SIZE (type), 1))
1276 /* A record which has any BLKmode members must itself be
1277 BLKmode; it can't go in a register. Unless the member is
1278 BLKmode only because it isn't aligned. */
1279 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1281 if (TREE_CODE (field) != FIELD_DECL)
1284 if (TREE_CODE (TREE_TYPE (field)) == ERROR_MARK
1285 || (TYPE_MODE (TREE_TYPE (field)) == BLKmode
1286 && ! TYPE_NO_FORCE_BLK (TREE_TYPE (field))
1287 && !(TYPE_SIZE (TREE_TYPE (field)) != 0
1288 && integer_zerop (TYPE_SIZE (TREE_TYPE (field)))))
1289 || ! host_integerp (bit_position (field), 1)
1290 || DECL_SIZE (field) == 0
1291 || ! host_integerp (DECL_SIZE (field), 1))
1294 /* If this field is the whole struct, remember its mode so
1295 that, say, we can put a double in a class into a DF
1296 register instead of forcing it to live in the stack. */
1297 if (simple_cst_equal (TYPE_SIZE (type), DECL_SIZE (field)))
1298 mode = DECL_MODE (field);
1300 #ifdef MEMBER_TYPE_FORCES_BLK
1301 /* With some targets, eg. c4x, it is sub-optimal
1302 to access an aligned BLKmode structure as a scalar. */
1304 if (MEMBER_TYPE_FORCES_BLK (field, mode))
1306 #endif /* MEMBER_TYPE_FORCES_BLK */
1309 TYPE_MODE (type) = mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1);
1311 /* If we only have one real field; use its mode if that mode's size
1312 matches the type's size. This only applies to RECORD_TYPE. This
1313 does not apply to unions. */
1314 if (TREE_CODE (type) == RECORD_TYPE && mode != VOIDmode
1315 && GET_MODE_SIZE (mode) == GET_MODE_SIZE (TYPE_MODE (type)))
1316 TYPE_MODE (type) = mode;
1318 /* If structure's known alignment is less than what the scalar
1319 mode would need, and it matters, then stick with BLKmode. */
1320 if (TYPE_MODE (type) != BLKmode
1322 && ! (TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT
1323 || TYPE_ALIGN (type) >= GET_MODE_ALIGNMENT (TYPE_MODE (type))))
1325 /* If this is the only reason this type is BLKmode, then
1326 don't force containing types to be BLKmode. */
1327 TYPE_NO_FORCE_BLK (type) = 1;
1328 TYPE_MODE (type) = BLKmode;
1332 /* Compute TYPE_SIZE and TYPE_ALIGN for TYPE, once it has been laid
1336 finalize_type_size (tree type)
1338 /* Normally, use the alignment corresponding to the mode chosen.
1339 However, where strict alignment is not required, avoid
1340 over-aligning structures, since most compilers do not do this
1343 if (TYPE_MODE (type) != BLKmode && TYPE_MODE (type) != VOIDmode
1344 && (STRICT_ALIGNMENT
1345 || (TREE_CODE (type) != RECORD_TYPE && TREE_CODE (type) != UNION_TYPE
1346 && TREE_CODE (type) != QUAL_UNION_TYPE
1347 && TREE_CODE (type) != ARRAY_TYPE)))
1349 TYPE_ALIGN (type) = GET_MODE_ALIGNMENT (TYPE_MODE (type));
1350 TYPE_USER_ALIGN (type) = 0;
1353 /* Do machine-dependent extra alignment. */
1354 #ifdef ROUND_TYPE_ALIGN
1356 = ROUND_TYPE_ALIGN (type, TYPE_ALIGN (type), BITS_PER_UNIT);
1359 /* If we failed to find a simple way to calculate the unit size
1360 of the type, find it by division. */
1361 if (TYPE_SIZE_UNIT (type) == 0 && TYPE_SIZE (type) != 0)
1362 /* TYPE_SIZE (type) is computed in bitsizetype. After the division, the
1363 result will fit in sizetype. We will get more efficient code using
1364 sizetype, so we force a conversion. */
1365 TYPE_SIZE_UNIT (type)
1366 = fold_convert (sizetype,
1367 size_binop (FLOOR_DIV_EXPR, TYPE_SIZE (type),
1368 bitsize_unit_node));
1370 if (TYPE_SIZE (type) != 0)
1372 TYPE_SIZE (type) = round_up (TYPE_SIZE (type), TYPE_ALIGN (type));
1373 TYPE_SIZE_UNIT (type) = round_up (TYPE_SIZE_UNIT (type),
1374 TYPE_ALIGN_UNIT (type));
1377 /* Evaluate nonconstant sizes only once, either now or as soon as safe. */
1378 if (TYPE_SIZE (type) != 0 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1379 TYPE_SIZE (type) = variable_size (TYPE_SIZE (type));
1380 if (TYPE_SIZE_UNIT (type) != 0
1381 && TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)
1382 TYPE_SIZE_UNIT (type) = variable_size (TYPE_SIZE_UNIT (type));
1384 /* Also layout any other variants of the type. */
1385 if (TYPE_NEXT_VARIANT (type)
1386 || type != TYPE_MAIN_VARIANT (type))
1389 /* Record layout info of this variant. */
1390 tree size = TYPE_SIZE (type);
1391 tree size_unit = TYPE_SIZE_UNIT (type);
1392 unsigned int align = TYPE_ALIGN (type);
1393 unsigned int user_align = TYPE_USER_ALIGN (type);
1394 enum machine_mode mode = TYPE_MODE (type);
1396 /* Copy it into all variants. */
1397 for (variant = TYPE_MAIN_VARIANT (type);
1399 variant = TYPE_NEXT_VARIANT (variant))
1401 TYPE_SIZE (variant) = size;
1402 TYPE_SIZE_UNIT (variant) = size_unit;
1403 TYPE_ALIGN (variant) = align;
1404 TYPE_USER_ALIGN (variant) = user_align;
1405 TYPE_MODE (variant) = mode;
1410 /* Do all of the work required to layout the type indicated by RLI,
1411 once the fields have been laid out. This function will call `free'
1412 for RLI, unless FREE_P is false. Passing a value other than false
1413 for FREE_P is bad practice; this option only exists to support the
1417 finish_record_layout (record_layout_info rli, int free_p)
1419 /* Compute the final size. */
1420 finalize_record_size (rli);
1422 /* Compute the TYPE_MODE for the record. */
1423 compute_record_mode (rli->t);
1425 /* Perform any last tweaks to the TYPE_SIZE, etc. */
1426 finalize_type_size (rli->t);
1428 /* Lay out any static members. This is done now because their type
1429 may use the record's type. */
1430 while (rli->pending_statics)
1432 layout_decl (TREE_VALUE (rli->pending_statics), 0);
1433 rli->pending_statics = TREE_CHAIN (rli->pending_statics);
1442 /* Finish processing a builtin RECORD_TYPE type TYPE. It's name is
1443 NAME, its fields are chained in reverse on FIELDS.
1445 If ALIGN_TYPE is non-null, it is given the same alignment as
1449 finish_builtin_struct (tree type, const char *name, tree fields,
1454 for (tail = NULL_TREE; fields; tail = fields, fields = next)
1456 DECL_FIELD_CONTEXT (fields) = type;
1457 next = TREE_CHAIN (fields);
1458 TREE_CHAIN (fields) = tail;
1460 TYPE_FIELDS (type) = tail;
1464 TYPE_ALIGN (type) = TYPE_ALIGN (align_type);
1465 TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (align_type);
1469 #if 0 /* not yet, should get fixed properly later */
1470 TYPE_NAME (type) = make_type_decl (get_identifier (name), type);
1472 TYPE_NAME (type) = build_decl (TYPE_DECL, get_identifier (name), type);
1474 TYPE_STUB_DECL (type) = TYPE_NAME (type);
1475 layout_decl (TYPE_NAME (type), 0);
1478 /* Calculate the mode, size, and alignment for TYPE.
1479 For an array type, calculate the element separation as well.
1480 Record TYPE on the chain of permanent or temporary types
1481 so that dbxout will find out about it.
1483 TYPE_SIZE of a type is nonzero if the type has been laid out already.
1484 layout_type does nothing on such a type.
1486 If the type is incomplete, its TYPE_SIZE remains zero. */
1489 layout_type (tree type)
1493 if (type == error_mark_node)
1496 /* Do nothing if type has been laid out before. */
1497 if (TYPE_SIZE (type))
1500 switch (TREE_CODE (type))
1503 /* This kind of type is the responsibility
1504 of the language-specific code. */
1507 case BOOLEAN_TYPE: /* Used for Java, Pascal, and Chill. */
1508 if (TYPE_PRECISION (type) == 0)
1509 TYPE_PRECISION (type) = 1; /* default to one byte/boolean. */
1511 /* ... fall through ... */
1516 if (TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST
1517 && tree_int_cst_sgn (TYPE_MIN_VALUE (type)) >= 0)
1518 TYPE_UNSIGNED (type) = 1;
1520 TYPE_MODE (type) = smallest_mode_for_size (TYPE_PRECISION (type),
1522 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1523 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1527 TYPE_MODE (type) = mode_for_size (TYPE_PRECISION (type), MODE_FLOAT, 0);
1528 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1529 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1533 TYPE_UNSIGNED (type) = TYPE_UNSIGNED (TREE_TYPE (type));
1535 = mode_for_size (2 * TYPE_PRECISION (TREE_TYPE (type)),
1536 (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE
1537 ? MODE_COMPLEX_FLOAT : MODE_COMPLEX_INT),
1539 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1540 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1545 int nunits = TYPE_VECTOR_SUBPARTS (type);
1546 tree nunits_tree = build_int_cst (NULL_TREE, nunits);
1547 tree innertype = TREE_TYPE (type);
1549 gcc_assert (!(nunits & (nunits - 1)));
1551 /* Find an appropriate mode for the vector type. */
1552 if (TYPE_MODE (type) == VOIDmode)
1554 enum machine_mode innermode = TYPE_MODE (innertype);
1555 enum machine_mode mode;
1557 /* First, look for a supported vector type. */
1558 if (GET_MODE_CLASS (innermode) == MODE_FLOAT)
1559 mode = MIN_MODE_VECTOR_FLOAT;
1561 mode = MIN_MODE_VECTOR_INT;
1563 for (; mode != VOIDmode ; mode = GET_MODE_WIDER_MODE (mode))
1564 if (GET_MODE_NUNITS (mode) == nunits
1565 && GET_MODE_INNER (mode) == innermode
1566 && targetm.vector_mode_supported_p (mode))
1569 /* For integers, try mapping it to a same-sized scalar mode. */
1570 if (mode == VOIDmode
1571 && GET_MODE_CLASS (innermode) == MODE_INT)
1572 mode = mode_for_size (nunits * GET_MODE_BITSIZE (innermode),
1575 if (mode == VOIDmode || !have_regs_of_mode[mode])
1576 TYPE_MODE (type) = BLKmode;
1578 TYPE_MODE (type) = mode;
1581 TYPE_UNSIGNED (type) = TYPE_UNSIGNED (TREE_TYPE (type));
1582 TYPE_SIZE_UNIT (type) = int_const_binop (MULT_EXPR,
1583 TYPE_SIZE_UNIT (innertype),
1585 TYPE_SIZE (type) = int_const_binop (MULT_EXPR, TYPE_SIZE (innertype),
1588 /* Always naturally align vectors. This prevents ABI changes
1589 depending on whether or not native vector modes are supported. */
1590 TYPE_ALIGN (type) = tree_low_cst (TYPE_SIZE (type), 0);
1595 /* This is an incomplete type and so doesn't have a size. */
1596 TYPE_ALIGN (type) = 1;
1597 TYPE_USER_ALIGN (type) = 0;
1598 TYPE_MODE (type) = VOIDmode;
1602 TYPE_SIZE (type) = bitsize_int (POINTER_SIZE);
1603 TYPE_SIZE_UNIT (type) = size_int (POINTER_SIZE / BITS_PER_UNIT);
1604 /* A pointer might be MODE_PARTIAL_INT,
1605 but ptrdiff_t must be integral. */
1606 TYPE_MODE (type) = mode_for_size (POINTER_SIZE, MODE_INT, 0);
1611 /* It's hard to see what the mode and size of a function ought to
1612 be, but we do know the alignment is FUNCTION_BOUNDARY, so
1613 make it consistent with that. */
1614 TYPE_MODE (type) = mode_for_size (FUNCTION_BOUNDARY, MODE_INT, 0);
1615 TYPE_SIZE (type) = bitsize_int (FUNCTION_BOUNDARY);
1616 TYPE_SIZE_UNIT (type) = size_int (FUNCTION_BOUNDARY / BITS_PER_UNIT);
1620 case REFERENCE_TYPE:
1623 enum machine_mode mode = ((TREE_CODE (type) == REFERENCE_TYPE
1624 && reference_types_internal)
1625 ? Pmode : TYPE_MODE (type));
1627 int nbits = GET_MODE_BITSIZE (mode);
1629 TYPE_SIZE (type) = bitsize_int (nbits);
1630 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (mode));
1631 TYPE_UNSIGNED (type) = 1;
1632 TYPE_PRECISION (type) = nbits;
1638 tree index = TYPE_DOMAIN (type);
1639 tree element = TREE_TYPE (type);
1641 build_pointer_type (element);
1643 /* We need to know both bounds in order to compute the size. */
1644 if (index && TYPE_MAX_VALUE (index) && TYPE_MIN_VALUE (index)
1645 && TYPE_SIZE (element))
1647 tree ub = unshare_expr (TYPE_MAX_VALUE (index));
1648 tree lb = unshare_expr (TYPE_MIN_VALUE (index));
1652 /* The initial subtraction should happen in the original type so
1653 that (possible) negative values are handled appropriately. */
1654 length = size_binop (PLUS_EXPR, size_one_node,
1655 fold_convert (sizetype,
1656 fold (build2 (MINUS_EXPR,
1660 /* Special handling for arrays of bits (for Chill). */
1661 element_size = TYPE_SIZE (element);
1662 if (TYPE_PACKED (type) && INTEGRAL_TYPE_P (element)
1663 && (integer_zerop (TYPE_MAX_VALUE (element))
1664 || integer_onep (TYPE_MAX_VALUE (element)))
1665 && host_integerp (TYPE_MIN_VALUE (element), 1))
1667 HOST_WIDE_INT maxvalue
1668 = tree_low_cst (TYPE_MAX_VALUE (element), 1);
1669 HOST_WIDE_INT minvalue
1670 = tree_low_cst (TYPE_MIN_VALUE (element), 1);
1672 if (maxvalue - minvalue == 1
1673 && (maxvalue == 1 || maxvalue == 0))
1674 element_size = integer_one_node;
1677 /* If neither bound is a constant and sizetype is signed, make
1678 sure the size is never negative. We should really do this
1679 if *either* bound is non-constant, but this is the best
1680 compromise between C and Ada. */
1681 if (!TYPE_UNSIGNED (sizetype)
1682 && TREE_CODE (TYPE_MIN_VALUE (index)) != INTEGER_CST
1683 && TREE_CODE (TYPE_MAX_VALUE (index)) != INTEGER_CST)
1684 length = size_binop (MAX_EXPR, length, size_zero_node);
1686 TYPE_SIZE (type) = size_binop (MULT_EXPR, element_size,
1687 fold_convert (bitsizetype,
1690 /* If we know the size of the element, calculate the total
1691 size directly, rather than do some division thing below.
1692 This optimization helps Fortran assumed-size arrays
1693 (where the size of the array is determined at runtime)
1695 Note that we can't do this in the case where the size of
1696 the elements is one bit since TYPE_SIZE_UNIT cannot be
1697 set correctly in that case. */
1698 if (TYPE_SIZE_UNIT (element) != 0 && ! integer_onep (element_size))
1699 TYPE_SIZE_UNIT (type)
1700 = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (element), length);
1703 /* Now round the alignment and size,
1704 using machine-dependent criteria if any. */
1706 #ifdef ROUND_TYPE_ALIGN
1708 = ROUND_TYPE_ALIGN (type, TYPE_ALIGN (element), BITS_PER_UNIT);
1710 TYPE_ALIGN (type) = MAX (TYPE_ALIGN (element), BITS_PER_UNIT);
1712 TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (element);
1713 TYPE_MODE (type) = BLKmode;
1714 if (TYPE_SIZE (type) != 0
1715 #ifdef MEMBER_TYPE_FORCES_BLK
1716 && ! MEMBER_TYPE_FORCES_BLK (type, VOIDmode)
1718 /* BLKmode elements force BLKmode aggregate;
1719 else extract/store fields may lose. */
1720 && (TYPE_MODE (TREE_TYPE (type)) != BLKmode
1721 || TYPE_NO_FORCE_BLK (TREE_TYPE (type))))
1723 /* One-element arrays get the component type's mode. */
1724 if (simple_cst_equal (TYPE_SIZE (type),
1725 TYPE_SIZE (TREE_TYPE (type))))
1726 TYPE_MODE (type) = TYPE_MODE (TREE_TYPE (type));
1729 = mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1);
1731 if (TYPE_MODE (type) != BLKmode
1732 && STRICT_ALIGNMENT && TYPE_ALIGN (type) < BIGGEST_ALIGNMENT
1733 && TYPE_ALIGN (type) < GET_MODE_ALIGNMENT (TYPE_MODE (type))
1734 && TYPE_MODE (type) != BLKmode)
1736 TYPE_NO_FORCE_BLK (type) = 1;
1737 TYPE_MODE (type) = BLKmode;
1745 case QUAL_UNION_TYPE:
1748 record_layout_info rli;
1750 /* Initialize the layout information. */
1751 rli = start_record_layout (type);
1753 /* If this is a QUAL_UNION_TYPE, we want to process the fields
1754 in the reverse order in building the COND_EXPR that denotes
1755 its size. We reverse them again later. */
1756 if (TREE_CODE (type) == QUAL_UNION_TYPE)
1757 TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
1759 /* Place all the fields. */
1760 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1761 place_field (rli, field);
1763 if (TREE_CODE (type) == QUAL_UNION_TYPE)
1764 TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
1766 if (lang_adjust_rli)
1767 (*lang_adjust_rli) (rli);
1769 /* Finish laying out the record. */
1770 finish_record_layout (rli, /*free_p=*/true);
1775 /* The size may vary in different languages, so the language front end
1776 should fill in the size. */
1777 TYPE_ALIGN (type) = BIGGEST_ALIGNMENT;
1778 TYPE_USER_ALIGN (type) = 0;
1779 TYPE_MODE (type) = BLKmode;
1786 /* Compute the final TYPE_SIZE, TYPE_ALIGN, etc. for TYPE. For
1787 records and unions, finish_record_layout already called this
1789 if (TREE_CODE (type) != RECORD_TYPE
1790 && TREE_CODE (type) != UNION_TYPE
1791 && TREE_CODE (type) != QUAL_UNION_TYPE)
1792 finalize_type_size (type);
1794 /* If an alias set has been set for this aggregate when it was incomplete,
1795 force it into alias set 0.
1796 This is too conservative, but we cannot call record_component_aliases
1797 here because some frontends still change the aggregates after
1799 if (AGGREGATE_TYPE_P (type) && TYPE_ALIAS_SET_KNOWN_P (type))
1800 TYPE_ALIAS_SET (type) = 0;
1803 /* Create and return a type for signed integers of PRECISION bits. */
1806 make_signed_type (int precision)
1808 tree type = make_node (INTEGER_TYPE);
1810 TYPE_PRECISION (type) = precision;
1812 fixup_signed_type (type);
1816 /* Create and return a type for unsigned integers of PRECISION bits. */
1819 make_unsigned_type (int precision)
1821 tree type = make_node (INTEGER_TYPE);
1823 TYPE_PRECISION (type) = precision;
1825 fixup_unsigned_type (type);
1829 /* Initialize sizetype and bitsizetype to a reasonable and temporary
1830 value to enable integer types to be created. */
1833 initialize_sizetypes (bool signed_p)
1835 tree t = make_node (INTEGER_TYPE);
1837 TYPE_MODE (t) = SImode;
1838 TYPE_ALIGN (t) = GET_MODE_ALIGNMENT (SImode);
1839 TYPE_USER_ALIGN (t) = 0;
1840 TYPE_IS_SIZETYPE (t) = 1;
1841 TYPE_UNSIGNED (t) = !signed_p;
1842 TYPE_SIZE (t) = build_int_cst (t, GET_MODE_BITSIZE (SImode));
1843 TYPE_SIZE_UNIT (t) = build_int_cst (t, GET_MODE_SIZE (SImode));
1844 TYPE_PRECISION (t) = GET_MODE_BITSIZE (SImode);
1845 TYPE_MIN_VALUE (t) = build_int_cst (t, 0);
1847 /* 1000 avoids problems with possible overflow and is certainly
1848 larger than any size value we'd want to be storing. */
1849 TYPE_MAX_VALUE (t) = build_int_cst (t, 1000);
1852 bitsizetype = build_distinct_type_copy (t);
1855 /* Make sizetype a version of TYPE, and initialize *sizetype
1856 accordingly. We do this by overwriting the stub sizetype and
1857 bitsizetype nodes created by initialize_sizetypes. This makes sure
1858 that (a) anything stubby about them no longer exists, (b) any
1859 INTEGER_CSTs created with such a type, remain valid. */
1862 set_sizetype (tree type)
1864 int oprecision = TYPE_PRECISION (type);
1865 /* The *bitsizetype types use a precision that avoids overflows when
1866 calculating signed sizes / offsets in bits. However, when
1867 cross-compiling from a 32 bit to a 64 bit host, we are limited to 64 bit
1869 int precision = MIN (oprecision + BITS_PER_UNIT_LOG + 1,
1870 2 * HOST_BITS_PER_WIDE_INT);
1873 gcc_assert (TYPE_UNSIGNED (type) == TYPE_UNSIGNED (sizetype));
1875 t = build_distinct_type_copy (type);
1876 /* We do want to use sizetype's cache, as we will be replacing that
1878 TYPE_CACHED_VALUES (t) = TYPE_CACHED_VALUES (sizetype);
1879 TYPE_CACHED_VALUES_P (t) = TYPE_CACHED_VALUES_P (sizetype);
1880 TREE_TYPE (TYPE_CACHED_VALUES (t)) = type;
1881 TYPE_UID (t) = TYPE_UID (sizetype);
1882 TYPE_IS_SIZETYPE (t) = 1;
1884 /* Replace our original stub sizetype. */
1885 memcpy (sizetype, t, tree_size (sizetype));
1886 TYPE_MAIN_VARIANT (sizetype) = sizetype;
1888 t = make_node (INTEGER_TYPE);
1889 TYPE_NAME (t) = get_identifier ("bit_size_type");
1890 /* We do want to use bitsizetype's cache, as we will be replacing that
1892 TYPE_CACHED_VALUES (t) = TYPE_CACHED_VALUES (bitsizetype);
1893 TYPE_CACHED_VALUES_P (t) = TYPE_CACHED_VALUES_P (bitsizetype);
1894 TYPE_PRECISION (t) = precision;
1895 TYPE_UID (t) = TYPE_UID (bitsizetype);
1896 TYPE_IS_SIZETYPE (t) = 1;
1897 /* Replace our original stub bitsizetype. */
1898 memcpy (bitsizetype, t, tree_size (bitsizetype));
1900 if (TYPE_UNSIGNED (type))
1902 fixup_unsigned_type (bitsizetype);
1903 ssizetype = build_distinct_type_copy (make_signed_type (oprecision));
1904 TYPE_IS_SIZETYPE (ssizetype) = 1;
1905 sbitsizetype = build_distinct_type_copy (make_signed_type (precision));
1906 TYPE_IS_SIZETYPE (sbitsizetype) = 1;
1910 fixup_signed_type (bitsizetype);
1911 ssizetype = sizetype;
1912 sbitsizetype = bitsizetype;
1916 /* TYPE is an integral type, i.e., an INTEGRAL_TYPE, ENUMERAL_TYPE,
1917 BOOLEAN_TYPE, or CHAR_TYPE. Set TYPE_MIN_VALUE and TYPE_MAX_VALUE
1918 for TYPE, based on the PRECISION and whether or not the TYPE
1919 IS_UNSIGNED. PRECISION need not correspond to a width supported
1920 natively by the hardware; for example, on a machine with 8-bit,
1921 16-bit, and 32-bit register modes, PRECISION might be 7, 23, or
1925 set_min_and_max_values_for_integral_type (tree type,
1934 min_value = build_int_cst (type, 0);
1936 = build_int_cst_wide (type, precision - HOST_BITS_PER_WIDE_INT >= 0
1938 : ((HOST_WIDE_INT) 1 << precision) - 1,
1939 precision - HOST_BITS_PER_WIDE_INT > 0
1940 ? ((unsigned HOST_WIDE_INT) ~0
1941 >> (HOST_BITS_PER_WIDE_INT
1942 - (precision - HOST_BITS_PER_WIDE_INT)))
1948 = build_int_cst_wide (type,
1949 (precision - HOST_BITS_PER_WIDE_INT > 0
1951 : (HOST_WIDE_INT) (-1) << (precision - 1)),
1952 (((HOST_WIDE_INT) (-1)
1953 << (precision - HOST_BITS_PER_WIDE_INT - 1 > 0
1954 ? precision - HOST_BITS_PER_WIDE_INT - 1
1957 = build_int_cst_wide (type,
1958 (precision - HOST_BITS_PER_WIDE_INT > 0
1960 : ((HOST_WIDE_INT) 1 << (precision - 1)) - 1),
1961 (precision - HOST_BITS_PER_WIDE_INT - 1 > 0
1962 ? (((HOST_WIDE_INT) 1
1963 << (precision - HOST_BITS_PER_WIDE_INT - 1))) - 1
1967 TYPE_MIN_VALUE (type) = min_value;
1968 TYPE_MAX_VALUE (type) = max_value;
1971 /* Set the extreme values of TYPE based on its precision in bits,
1972 then lay it out. Used when make_signed_type won't do
1973 because the tree code is not INTEGER_TYPE.
1974 E.g. for Pascal, when the -fsigned-char option is given. */
1977 fixup_signed_type (tree type)
1979 int precision = TYPE_PRECISION (type);
1981 /* We can not represent properly constants greater then
1982 2 * HOST_BITS_PER_WIDE_INT, still we need the types
1983 as they are used by i386 vector extensions and friends. */
1984 if (precision > HOST_BITS_PER_WIDE_INT * 2)
1985 precision = HOST_BITS_PER_WIDE_INT * 2;
1987 set_min_and_max_values_for_integral_type (type, precision,
1988 /*is_unsigned=*/false);
1990 /* Lay out the type: set its alignment, size, etc. */
1994 /* Set the extreme values of TYPE based on its precision in bits,
1995 then lay it out. This is used both in `make_unsigned_type'
1996 and for enumeral types. */
1999 fixup_unsigned_type (tree type)
2001 int precision = TYPE_PRECISION (type);
2003 /* We can not represent properly constants greater then
2004 2 * HOST_BITS_PER_WIDE_INT, still we need the types
2005 as they are used by i386 vector extensions and friends. */
2006 if (precision > HOST_BITS_PER_WIDE_INT * 2)
2007 precision = HOST_BITS_PER_WIDE_INT * 2;
2009 TYPE_UNSIGNED (type) = 1;
2011 set_min_and_max_values_for_integral_type (type, precision,
2012 /*is_unsigned=*/true);
2014 /* Lay out the type: set its alignment, size, etc. */
2018 /* Find the best machine mode to use when referencing a bit field of length
2019 BITSIZE bits starting at BITPOS.
2021 The underlying object is known to be aligned to a boundary of ALIGN bits.
2022 If LARGEST_MODE is not VOIDmode, it means that we should not use a mode
2023 larger than LARGEST_MODE (usually SImode).
2025 If no mode meets all these conditions, we return VOIDmode. Otherwise, if
2026 VOLATILEP is true or SLOW_BYTE_ACCESS is false, we return the smallest
2027 mode meeting these conditions.
2029 Otherwise (VOLATILEP is false and SLOW_BYTE_ACCESS is true), we return
2030 the largest mode (but a mode no wider than UNITS_PER_WORD) that meets
2031 all the conditions. */
2034 get_best_mode (int bitsize, int bitpos, unsigned int align,
2035 enum machine_mode largest_mode, int volatilep)
2037 enum machine_mode mode;
2038 unsigned int unit = 0;
2040 /* Find the narrowest integer mode that contains the bit field. */
2041 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2042 mode = GET_MODE_WIDER_MODE (mode))
2044 unit = GET_MODE_BITSIZE (mode);
2045 if ((bitpos % unit) + bitsize <= unit)
2049 if (mode == VOIDmode
2050 /* It is tempting to omit the following line
2051 if STRICT_ALIGNMENT is true.
2052 But that is incorrect, since if the bitfield uses part of 3 bytes
2053 and we use a 4-byte mode, we could get a spurious segv
2054 if the extra 4th byte is past the end of memory.
2055 (Though at least one Unix compiler ignores this problem:
2056 that on the Sequent 386 machine. */
2057 || MIN (unit, BIGGEST_ALIGNMENT) > align
2058 || (largest_mode != VOIDmode && unit > GET_MODE_BITSIZE (largest_mode)))
2061 if (SLOW_BYTE_ACCESS && ! volatilep)
2063 enum machine_mode wide_mode = VOIDmode, tmode;
2065 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); tmode != VOIDmode;
2066 tmode = GET_MODE_WIDER_MODE (tmode))
2068 unit = GET_MODE_BITSIZE (tmode);
2069 if (bitpos / unit == (bitpos + bitsize - 1) / unit
2070 && unit <= BITS_PER_WORD
2071 && unit <= MIN (align, BIGGEST_ALIGNMENT)
2072 && (largest_mode == VOIDmode
2073 || unit <= GET_MODE_BITSIZE (largest_mode)))
2077 if (wide_mode != VOIDmode)
2084 /* Gets minimal and maximal values for MODE (signed or unsigned depending on
2085 SIGN). The returned constants are made to be usable in TARGET_MODE. */
2088 get_mode_bounds (enum machine_mode mode, int sign,
2089 enum machine_mode target_mode,
2090 rtx *mmin, rtx *mmax)
2092 unsigned size = GET_MODE_BITSIZE (mode);
2093 unsigned HOST_WIDE_INT min_val, max_val;
2095 gcc_assert (size <= HOST_BITS_PER_WIDE_INT);
2099 min_val = -((unsigned HOST_WIDE_INT) 1 << (size - 1));
2100 max_val = ((unsigned HOST_WIDE_INT) 1 << (size - 1)) - 1;
2105 max_val = ((unsigned HOST_WIDE_INT) 1 << (size - 1) << 1) - 1;
2108 *mmin = GEN_INT (trunc_int_for_mode (min_val, target_mode));
2109 *mmax = GEN_INT (trunc_int_for_mode (max_val, target_mode));
2112 #include "gt-stor-layout.h"