1 /* C-compiler utilities for types and variables storage layout
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1996, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
25 #include "coretypes.h"
36 #include "langhooks.h"
40 /* Data type for the expressions representing sizes of data types.
41 It is the first integer type laid out. */
42 tree sizetype_tab[(int) TYPE_KIND_LAST];
44 /* If nonzero, this is an upper limit on alignment of structure fields.
45 The value is measured in bits. */
46 unsigned int maximum_field_alignment = TARGET_DEFAULT_PACK_STRUCT * BITS_PER_UNIT;
47 /* ... and its original value in bytes, specified via -fpack-struct=<value>. */
48 unsigned int initial_max_fld_align = TARGET_DEFAULT_PACK_STRUCT;
50 /* Nonzero if all REFERENCE_TYPEs are internal and hence should be
51 allocated in Pmode, not ptr_mode. Set only by internal_reference_types
52 called only by a front end. */
53 static int reference_types_internal = 0;
55 static void finalize_record_size (record_layout_info);
56 static void finalize_type_size (tree);
57 static void place_union_field (record_layout_info, tree);
58 #if defined (PCC_BITFIELD_TYPE_MATTERS) || defined (BITFIELD_NBYTES_LIMITED)
59 static int excess_unit_span (HOST_WIDE_INT, HOST_WIDE_INT, HOST_WIDE_INT,
62 extern void debug_rli (record_layout_info);
64 /* SAVE_EXPRs for sizes of types and decls, waiting to be expanded. */
66 static GTY(()) tree pending_sizes;
68 /* Show that REFERENCE_TYPES are internal and should be Pmode. Called only
72 internal_reference_types (void)
74 reference_types_internal = 1;
77 /* Get a list of all the objects put on the pending sizes list. */
80 get_pending_sizes (void)
82 tree chain = pending_sizes;
88 /* Add EXPR to the pending sizes list. */
91 put_pending_size (tree expr)
93 /* Strip any simple arithmetic from EXPR to see if it has an underlying
95 expr = skip_simple_arithmetic (expr);
97 if (TREE_CODE (expr) == SAVE_EXPR)
98 pending_sizes = tree_cons (NULL_TREE, expr, pending_sizes);
101 /* Put a chain of objects into the pending sizes list, which must be
105 put_pending_sizes (tree chain)
107 gcc_assert (!pending_sizes);
108 pending_sizes = chain;
111 /* Given a size SIZE that may not be a constant, return a SAVE_EXPR
112 to serve as the actual size-expression for a type or decl. */
115 variable_size (tree size)
119 /* If the language-processor is to take responsibility for variable-sized
120 items (e.g., languages which have elaboration procedures like Ada),
121 just return SIZE unchanged. Likewise for self-referential sizes and
123 if (TREE_CONSTANT (size)
124 || lang_hooks.decls.global_bindings_p () < 0
125 || CONTAINS_PLACEHOLDER_P (size))
128 size = save_expr (size);
130 /* If an array with a variable number of elements is declared, and
131 the elements require destruction, we will emit a cleanup for the
132 array. That cleanup is run both on normal exit from the block
133 and in the exception-handler for the block. Normally, when code
134 is used in both ordinary code and in an exception handler it is
135 `unsaved', i.e., all SAVE_EXPRs are recalculated. However, we do
136 not wish to do that here; the array-size is the same in both
138 save = skip_simple_arithmetic (size);
140 if (cfun && cfun->x_dont_save_pending_sizes_p)
141 /* The front-end doesn't want us to keep a list of the expressions
142 that determine sizes for variable size objects. Trust it. */
145 if (lang_hooks.decls.global_bindings_p ())
147 if (TREE_CONSTANT (size))
148 error ("type size can%'t be explicitly evaluated");
150 error ("variable-size type declared outside of any function");
152 return size_one_node;
155 put_pending_size (save);
160 #ifndef MAX_FIXED_MODE_SIZE
161 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
164 /* Return the machine mode to use for a nonscalar of SIZE bits. The
165 mode must be in class CLASS, and have exactly that many value bits;
166 it may have padding as well. If LIMIT is nonzero, modes of wider
167 than MAX_FIXED_MODE_SIZE will not be used. */
170 mode_for_size (unsigned int size, enum mode_class class, int limit)
172 enum machine_mode mode;
174 if (limit && size > MAX_FIXED_MODE_SIZE)
177 /* Get the first mode which has this size, in the specified class. */
178 for (mode = GET_CLASS_NARROWEST_MODE (class); mode != VOIDmode;
179 mode = GET_MODE_WIDER_MODE (mode))
180 if (GET_MODE_PRECISION (mode) == size)
186 /* Similar, except passed a tree node. */
189 mode_for_size_tree (tree size, enum mode_class class, int limit)
191 if (TREE_CODE (size) != INTEGER_CST
192 || TREE_OVERFLOW (size)
193 /* What we really want to say here is that the size can fit in a
194 host integer, but we know there's no way we'd find a mode for
195 this many bits, so there's no point in doing the precise test. */
196 || compare_tree_int (size, 1000) > 0)
199 return mode_for_size (tree_low_cst (size, 1), class, limit);
202 /* Similar, but never return BLKmode; return the narrowest mode that
203 contains at least the requested number of value bits. */
206 smallest_mode_for_size (unsigned int size, enum mode_class class)
208 enum machine_mode mode;
210 /* Get the first mode which has at least this size, in the
212 for (mode = GET_CLASS_NARROWEST_MODE (class); mode != VOIDmode;
213 mode = GET_MODE_WIDER_MODE (mode))
214 if (GET_MODE_PRECISION (mode) >= size)
220 /* Find an integer mode of the exact same size, or BLKmode on failure. */
223 int_mode_for_mode (enum machine_mode mode)
225 switch (GET_MODE_CLASS (mode))
228 case MODE_PARTIAL_INT:
231 case MODE_COMPLEX_INT:
232 case MODE_COMPLEX_FLOAT:
234 case MODE_VECTOR_INT:
235 case MODE_VECTOR_FLOAT:
236 mode = mode_for_size (GET_MODE_BITSIZE (mode), MODE_INT, 0);
243 /* ... fall through ... */
253 /* Return the alignment of MODE. This will be bounded by 1 and
254 BIGGEST_ALIGNMENT. */
257 get_mode_alignment (enum machine_mode mode)
259 return MIN (BIGGEST_ALIGNMENT, MAX (1, mode_base_align[mode]*BITS_PER_UNIT));
263 /* Subroutine of layout_decl: Force alignment required for the data type.
264 But if the decl itself wants greater alignment, don't override that. */
267 do_type_align (tree type, tree decl)
269 if (TYPE_ALIGN (type) > DECL_ALIGN (decl))
271 DECL_ALIGN (decl) = TYPE_ALIGN (type);
272 if (TREE_CODE (decl) == FIELD_DECL)
273 DECL_USER_ALIGN (decl) = TYPE_USER_ALIGN (type);
277 /* Set the size, mode and alignment of a ..._DECL node.
278 TYPE_DECL does need this for C++.
279 Note that LABEL_DECL and CONST_DECL nodes do not need this,
280 and FUNCTION_DECL nodes have them set up in a special (and simple) way.
281 Don't call layout_decl for them.
283 KNOWN_ALIGN is the amount of alignment we can assume this
284 decl has with no special effort. It is relevant only for FIELD_DECLs
285 and depends on the previous fields.
286 All that matters about KNOWN_ALIGN is which powers of 2 divide it.
287 If KNOWN_ALIGN is 0, it means, "as much alignment as you like":
288 the record will be aligned to suit. */
291 layout_decl (tree decl, unsigned int known_align)
293 tree type = TREE_TYPE (decl);
294 enum tree_code code = TREE_CODE (decl);
297 if (code == CONST_DECL)
300 gcc_assert (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL
301 || code == TYPE_DECL ||code == FIELD_DECL);
303 rtl = DECL_RTL_IF_SET (decl);
305 if (type == error_mark_node)
306 type = void_type_node;
308 /* Usually the size and mode come from the data type without change,
309 however, the front-end may set the explicit width of the field, so its
310 size may not be the same as the size of its type. This happens with
311 bitfields, of course (an `int' bitfield may be only 2 bits, say), but it
312 also happens with other fields. For example, the C++ front-end creates
313 zero-sized fields corresponding to empty base classes, and depends on
314 layout_type setting DECL_FIELD_BITPOS correctly for the field. Set the
315 size in bytes from the size in bits. If we have already set the mode,
316 don't set it again since we can be called twice for FIELD_DECLs. */
318 DECL_UNSIGNED (decl) = TYPE_UNSIGNED (type);
319 if (DECL_MODE (decl) == VOIDmode)
320 DECL_MODE (decl) = TYPE_MODE (type);
322 if (DECL_SIZE (decl) == 0)
324 DECL_SIZE (decl) = TYPE_SIZE (type);
325 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
327 else if (DECL_SIZE_UNIT (decl) == 0)
328 DECL_SIZE_UNIT (decl)
329 = fold_convert (sizetype, size_binop (CEIL_DIV_EXPR, DECL_SIZE (decl),
332 if (code != FIELD_DECL)
333 /* For non-fields, update the alignment from the type. */
334 do_type_align (type, decl);
336 /* For fields, it's a bit more complicated... */
338 bool old_user_align = DECL_USER_ALIGN (decl);
340 if (DECL_BIT_FIELD (decl))
342 DECL_BIT_FIELD_TYPE (decl) = type;
344 /* A zero-length bit-field affects the alignment of the next
346 if (integer_zerop (DECL_SIZE (decl))
347 && ! DECL_PACKED (decl)
348 && ! targetm.ms_bitfield_layout_p (DECL_FIELD_CONTEXT (decl)))
350 #ifdef PCC_BITFIELD_TYPE_MATTERS
351 if (PCC_BITFIELD_TYPE_MATTERS)
352 do_type_align (type, decl);
356 #ifdef EMPTY_FIELD_BOUNDARY
357 if (EMPTY_FIELD_BOUNDARY > DECL_ALIGN (decl))
359 DECL_ALIGN (decl) = EMPTY_FIELD_BOUNDARY;
360 DECL_USER_ALIGN (decl) = 0;
366 /* See if we can use an ordinary integer mode for a bit-field.
367 Conditions are: a fixed size that is correct for another mode
368 and occupying a complete byte or bytes on proper boundary. */
369 if (TYPE_SIZE (type) != 0
370 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
371 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT)
373 enum machine_mode xmode
374 = mode_for_size_tree (DECL_SIZE (decl), MODE_INT, 1);
378 || known_align >= GET_MODE_ALIGNMENT (xmode)))
380 DECL_ALIGN (decl) = MAX (GET_MODE_ALIGNMENT (xmode),
382 DECL_MODE (decl) = xmode;
383 DECL_BIT_FIELD (decl) = 0;
387 /* Turn off DECL_BIT_FIELD if we won't need it set. */
388 if (TYPE_MODE (type) == BLKmode && DECL_MODE (decl) == BLKmode
389 && known_align >= TYPE_ALIGN (type)
390 && DECL_ALIGN (decl) >= TYPE_ALIGN (type))
391 DECL_BIT_FIELD (decl) = 0;
393 else if (DECL_PACKED (decl) && DECL_USER_ALIGN (decl))
394 /* Don't touch DECL_ALIGN. For other packed fields, go ahead and
395 round up; we'll reduce it again below. We want packing to
396 supersede USER_ALIGN inherited from the type, but defer to
397 alignment explicitly specified on the field decl. */;
399 do_type_align (type, decl);
401 /* If the field is of variable size, we can't misalign it since we
402 have no way to make a temporary to align the result. But this
403 isn't an issue if the decl is not addressable. Likewise if it
406 Note that do_type_align may set DECL_USER_ALIGN, so we need to
407 check old_user_align instead. */
408 if (DECL_PACKED (decl)
410 && (DECL_NONADDRESSABLE_P (decl)
411 || DECL_SIZE_UNIT (decl) == 0
412 || TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST))
413 DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), BITS_PER_UNIT);
415 if (! DECL_USER_ALIGN (decl) && ! DECL_PACKED (decl))
417 /* Some targets (i.e. i386, VMS) limit struct field alignment
418 to a lower boundary than alignment of variables unless
419 it was overridden by attribute aligned. */
420 #ifdef BIGGEST_FIELD_ALIGNMENT
422 = MIN (DECL_ALIGN (decl), (unsigned) BIGGEST_FIELD_ALIGNMENT);
424 #ifdef ADJUST_FIELD_ALIGN
425 DECL_ALIGN (decl) = ADJUST_FIELD_ALIGN (decl, DECL_ALIGN (decl));
429 /* Should this be controlled by DECL_USER_ALIGN, too? */
430 if (maximum_field_alignment != 0)
431 DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), maximum_field_alignment);
434 /* Evaluate nonconstant size only once, either now or as soon as safe. */
435 if (DECL_SIZE (decl) != 0 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
436 DECL_SIZE (decl) = variable_size (DECL_SIZE (decl));
437 if (DECL_SIZE_UNIT (decl) != 0
438 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST)
439 DECL_SIZE_UNIT (decl) = variable_size (DECL_SIZE_UNIT (decl));
441 /* If requested, warn about definitions of large data objects. */
443 && (code == VAR_DECL || code == PARM_DECL)
444 && ! DECL_EXTERNAL (decl))
446 tree size = DECL_SIZE_UNIT (decl);
448 if (size != 0 && TREE_CODE (size) == INTEGER_CST
449 && compare_tree_int (size, larger_than_size) > 0)
451 int size_as_int = TREE_INT_CST_LOW (size);
453 if (compare_tree_int (size, size_as_int) == 0)
454 warning (0, "%Jsize of %qD is %d bytes", decl, decl, size_as_int);
456 warning (0, "%Jsize of %qD is larger than %d bytes",
457 decl, decl, larger_than_size);
461 /* If the RTL was already set, update its mode and mem attributes. */
464 PUT_MODE (rtl, DECL_MODE (decl));
465 SET_DECL_RTL (decl, 0);
466 set_mem_attributes (rtl, decl, 1);
467 SET_DECL_RTL (decl, rtl);
471 /* Given a VAR_DECL, PARM_DECL or RESULT_DECL, clears the results of
472 a previous call to layout_decl and calls it again. */
475 relayout_decl (tree decl)
477 DECL_SIZE (decl) = DECL_SIZE_UNIT (decl) = 0;
478 DECL_MODE (decl) = VOIDmode;
479 DECL_ALIGN (decl) = 0;
480 SET_DECL_RTL (decl, 0);
482 layout_decl (decl, 0);
485 /* Hook for a front-end function that can modify the record layout as needed
486 immediately before it is finalized. */
488 static void (*lang_adjust_rli) (record_layout_info) = 0;
491 set_lang_adjust_rli (void (*f) (record_layout_info))
496 /* Begin laying out type T, which may be a RECORD_TYPE, UNION_TYPE, or
497 QUAL_UNION_TYPE. Return a pointer to a struct record_layout_info which
498 is to be passed to all other layout functions for this record. It is the
499 responsibility of the caller to call `free' for the storage returned.
500 Note that garbage collection is not permitted until we finish laying
504 start_record_layout (tree t)
506 record_layout_info rli = xmalloc (sizeof (struct record_layout_info_s));
510 /* If the type has a minimum specified alignment (via an attribute
511 declaration, for example) use it -- otherwise, start with a
512 one-byte alignment. */
513 rli->record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (t));
514 rli->unpacked_align = rli->record_align;
515 rli->offset_align = MAX (rli->record_align, BIGGEST_ALIGNMENT);
517 #ifdef STRUCTURE_SIZE_BOUNDARY
518 /* Packed structures don't need to have minimum size. */
519 if (! TYPE_PACKED (t))
520 rli->record_align = MAX (rli->record_align, (unsigned) STRUCTURE_SIZE_BOUNDARY);
523 rli->offset = size_zero_node;
524 rli->bitpos = bitsize_zero_node;
526 rli->pending_statics = 0;
527 rli->packed_maybe_necessary = 0;
532 /* These four routines perform computations that convert between
533 the offset/bitpos forms and byte and bit offsets. */
536 bit_from_pos (tree offset, tree bitpos)
538 return size_binop (PLUS_EXPR, bitpos,
539 size_binop (MULT_EXPR,
540 fold_convert (bitsizetype, offset),
545 byte_from_pos (tree offset, tree bitpos)
547 return size_binop (PLUS_EXPR, offset,
548 fold_convert (sizetype,
549 size_binop (TRUNC_DIV_EXPR, bitpos,
550 bitsize_unit_node)));
554 pos_from_bit (tree *poffset, tree *pbitpos, unsigned int off_align,
557 *poffset = size_binop (MULT_EXPR,
558 fold_convert (sizetype,
559 size_binop (FLOOR_DIV_EXPR, pos,
560 bitsize_int (off_align))),
561 size_int (off_align / BITS_PER_UNIT));
562 *pbitpos = size_binop (FLOOR_MOD_EXPR, pos, bitsize_int (off_align));
565 /* Given a pointer to bit and byte offsets and an offset alignment,
566 normalize the offsets so they are within the alignment. */
569 normalize_offset (tree *poffset, tree *pbitpos, unsigned int off_align)
571 /* If the bit position is now larger than it should be, adjust it
573 if (compare_tree_int (*pbitpos, off_align) >= 0)
575 tree extra_aligns = size_binop (FLOOR_DIV_EXPR, *pbitpos,
576 bitsize_int (off_align));
579 = size_binop (PLUS_EXPR, *poffset,
580 size_binop (MULT_EXPR,
581 fold_convert (sizetype, extra_aligns),
582 size_int (off_align / BITS_PER_UNIT)));
585 = size_binop (FLOOR_MOD_EXPR, *pbitpos, bitsize_int (off_align));
589 /* Print debugging information about the information in RLI. */
592 debug_rli (record_layout_info rli)
594 print_node_brief (stderr, "type", rli->t, 0);
595 print_node_brief (stderr, "\noffset", rli->offset, 0);
596 print_node_brief (stderr, " bitpos", rli->bitpos, 0);
598 fprintf (stderr, "\naligns: rec = %u, unpack = %u, off = %u\n",
599 rli->record_align, rli->unpacked_align,
601 if (rli->packed_maybe_necessary)
602 fprintf (stderr, "packed may be necessary\n");
604 if (rli->pending_statics)
606 fprintf (stderr, "pending statics:\n");
607 debug_tree (rli->pending_statics);
611 /* Given an RLI with a possibly-incremented BITPOS, adjust OFFSET and
612 BITPOS if necessary to keep BITPOS below OFFSET_ALIGN. */
615 normalize_rli (record_layout_info rli)
617 normalize_offset (&rli->offset, &rli->bitpos, rli->offset_align);
620 /* Returns the size in bytes allocated so far. */
623 rli_size_unit_so_far (record_layout_info rli)
625 return byte_from_pos (rli->offset, rli->bitpos);
628 /* Returns the size in bits allocated so far. */
631 rli_size_so_far (record_layout_info rli)
633 return bit_from_pos (rli->offset, rli->bitpos);
636 /* FIELD is about to be added to RLI->T. The alignment (in bits) of
637 the next available location within the record is given by KNOWN_ALIGN.
638 Update the variable alignment fields in RLI, and return the alignment
639 to give the FIELD. */
642 update_alignment_for_field (record_layout_info rli, tree field,
643 unsigned int known_align)
645 /* The alignment required for FIELD. */
646 unsigned int desired_align;
647 /* The type of this field. */
648 tree type = TREE_TYPE (field);
649 /* True if the field was explicitly aligned by the user. */
653 /* Lay out the field so we know what alignment it needs. */
654 layout_decl (field, known_align);
655 desired_align = DECL_ALIGN (field);
656 user_align = DECL_USER_ALIGN (field);
658 is_bitfield = (type != error_mark_node
659 && DECL_BIT_FIELD_TYPE (field)
660 && ! integer_zerop (TYPE_SIZE (type)));
662 /* Record must have at least as much alignment as any field.
663 Otherwise, the alignment of the field within the record is
665 if (is_bitfield && targetm.ms_bitfield_layout_p (rli->t))
667 /* Here, the alignment of the underlying type of a bitfield can
668 affect the alignment of a record; even a zero-sized field
669 can do this. The alignment should be to the alignment of
670 the type, except that for zero-size bitfields this only
671 applies if there was an immediately prior, nonzero-size
672 bitfield. (That's the way it is, experimentally.) */
673 if (! integer_zerop (DECL_SIZE (field))
674 ? ! DECL_PACKED (field)
676 && DECL_BIT_FIELD_TYPE (rli->prev_field)
677 && ! integer_zerop (DECL_SIZE (rli->prev_field))))
679 unsigned int type_align = TYPE_ALIGN (type);
680 type_align = MAX (type_align, desired_align);
681 if (maximum_field_alignment != 0)
682 type_align = MIN (type_align, maximum_field_alignment);
683 rli->record_align = MAX (rli->record_align, type_align);
684 rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
685 /* If we start a new run, make sure we start it properly aligned. */
686 if ((!rli->prev_field
687 || integer_zerop (DECL_SIZE (field))
688 || integer_zerop (DECL_SIZE (rli->prev_field))
689 || !host_integerp (DECL_SIZE (rli->prev_field), 0)
690 || !host_integerp (TYPE_SIZE (type), 0)
691 || !simple_cst_equal (TYPE_SIZE (type),
692 TYPE_SIZE (TREE_TYPE (rli->prev_field)))
693 || (rli->remaining_in_alignment
694 < tree_low_cst (DECL_SIZE (field), 0)))
695 && desired_align < type_align)
696 desired_align = type_align;
699 #ifdef PCC_BITFIELD_TYPE_MATTERS
700 else if (is_bitfield && PCC_BITFIELD_TYPE_MATTERS)
702 /* Named bit-fields cause the entire structure to have the
703 alignment implied by their type. Some targets also apply the same
704 rules to unnamed bitfields. */
705 if (DECL_NAME (field) != 0
706 || targetm.align_anon_bitfield ())
708 unsigned int type_align = TYPE_ALIGN (type);
710 #ifdef ADJUST_FIELD_ALIGN
711 if (! TYPE_USER_ALIGN (type))
712 type_align = ADJUST_FIELD_ALIGN (field, type_align);
715 if (maximum_field_alignment != 0)
716 type_align = MIN (type_align, maximum_field_alignment);
717 else if (DECL_PACKED (field))
718 type_align = MIN (type_align, BITS_PER_UNIT);
720 /* The alignment of the record is increased to the maximum
721 of the current alignment, the alignment indicated on the
722 field (i.e., the alignment specified by an __aligned__
723 attribute), and the alignment indicated by the type of
725 rli->record_align = MAX (rli->record_align, desired_align);
726 rli->record_align = MAX (rli->record_align, type_align);
729 rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
730 user_align |= TYPE_USER_ALIGN (type);
736 rli->record_align = MAX (rli->record_align, desired_align);
737 rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
740 TYPE_USER_ALIGN (rli->t) |= user_align;
742 return desired_align;
745 /* Called from place_field to handle unions. */
748 place_union_field (record_layout_info rli, tree field)
750 update_alignment_for_field (rli, field, /*known_align=*/0);
752 DECL_FIELD_OFFSET (field) = size_zero_node;
753 DECL_FIELD_BIT_OFFSET (field) = bitsize_zero_node;
754 SET_DECL_OFFSET_ALIGN (field, BIGGEST_ALIGNMENT);
756 /* We assume the union's size will be a multiple of a byte so we don't
757 bother with BITPOS. */
758 if (TREE_CODE (rli->t) == UNION_TYPE)
759 rli->offset = size_binop (MAX_EXPR, rli->offset, DECL_SIZE_UNIT (field));
760 else if (TREE_CODE (rli->t) == QUAL_UNION_TYPE)
761 rli->offset = fold_build3 (COND_EXPR, sizetype,
762 DECL_QUALIFIER (field),
763 DECL_SIZE_UNIT (field), rli->offset);
766 #if defined (PCC_BITFIELD_TYPE_MATTERS) || defined (BITFIELD_NBYTES_LIMITED)
767 /* A bitfield of SIZE with a required access alignment of ALIGN is allocated
768 at BYTE_OFFSET / BIT_OFFSET. Return nonzero if the field would span more
769 units of alignment than the underlying TYPE. */
771 excess_unit_span (HOST_WIDE_INT byte_offset, HOST_WIDE_INT bit_offset,
772 HOST_WIDE_INT size, HOST_WIDE_INT align, tree type)
774 /* Note that the calculation of OFFSET might overflow; we calculate it so
775 that we still get the right result as long as ALIGN is a power of two. */
776 unsigned HOST_WIDE_INT offset = byte_offset * BITS_PER_UNIT + bit_offset;
778 offset = offset % align;
779 return ((offset + size + align - 1) / align
780 > ((unsigned HOST_WIDE_INT) tree_low_cst (TYPE_SIZE (type), 1)
785 /* RLI contains information about the layout of a RECORD_TYPE. FIELD
786 is a FIELD_DECL to be added after those fields already present in
787 T. (FIELD is not actually added to the TYPE_FIELDS list here;
788 callers that desire that behavior must manually perform that step.) */
791 place_field (record_layout_info rli, tree field)
793 /* The alignment required for FIELD. */
794 unsigned int desired_align;
795 /* The alignment FIELD would have if we just dropped it into the
796 record as it presently stands. */
797 unsigned int known_align;
798 unsigned int actual_align;
799 /* The type of this field. */
800 tree type = TREE_TYPE (field);
802 if (TREE_CODE (field) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
805 /* If FIELD is static, then treat it like a separate variable, not
806 really like a structure field. If it is a FUNCTION_DECL, it's a
807 method. In both cases, all we do is lay out the decl, and we do
808 it *after* the record is laid out. */
809 if (TREE_CODE (field) == VAR_DECL)
811 rli->pending_statics = tree_cons (NULL_TREE, field,
812 rli->pending_statics);
816 /* Enumerators and enum types which are local to this class need not
817 be laid out. Likewise for initialized constant fields. */
818 else if (TREE_CODE (field) != FIELD_DECL)
821 /* Unions are laid out very differently than records, so split
822 that code off to another function. */
823 else if (TREE_CODE (rli->t) != RECORD_TYPE)
825 place_union_field (rli, field);
829 /* Work out the known alignment so far. Note that A & (-A) is the
830 value of the least-significant bit in A that is one. */
831 if (! integer_zerop (rli->bitpos))
832 known_align = (tree_low_cst (rli->bitpos, 1)
833 & - tree_low_cst (rli->bitpos, 1));
834 else if (integer_zerop (rli->offset))
836 else if (host_integerp (rli->offset, 1))
837 known_align = (BITS_PER_UNIT
838 * (tree_low_cst (rli->offset, 1)
839 & - tree_low_cst (rli->offset, 1)));
841 known_align = rli->offset_align;
843 desired_align = update_alignment_for_field (rli, field, known_align);
844 if (known_align == 0)
845 known_align = MAX (BIGGEST_ALIGNMENT, rli->record_align);
847 if (warn_packed && DECL_PACKED (field))
849 if (known_align >= TYPE_ALIGN (type))
851 if (TYPE_ALIGN (type) > desired_align)
853 if (STRICT_ALIGNMENT)
854 warning (0, "%Jpacked attribute causes inefficient alignment "
855 "for %qD", field, field);
857 warning (0, "%Jpacked attribute is unnecessary for %qD",
862 rli->packed_maybe_necessary = 1;
865 /* Does this field automatically have alignment it needs by virtue
866 of the fields that precede it and the record's own alignment? */
867 if (known_align < desired_align)
869 /* No, we need to skip space before this field.
870 Bump the cumulative size to multiple of field alignment. */
873 warning (0, "%Jpadding struct to align %qD", field, field);
875 /* If the alignment is still within offset_align, just align
877 if (desired_align < rli->offset_align)
878 rli->bitpos = round_up (rli->bitpos, desired_align);
881 /* First adjust OFFSET by the partial bits, then align. */
883 = size_binop (PLUS_EXPR, rli->offset,
884 fold_convert (sizetype,
885 size_binop (CEIL_DIV_EXPR, rli->bitpos,
886 bitsize_unit_node)));
887 rli->bitpos = bitsize_zero_node;
889 rli->offset = round_up (rli->offset, desired_align / BITS_PER_UNIT);
892 if (! TREE_CONSTANT (rli->offset))
893 rli->offset_align = desired_align;
897 /* Handle compatibility with PCC. Note that if the record has any
898 variable-sized fields, we need not worry about compatibility. */
899 #ifdef PCC_BITFIELD_TYPE_MATTERS
900 if (PCC_BITFIELD_TYPE_MATTERS
901 && ! targetm.ms_bitfield_layout_p (rli->t)
902 && TREE_CODE (field) == FIELD_DECL
903 && type != error_mark_node
904 && DECL_BIT_FIELD (field)
905 && ! DECL_PACKED (field)
906 && maximum_field_alignment == 0
907 && ! integer_zerop (DECL_SIZE (field))
908 && host_integerp (DECL_SIZE (field), 1)
909 && host_integerp (rli->offset, 1)
910 && host_integerp (TYPE_SIZE (type), 1))
912 unsigned int type_align = TYPE_ALIGN (type);
913 tree dsize = DECL_SIZE (field);
914 HOST_WIDE_INT field_size = tree_low_cst (dsize, 1);
915 HOST_WIDE_INT offset = tree_low_cst (rli->offset, 0);
916 HOST_WIDE_INT bit_offset = tree_low_cst (rli->bitpos, 0);
918 #ifdef ADJUST_FIELD_ALIGN
919 if (! TYPE_USER_ALIGN (type))
920 type_align = ADJUST_FIELD_ALIGN (field, type_align);
923 /* A bit field may not span more units of alignment of its type
924 than its type itself. Advance to next boundary if necessary. */
925 if (excess_unit_span (offset, bit_offset, field_size, type_align, type))
926 rli->bitpos = round_up (rli->bitpos, type_align);
928 TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type);
932 #ifdef BITFIELD_NBYTES_LIMITED
933 if (BITFIELD_NBYTES_LIMITED
934 && ! targetm.ms_bitfield_layout_p (rli->t)
935 && TREE_CODE (field) == FIELD_DECL
936 && type != error_mark_node
937 && DECL_BIT_FIELD_TYPE (field)
938 && ! DECL_PACKED (field)
939 && ! integer_zerop (DECL_SIZE (field))
940 && host_integerp (DECL_SIZE (field), 1)
941 && host_integerp (rli->offset, 1)
942 && host_integerp (TYPE_SIZE (type), 1))
944 unsigned int type_align = TYPE_ALIGN (type);
945 tree dsize = DECL_SIZE (field);
946 HOST_WIDE_INT field_size = tree_low_cst (dsize, 1);
947 HOST_WIDE_INT offset = tree_low_cst (rli->offset, 0);
948 HOST_WIDE_INT bit_offset = tree_low_cst (rli->bitpos, 0);
950 #ifdef ADJUST_FIELD_ALIGN
951 if (! TYPE_USER_ALIGN (type))
952 type_align = ADJUST_FIELD_ALIGN (field, type_align);
955 if (maximum_field_alignment != 0)
956 type_align = MIN (type_align, maximum_field_alignment);
957 /* ??? This test is opposite the test in the containing if
958 statement, so this code is unreachable currently. */
959 else if (DECL_PACKED (field))
960 type_align = MIN (type_align, BITS_PER_UNIT);
962 /* A bit field may not span the unit of alignment of its type.
963 Advance to next boundary if necessary. */
964 if (excess_unit_span (offset, bit_offset, field_size, type_align, type))
965 rli->bitpos = round_up (rli->bitpos, type_align);
967 TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type);
971 /* See the docs for TARGET_MS_BITFIELD_LAYOUT_P for details.
973 When a bit field is inserted into a packed record, the whole
974 size of the underlying type is used by one or more same-size
975 adjacent bitfields. (That is, if its long:3, 32 bits is
976 used in the record, and any additional adjacent long bitfields are
977 packed into the same chunk of 32 bits. However, if the size
978 changes, a new field of that size is allocated.) In an unpacked
979 record, this is the same as using alignment, but not equivalent
982 Note: for compatibility, we use the type size, not the type alignment
983 to determine alignment, since that matches the documentation */
985 if (targetm.ms_bitfield_layout_p (rli->t)
986 && ((DECL_BIT_FIELD_TYPE (field) && ! DECL_PACKED (field))
987 || (rli->prev_field && ! DECL_PACKED (rli->prev_field))))
989 /* At this point, either the prior or current are bitfields,
990 (possibly both), and we're dealing with MS packing. */
991 tree prev_saved = rli->prev_field;
993 /* Is the prior field a bitfield? If so, handle "runs" of same
995 if (rli->prev_field /* necessarily a bitfield if it exists. */)
997 /* If both are bitfields, nonzero, and the same size, this is
998 the middle of a run. Zero declared size fields are special
999 and handled as "end of run". (Note: it's nonzero declared
1000 size, but equal type sizes!) (Since we know that both
1001 the current and previous fields are bitfields by the
1002 time we check it, DECL_SIZE must be present for both.) */
1003 if (DECL_BIT_FIELD_TYPE (field)
1004 && !integer_zerop (DECL_SIZE (field))
1005 && !integer_zerop (DECL_SIZE (rli->prev_field))
1006 && host_integerp (DECL_SIZE (rli->prev_field), 0)
1007 && host_integerp (TYPE_SIZE (type), 0)
1008 && simple_cst_equal (TYPE_SIZE (type),
1009 TYPE_SIZE (TREE_TYPE (rli->prev_field))))
1011 /* We're in the middle of a run of equal type size fields; make
1012 sure we realign if we run out of bits. (Not decl size,
1014 HOST_WIDE_INT bitsize = tree_low_cst (DECL_SIZE (field), 0);
1016 if (rli->remaining_in_alignment < bitsize)
1018 /* If PREV_FIELD is packed, and we haven't lumped
1019 non-packed bitfields with it, treat this as if PREV_FIELD
1020 was not a bitfield. This avoids anomalies where a packed
1021 bitfield with long long base type can take up more
1022 space than a same-size bitfield with base type short. */
1023 if (rli->prev_packed)
1024 rli->prev_field = prev_saved = NULL;
1027 /* out of bits; bump up to next 'word'. */
1028 rli->offset = DECL_FIELD_OFFSET (rli->prev_field);
1030 = size_binop (PLUS_EXPR, TYPE_SIZE (type),
1031 DECL_FIELD_BIT_OFFSET (rli->prev_field));
1032 rli->prev_field = field;
1033 rli->remaining_in_alignment
1034 = tree_low_cst (TYPE_SIZE (type), 0) - bitsize;
1038 rli->remaining_in_alignment -= bitsize;
1040 else if (rli->prev_packed)
1041 rli->prev_field = prev_saved = NULL;
1044 /* End of a run: if leaving a run of bitfields of the same type
1045 size, we have to "use up" the rest of the bits of the type
1048 Compute the new position as the sum of the size for the prior
1049 type and where we first started working on that type.
1050 Note: since the beginning of the field was aligned then
1051 of course the end will be too. No round needed. */
1053 if (!integer_zerop (DECL_SIZE (rli->prev_field)))
1055 tree type_size = TYPE_SIZE (TREE_TYPE (rli->prev_field));
1057 /* If the desired alignment is greater or equal to TYPE_SIZE,
1058 we have already adjusted rli->bitpos / rli->offset above.
1060 if ((unsigned HOST_WIDE_INT) tree_low_cst (type_size, 0)
1063 = size_binop (PLUS_EXPR, type_size,
1064 DECL_FIELD_BIT_OFFSET (rli->prev_field));
1067 /* We "use up" size zero fields; the code below should behave
1068 as if the prior field was not a bitfield. */
1071 /* Cause a new bitfield to be captured, either this time (if
1072 currently a bitfield) or next time we see one. */
1073 if (!DECL_BIT_FIELD_TYPE(field)
1074 || integer_zerop (DECL_SIZE (field)))
1075 rli->prev_field = NULL;
1078 rli->prev_packed = 0;
1079 normalize_rli (rli);
1082 /* If we're starting a new run of same size type bitfields
1083 (or a run of non-bitfields), set up the "first of the run"
1086 That is, if the current field is not a bitfield, or if there
1087 was a prior bitfield the type sizes differ, or if there wasn't
1088 a prior bitfield the size of the current field is nonzero.
1090 Note: we must be sure to test ONLY the type size if there was
1091 a prior bitfield and ONLY for the current field being zero if
1094 if (!DECL_BIT_FIELD_TYPE (field)
1095 || ( prev_saved != NULL
1096 ? !simple_cst_equal (TYPE_SIZE (type),
1097 TYPE_SIZE (TREE_TYPE (prev_saved)))
1098 : !integer_zerop (DECL_SIZE (field)) ))
1100 /* Never smaller than a byte for compatibility. */
1101 unsigned int type_align = BITS_PER_UNIT;
1103 /* (When not a bitfield), we could be seeing a flex array (with
1104 no DECL_SIZE). Since we won't be using remaining_in_alignment
1105 until we see a bitfield (and come by here again) we just skip
1107 if (DECL_SIZE (field) != NULL
1108 && host_integerp (TYPE_SIZE (TREE_TYPE (field)), 0)
1109 && host_integerp (DECL_SIZE (field), 0))
1110 rli->remaining_in_alignment
1111 = tree_low_cst (TYPE_SIZE (TREE_TYPE(field)), 0)
1112 - tree_low_cst (DECL_SIZE (field), 0);
1114 /* Now align (conventionally) for the new type. */
1115 if (!DECL_PACKED(field))
1116 type_align = MAX(TYPE_ALIGN (type), type_align);
1119 && DECL_BIT_FIELD_TYPE (prev_saved)
1120 /* If the previous bit-field is zero-sized, we've already
1121 accounted for its alignment needs (or ignored it, if
1122 appropriate) while placing it. */
1123 && ! integer_zerop (DECL_SIZE (prev_saved)))
1124 type_align = MAX (type_align,
1125 TYPE_ALIGN (TREE_TYPE (prev_saved)));
1127 if (maximum_field_alignment != 0)
1128 type_align = MIN (type_align, maximum_field_alignment);
1130 rli->bitpos = round_up (rli->bitpos, type_align);
1132 /* If we really aligned, don't allow subsequent bitfields
1134 rli->prev_field = NULL;
1138 /* Offset so far becomes the position of this field after normalizing. */
1139 normalize_rli (rli);
1140 DECL_FIELD_OFFSET (field) = rli->offset;
1141 DECL_FIELD_BIT_OFFSET (field) = rli->bitpos;
1142 SET_DECL_OFFSET_ALIGN (field, rli->offset_align);
1144 /* If this field ended up more aligned than we thought it would be (we
1145 approximate this by seeing if its position changed), lay out the field
1146 again; perhaps we can use an integral mode for it now. */
1147 if (! integer_zerop (DECL_FIELD_BIT_OFFSET (field)))
1148 actual_align = (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1149 & - tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1));
1150 else if (integer_zerop (DECL_FIELD_OFFSET (field)))
1151 actual_align = MAX (BIGGEST_ALIGNMENT, rli->record_align);
1152 else if (host_integerp (DECL_FIELD_OFFSET (field), 1))
1153 actual_align = (BITS_PER_UNIT
1154 * (tree_low_cst (DECL_FIELD_OFFSET (field), 1)
1155 & - tree_low_cst (DECL_FIELD_OFFSET (field), 1)));
1157 actual_align = DECL_OFFSET_ALIGN (field);
1158 /* ACTUAL_ALIGN is still the actual alignment *within the record* .
1159 store / extract bit field operations will check the alignment of the
1160 record against the mode of bit fields. */
1162 if (known_align != actual_align)
1163 layout_decl (field, actual_align);
1165 if (DECL_BIT_FIELD_TYPE (field))
1167 unsigned int type_align = TYPE_ALIGN (type);
1169 /* Only the MS bitfields use this. We used to also put any kind of
1170 packed bit fields into prev_field, but that makes no sense, because
1171 an 8 bit packed bit field shouldn't impose more restriction on
1172 following fields than a char field, and the alignment requirements
1173 are also not fulfilled.
1174 There is no sane value to set rli->remaining_in_alignment to when
1175 a packed bitfield in prev_field is unaligned. */
1176 if (maximum_field_alignment != 0)
1177 type_align = MIN (type_align, maximum_field_alignment);
1178 gcc_assert (rli->prev_field
1179 || actual_align >= type_align || DECL_PACKED (field)
1180 || integer_zerop (DECL_SIZE (field))
1181 || !targetm.ms_bitfield_layout_p (rli->t));
1182 if (rli->prev_field == NULL && actual_align >= type_align
1183 && !integer_zerop (DECL_SIZE (field)))
1185 rli->prev_field = field;
1186 /* rli->remaining_in_alignment has not been set if the bitfield
1187 has size zero, or if it is a packed bitfield. */
1188 rli->remaining_in_alignment
1189 = (tree_low_cst (TYPE_SIZE (TREE_TYPE (field)), 0)
1190 - tree_low_cst (DECL_SIZE (field), 0));
1191 rli->prev_packed = DECL_PACKED (field);
1194 else if (rli->prev_field && DECL_PACKED (field))
1196 HOST_WIDE_INT bitsize = tree_low_cst (DECL_SIZE (field), 0);
1198 if (rli->remaining_in_alignment < bitsize)
1199 rli->prev_field = NULL;
1201 rli->remaining_in_alignment -= bitsize;
1205 /* Now add size of this field to the size of the record. If the size is
1206 not constant, treat the field as being a multiple of bytes and just
1207 adjust the offset, resetting the bit position. Otherwise, apportion the
1208 size amongst the bit position and offset. First handle the case of an
1209 unspecified size, which can happen when we have an invalid nested struct
1210 definition, such as struct j { struct j { int i; } }. The error message
1211 is printed in finish_struct. */
1212 if (DECL_SIZE (field) == 0)
1214 else if (TREE_CODE (DECL_SIZE_UNIT (field)) != INTEGER_CST
1215 || TREE_CONSTANT_OVERFLOW (DECL_SIZE_UNIT (field)))
1218 = size_binop (PLUS_EXPR, rli->offset,
1219 fold_convert (sizetype,
1220 size_binop (CEIL_DIV_EXPR, rli->bitpos,
1221 bitsize_unit_node)));
1223 = size_binop (PLUS_EXPR, rli->offset, DECL_SIZE_UNIT (field));
1224 rli->bitpos = bitsize_zero_node;
1225 rli->offset_align = MIN (rli->offset_align, desired_align);
1229 rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos, DECL_SIZE (field));
1230 normalize_rli (rli);
1234 /* Assuming that all the fields have been laid out, this function uses
1235 RLI to compute the final TYPE_SIZE, TYPE_ALIGN, etc. for the type
1236 indicated by RLI. */
1239 finalize_record_size (record_layout_info rli)
1241 tree unpadded_size, unpadded_size_unit;
1243 /* Now we want just byte and bit offsets, so set the offset alignment
1244 to be a byte and then normalize. */
1245 rli->offset_align = BITS_PER_UNIT;
1246 normalize_rli (rli);
1248 /* Determine the desired alignment. */
1249 #ifdef ROUND_TYPE_ALIGN
1250 TYPE_ALIGN (rli->t) = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t),
1253 TYPE_ALIGN (rli->t) = MAX (TYPE_ALIGN (rli->t), rli->record_align);
1256 /* Compute the size so far. Be sure to allow for extra bits in the
1257 size in bytes. We have guaranteed above that it will be no more
1258 than a single byte. */
1259 unpadded_size = rli_size_so_far (rli);
1260 unpadded_size_unit = rli_size_unit_so_far (rli);
1261 if (! integer_zerop (rli->bitpos))
1263 = size_binop (PLUS_EXPR, unpadded_size_unit, size_one_node);
1265 /* Round the size up to be a multiple of the required alignment. */
1266 TYPE_SIZE (rli->t) = round_up (unpadded_size, TYPE_ALIGN (rli->t));
1267 TYPE_SIZE_UNIT (rli->t)
1268 = round_up (unpadded_size_unit, TYPE_ALIGN_UNIT (rli->t));
1270 if (warn_padded && TREE_CONSTANT (unpadded_size)
1271 && simple_cst_equal (unpadded_size, TYPE_SIZE (rli->t)) == 0)
1272 warning (0, "padding struct size to alignment boundary");
1274 if (warn_packed && TREE_CODE (rli->t) == RECORD_TYPE
1275 && TYPE_PACKED (rli->t) && ! rli->packed_maybe_necessary
1276 && TREE_CONSTANT (unpadded_size))
1280 #ifdef ROUND_TYPE_ALIGN
1282 = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t), rli->unpacked_align);
1284 rli->unpacked_align = MAX (TYPE_ALIGN (rli->t), rli->unpacked_align);
1287 unpacked_size = round_up (TYPE_SIZE (rli->t), rli->unpacked_align);
1288 if (simple_cst_equal (unpacked_size, TYPE_SIZE (rli->t)))
1290 TYPE_PACKED (rli->t) = 0;
1292 if (TYPE_NAME (rli->t))
1296 if (TREE_CODE (TYPE_NAME (rli->t)) == IDENTIFIER_NODE)
1297 name = IDENTIFIER_POINTER (TYPE_NAME (rli->t));
1299 name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (rli->t)));
1301 if (STRICT_ALIGNMENT)
1302 warning (0, "packed attribute causes inefficient "
1303 "alignment for %qs", name);
1305 warning (0, "packed attribute is unnecessary for %qs", name);
1309 if (STRICT_ALIGNMENT)
1310 warning (0, "packed attribute causes inefficient alignment");
1312 warning (0, "packed attribute is unnecessary");
1318 /* Compute the TYPE_MODE for the TYPE (which is a RECORD_TYPE). */
1321 compute_record_mode (tree type)
1324 enum machine_mode mode = VOIDmode;
1326 /* Most RECORD_TYPEs have BLKmode, so we start off assuming that.
1327 However, if possible, we use a mode that fits in a register
1328 instead, in order to allow for better optimization down the
1330 TYPE_MODE (type) = BLKmode;
1332 if (! host_integerp (TYPE_SIZE (type), 1))
1335 /* A record which has any BLKmode members must itself be
1336 BLKmode; it can't go in a register. Unless the member is
1337 BLKmode only because it isn't aligned. */
1338 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1340 if (TREE_CODE (field) != FIELD_DECL)
1343 if (TREE_CODE (TREE_TYPE (field)) == ERROR_MARK
1344 || (TYPE_MODE (TREE_TYPE (field)) == BLKmode
1345 && ! TYPE_NO_FORCE_BLK (TREE_TYPE (field))
1346 && !(TYPE_SIZE (TREE_TYPE (field)) != 0
1347 && integer_zerop (TYPE_SIZE (TREE_TYPE (field)))))
1348 || ! host_integerp (bit_position (field), 1)
1349 || DECL_SIZE (field) == 0
1350 || ! host_integerp (DECL_SIZE (field), 1))
1353 /* If this field is the whole struct, remember its mode so
1354 that, say, we can put a double in a class into a DF
1355 register instead of forcing it to live in the stack. */
1356 if (simple_cst_equal (TYPE_SIZE (type), DECL_SIZE (field)))
1357 mode = DECL_MODE (field);
1359 #ifdef MEMBER_TYPE_FORCES_BLK
1360 /* With some targets, eg. c4x, it is sub-optimal
1361 to access an aligned BLKmode structure as a scalar. */
1363 if (MEMBER_TYPE_FORCES_BLK (field, mode))
1365 #endif /* MEMBER_TYPE_FORCES_BLK */
1368 TYPE_MODE (type) = mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1);
1370 /* If we only have one real field; use its mode if that mode's size
1371 matches the type's size. This only applies to RECORD_TYPE. This
1372 does not apply to unions. */
1373 if (TREE_CODE (type) == RECORD_TYPE && mode != VOIDmode
1374 && GET_MODE_SIZE (mode) == GET_MODE_SIZE (TYPE_MODE (type)))
1375 TYPE_MODE (type) = mode;
1377 /* If structure's known alignment is less than what the scalar
1378 mode would need, and it matters, then stick with BLKmode. */
1379 if (TYPE_MODE (type) != BLKmode
1381 && ! (TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT
1382 || TYPE_ALIGN (type) >= GET_MODE_ALIGNMENT (TYPE_MODE (type))))
1384 /* If this is the only reason this type is BLKmode, then
1385 don't force containing types to be BLKmode. */
1386 TYPE_NO_FORCE_BLK (type) = 1;
1387 TYPE_MODE (type) = BLKmode;
1391 /* Compute TYPE_SIZE and TYPE_ALIGN for TYPE, once it has been laid
1395 finalize_type_size (tree type)
1397 /* Normally, use the alignment corresponding to the mode chosen.
1398 However, where strict alignment is not required, avoid
1399 over-aligning structures, since most compilers do not do this
1402 if (TYPE_MODE (type) != BLKmode && TYPE_MODE (type) != VOIDmode
1403 && (STRICT_ALIGNMENT
1404 || (TREE_CODE (type) != RECORD_TYPE && TREE_CODE (type) != UNION_TYPE
1405 && TREE_CODE (type) != QUAL_UNION_TYPE
1406 && TREE_CODE (type) != ARRAY_TYPE)))
1408 TYPE_ALIGN (type) = GET_MODE_ALIGNMENT (TYPE_MODE (type));
1409 TYPE_USER_ALIGN (type) = 0;
1412 /* Do machine-dependent extra alignment. */
1413 #ifdef ROUND_TYPE_ALIGN
1415 = ROUND_TYPE_ALIGN (type, TYPE_ALIGN (type), BITS_PER_UNIT);
1418 /* If we failed to find a simple way to calculate the unit size
1419 of the type, find it by division. */
1420 if (TYPE_SIZE_UNIT (type) == 0 && TYPE_SIZE (type) != 0)
1421 /* TYPE_SIZE (type) is computed in bitsizetype. After the division, the
1422 result will fit in sizetype. We will get more efficient code using
1423 sizetype, so we force a conversion. */
1424 TYPE_SIZE_UNIT (type)
1425 = fold_convert (sizetype,
1426 size_binop (FLOOR_DIV_EXPR, TYPE_SIZE (type),
1427 bitsize_unit_node));
1429 if (TYPE_SIZE (type) != 0)
1431 TYPE_SIZE (type) = round_up (TYPE_SIZE (type), TYPE_ALIGN (type));
1432 TYPE_SIZE_UNIT (type) = round_up (TYPE_SIZE_UNIT (type),
1433 TYPE_ALIGN_UNIT (type));
1436 /* Evaluate nonconstant sizes only once, either now or as soon as safe. */
1437 if (TYPE_SIZE (type) != 0 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1438 TYPE_SIZE (type) = variable_size (TYPE_SIZE (type));
1439 if (TYPE_SIZE_UNIT (type) != 0
1440 && TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)
1441 TYPE_SIZE_UNIT (type) = variable_size (TYPE_SIZE_UNIT (type));
1443 /* Also layout any other variants of the type. */
1444 if (TYPE_NEXT_VARIANT (type)
1445 || type != TYPE_MAIN_VARIANT (type))
1448 /* Record layout info of this variant. */
1449 tree size = TYPE_SIZE (type);
1450 tree size_unit = TYPE_SIZE_UNIT (type);
1451 unsigned int align = TYPE_ALIGN (type);
1452 unsigned int user_align = TYPE_USER_ALIGN (type);
1453 enum machine_mode mode = TYPE_MODE (type);
1455 /* Copy it into all variants. */
1456 for (variant = TYPE_MAIN_VARIANT (type);
1458 variant = TYPE_NEXT_VARIANT (variant))
1460 TYPE_SIZE (variant) = size;
1461 TYPE_SIZE_UNIT (variant) = size_unit;
1462 TYPE_ALIGN (variant) = align;
1463 TYPE_USER_ALIGN (variant) = user_align;
1464 TYPE_MODE (variant) = mode;
1469 /* Do all of the work required to layout the type indicated by RLI,
1470 once the fields have been laid out. This function will call `free'
1471 for RLI, unless FREE_P is false. Passing a value other than false
1472 for FREE_P is bad practice; this option only exists to support the
1476 finish_record_layout (record_layout_info rli, int free_p)
1478 /* Compute the final size. */
1479 finalize_record_size (rli);
1481 /* Compute the TYPE_MODE for the record. */
1482 compute_record_mode (rli->t);
1484 /* Perform any last tweaks to the TYPE_SIZE, etc. */
1485 finalize_type_size (rli->t);
1487 /* Lay out any static members. This is done now because their type
1488 may use the record's type. */
1489 while (rli->pending_statics)
1491 layout_decl (TREE_VALUE (rli->pending_statics), 0);
1492 rli->pending_statics = TREE_CHAIN (rli->pending_statics);
1501 /* Finish processing a builtin RECORD_TYPE type TYPE. It's name is
1502 NAME, its fields are chained in reverse on FIELDS.
1504 If ALIGN_TYPE is non-null, it is given the same alignment as
1508 finish_builtin_struct (tree type, const char *name, tree fields,
1513 for (tail = NULL_TREE; fields; tail = fields, fields = next)
1515 DECL_FIELD_CONTEXT (fields) = type;
1516 next = TREE_CHAIN (fields);
1517 TREE_CHAIN (fields) = tail;
1519 TYPE_FIELDS (type) = tail;
1523 TYPE_ALIGN (type) = TYPE_ALIGN (align_type);
1524 TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (align_type);
1528 #if 0 /* not yet, should get fixed properly later */
1529 TYPE_NAME (type) = make_type_decl (get_identifier (name), type);
1531 TYPE_NAME (type) = build_decl (TYPE_DECL, get_identifier (name), type);
1533 TYPE_STUB_DECL (type) = TYPE_NAME (type);
1534 layout_decl (TYPE_NAME (type), 0);
1537 /* Calculate the mode, size, and alignment for TYPE.
1538 For an array type, calculate the element separation as well.
1539 Record TYPE on the chain of permanent or temporary types
1540 so that dbxout will find out about it.
1542 TYPE_SIZE of a type is nonzero if the type has been laid out already.
1543 layout_type does nothing on such a type.
1545 If the type is incomplete, its TYPE_SIZE remains zero. */
1548 layout_type (tree type)
1552 if (type == error_mark_node)
1555 /* Do nothing if type has been laid out before. */
1556 if (TYPE_SIZE (type))
1559 switch (TREE_CODE (type))
1562 /* This kind of type is the responsibility
1563 of the language-specific code. */
1566 case BOOLEAN_TYPE: /* Used for Java, Pascal, and Chill. */
1567 if (TYPE_PRECISION (type) == 0)
1568 TYPE_PRECISION (type) = 1; /* default to one byte/boolean. */
1570 /* ... fall through ... */
1575 if (TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST
1576 && tree_int_cst_sgn (TYPE_MIN_VALUE (type)) >= 0)
1577 TYPE_UNSIGNED (type) = 1;
1579 TYPE_MODE (type) = smallest_mode_for_size (TYPE_PRECISION (type),
1581 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1582 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1586 TYPE_MODE (type) = mode_for_size (TYPE_PRECISION (type), MODE_FLOAT, 0);
1587 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1588 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1592 TYPE_UNSIGNED (type) = TYPE_UNSIGNED (TREE_TYPE (type));
1594 = mode_for_size (2 * TYPE_PRECISION (TREE_TYPE (type)),
1595 (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE
1596 ? MODE_COMPLEX_FLOAT : MODE_COMPLEX_INT),
1598 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1599 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1604 int nunits = TYPE_VECTOR_SUBPARTS (type);
1605 tree nunits_tree = build_int_cst (NULL_TREE, nunits);
1606 tree innertype = TREE_TYPE (type);
1608 gcc_assert (!(nunits & (nunits - 1)));
1610 /* Find an appropriate mode for the vector type. */
1611 if (TYPE_MODE (type) == VOIDmode)
1613 enum machine_mode innermode = TYPE_MODE (innertype);
1614 enum machine_mode mode;
1616 /* First, look for a supported vector type. */
1617 if (GET_MODE_CLASS (innermode) == MODE_FLOAT)
1618 mode = MIN_MODE_VECTOR_FLOAT;
1620 mode = MIN_MODE_VECTOR_INT;
1622 for (; mode != VOIDmode ; mode = GET_MODE_WIDER_MODE (mode))
1623 if (GET_MODE_NUNITS (mode) == nunits
1624 && GET_MODE_INNER (mode) == innermode
1625 && targetm.vector_mode_supported_p (mode))
1628 /* For integers, try mapping it to a same-sized scalar mode. */
1629 if (mode == VOIDmode
1630 && GET_MODE_CLASS (innermode) == MODE_INT)
1631 mode = mode_for_size (nunits * GET_MODE_BITSIZE (innermode),
1634 if (mode == VOIDmode || !have_regs_of_mode[mode])
1635 TYPE_MODE (type) = BLKmode;
1637 TYPE_MODE (type) = mode;
1640 TYPE_UNSIGNED (type) = TYPE_UNSIGNED (TREE_TYPE (type));
1641 TYPE_SIZE_UNIT (type) = int_const_binop (MULT_EXPR,
1642 TYPE_SIZE_UNIT (innertype),
1644 TYPE_SIZE (type) = int_const_binop (MULT_EXPR, TYPE_SIZE (innertype),
1647 /* Always naturally align vectors. This prevents ABI changes
1648 depending on whether or not native vector modes are supported. */
1649 TYPE_ALIGN (type) = tree_low_cst (TYPE_SIZE (type), 0);
1654 /* This is an incomplete type and so doesn't have a size. */
1655 TYPE_ALIGN (type) = 1;
1656 TYPE_USER_ALIGN (type) = 0;
1657 TYPE_MODE (type) = VOIDmode;
1661 TYPE_SIZE (type) = bitsize_int (POINTER_SIZE);
1662 TYPE_SIZE_UNIT (type) = size_int (POINTER_SIZE / BITS_PER_UNIT);
1663 /* A pointer might be MODE_PARTIAL_INT,
1664 but ptrdiff_t must be integral. */
1665 TYPE_MODE (type) = mode_for_size (POINTER_SIZE, MODE_INT, 0);
1670 /* It's hard to see what the mode and size of a function ought to
1671 be, but we do know the alignment is FUNCTION_BOUNDARY, so
1672 make it consistent with that. */
1673 TYPE_MODE (type) = mode_for_size (FUNCTION_BOUNDARY, MODE_INT, 0);
1674 TYPE_SIZE (type) = bitsize_int (FUNCTION_BOUNDARY);
1675 TYPE_SIZE_UNIT (type) = size_int (FUNCTION_BOUNDARY / BITS_PER_UNIT);
1679 case REFERENCE_TYPE:
1682 enum machine_mode mode = ((TREE_CODE (type) == REFERENCE_TYPE
1683 && reference_types_internal)
1684 ? Pmode : TYPE_MODE (type));
1686 int nbits = GET_MODE_BITSIZE (mode);
1688 TYPE_SIZE (type) = bitsize_int (nbits);
1689 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (mode));
1690 TYPE_UNSIGNED (type) = 1;
1691 TYPE_PRECISION (type) = nbits;
1697 tree index = TYPE_DOMAIN (type);
1698 tree element = TREE_TYPE (type);
1700 build_pointer_type (element);
1702 /* We need to know both bounds in order to compute the size. */
1703 if (index && TYPE_MAX_VALUE (index) && TYPE_MIN_VALUE (index)
1704 && TYPE_SIZE (element))
1706 tree ub = TYPE_MAX_VALUE (index);
1707 tree lb = TYPE_MIN_VALUE (index);
1711 /* The initial subtraction should happen in the original type so
1712 that (possible) negative values are handled appropriately. */
1713 length = size_binop (PLUS_EXPR, size_one_node,
1714 fold_convert (sizetype,
1715 fold_build2 (MINUS_EXPR,
1719 /* Special handling for arrays of bits (for Chill). */
1720 element_size = TYPE_SIZE (element);
1721 if (TYPE_PACKED (type) && INTEGRAL_TYPE_P (element)
1722 && (integer_zerop (TYPE_MAX_VALUE (element))
1723 || integer_onep (TYPE_MAX_VALUE (element)))
1724 && host_integerp (TYPE_MIN_VALUE (element), 1))
1726 HOST_WIDE_INT maxvalue
1727 = tree_low_cst (TYPE_MAX_VALUE (element), 1);
1728 HOST_WIDE_INT minvalue
1729 = tree_low_cst (TYPE_MIN_VALUE (element), 1);
1731 if (maxvalue - minvalue == 1
1732 && (maxvalue == 1 || maxvalue == 0))
1733 element_size = integer_one_node;
1736 /* If neither bound is a constant and sizetype is signed, make
1737 sure the size is never negative. We should really do this
1738 if *either* bound is non-constant, but this is the best
1739 compromise between C and Ada. */
1740 if (!TYPE_UNSIGNED (sizetype)
1741 && TREE_CODE (TYPE_MIN_VALUE (index)) != INTEGER_CST
1742 && TREE_CODE (TYPE_MAX_VALUE (index)) != INTEGER_CST)
1743 length = size_binop (MAX_EXPR, length, size_zero_node);
1745 TYPE_SIZE (type) = size_binop (MULT_EXPR, element_size,
1746 fold_convert (bitsizetype,
1749 /* If we know the size of the element, calculate the total
1750 size directly, rather than do some division thing below.
1751 This optimization helps Fortran assumed-size arrays
1752 (where the size of the array is determined at runtime)
1754 Note that we can't do this in the case where the size of
1755 the elements is one bit since TYPE_SIZE_UNIT cannot be
1756 set correctly in that case. */
1757 if (TYPE_SIZE_UNIT (element) != 0 && ! integer_onep (element_size))
1758 TYPE_SIZE_UNIT (type)
1759 = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (element), length);
1762 /* Now round the alignment and size,
1763 using machine-dependent criteria if any. */
1765 #ifdef ROUND_TYPE_ALIGN
1767 = ROUND_TYPE_ALIGN (type, TYPE_ALIGN (element), BITS_PER_UNIT);
1769 TYPE_ALIGN (type) = MAX (TYPE_ALIGN (element), BITS_PER_UNIT);
1771 TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (element);
1772 TYPE_MODE (type) = BLKmode;
1773 if (TYPE_SIZE (type) != 0
1774 #ifdef MEMBER_TYPE_FORCES_BLK
1775 && ! MEMBER_TYPE_FORCES_BLK (type, VOIDmode)
1777 /* BLKmode elements force BLKmode aggregate;
1778 else extract/store fields may lose. */
1779 && (TYPE_MODE (TREE_TYPE (type)) != BLKmode
1780 || TYPE_NO_FORCE_BLK (TREE_TYPE (type))))
1782 /* One-element arrays get the component type's mode. */
1783 if (simple_cst_equal (TYPE_SIZE (type),
1784 TYPE_SIZE (TREE_TYPE (type))))
1785 TYPE_MODE (type) = TYPE_MODE (TREE_TYPE (type));
1788 = mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1);
1790 if (TYPE_MODE (type) != BLKmode
1791 && STRICT_ALIGNMENT && TYPE_ALIGN (type) < BIGGEST_ALIGNMENT
1792 && TYPE_ALIGN (type) < GET_MODE_ALIGNMENT (TYPE_MODE (type))
1793 && TYPE_MODE (type) != BLKmode)
1795 TYPE_NO_FORCE_BLK (type) = 1;
1796 TYPE_MODE (type) = BLKmode;
1804 case QUAL_UNION_TYPE:
1807 record_layout_info rli;
1809 /* Initialize the layout information. */
1810 rli = start_record_layout (type);
1812 /* If this is a QUAL_UNION_TYPE, we want to process the fields
1813 in the reverse order in building the COND_EXPR that denotes
1814 its size. We reverse them again later. */
1815 if (TREE_CODE (type) == QUAL_UNION_TYPE)
1816 TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
1818 /* Place all the fields. */
1819 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1820 place_field (rli, field);
1822 if (TREE_CODE (type) == QUAL_UNION_TYPE)
1823 TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
1825 if (lang_adjust_rli)
1826 (*lang_adjust_rli) (rli);
1828 /* Finish laying out the record. */
1829 finish_record_layout (rli, /*free_p=*/true);
1837 /* Compute the final TYPE_SIZE, TYPE_ALIGN, etc. for TYPE. For
1838 records and unions, finish_record_layout already called this
1840 if (TREE_CODE (type) != RECORD_TYPE
1841 && TREE_CODE (type) != UNION_TYPE
1842 && TREE_CODE (type) != QUAL_UNION_TYPE)
1843 finalize_type_size (type);
1845 /* If an alias set has been set for this aggregate when it was incomplete,
1846 force it into alias set 0.
1847 This is too conservative, but we cannot call record_component_aliases
1848 here because some frontends still change the aggregates after
1850 if (AGGREGATE_TYPE_P (type) && TYPE_ALIAS_SET_KNOWN_P (type))
1851 TYPE_ALIAS_SET (type) = 0;
1854 /* Create and return a type for signed integers of PRECISION bits. */
1857 make_signed_type (int precision)
1859 tree type = make_node (INTEGER_TYPE);
1861 TYPE_PRECISION (type) = precision;
1863 fixup_signed_type (type);
1867 /* Create and return a type for unsigned integers of PRECISION bits. */
1870 make_unsigned_type (int precision)
1872 tree type = make_node (INTEGER_TYPE);
1874 TYPE_PRECISION (type) = precision;
1876 fixup_unsigned_type (type);
1880 /* Initialize sizetype and bitsizetype to a reasonable and temporary
1881 value to enable integer types to be created. */
1884 initialize_sizetypes (bool signed_p)
1886 tree t = make_node (INTEGER_TYPE);
1888 TYPE_MODE (t) = SImode;
1889 TYPE_ALIGN (t) = GET_MODE_ALIGNMENT (SImode);
1890 TYPE_USER_ALIGN (t) = 0;
1891 TYPE_IS_SIZETYPE (t) = 1;
1892 TYPE_UNSIGNED (t) = !signed_p;
1893 TYPE_SIZE (t) = build_int_cst (t, GET_MODE_BITSIZE (SImode));
1894 TYPE_SIZE_UNIT (t) = build_int_cst (t, GET_MODE_SIZE (SImode));
1895 TYPE_PRECISION (t) = GET_MODE_BITSIZE (SImode);
1896 TYPE_MIN_VALUE (t) = build_int_cst (t, 0);
1898 /* 1000 avoids problems with possible overflow and is certainly
1899 larger than any size value we'd want to be storing. */
1900 TYPE_MAX_VALUE (t) = build_int_cst (t, 1000);
1903 bitsizetype = build_distinct_type_copy (t);
1906 /* Make sizetype a version of TYPE, and initialize *sizetype
1907 accordingly. We do this by overwriting the stub sizetype and
1908 bitsizetype nodes created by initialize_sizetypes. This makes sure
1909 that (a) anything stubby about them no longer exists, (b) any
1910 INTEGER_CSTs created with such a type, remain valid. */
1913 set_sizetype (tree type)
1915 int oprecision = TYPE_PRECISION (type);
1916 /* The *bitsizetype types use a precision that avoids overflows when
1917 calculating signed sizes / offsets in bits. However, when
1918 cross-compiling from a 32 bit to a 64 bit host, we are limited to 64 bit
1920 int precision = MIN (oprecision + BITS_PER_UNIT_LOG + 1,
1921 2 * HOST_BITS_PER_WIDE_INT);
1924 gcc_assert (TYPE_UNSIGNED (type) == TYPE_UNSIGNED (sizetype));
1926 t = build_distinct_type_copy (type);
1927 /* We do want to use sizetype's cache, as we will be replacing that
1929 TYPE_CACHED_VALUES (t) = TYPE_CACHED_VALUES (sizetype);
1930 TYPE_CACHED_VALUES_P (t) = TYPE_CACHED_VALUES_P (sizetype);
1931 TREE_TYPE (TYPE_CACHED_VALUES (t)) = type;
1932 TYPE_UID (t) = TYPE_UID (sizetype);
1933 TYPE_IS_SIZETYPE (t) = 1;
1935 /* Replace our original stub sizetype. */
1936 memcpy (sizetype, t, tree_size (sizetype));
1937 TYPE_MAIN_VARIANT (sizetype) = sizetype;
1939 t = make_node (INTEGER_TYPE);
1940 TYPE_NAME (t) = get_identifier ("bit_size_type");
1941 /* We do want to use bitsizetype's cache, as we will be replacing that
1943 TYPE_CACHED_VALUES (t) = TYPE_CACHED_VALUES (bitsizetype);
1944 TYPE_CACHED_VALUES_P (t) = TYPE_CACHED_VALUES_P (bitsizetype);
1945 TYPE_PRECISION (t) = precision;
1946 TYPE_UID (t) = TYPE_UID (bitsizetype);
1947 TYPE_IS_SIZETYPE (t) = 1;
1948 /* Replace our original stub bitsizetype. */
1949 memcpy (bitsizetype, t, tree_size (bitsizetype));
1951 if (TYPE_UNSIGNED (type))
1953 fixup_unsigned_type (bitsizetype);
1954 ssizetype = build_distinct_type_copy (make_signed_type (oprecision));
1955 TYPE_IS_SIZETYPE (ssizetype) = 1;
1956 sbitsizetype = build_distinct_type_copy (make_signed_type (precision));
1957 TYPE_IS_SIZETYPE (sbitsizetype) = 1;
1961 fixup_signed_type (bitsizetype);
1962 ssizetype = sizetype;
1963 sbitsizetype = bitsizetype;
1967 /* TYPE is an integral type, i.e., an INTEGRAL_TYPE, ENUMERAL_TYPE,
1968 BOOLEAN_TYPE, or CHAR_TYPE. Set TYPE_MIN_VALUE and TYPE_MAX_VALUE
1969 for TYPE, based on the PRECISION and whether or not the TYPE
1970 IS_UNSIGNED. PRECISION need not correspond to a width supported
1971 natively by the hardware; for example, on a machine with 8-bit,
1972 16-bit, and 32-bit register modes, PRECISION might be 7, 23, or
1976 set_min_and_max_values_for_integral_type (tree type,
1985 min_value = build_int_cst (type, 0);
1987 = build_int_cst_wide (type, precision - HOST_BITS_PER_WIDE_INT >= 0
1989 : ((HOST_WIDE_INT) 1 << precision) - 1,
1990 precision - HOST_BITS_PER_WIDE_INT > 0
1991 ? ((unsigned HOST_WIDE_INT) ~0
1992 >> (HOST_BITS_PER_WIDE_INT
1993 - (precision - HOST_BITS_PER_WIDE_INT)))
1999 = build_int_cst_wide (type,
2000 (precision - HOST_BITS_PER_WIDE_INT > 0
2002 : (HOST_WIDE_INT) (-1) << (precision - 1)),
2003 (((HOST_WIDE_INT) (-1)
2004 << (precision - HOST_BITS_PER_WIDE_INT - 1 > 0
2005 ? precision - HOST_BITS_PER_WIDE_INT - 1
2008 = build_int_cst_wide (type,
2009 (precision - HOST_BITS_PER_WIDE_INT > 0
2011 : ((HOST_WIDE_INT) 1 << (precision - 1)) - 1),
2012 (precision - HOST_BITS_PER_WIDE_INT - 1 > 0
2013 ? (((HOST_WIDE_INT) 1
2014 << (precision - HOST_BITS_PER_WIDE_INT - 1))) - 1
2018 TYPE_MIN_VALUE (type) = min_value;
2019 TYPE_MAX_VALUE (type) = max_value;
2022 /* Set the extreme values of TYPE based on its precision in bits,
2023 then lay it out. Used when make_signed_type won't do
2024 because the tree code is not INTEGER_TYPE.
2025 E.g. for Pascal, when the -fsigned-char option is given. */
2028 fixup_signed_type (tree type)
2030 int precision = TYPE_PRECISION (type);
2032 /* We can not represent properly constants greater then
2033 2 * HOST_BITS_PER_WIDE_INT, still we need the types
2034 as they are used by i386 vector extensions and friends. */
2035 if (precision > HOST_BITS_PER_WIDE_INT * 2)
2036 precision = HOST_BITS_PER_WIDE_INT * 2;
2038 set_min_and_max_values_for_integral_type (type, precision,
2039 /*is_unsigned=*/false);
2041 /* Lay out the type: set its alignment, size, etc. */
2045 /* Set the extreme values of TYPE based on its precision in bits,
2046 then lay it out. This is used both in `make_unsigned_type'
2047 and for enumeral types. */
2050 fixup_unsigned_type (tree type)
2052 int precision = TYPE_PRECISION (type);
2054 /* We can not represent properly constants greater then
2055 2 * HOST_BITS_PER_WIDE_INT, still we need the types
2056 as they are used by i386 vector extensions and friends. */
2057 if (precision > HOST_BITS_PER_WIDE_INT * 2)
2058 precision = HOST_BITS_PER_WIDE_INT * 2;
2060 TYPE_UNSIGNED (type) = 1;
2062 set_min_and_max_values_for_integral_type (type, precision,
2063 /*is_unsigned=*/true);
2065 /* Lay out the type: set its alignment, size, etc. */
2069 /* Find the best machine mode to use when referencing a bit field of length
2070 BITSIZE bits starting at BITPOS.
2072 The underlying object is known to be aligned to a boundary of ALIGN bits.
2073 If LARGEST_MODE is not VOIDmode, it means that we should not use a mode
2074 larger than LARGEST_MODE (usually SImode).
2076 If no mode meets all these conditions, we return VOIDmode. Otherwise, if
2077 VOLATILEP is true or SLOW_BYTE_ACCESS is false, we return the smallest
2078 mode meeting these conditions.
2080 Otherwise (VOLATILEP is false and SLOW_BYTE_ACCESS is true), we return
2081 the largest mode (but a mode no wider than UNITS_PER_WORD) that meets
2082 all the conditions. */
2085 get_best_mode (int bitsize, int bitpos, unsigned int align,
2086 enum machine_mode largest_mode, int volatilep)
2088 enum machine_mode mode;
2089 unsigned int unit = 0;
2091 /* Find the narrowest integer mode that contains the bit field. */
2092 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2093 mode = GET_MODE_WIDER_MODE (mode))
2095 unit = GET_MODE_BITSIZE (mode);
2096 if ((bitpos % unit) + bitsize <= unit)
2100 if (mode == VOIDmode
2101 /* It is tempting to omit the following line
2102 if STRICT_ALIGNMENT is true.
2103 But that is incorrect, since if the bitfield uses part of 3 bytes
2104 and we use a 4-byte mode, we could get a spurious segv
2105 if the extra 4th byte is past the end of memory.
2106 (Though at least one Unix compiler ignores this problem:
2107 that on the Sequent 386 machine. */
2108 || MIN (unit, BIGGEST_ALIGNMENT) > align
2109 || (largest_mode != VOIDmode && unit > GET_MODE_BITSIZE (largest_mode)))
2112 if (SLOW_BYTE_ACCESS && ! volatilep)
2114 enum machine_mode wide_mode = VOIDmode, tmode;
2116 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); tmode != VOIDmode;
2117 tmode = GET_MODE_WIDER_MODE (tmode))
2119 unit = GET_MODE_BITSIZE (tmode);
2120 if (bitpos / unit == (bitpos + bitsize - 1) / unit
2121 && unit <= BITS_PER_WORD
2122 && unit <= MIN (align, BIGGEST_ALIGNMENT)
2123 && (largest_mode == VOIDmode
2124 || unit <= GET_MODE_BITSIZE (largest_mode)))
2128 if (wide_mode != VOIDmode)
2135 /* Gets minimal and maximal values for MODE (signed or unsigned depending on
2136 SIGN). The returned constants are made to be usable in TARGET_MODE. */
2139 get_mode_bounds (enum machine_mode mode, int sign,
2140 enum machine_mode target_mode,
2141 rtx *mmin, rtx *mmax)
2143 unsigned size = GET_MODE_BITSIZE (mode);
2144 unsigned HOST_WIDE_INT min_val, max_val;
2146 gcc_assert (size <= HOST_BITS_PER_WIDE_INT);
2150 min_val = -((unsigned HOST_WIDE_INT) 1 << (size - 1));
2151 max_val = ((unsigned HOST_WIDE_INT) 1 << (size - 1)) - 1;
2156 max_val = ((unsigned HOST_WIDE_INT) 1 << (size - 1) << 1) - 1;
2159 *mmin = gen_int_mode (min_val, target_mode);
2160 *mmax = gen_int_mode (max_val, target_mode);
2163 #include "gt-stor-layout.h"