1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2010, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 ****************************************************************************/
28 #include "coretypes.h"
40 #include "langhooks.h"
41 #include "pointer-set.h"
43 #include "tree-dump.h"
44 #include "tree-inline.h"
45 #include "tree-iterator.h"
62 #ifndef MAX_BITS_PER_WORD
63 #define MAX_BITS_PER_WORD BITS_PER_WORD
66 /* If nonzero, pretend we are allocating at global level. */
69 /* The default alignment of "double" floating-point types, i.e. floating
70 point types whose size is equal to 64 bits, or 0 if this alignment is
71 not specifically capped. */
72 int double_float_alignment;
74 /* The default alignment of "double" or larger scalar types, i.e. scalar
75 types whose size is greater or equal to 64 bits, or 0 if this alignment
76 is not specifically capped. */
77 int double_scalar_alignment;
79 /* Tree nodes for the various types and decls we create. */
80 tree gnat_std_decls[(int) ADT_LAST];
82 /* Functions to call for each of the possible raise reasons. */
83 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
85 /* Forward declarations for handlers of attributes. */
86 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
87 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
88 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
93 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
94 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
95 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
96 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
98 /* Fake handler for attributes we don't properly support, typically because
99 they'd require dragging a lot of the common-c front-end circuitry. */
100 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
102 /* Table of machine-independent internal attributes for Ada. We support
103 this minimal set of attributes to accommodate the needs of builtins. */
104 const struct attribute_spec gnat_internal_attribute_table[] =
106 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
107 { "const", 0, 0, true, false, false, handle_const_attribute },
108 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute },
109 { "pure", 0, 0, true, false, false, handle_pure_attribute },
110 { "no vops", 0, 0, true, false, false, handle_novops_attribute },
111 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute },
112 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute },
113 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute },
114 { "malloc", 0, 0, true, false, false, handle_malloc_attribute },
115 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute },
117 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute },
118 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute },
119 { "may_alias", 0, 0, false, true, false, NULL },
121 /* ??? format and format_arg are heavy and not supported, which actually
122 prevents support for stdio builtins, which we however declare as part
123 of the common builtins.def contents. */
124 { "format", 3, 3, false, true, true, fake_attribute_handler },
125 { "format_arg", 1, 1, false, true, true, fake_attribute_handler },
127 { NULL, 0, 0, false, false, false, NULL }
130 /* Associates a GNAT tree node to a GCC tree node. It is used in
131 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
132 of `save_gnu_tree' for more info. */
133 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
135 #define GET_GNU_TREE(GNAT_ENTITY) \
136 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
138 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
139 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
141 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
142 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
144 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
145 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
147 #define GET_DUMMY_NODE(GNAT_ENTITY) \
148 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
150 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
151 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
153 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
154 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
156 /* This variable keeps a table for types for each precision so that we only
157 allocate each of them once. Signed and unsigned types are kept separate.
159 Note that these types are only used when fold-const requests something
160 special. Perhaps we should NOT share these types; we'll see how it
162 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
164 /* Likewise for float types, but record these by mode. */
165 static GTY(()) tree float_types[NUM_MACHINE_MODES];
167 /* For each binding contour we allocate a binding_level structure to indicate
168 the binding depth. */
170 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
171 /* The binding level containing this one (the enclosing binding level). */
172 struct gnat_binding_level *chain;
173 /* The BLOCK node for this level. */
175 /* If nonzero, the setjmp buffer that needs to be updated for any
176 variable-sized definition within this context. */
180 /* The binding level currently in effect. */
181 static GTY(()) struct gnat_binding_level *current_binding_level;
183 /* A chain of gnat_binding_level structures awaiting reuse. */
184 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
186 /* An array of global declarations. */
187 static GTY(()) VEC(tree,gc) *global_decls;
189 /* An array of builtin function declarations. */
190 static GTY(()) VEC(tree,gc) *builtin_decls;
192 /* An array of global renaming pointers. */
193 static GTY(()) VEC(tree,gc) *global_renaming_pointers;
195 /* A chain of unused BLOCK nodes. */
196 static GTY((deletable)) tree free_block_chain;
198 static tree merge_sizes (tree, tree, tree, bool, bool);
199 static tree compute_related_constant (tree, tree);
200 static tree split_plus (tree, tree *);
201 static tree float_type_for_precision (int, enum machine_mode);
202 static tree convert_to_fat_pointer (tree, tree);
203 static tree convert_to_thin_pointer (tree, tree);
204 static tree make_descriptor_field (const char *,tree, tree, tree);
205 static bool potential_alignment_gap (tree, tree, tree);
207 /* Initialize the association of GNAT nodes to GCC trees. */
210 init_gnat_to_gnu (void)
212 associate_gnat_to_gnu
213 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
216 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
217 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
218 a ..._DECL node. If NO_CHECK is true, the latter check is suppressed.
220 If GNU_DECL is zero, a previous association is to be reset. */
223 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
225 /* Check that GNAT_ENTITY is not already defined and that it is being set
226 to something which is a decl. Raise gigi 401 if not. Usually, this
227 means GNAT_ENTITY is defined twice, but occasionally is due to some
229 gcc_assert (!(gnu_decl
230 && (PRESENT_GNU_TREE (gnat_entity)
231 || (!no_check && !DECL_P (gnu_decl)))));
233 SET_GNU_TREE (gnat_entity, gnu_decl);
236 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
237 Return the ..._DECL node that was associated with it. If there is no tree
238 node associated with GNAT_ENTITY, abort.
240 In some cases, such as delayed elaboration or expressions that need to
241 be elaborated only once, GNAT_ENTITY is really not an entity. */
244 get_gnu_tree (Entity_Id gnat_entity)
246 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
247 return GET_GNU_TREE (gnat_entity);
250 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
253 present_gnu_tree (Entity_Id gnat_entity)
255 return PRESENT_GNU_TREE (gnat_entity);
258 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
261 init_dummy_type (void)
264 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
267 /* Make a dummy type corresponding to GNAT_TYPE. */
270 make_dummy_type (Entity_Id gnat_type)
272 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
275 /* If there is an equivalent type, get its underlying type. */
276 if (Present (gnat_underlying))
277 gnat_underlying = Underlying_Type (gnat_underlying);
279 /* If there was no equivalent type (can only happen when just annotating
280 types) or underlying type, go back to the original type. */
281 if (No (gnat_underlying))
282 gnat_underlying = gnat_type;
284 /* If it there already a dummy type, use that one. Else make one. */
285 if (PRESENT_DUMMY_NODE (gnat_underlying))
286 return GET_DUMMY_NODE (gnat_underlying);
288 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
290 gnu_type = make_node (Is_Record_Type (gnat_underlying)
291 ? tree_code_for_record_type (gnat_underlying)
293 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
294 TYPE_DUMMY_P (gnu_type) = 1;
295 TYPE_STUB_DECL (gnu_type)
296 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
297 if (AGGREGATE_TYPE_P (gnu_type))
298 TYPE_BY_REFERENCE_P (gnu_type) = Is_By_Reference_Type (gnat_type);
300 SET_DUMMY_NODE (gnat_underlying, gnu_type);
305 /* Return nonzero if we are currently in the global binding level. */
308 global_bindings_p (void)
310 return ((force_global || !current_function_decl) ? -1 : 0);
313 /* Enter a new binding level. */
316 gnat_pushlevel (void)
318 struct gnat_binding_level *newlevel = NULL;
320 /* Reuse a struct for this binding level, if there is one. */
321 if (free_binding_level)
323 newlevel = free_binding_level;
324 free_binding_level = free_binding_level->chain;
328 = (struct gnat_binding_level *)
329 ggc_alloc (sizeof (struct gnat_binding_level));
331 /* Use a free BLOCK, if any; otherwise, allocate one. */
332 if (free_block_chain)
334 newlevel->block = free_block_chain;
335 free_block_chain = BLOCK_CHAIN (free_block_chain);
336 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
339 newlevel->block = make_node (BLOCK);
341 /* Point the BLOCK we just made to its parent. */
342 if (current_binding_level)
343 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
345 BLOCK_VARS (newlevel->block) = BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
346 TREE_USED (newlevel->block) = 1;
348 /* Add this level to the front of the chain (stack) of levels that are
350 newlevel->chain = current_binding_level;
351 newlevel->jmpbuf_decl = NULL_TREE;
352 current_binding_level = newlevel;
355 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
356 and point FNDECL to this BLOCK. */
359 set_current_block_context (tree fndecl)
361 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
362 DECL_INITIAL (fndecl) = current_binding_level->block;
365 /* Set the jmpbuf_decl for the current binding level to DECL. */
368 set_block_jmpbuf_decl (tree decl)
370 current_binding_level->jmpbuf_decl = decl;
373 /* Get the jmpbuf_decl, if any, for the current binding level. */
376 get_block_jmpbuf_decl (void)
378 return current_binding_level->jmpbuf_decl;
381 /* Exit a binding level. Set any BLOCK into the current code group. */
386 struct gnat_binding_level *level = current_binding_level;
387 tree block = level->block;
389 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
390 BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
392 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
393 are no variables free the block and merge its subblocks into those of its
394 parent block. Otherwise, add it to the list of its parent. */
395 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
397 else if (BLOCK_VARS (block) == NULL_TREE)
399 BLOCK_SUBBLOCKS (level->chain->block)
400 = chainon (BLOCK_SUBBLOCKS (block),
401 BLOCK_SUBBLOCKS (level->chain->block));
402 BLOCK_CHAIN (block) = free_block_chain;
403 free_block_chain = block;
407 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
408 BLOCK_SUBBLOCKS (level->chain->block) = block;
409 TREE_USED (block) = 1;
410 set_block_for_group (block);
413 /* Free this binding structure. */
414 current_binding_level = level->chain;
415 level->chain = free_binding_level;
416 free_binding_level = level;
420 /* Records a ..._DECL node DECL as belonging to the current lexical scope
421 and uses GNAT_NODE for location information and propagating flags. */
424 gnat_pushdecl (tree decl, Node_Id gnat_node)
426 /* If this decl is public external or at toplevel, there is no context.
427 But PARM_DECLs always go in the level of its function. */
428 if (TREE_CODE (decl) != PARM_DECL
429 && ((DECL_EXTERNAL (decl) && TREE_PUBLIC (decl))
430 || global_bindings_p ()))
431 DECL_CONTEXT (decl) = 0;
434 DECL_CONTEXT (decl) = current_function_decl;
436 /* Functions imported in another function are not really nested.
437 For really nested functions mark them initially as needing
438 a static chain for uses of that flag before unnesting;
439 lower_nested_functions will then recompute it. */
440 if (TREE_CODE (decl) == FUNCTION_DECL && !TREE_PUBLIC (decl))
441 DECL_STATIC_CHAIN (decl) = 1;
444 TREE_NO_WARNING (decl) = (gnat_node == Empty || Warnings_Off (gnat_node));
446 /* Set the location of DECL and emit a declaration for it. */
447 if (Present (gnat_node))
448 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
449 add_decl_expr (decl, gnat_node);
451 /* Put the declaration on the list. The list of declarations is in reverse
452 order. The list will be reversed later. Put global variables in the
453 globals list and builtin functions in a dedicated list to speed up
454 further lookups. Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
455 the list, as they will cause trouble with the debugger and aren't needed
457 if (TREE_CODE (decl) != TYPE_DECL
458 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
460 if (global_bindings_p ())
462 VEC_safe_push (tree, gc, global_decls, decl);
464 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
465 VEC_safe_push (tree, gc, builtin_decls, decl);
469 TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
470 BLOCK_VARS (current_binding_level->block) = decl;
474 /* For the declaration of a type, set its name if it either is not already
475 set or if the previous type name was not derived from a source name.
476 We'd rather have the type named with a real name and all the pointer
477 types to the same object have the same POINTER_TYPE node. Code in the
478 equivalent function of c-decl.c makes a copy of the type node here, but
479 that may cause us trouble with incomplete types. We make an exception
480 for fat pointer types because the compiler automatically builds them
481 for unconstrained array types and the debugger uses them to represent
482 both these and pointers to these. */
483 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
485 tree t = TREE_TYPE (decl);
487 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
489 else if (TYPE_IS_FAT_POINTER_P (t))
491 tree tt = build_variant_type_copy (t);
492 TYPE_NAME (tt) = decl;
493 TREE_USED (tt) = TREE_USED (t);
494 TREE_TYPE (decl) = tt;
495 if (DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
496 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
498 DECL_ORIGINAL_TYPE (decl) = t;
500 DECL_ARTIFICIAL (decl) = 0;
502 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
507 /* Propagate the name to all the variants. This is needed for
508 the type qualifiers machinery to work properly. */
510 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
511 TYPE_NAME (t) = decl;
515 /* Do little here. Set up the standard declarations later after the
516 front end has been run. */
519 gnat_init_decl_processing (void)
521 /* Make the binding_level structure for global names. */
522 current_function_decl = 0;
523 current_binding_level = 0;
524 free_binding_level = 0;
527 build_common_tree_nodes (true, true);
529 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
530 corresponding to the width of Pmode. In most cases when ptr_mode
531 and Pmode differ, C will use the width of ptr_mode for SIZETYPE.
532 But we get far better code using the width of Pmode. */
533 size_type_node = gnat_type_for_mode (Pmode, 0);
534 set_sizetype (size_type_node);
536 /* In Ada, we use an unsigned 8-bit type for the default boolean type. */
537 boolean_type_node = make_unsigned_type (8);
538 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
539 SET_TYPE_RM_MAX_VALUE (boolean_type_node,
540 build_int_cst (boolean_type_node, 1));
541 SET_TYPE_RM_SIZE (boolean_type_node, bitsize_int (1));
543 build_common_tree_nodes_2 (0);
544 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
546 ptr_void_type_node = build_pointer_type (void_type_node);
549 /* Record TYPE as a builtin type for Ada. NAME is the name of the type. */
552 record_builtin_type (const char *name, tree type)
554 tree type_decl = build_decl (input_location,
555 TYPE_DECL, get_identifier (name), type);
557 gnat_pushdecl (type_decl, Empty);
559 if (debug_hooks->type_decl)
560 debug_hooks->type_decl (type_decl, false);
563 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
564 finish constructing the record or union type. If REP_LEVEL is zero, this
565 record has no representation clause and so will be entirely laid out here.
566 If REP_LEVEL is one, this record has a representation clause and has been
567 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
568 this record is derived from a parent record and thus inherits its layout;
569 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
570 we need to write debug information about this type. */
573 finish_record_type (tree record_type, tree field_list, int rep_level,
576 enum tree_code code = TREE_CODE (record_type);
577 tree name = TYPE_NAME (record_type);
578 tree ada_size = bitsize_zero_node;
579 tree size = bitsize_zero_node;
580 bool had_size = TYPE_SIZE (record_type) != 0;
581 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
582 bool had_align = TYPE_ALIGN (record_type) != 0;
585 TYPE_FIELDS (record_type) = field_list;
587 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
588 generate debug info and have a parallel type. */
589 if (name && TREE_CODE (name) == TYPE_DECL)
590 name = DECL_NAME (name);
591 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
593 /* Globally initialize the record first. If this is a rep'ed record,
594 that just means some initializations; otherwise, layout the record. */
597 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
600 TYPE_SIZE_UNIT (record_type) = size_zero_node;
603 TYPE_SIZE (record_type) = bitsize_zero_node;
605 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
606 out just like a UNION_TYPE, since the size will be fixed. */
607 else if (code == QUAL_UNION_TYPE)
612 /* Ensure there isn't a size already set. There can be in an error
613 case where there is a rep clause but all fields have errors and
614 no longer have a position. */
615 TYPE_SIZE (record_type) = 0;
616 layout_type (record_type);
619 /* At this point, the position and size of each field is known. It was
620 either set before entry by a rep clause, or by laying out the type above.
622 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
623 to compute the Ada size; the GCC size and alignment (for rep'ed records
624 that are not padding types); and the mode (for rep'ed records). We also
625 clear the DECL_BIT_FIELD indication for the cases we know have not been
626 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
628 if (code == QUAL_UNION_TYPE)
629 field_list = nreverse (field_list);
631 for (field = field_list; field; field = TREE_CHAIN (field))
633 tree type = TREE_TYPE (field);
634 tree pos = bit_position (field);
635 tree this_size = DECL_SIZE (field);
638 if ((TREE_CODE (type) == RECORD_TYPE
639 || TREE_CODE (type) == UNION_TYPE
640 || TREE_CODE (type) == QUAL_UNION_TYPE)
641 && !TYPE_FAT_POINTER_P (type)
642 && !TYPE_CONTAINS_TEMPLATE_P (type)
643 && TYPE_ADA_SIZE (type))
644 this_ada_size = TYPE_ADA_SIZE (type);
646 this_ada_size = this_size;
648 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
649 if (DECL_BIT_FIELD (field)
650 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
652 unsigned int align = TYPE_ALIGN (type);
654 /* In the general case, type alignment is required. */
655 if (value_factor_p (pos, align))
657 /* The enclosing record type must be sufficiently aligned.
658 Otherwise, if no alignment was specified for it and it
659 has been laid out already, bump its alignment to the
660 desired one if this is compatible with its size. */
661 if (TYPE_ALIGN (record_type) >= align)
663 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
664 DECL_BIT_FIELD (field) = 0;
668 && value_factor_p (TYPE_SIZE (record_type), align))
670 TYPE_ALIGN (record_type) = align;
671 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
672 DECL_BIT_FIELD (field) = 0;
676 /* In the non-strict alignment case, only byte alignment is. */
677 if (!STRICT_ALIGNMENT
678 && DECL_BIT_FIELD (field)
679 && value_factor_p (pos, BITS_PER_UNIT))
680 DECL_BIT_FIELD (field) = 0;
683 /* If we still have DECL_BIT_FIELD set at this point, we know that the
684 field is technically not addressable. Except that it can actually
685 be addressed if it is BLKmode and happens to be properly aligned. */
686 if (DECL_BIT_FIELD (field)
687 && !(DECL_MODE (field) == BLKmode
688 && value_factor_p (pos, BITS_PER_UNIT)))
689 DECL_NONADDRESSABLE_P (field) = 1;
691 /* A type must be as aligned as its most aligned field that is not
692 a bit-field. But this is already enforced by layout_type. */
693 if (rep_level > 0 && !DECL_BIT_FIELD (field))
694 TYPE_ALIGN (record_type)
695 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
700 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
701 size = size_binop (MAX_EXPR, size, this_size);
704 case QUAL_UNION_TYPE:
706 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
707 this_ada_size, ada_size);
708 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
713 /* Since we know here that all fields are sorted in order of
714 increasing bit position, the size of the record is one
715 higher than the ending bit of the last field processed
716 unless we have a rep clause, since in that case we might
717 have a field outside a QUAL_UNION_TYPE that has a higher ending
718 position. So use a MAX in that case. Also, if this field is a
719 QUAL_UNION_TYPE, we need to take into account the previous size in
720 the case of empty variants. */
722 = merge_sizes (ada_size, pos, this_ada_size,
723 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
725 = merge_sizes (size, pos, this_size,
726 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
734 if (code == QUAL_UNION_TYPE)
735 nreverse (field_list);
739 /* If this is a padding record, we never want to make the size smaller
740 than what was specified in it, if any. */
741 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
742 size = TYPE_SIZE (record_type);
744 /* Now set any of the values we've just computed that apply. */
745 if (!TYPE_FAT_POINTER_P (record_type)
746 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
747 SET_TYPE_ADA_SIZE (record_type, ada_size);
751 tree size_unit = had_size_unit
752 ? TYPE_SIZE_UNIT (record_type)
754 size_binop (CEIL_DIV_EXPR, size,
756 unsigned int align = TYPE_ALIGN (record_type);
758 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
759 TYPE_SIZE_UNIT (record_type)
760 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
762 compute_record_mode (record_type);
767 rest_of_record_type_compilation (record_type);
770 /* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
771 associated with it. It need not be invoked directly in most cases since
772 finish_record_type takes care of doing so, but this can be necessary if
773 a parallel type is to be attached to the record type. */
776 rest_of_record_type_compilation (tree record_type)
778 tree field_list = TYPE_FIELDS (record_type);
780 enum tree_code code = TREE_CODE (record_type);
781 bool var_size = false;
783 for (field = field_list; field; field = TREE_CHAIN (field))
785 /* We need to make an XVE/XVU record if any field has variable size,
786 whether or not the record does. For example, if we have a union,
787 it may be that all fields, rounded up to the alignment, have the
788 same size, in which case we'll use that size. But the debug
789 output routines (except Dwarf2) won't be able to output the fields,
790 so we need to make the special record. */
791 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
792 /* If a field has a non-constant qualifier, the record will have
793 variable size too. */
794 || (code == QUAL_UNION_TYPE
795 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
802 /* If this record is of variable size, rename it so that the
803 debugger knows it is and make a new, parallel, record
804 that tells the debugger how the record is laid out. See
805 exp_dbug.ads. But don't do this for records that are padding
806 since they confuse GDB. */
807 if (var_size && !TYPE_IS_PADDING_P (record_type))
810 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
811 ? UNION_TYPE : TREE_CODE (record_type));
812 tree orig_name = TYPE_NAME (record_type), new_name;
813 tree last_pos = bitsize_zero_node;
814 tree old_field, prev_old_field = NULL_TREE;
816 if (TREE_CODE (orig_name) == TYPE_DECL)
817 orig_name = DECL_NAME (orig_name);
820 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
822 TYPE_NAME (new_record_type) = new_name;
823 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
824 TYPE_STUB_DECL (new_record_type)
825 = create_type_stub_decl (new_name, new_record_type);
826 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
827 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
828 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
829 TYPE_SIZE_UNIT (new_record_type)
830 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
832 add_parallel_type (TYPE_STUB_DECL (record_type), new_record_type);
834 /* Now scan all the fields, replacing each field with a new
835 field corresponding to the new encoding. */
836 for (old_field = TYPE_FIELDS (record_type); old_field;
837 old_field = TREE_CHAIN (old_field))
839 tree field_type = TREE_TYPE (old_field);
840 tree field_name = DECL_NAME (old_field);
842 tree curpos = bit_position (old_field);
844 unsigned int align = 0;
847 /* See how the position was modified from the last position.
849 There are two basic cases we support: a value was added
850 to the last position or the last position was rounded to
851 a boundary and they something was added. Check for the
852 first case first. If not, see if there is any evidence
853 of rounding. If so, round the last position and try
856 If this is a union, the position can be taken as zero. */
858 /* Some computations depend on the shape of the position expression,
859 so strip conversions to make sure it's exposed. */
860 curpos = remove_conversions (curpos, true);
862 if (TREE_CODE (new_record_type) == UNION_TYPE)
863 pos = bitsize_zero_node, align = 0;
865 pos = compute_related_constant (curpos, last_pos);
867 if (!pos && TREE_CODE (curpos) == MULT_EXPR
868 && host_integerp (TREE_OPERAND (curpos, 1), 1))
870 tree offset = TREE_OPERAND (curpos, 0);
871 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
873 /* An offset which is a bitwise AND with a negative power of 2
874 means an alignment corresponding to this power of 2. */
875 offset = remove_conversions (offset, true);
876 if (TREE_CODE (offset) == BIT_AND_EXPR
877 && host_integerp (TREE_OPERAND (offset, 1), 0)
878 && tree_int_cst_sgn (TREE_OPERAND (offset, 1)) < 0)
881 = - tree_low_cst (TREE_OPERAND (offset, 1), 0);
882 if (exact_log2 (pow) > 0)
886 pos = compute_related_constant (curpos,
887 round_up (last_pos, align));
889 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
890 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
891 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
892 && host_integerp (TREE_OPERAND
893 (TREE_OPERAND (curpos, 0), 1),
898 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
899 pos = compute_related_constant (curpos,
900 round_up (last_pos, align));
902 else if (potential_alignment_gap (prev_old_field, old_field,
905 align = TYPE_ALIGN (field_type);
906 pos = compute_related_constant (curpos,
907 round_up (last_pos, align));
910 /* If we can't compute a position, set it to zero.
912 ??? We really should abort here, but it's too much work
913 to get this correct for all cases. */
916 pos = bitsize_zero_node;
918 /* See if this type is variable-sized and make a pointer type
919 and indicate the indirection if so. Beware that the debug
920 back-end may adjust the position computed above according
921 to the alignment of the field type, i.e. the pointer type
922 in this case, if we don't preventively counter that. */
923 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
925 field_type = build_pointer_type (field_type);
926 if (align != 0 && TYPE_ALIGN (field_type) > align)
928 field_type = copy_node (field_type);
929 TYPE_ALIGN (field_type) = align;
934 /* Make a new field name, if necessary. */
935 if (var || align != 0)
940 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
941 align / BITS_PER_UNIT);
943 strcpy (suffix, "XVL");
945 field_name = concat_name (field_name, suffix);
948 new_field = create_field_decl (field_name, field_type,
950 DECL_SIZE (old_field), pos, 0);
951 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
952 TYPE_FIELDS (new_record_type) = new_field;
954 /* If old_field is a QUAL_UNION_TYPE, take its size as being
955 zero. The only time it's not the last field of the record
956 is when there are other components at fixed positions after
957 it (meaning there was a rep clause for every field) and we
958 want to be able to encode them. */
959 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
960 (TREE_CODE (TREE_TYPE (old_field))
963 : DECL_SIZE (old_field));
964 prev_old_field = old_field;
967 TYPE_FIELDS (new_record_type)
968 = nreverse (TYPE_FIELDS (new_record_type));
970 rest_of_type_decl_compilation (TYPE_STUB_DECL (new_record_type));
973 rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type));
976 /* Append PARALLEL_TYPE on the chain of parallel types for decl. */
979 add_parallel_type (tree decl, tree parallel_type)
983 while (DECL_PARALLEL_TYPE (d))
984 d = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (d));
986 SET_DECL_PARALLEL_TYPE (d, parallel_type);
989 /* Return the parallel type associated to a type, if any. */
992 get_parallel_type (tree type)
994 if (TYPE_STUB_DECL (type))
995 return DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type));
1000 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1001 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
1002 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
1003 replace a value of zero with the old size. If HAS_REP is true, we take the
1004 MAX of the end position of this field with LAST_SIZE. In all other cases,
1005 we use FIRST_BIT plus SIZE. Return an expression for the size. */
1008 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1011 tree type = TREE_TYPE (last_size);
1014 if (!special || TREE_CODE (size) != COND_EXPR)
1016 new_size = size_binop (PLUS_EXPR, first_bit, size);
1018 new_size = size_binop (MAX_EXPR, last_size, new_size);
1022 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1023 integer_zerop (TREE_OPERAND (size, 1))
1024 ? last_size : merge_sizes (last_size, first_bit,
1025 TREE_OPERAND (size, 1),
1027 integer_zerop (TREE_OPERAND (size, 2))
1028 ? last_size : merge_sizes (last_size, first_bit,
1029 TREE_OPERAND (size, 2),
1032 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1033 when fed through substitute_in_expr) into thinking that a constant
1034 size is not constant. */
1035 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
1036 new_size = TREE_OPERAND (new_size, 0);
1041 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1042 related by the addition of a constant. Return that constant if so. */
1045 compute_related_constant (tree op0, tree op1)
1047 tree op0_var, op1_var;
1048 tree op0_con = split_plus (op0, &op0_var);
1049 tree op1_con = split_plus (op1, &op1_var);
1050 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1052 if (operand_equal_p (op0_var, op1_var, 0))
1054 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1060 /* Utility function of above to split a tree OP which may be a sum, into a
1061 constant part, which is returned, and a variable part, which is stored
1062 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1066 split_plus (tree in, tree *pvar)
1068 /* Strip NOPS in order to ease the tree traversal and maximize the
1069 potential for constant or plus/minus discovery. We need to be careful
1070 to always return and set *pvar to bitsizetype trees, but it's worth
1074 *pvar = convert (bitsizetype, in);
1076 if (TREE_CODE (in) == INTEGER_CST)
1078 *pvar = bitsize_zero_node;
1079 return convert (bitsizetype, in);
1081 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1083 tree lhs_var, rhs_var;
1084 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1085 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1087 if (lhs_var == TREE_OPERAND (in, 0)
1088 && rhs_var == TREE_OPERAND (in, 1))
1089 return bitsize_zero_node;
1091 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1092 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1095 return bitsize_zero_node;
1098 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1099 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
1100 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1101 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
1102 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
1103 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
1104 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
1105 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
1106 invisible reference. */
1109 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1110 bool return_unconstrained_p, bool return_by_direct_ref_p,
1111 bool return_by_invisi_ref_p)
1113 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1114 the subprogram formal parameters. This list is generated by traversing
1115 the input list of PARM_DECL nodes. */
1116 tree param_type_list = NULL_TREE;
1119 for (t = param_decl_list; t; t = TREE_CHAIN (t))
1120 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (t), param_type_list);
1122 /* The list of the function parameter types has to be terminated by the void
1123 type to signal to the back-end that we are not dealing with a variable
1124 parameter subprogram, but that it has a fixed number of parameters. */
1125 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1127 /* The list of argument types has been created in reverse so reverse it. */
1128 param_type_list = nreverse (param_type_list);
1130 type = build_function_type (return_type, param_type_list);
1132 /* TYPE may have been shared since GCC hashes types. If it has a different
1133 CICO_LIST, make a copy. Likewise for the various flags. */
1134 if (TYPE_CI_CO_LIST (type) != cico_list
1135 || TYPE_RETURN_UNCONSTRAINED_P (type) != return_unconstrained_p
1136 || TYPE_RETURN_BY_DIRECT_REF_P (type) != return_by_direct_ref_p
1137 || TREE_ADDRESSABLE (type) != return_by_invisi_ref_p)
1139 type = copy_type (type);
1140 TYPE_CI_CO_LIST (type) = cico_list;
1141 TYPE_RETURN_UNCONSTRAINED_P (type) = return_unconstrained_p;
1142 TYPE_RETURN_BY_DIRECT_REF_P (type) = return_by_direct_ref_p;
1143 TREE_ADDRESSABLE (type) = return_by_invisi_ref_p;
1149 /* Return a copy of TYPE but safe to modify in any way. */
1152 copy_type (tree type)
1154 tree new_type = copy_node (type);
1156 /* Unshare the language-specific data. */
1157 if (TYPE_LANG_SPECIFIC (type))
1159 TYPE_LANG_SPECIFIC (new_type) = NULL;
1160 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
1163 /* And the contents of the language-specific slot if needed. */
1164 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
1165 && TYPE_RM_VALUES (type))
1167 TYPE_RM_VALUES (new_type) = NULL_TREE;
1168 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
1169 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
1170 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
1173 /* copy_node clears this field instead of copying it, because it is
1174 aliased with TREE_CHAIN. */
1175 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
1177 TYPE_POINTER_TO (new_type) = 0;
1178 TYPE_REFERENCE_TO (new_type) = 0;
1179 TYPE_MAIN_VARIANT (new_type) = new_type;
1180 TYPE_NEXT_VARIANT (new_type) = 0;
1185 /* Return a subtype of sizetype with range MIN to MAX and whose
1186 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
1187 of the associated TYPE_DECL. */
1190 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
1192 /* First build a type for the desired range. */
1193 tree type = build_index_2_type (min, max);
1195 /* If this type has the TYPE_INDEX_TYPE we want, return it. */
1196 if (TYPE_INDEX_TYPE (type) == index)
1199 /* Otherwise, if TYPE_INDEX_TYPE is set, make a copy. Note that we have
1200 no way of sharing these types, but that's only a small hole. */
1201 if (TYPE_INDEX_TYPE (type))
1202 type = copy_type (type);
1204 SET_TYPE_INDEX_TYPE (type, index);
1205 create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
1210 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
1211 sizetype is used. */
1214 create_range_type (tree type, tree min, tree max)
1218 if (type == NULL_TREE)
1221 /* First build a type with the base range. */
1223 = build_range_type (type, TYPE_MIN_VALUE (type), TYPE_MAX_VALUE (type));
1225 min = convert (type, min);
1226 max = convert (type, max);
1228 /* If this type has the TYPE_RM_{MIN,MAX}_VALUE we want, return it. */
1229 if (TYPE_RM_MIN_VALUE (range_type)
1230 && TYPE_RM_MAX_VALUE (range_type)
1231 && operand_equal_p (TYPE_RM_MIN_VALUE (range_type), min, 0)
1232 && operand_equal_p (TYPE_RM_MAX_VALUE (range_type), max, 0))
1235 /* Otherwise, if TYPE_RM_{MIN,MAX}_VALUE is set, make a copy. */
1236 if (TYPE_RM_MIN_VALUE (range_type) || TYPE_RM_MAX_VALUE (range_type))
1237 range_type = copy_type (range_type);
1239 /* Then set the actual range. */
1240 SET_TYPE_RM_MIN_VALUE (range_type, min);
1241 SET_TYPE_RM_MAX_VALUE (range_type, max);
1246 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
1247 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
1251 create_type_stub_decl (tree type_name, tree type)
1253 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
1254 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
1255 emitted in DWARF. */
1256 tree type_decl = build_decl (input_location,
1257 TYPE_DECL, type_name, type);
1258 DECL_ARTIFICIAL (type_decl) = 1;
1262 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
1263 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
1264 is a declaration that was generated by the compiler. DEBUG_INFO_P is
1265 true if we need to write debug information about this type. GNAT_NODE
1266 is used for the position of the decl. */
1269 create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1270 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1272 enum tree_code code = TREE_CODE (type);
1273 bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
1276 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
1277 gcc_assert (!TYPE_IS_DUMMY_P (type));
1279 /* If the type hasn't been named yet, we're naming it; preserve an existing
1280 TYPE_STUB_DECL that has been attached to it for some purpose. */
1281 if (!named && TYPE_STUB_DECL (type))
1283 type_decl = TYPE_STUB_DECL (type);
1284 DECL_NAME (type_decl) = type_name;
1287 type_decl = build_decl (input_location,
1288 TYPE_DECL, type_name, type);
1290 DECL_ARTIFICIAL (type_decl) = artificial_p;
1291 gnat_pushdecl (type_decl, gnat_node);
1292 process_attributes (type_decl, attr_list);
1294 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
1295 This causes the name to be also viewed as a "tag" by the debug
1296 back-end, with the advantage that no DW_TAG_typedef is emitted
1297 for artificial "tagged" types in DWARF. */
1299 TYPE_STUB_DECL (type) = type_decl;
1301 /* Pass the type declaration to the debug back-end unless this is an
1302 UNCONSTRAINED_ARRAY_TYPE that the back-end does not support, or a
1303 type for which debugging information was not requested, or else an
1304 ENUMERAL_TYPE or RECORD_TYPE (except for fat pointers) which are
1305 handled separately. And do not pass dummy types either. */
1306 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
1307 DECL_IGNORED_P (type_decl) = 1;
1308 else if (code != ENUMERAL_TYPE
1309 && (code != RECORD_TYPE || TYPE_FAT_POINTER_P (type))
1310 && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
1311 && TYPE_IS_DUMMY_P (TREE_TYPE (type)))
1312 && !(code == RECORD_TYPE
1314 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))))))
1315 rest_of_type_decl_compilation (type_decl);
1320 /* Return a VAR_DECL or CONST_DECL node.
1322 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1323 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1324 the GCC tree for an optional initial expression; NULL_TREE if none.
1326 CONST_FLAG is true if this variable is constant, in which case we might
1327 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
1329 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
1330 definition to be made visible outside of the current compilation unit, for
1331 instance variable definitions in a package specification.
1333 EXTERN_FLAG is true when processing an external variable declaration (as
1334 opposed to a definition: no storage is to be allocated for the variable).
1336 STATIC_FLAG is only relevant when not at top level. In that case
1337 it indicates whether to always allocate storage to the variable.
1339 GNAT_NODE is used for the position of the decl. */
1342 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
1343 bool const_flag, bool public_flag, bool extern_flag,
1344 bool static_flag, bool const_decl_allowed_p,
1345 struct attrib *attr_list, Node_Id gnat_node)
1349 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
1350 && (global_bindings_p () || static_flag
1351 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
1352 : TREE_CONSTANT (var_init)));
1354 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1355 case the initializer may be used in-lieu of the DECL node (as done in
1356 Identifier_to_gnu). This is useful to prevent the need of elaboration
1357 code when an identifier for which such a decl is made is in turn used as
1358 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1359 but extra constraints apply to this choice (see below) and are not
1360 relevant to the distinction we wish to make. */
1361 bool constant_p = const_flag && init_const;
1363 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1364 and may be used for scalars in general but not for aggregates. */
1366 = build_decl (input_location,
1367 (constant_p && const_decl_allowed_p
1368 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
1371 /* If this is external, throw away any initializations (they will be done
1372 elsewhere) unless this is a constant for which we would like to remain
1373 able to get the initializer. If we are defining a global here, leave a
1374 constant initialization and save any variable elaborations for the
1375 elaboration routine. If we are just annotating types, throw away the
1376 initialization if it isn't a constant. */
1377 if ((extern_flag && !constant_p)
1378 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1379 var_init = NULL_TREE;
1381 /* At the global level, an initializer requiring code to be generated
1382 produces elaboration statements. Check that such statements are allowed,
1383 that is, not violating a No_Elaboration_Code restriction. */
1384 if (global_bindings_p () && var_init != 0 && !init_const)
1385 Check_Elaboration_Code_Allowed (gnat_node);
1387 DECL_INITIAL (var_decl) = var_init;
1388 TREE_READONLY (var_decl) = const_flag;
1389 DECL_EXTERNAL (var_decl) = extern_flag;
1390 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1391 TREE_CONSTANT (var_decl) = constant_p;
1392 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1393 = TYPE_VOLATILE (type);
1395 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1396 try to fiddle with DECL_COMMON. However, on platforms that don't
1397 support global BSS sections, uninitialized global variables would
1398 go in DATA instead, thus increasing the size of the executable. */
1400 && TREE_CODE (var_decl) == VAR_DECL
1401 && TREE_PUBLIC (var_decl)
1402 && !have_global_bss_p ())
1403 DECL_COMMON (var_decl) = 1;
1405 /* If it's public and not external, always allocate storage for it.
1406 At the global binding level we need to allocate static storage for the
1407 variable if and only if it's not external. If we are not at the top level
1408 we allocate automatic storage unless requested not to. */
1409 TREE_STATIC (var_decl)
1410 = !extern_flag && (public_flag || static_flag || global_bindings_p ());
1412 /* For an external constant whose initializer is not absolute, do not emit
1413 debug info. In DWARF this would mean a global relocation in a read-only
1414 section which runs afoul of the PE-COFF runtime relocation mechanism. */
1417 && initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
1418 != null_pointer_node)
1419 DECL_IGNORED_P (var_decl) = 1;
1421 if (TREE_CODE (var_decl) == VAR_DECL)
1424 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1425 process_attributes (var_decl, attr_list);
1428 /* Add this decl to the current binding level. */
1429 gnat_pushdecl (var_decl, gnat_node);
1431 if (TREE_SIDE_EFFECTS (var_decl))
1432 TREE_ADDRESSABLE (var_decl) = 1;
1434 if (TREE_CODE (var_decl) != CONST_DECL)
1436 if (global_bindings_p ())
1437 rest_of_decl_compilation (var_decl, true, 0);
1440 expand_decl (var_decl);
1445 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
1448 aggregate_type_contains_array_p (tree type)
1450 switch (TREE_CODE (type))
1454 case QUAL_UNION_TYPE:
1457 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1458 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
1459 && aggregate_type_contains_array_p (TREE_TYPE (field)))
1472 /* Return a FIELD_DECL node. FIELD_NAME is the field's name, FIELD_TYPE is
1473 its type and RECORD_TYPE is the type of the enclosing record. PACKED is
1474 1 if the enclosing record is packed, -1 if it has Component_Alignment of
1475 Storage_Unit. If SIZE is nonzero, it is the specified size of the field.
1476 If POS is nonzero, it is the bit position. If ADDRESSABLE is nonzero, it
1477 means we are allowed to take the address of the field; if it is negative,
1478 we should not make a bitfield, which is used by make_aligning_type. */
1481 create_field_decl (tree field_name, tree field_type, tree record_type,
1482 int packed, tree size, tree pos, int addressable)
1484 tree field_decl = build_decl (input_location,
1485 FIELD_DECL, field_name, field_type);
1487 DECL_CONTEXT (field_decl) = record_type;
1488 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1490 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1491 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1492 Likewise for an aggregate without specified position that contains an
1493 array, because in this case slices of variable length of this array
1494 must be handled by GCC and variable-sized objects need to be aligned
1495 to at least a byte boundary. */
1496 if (packed && (TYPE_MODE (field_type) == BLKmode
1498 && AGGREGATE_TYPE_P (field_type)
1499 && aggregate_type_contains_array_p (field_type))))
1500 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1502 /* If a size is specified, use it. Otherwise, if the record type is packed
1503 compute a size to use, which may differ from the object's natural size.
1504 We always set a size in this case to trigger the checks for bitfield
1505 creation below, which is typically required when no position has been
1508 size = convert (bitsizetype, size);
1509 else if (packed == 1)
1511 size = rm_size (field_type);
1512 if (TYPE_MODE (field_type) == BLKmode)
1513 size = round_up (size, BITS_PER_UNIT);
1516 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1517 specified for two reasons: first if the size differs from the natural
1518 size. Second, if the alignment is insufficient. There are a number of
1519 ways the latter can be true.
1521 We never make a bitfield if the type of the field has a nonconstant size,
1522 because no such entity requiring bitfield operations should reach here.
1524 We do *preventively* make a bitfield when there might be the need for it
1525 but we don't have all the necessary information to decide, as is the case
1526 of a field with no specified position in a packed record.
1528 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1529 in layout_decl or finish_record_type to clear the bit_field indication if
1530 it is in fact not needed. */
1531 if (addressable >= 0
1533 && TREE_CODE (size) == INTEGER_CST
1534 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1535 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
1536 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
1538 || (TYPE_ALIGN (record_type) != 0
1539 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1541 DECL_BIT_FIELD (field_decl) = 1;
1542 DECL_SIZE (field_decl) = size;
1543 if (!packed && !pos)
1545 if (TYPE_ALIGN (record_type) != 0
1546 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))
1547 DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
1549 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1553 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1555 /* Bump the alignment if need be, either for bitfield/packing purposes or
1556 to satisfy the type requirements if no such consideration applies. When
1557 we get the alignment from the type, indicate if this is from an explicit
1558 user request, which prevents stor-layout from lowering it later on. */
1560 unsigned int bit_align
1561 = (DECL_BIT_FIELD (field_decl) ? 1
1562 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
1564 if (bit_align > DECL_ALIGN (field_decl))
1565 DECL_ALIGN (field_decl) = bit_align;
1566 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
1568 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1569 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
1575 /* We need to pass in the alignment the DECL is known to have.
1576 This is the lowest-order bit set in POS, but no more than
1577 the alignment of the record, if one is specified. Note
1578 that an alignment of 0 is taken as infinite. */
1579 unsigned int known_align;
1581 if (host_integerp (pos, 1))
1582 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1584 known_align = BITS_PER_UNIT;
1586 if (TYPE_ALIGN (record_type)
1587 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1588 known_align = TYPE_ALIGN (record_type);
1590 layout_decl (field_decl, known_align);
1591 SET_DECL_OFFSET_ALIGN (field_decl,
1592 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1594 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1595 &DECL_FIELD_BIT_OFFSET (field_decl),
1596 DECL_OFFSET_ALIGN (field_decl), pos);
1599 /* In addition to what our caller says, claim the field is addressable if we
1600 know that its type is not suitable.
1602 The field may also be "technically" nonaddressable, meaning that even if
1603 we attempt to take the field's address we will actually get the address
1604 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
1605 value we have at this point is not accurate enough, so we don't account
1606 for this here and let finish_record_type decide. */
1607 if (!addressable && !type_for_nonaliased_component_p (field_type))
1610 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1615 /* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
1616 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
1617 (either an In parameter or an address of a pass-by-ref parameter). */
1620 create_param_decl (tree param_name, tree param_type, bool readonly)
1622 tree param_decl = build_decl (input_location,
1623 PARM_DECL, param_name, param_type);
1625 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
1626 can lead to various ABI violations. */
1627 if (targetm.calls.promote_prototypes (NULL_TREE)
1628 && INTEGRAL_TYPE_P (param_type)
1629 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1631 /* We have to be careful about biased types here. Make a subtype
1632 of integer_type_node with the proper biasing. */
1633 if (TREE_CODE (param_type) == INTEGER_TYPE
1634 && TYPE_BIASED_REPRESENTATION_P (param_type))
1637 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
1638 TREE_TYPE (subtype) = integer_type_node;
1639 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
1640 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (param_type));
1641 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (param_type));
1642 param_type = subtype;
1645 param_type = integer_type_node;
1648 DECL_ARG_TYPE (param_decl) = param_type;
1649 TREE_READONLY (param_decl) = readonly;
1653 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1656 process_attributes (tree decl, struct attrib *attr_list)
1658 for (; attr_list; attr_list = attr_list->next)
1659 switch (attr_list->type)
1661 case ATTR_MACHINE_ATTRIBUTE:
1662 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1664 ATTR_FLAG_TYPE_IN_PLACE);
1667 case ATTR_LINK_ALIAS:
1668 if (! DECL_EXTERNAL (decl))
1670 TREE_STATIC (decl) = 1;
1671 assemble_alias (decl, attr_list->name);
1675 case ATTR_WEAK_EXTERNAL:
1677 declare_weak (decl);
1679 post_error ("?weak declarations not supported on this target",
1680 attr_list->error_point);
1683 case ATTR_LINK_SECTION:
1684 if (targetm.have_named_sections)
1686 DECL_SECTION_NAME (decl)
1687 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1688 IDENTIFIER_POINTER (attr_list->name));
1689 DECL_COMMON (decl) = 0;
1692 post_error ("?section attributes are not supported for this target",
1693 attr_list->error_point);
1696 case ATTR_LINK_CONSTRUCTOR:
1697 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1698 TREE_USED (decl) = 1;
1701 case ATTR_LINK_DESTRUCTOR:
1702 DECL_STATIC_DESTRUCTOR (decl) = 1;
1703 TREE_USED (decl) = 1;
1706 case ATTR_THREAD_LOCAL_STORAGE:
1707 DECL_TLS_MODEL (decl) = decl_default_tls_model (decl);
1708 DECL_COMMON (decl) = 0;
1713 /* Record DECL as a global renaming pointer. */
1716 record_global_renaming_pointer (tree decl)
1718 gcc_assert (DECL_RENAMED_OBJECT (decl));
1719 VEC_safe_push (tree, gc, global_renaming_pointers, decl);
1722 /* Invalidate the global renaming pointers. */
1725 invalidate_global_renaming_pointers (void)
1730 for (i = 0; VEC_iterate(tree, global_renaming_pointers, i, iter); i++)
1731 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
1733 VEC_free (tree, gc, global_renaming_pointers);
1736 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1740 value_factor_p (tree value, HOST_WIDE_INT factor)
1742 if (host_integerp (value, 1))
1743 return tree_low_cst (value, 1) % factor == 0;
1745 if (TREE_CODE (value) == MULT_EXPR)
1746 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1747 || value_factor_p (TREE_OPERAND (value, 1), factor));
1752 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1753 unless we can prove these 2 fields are laid out in such a way that no gap
1754 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1755 is the distance in bits between the end of PREV_FIELD and the starting
1756 position of CURR_FIELD. It is ignored if null. */
1759 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1761 /* If this is the first field of the record, there cannot be any gap */
1765 /* If the previous field is a union type, then return False: The only
1766 time when such a field is not the last field of the record is when
1767 there are other components at fixed positions after it (meaning there
1768 was a rep clause for every field), in which case we don't want the
1769 alignment constraint to override them. */
1770 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1773 /* If the distance between the end of prev_field and the beginning of
1774 curr_field is constant, then there is a gap if the value of this
1775 constant is not null. */
1776 if (offset && host_integerp (offset, 1))
1777 return !integer_zerop (offset);
1779 /* If the size and position of the previous field are constant,
1780 then check the sum of this size and position. There will be a gap
1781 iff it is not multiple of the current field alignment. */
1782 if (host_integerp (DECL_SIZE (prev_field), 1)
1783 && host_integerp (bit_position (prev_field), 1))
1784 return ((tree_low_cst (bit_position (prev_field), 1)
1785 + tree_low_cst (DECL_SIZE (prev_field), 1))
1786 % DECL_ALIGN (curr_field) != 0);
1788 /* If both the position and size of the previous field are multiples
1789 of the current field alignment, there cannot be any gap. */
1790 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1791 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1794 /* Fallback, return that there may be a potential gap */
1798 /* Returns a LABEL_DECL node for LABEL_NAME. */
1801 create_label_decl (tree label_name)
1803 tree label_decl = build_decl (input_location,
1804 LABEL_DECL, label_name, void_type_node);
1806 DECL_CONTEXT (label_decl) = current_function_decl;
1807 DECL_MODE (label_decl) = VOIDmode;
1808 DECL_SOURCE_LOCATION (label_decl) = input_location;
1813 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1814 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1815 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1816 PARM_DECL nodes chained through the TREE_CHAIN field).
1818 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1819 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1822 create_subprog_decl (tree subprog_name, tree asm_name,
1823 tree subprog_type, tree param_decl_list, bool inline_flag,
1824 bool public_flag, bool extern_flag,
1825 struct attrib *attr_list, Node_Id gnat_node)
1827 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, subprog_name,
1829 tree result_decl = build_decl (input_location, RESULT_DECL, NULL_TREE,
1830 TREE_TYPE (subprog_type));
1832 /* If this is a non-inline function nested inside an inlined external
1833 function, we cannot honor both requests without cloning the nested
1834 function in the current unit since it is private to the other unit.
1835 We could inline the nested function as well but it's probably better
1836 to err on the side of too little inlining. */
1838 && current_function_decl
1839 && DECL_DECLARED_INLINE_P (current_function_decl)
1840 && DECL_EXTERNAL (current_function_decl))
1841 DECL_DECLARED_INLINE_P (current_function_decl) = 0;
1843 DECL_EXTERNAL (subprog_decl) = extern_flag;
1844 TREE_PUBLIC (subprog_decl) = public_flag;
1845 TREE_STATIC (subprog_decl) = 1;
1846 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1847 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1848 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1849 DECL_DECLARED_INLINE_P (subprog_decl) = inline_flag;
1850 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1852 DECL_ARTIFICIAL (result_decl) = 1;
1853 DECL_IGNORED_P (result_decl) = 1;
1854 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (subprog_type);
1855 DECL_RESULT (subprog_decl) = result_decl;
1859 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1861 /* The expand_main_function circuitry expects "main_identifier_node" to
1862 designate the DECL_NAME of the 'main' entry point, in turn expected
1863 to be declared as the "main" function literally by default. Ada
1864 program entry points are typically declared with a different name
1865 within the binder generated file, exported as 'main' to satisfy the
1866 system expectations. Force main_identifier_node in this case. */
1867 if (asm_name == main_identifier_node)
1868 DECL_NAME (subprog_decl) = main_identifier_node;
1871 process_attributes (subprog_decl, attr_list);
1873 /* Add this decl to the current binding level. */
1874 gnat_pushdecl (subprog_decl, gnat_node);
1876 /* Output the assembler code and/or RTL for the declaration. */
1877 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
1879 return subprog_decl;
1882 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1883 body. This routine needs to be invoked before processing the declarations
1884 appearing in the subprogram. */
1887 begin_subprog_body (tree subprog_decl)
1891 current_function_decl = subprog_decl;
1892 announce_function (subprog_decl);
1894 /* Enter a new binding level and show that all the parameters belong to
1897 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1898 param_decl = TREE_CHAIN (param_decl))
1899 DECL_CONTEXT (param_decl) = subprog_decl;
1901 make_decl_rtl (subprog_decl);
1903 /* We handle pending sizes via the elaboration of types, so we don't need to
1904 save them. This causes them to be marked as part of the outer function
1905 and then discarded. */
1906 get_pending_sizes ();
1909 /* Finish the definition of the current subprogram BODY and finalize it. */
1912 end_subprog_body (tree body)
1914 tree fndecl = current_function_decl;
1916 /* Mark the BLOCK for this level as being for this function and pop the
1917 level. Since the vars in it are the parameters, clear them. */
1918 BLOCK_VARS (current_binding_level->block) = 0;
1919 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
1920 DECL_INITIAL (fndecl) = current_binding_level->block;
1923 /* We handle pending sizes via the elaboration of types, so we don't
1924 need to save them. */
1925 get_pending_sizes ();
1927 /* Mark the RESULT_DECL as being in this subprogram. */
1928 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
1930 DECL_SAVED_TREE (fndecl) = body;
1932 current_function_decl = DECL_CONTEXT (fndecl);
1935 /* We cannot track the location of errors past this point. */
1936 error_gnat_node = Empty;
1938 /* If we're only annotating types, don't actually compile this function. */
1939 if (type_annotate_only)
1942 /* Dump functions before gimplification. */
1943 dump_function (TDI_original, fndecl);
1945 /* ??? This special handling of nested functions is probably obsolete. */
1946 if (!DECL_CONTEXT (fndecl))
1947 cgraph_finalize_function (fndecl, false);
1949 /* Register this function with cgraph just far enough to get it
1950 added to our parent's nested function list. */
1951 (void) cgraph_node (fndecl);
1955 gnat_builtin_function (tree decl)
1957 gnat_pushdecl (decl, Empty);
1961 /* Return an integer type with the number of bits of precision given by
1962 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
1963 it is a signed type. */
1966 gnat_type_for_size (unsigned precision, int unsignedp)
1971 if (precision <= 2 * MAX_BITS_PER_WORD
1972 && signed_and_unsigned_types[precision][unsignedp])
1973 return signed_and_unsigned_types[precision][unsignedp];
1976 t = make_unsigned_type (precision);
1978 t = make_signed_type (precision);
1980 if (precision <= 2 * MAX_BITS_PER_WORD)
1981 signed_and_unsigned_types[precision][unsignedp] = t;
1985 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
1986 TYPE_NAME (t) = get_identifier (type_name);
1992 /* Likewise for floating-point types. */
1995 float_type_for_precision (int precision, enum machine_mode mode)
2000 if (float_types[(int) mode])
2001 return float_types[(int) mode];
2003 float_types[(int) mode] = t = make_node (REAL_TYPE);
2004 TYPE_PRECISION (t) = precision;
2007 gcc_assert (TYPE_MODE (t) == mode);
2010 sprintf (type_name, "FLOAT_%d", precision);
2011 TYPE_NAME (t) = get_identifier (type_name);
2017 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2018 an unsigned type; otherwise a signed type is returned. */
2021 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2023 if (mode == BLKmode)
2026 if (mode == VOIDmode)
2027 return void_type_node;
2029 if (COMPLEX_MODE_P (mode))
2032 if (SCALAR_FLOAT_MODE_P (mode))
2033 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2035 if (SCALAR_INT_MODE_P (mode))
2036 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2038 if (VECTOR_MODE_P (mode))
2040 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2041 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
2043 return build_vector_type_for_mode (inner_type, mode);
2049 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2052 gnat_unsigned_type (tree type_node)
2054 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2056 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2058 type = copy_node (type);
2059 TREE_TYPE (type) = type_node;
2061 else if (TREE_TYPE (type_node)
2062 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2063 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2065 type = copy_node (type);
2066 TREE_TYPE (type) = TREE_TYPE (type_node);
2072 /* Return the signed version of a TYPE_NODE, a scalar type. */
2075 gnat_signed_type (tree type_node)
2077 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2079 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2081 type = copy_node (type);
2082 TREE_TYPE (type) = type_node;
2084 else if (TREE_TYPE (type_node)
2085 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2086 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2088 type = copy_node (type);
2089 TREE_TYPE (type) = TREE_TYPE (type_node);
2095 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2096 transparently converted to each other. */
2099 gnat_types_compatible_p (tree t1, tree t2)
2101 enum tree_code code;
2103 /* This is the default criterion. */
2104 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2107 /* We only check structural equivalence here. */
2108 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2111 /* Vector types are also compatible if they have the same number of subparts
2112 and the same form of (scalar) element type. */
2113 if (code == VECTOR_TYPE
2114 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
2115 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
2116 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
2119 /* Array types are also compatible if they are constrained and have
2120 the same component type and the same domain. */
2121 if (code == ARRAY_TYPE
2122 && TREE_TYPE (t1) == TREE_TYPE (t2)
2123 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
2124 || (TYPE_DOMAIN (t1)
2126 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2127 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2128 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
2129 TYPE_MAX_VALUE (TYPE_DOMAIN (t2))))))
2132 /* Padding record types are also compatible if they pad the same
2133 type and have the same constant size. */
2134 if (code == RECORD_TYPE
2135 && TYPE_PADDING_P (t1) && TYPE_PADDING_P (t2)
2136 && TREE_TYPE (TYPE_FIELDS (t1)) == TREE_TYPE (TYPE_FIELDS (t2))
2137 && tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2)))
2143 /* EXP is an expression for the size of an object. If this size contains
2144 discriminant references, replace them with the maximum (if MAX_P) or
2145 minimum (if !MAX_P) possible value of the discriminant. */
2148 max_size (tree exp, bool max_p)
2150 enum tree_code code = TREE_CODE (exp);
2151 tree type = TREE_TYPE (exp);
2153 switch (TREE_CODE_CLASS (code))
2155 case tcc_declaration:
2160 if (code == CALL_EXPR)
2165 t = maybe_inline_call_in_expr (exp);
2167 return max_size (t, max_p);
2169 n = call_expr_nargs (exp);
2171 argarray = (tree *) alloca (n * sizeof (tree));
2172 for (i = 0; i < n; i++)
2173 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
2174 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
2179 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2180 modify. Otherwise, we treat it like a variable. */
2181 if (!CONTAINS_PLACEHOLDER_P (exp))
2184 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2186 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
2188 case tcc_comparison:
2189 return max_p ? size_one_node : size_zero_node;
2193 case tcc_expression:
2194 switch (TREE_CODE_LENGTH (code))
2197 if (code == NON_LVALUE_EXPR)
2198 return max_size (TREE_OPERAND (exp, 0), max_p);
2201 fold_build1 (code, type,
2202 max_size (TREE_OPERAND (exp, 0),
2203 code == NEGATE_EXPR ? !max_p : max_p));
2206 if (code == COMPOUND_EXPR)
2207 return max_size (TREE_OPERAND (exp, 1), max_p);
2209 /* Calculate "(A ? B : C) - D" as "A ? B - D : C - D" which
2210 may provide a tighter bound on max_size. */
2211 if (code == MINUS_EXPR
2212 && TREE_CODE (TREE_OPERAND (exp, 0)) == COND_EXPR)
2214 tree lhs = fold_build2 (MINUS_EXPR, type,
2215 TREE_OPERAND (TREE_OPERAND (exp, 0), 1),
2216 TREE_OPERAND (exp, 1));
2217 tree rhs = fold_build2 (MINUS_EXPR, type,
2218 TREE_OPERAND (TREE_OPERAND (exp, 0), 2),
2219 TREE_OPERAND (exp, 1));
2220 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2221 max_size (lhs, max_p),
2222 max_size (rhs, max_p));
2226 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2227 tree rhs = max_size (TREE_OPERAND (exp, 1),
2228 code == MINUS_EXPR ? !max_p : max_p);
2230 /* Special-case wanting the maximum value of a MIN_EXPR.
2231 In that case, if one side overflows, return the other.
2232 sizetype is signed, but we know sizes are non-negative.
2233 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2234 overflowing or the maximum possible value and the RHS
2238 && TREE_CODE (rhs) == INTEGER_CST
2239 && TREE_OVERFLOW (rhs))
2243 && TREE_CODE (lhs) == INTEGER_CST
2244 && TREE_OVERFLOW (lhs))
2246 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2247 && ((TREE_CODE (lhs) == INTEGER_CST
2248 && TREE_OVERFLOW (lhs))
2249 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2250 && !TREE_CONSTANT (rhs))
2253 return fold_build2 (code, type, lhs, rhs);
2257 if (code == SAVE_EXPR)
2259 else if (code == COND_EXPR)
2260 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2261 max_size (TREE_OPERAND (exp, 1), max_p),
2262 max_size (TREE_OPERAND (exp, 2), max_p));
2265 /* Other tree classes cannot happen. */
2273 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2274 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2275 Return a constructor for the template. */
2278 build_template (tree template_type, tree array_type, tree expr)
2280 tree template_elts = NULL_TREE;
2281 tree bound_list = NULL_TREE;
2284 while (TREE_CODE (array_type) == RECORD_TYPE
2285 && (TYPE_PADDING_P (array_type)
2286 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2287 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2289 if (TREE_CODE (array_type) == ARRAY_TYPE
2290 || (TREE_CODE (array_type) == INTEGER_TYPE
2291 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2292 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2294 /* First make the list for a CONSTRUCTOR for the template. Go down the
2295 field list of the template instead of the type chain because this
2296 array might be an Ada array of arrays and we can't tell where the
2297 nested arrays stop being the underlying object. */
2299 for (field = TYPE_FIELDS (template_type); field;
2301 ? (bound_list = TREE_CHAIN (bound_list))
2302 : (array_type = TREE_TYPE (array_type))),
2303 field = TREE_CHAIN (TREE_CHAIN (field)))
2305 tree bounds, min, max;
2307 /* If we have a bound list, get the bounds from there. Likewise
2308 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2309 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2310 This will give us a maximum range. */
2312 bounds = TREE_VALUE (bound_list);
2313 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2314 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2315 else if (expr && TREE_CODE (expr) == PARM_DECL
2316 && DECL_BY_COMPONENT_PTR_P (expr))
2317 bounds = TREE_TYPE (field);
2321 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
2322 max = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MAX_VALUE (bounds));
2324 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2325 substitute it from OBJECT. */
2326 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2327 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2329 template_elts = tree_cons (TREE_CHAIN (field), max,
2330 tree_cons (field, min, template_elts));
2333 return gnat_build_constructor (template_type, nreverse (template_elts));
2336 /* Build a 32bit VMS descriptor from a Mechanism_Type, which must specify
2337 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2338 in the type contains in its DECL_INITIAL the expression to use when
2339 a constructor is made for the type. GNAT_ENTITY is an entity used
2340 to print out an error message if the mechanism cannot be applied to
2341 an object of that type and also for the name. */
2344 build_vms_descriptor32 (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2346 tree record_type = make_node (RECORD_TYPE);
2347 tree pointer32_type;
2348 tree field_list = 0;
2357 /* If TYPE is an unconstrained array, use the underlying array type. */
2358 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2359 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2361 /* If this is an array, compute the number of dimensions in the array,
2362 get the index types, and point to the inner type. */
2363 if (TREE_CODE (type) != ARRAY_TYPE)
2366 for (ndim = 1, inner_type = type;
2367 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2368 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2369 ndim++, inner_type = TREE_TYPE (inner_type))
2372 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2374 if (mech != By_Descriptor_NCA && mech != By_Short_Descriptor_NCA
2375 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2376 for (i = ndim - 1, inner_type = type;
2378 i--, inner_type = TREE_TYPE (inner_type))
2379 idx_arr[i] = TYPE_DOMAIN (inner_type);
2381 for (i = 0, inner_type = type;
2383 i++, inner_type = TREE_TYPE (inner_type))
2384 idx_arr[i] = TYPE_DOMAIN (inner_type);
2386 /* Now get the DTYPE value. */
2387 switch (TREE_CODE (type))
2392 if (TYPE_VAX_FLOATING_POINT_P (type))
2393 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2406 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2409 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2412 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2415 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2418 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2421 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2427 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2431 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2432 && TYPE_VAX_FLOATING_POINT_P (type))
2433 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2445 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2456 /* Get the CLASS value. */
2459 case By_Descriptor_A:
2460 case By_Short_Descriptor_A:
2463 case By_Descriptor_NCA:
2464 case By_Short_Descriptor_NCA:
2467 case By_Descriptor_SB:
2468 case By_Short_Descriptor_SB:
2472 case By_Short_Descriptor:
2473 case By_Descriptor_S:
2474 case By_Short_Descriptor_S:
2480 /* Make the type for a descriptor for VMS. The first four fields
2481 are the same for all types. */
2484 = chainon (field_list,
2485 make_descriptor_field
2486 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2487 size_in_bytes ((mech == By_Descriptor_A ||
2488 mech == By_Short_Descriptor_A)
2489 ? inner_type : type)));
2491 field_list = chainon (field_list,
2492 make_descriptor_field ("DTYPE",
2493 gnat_type_for_size (8, 1),
2494 record_type, size_int (dtype)));
2495 field_list = chainon (field_list,
2496 make_descriptor_field ("CLASS",
2497 gnat_type_for_size (8, 1),
2498 record_type, size_int (klass)));
2500 /* Of course this will crash at run-time if the address space is not
2501 within the low 32 bits, but there is nothing else we can do. */
2502 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
2505 = chainon (field_list,
2506 make_descriptor_field
2507 ("POINTER", pointer32_type, record_type,
2508 build_unary_op (ADDR_EXPR,
2510 build0 (PLACEHOLDER_EXPR, type))));
2515 case By_Short_Descriptor:
2516 case By_Descriptor_S:
2517 case By_Short_Descriptor_S:
2520 case By_Descriptor_SB:
2521 case By_Short_Descriptor_SB:
2523 = chainon (field_list,
2524 make_descriptor_field
2525 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2526 TREE_CODE (type) == ARRAY_TYPE
2527 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2529 = chainon (field_list,
2530 make_descriptor_field
2531 ("SB_U1", gnat_type_for_size (32, 1), record_type,
2532 TREE_CODE (type) == ARRAY_TYPE
2533 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2536 case By_Descriptor_A:
2537 case By_Short_Descriptor_A:
2538 case By_Descriptor_NCA:
2539 case By_Short_Descriptor_NCA:
2540 field_list = chainon (field_list,
2541 make_descriptor_field ("SCALE",
2542 gnat_type_for_size (8, 1),
2546 field_list = chainon (field_list,
2547 make_descriptor_field ("DIGITS",
2548 gnat_type_for_size (8, 1),
2553 = chainon (field_list,
2554 make_descriptor_field
2555 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2556 size_int ((mech == By_Descriptor_NCA ||
2557 mech == By_Short_Descriptor_NCA)
2559 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2560 : (TREE_CODE (type) == ARRAY_TYPE
2561 && TYPE_CONVENTION_FORTRAN_P (type)
2564 field_list = chainon (field_list,
2565 make_descriptor_field ("DIMCT",
2566 gnat_type_for_size (8, 1),
2570 field_list = chainon (field_list,
2571 make_descriptor_field ("ARSIZE",
2572 gnat_type_for_size (32, 1),
2574 size_in_bytes (type)));
2576 /* Now build a pointer to the 0,0,0... element. */
2577 tem = build0 (PLACEHOLDER_EXPR, type);
2578 for (i = 0, inner_type = type; i < ndim;
2579 i++, inner_type = TREE_TYPE (inner_type))
2580 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2581 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2582 NULL_TREE, NULL_TREE);
2585 = chainon (field_list,
2586 make_descriptor_field
2588 build_pointer_type_for_mode (inner_type, SImode, false),
2591 build_pointer_type_for_mode (inner_type, SImode,
2595 /* Next come the addressing coefficients. */
2596 tem = size_one_node;
2597 for (i = 0; i < ndim; i++)
2601 = size_binop (MULT_EXPR, tem,
2602 size_binop (PLUS_EXPR,
2603 size_binop (MINUS_EXPR,
2604 TYPE_MAX_VALUE (idx_arr[i]),
2605 TYPE_MIN_VALUE (idx_arr[i])),
2608 fname[0] = ((mech == By_Descriptor_NCA ||
2609 mech == By_Short_Descriptor_NCA) ? 'S' : 'M');
2610 fname[1] = '0' + i, fname[2] = 0;
2612 = chainon (field_list,
2613 make_descriptor_field (fname,
2614 gnat_type_for_size (32, 1),
2615 record_type, idx_length));
2617 if (mech == By_Descriptor_NCA || mech == By_Short_Descriptor_NCA)
2621 /* Finally here are the bounds. */
2622 for (i = 0; i < ndim; i++)
2626 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2628 = chainon (field_list,
2629 make_descriptor_field
2630 (fname, gnat_type_for_size (32, 1), record_type,
2631 TYPE_MIN_VALUE (idx_arr[i])));
2635 = chainon (field_list,
2636 make_descriptor_field
2637 (fname, gnat_type_for_size (32, 1), record_type,
2638 TYPE_MAX_VALUE (idx_arr[i])));
2643 post_error ("unsupported descriptor type for &", gnat_entity);
2646 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC");
2647 finish_record_type (record_type, field_list, 0, false);
2651 /* Build a 64bit VMS descriptor from a Mechanism_Type, which must specify
2652 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2653 in the type contains in its DECL_INITIAL the expression to use when
2654 a constructor is made for the type. GNAT_ENTITY is an entity used
2655 to print out an error message if the mechanism cannot be applied to
2656 an object of that type and also for the name. */
2659 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2661 tree record64_type = make_node (RECORD_TYPE);
2662 tree pointer64_type;
2663 tree field_list64 = 0;
2672 /* If TYPE is an unconstrained array, use the underlying array type. */
2673 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2674 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2676 /* If this is an array, compute the number of dimensions in the array,
2677 get the index types, and point to the inner type. */
2678 if (TREE_CODE (type) != ARRAY_TYPE)
2681 for (ndim = 1, inner_type = type;
2682 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2683 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2684 ndim++, inner_type = TREE_TYPE (inner_type))
2687 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2689 if (mech != By_Descriptor_NCA
2690 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2691 for (i = ndim - 1, inner_type = type;
2693 i--, inner_type = TREE_TYPE (inner_type))
2694 idx_arr[i] = TYPE_DOMAIN (inner_type);
2696 for (i = 0, inner_type = type;
2698 i++, inner_type = TREE_TYPE (inner_type))
2699 idx_arr[i] = TYPE_DOMAIN (inner_type);
2701 /* Now get the DTYPE value. */
2702 switch (TREE_CODE (type))
2707 if (TYPE_VAX_FLOATING_POINT_P (type))
2708 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2721 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2724 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2727 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2730 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2733 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2736 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2742 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2746 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2747 && TYPE_VAX_FLOATING_POINT_P (type))
2748 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2760 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2771 /* Get the CLASS value. */
2774 case By_Descriptor_A:
2777 case By_Descriptor_NCA:
2780 case By_Descriptor_SB:
2784 case By_Descriptor_S:
2790 /* Make the type for a 64bit descriptor for VMS. The first six fields
2791 are the same for all types. */
2793 field_list64 = chainon (field_list64,
2794 make_descriptor_field ("MBO",
2795 gnat_type_for_size (16, 1),
2796 record64_type, size_int (1)));
2798 field_list64 = chainon (field_list64,
2799 make_descriptor_field ("DTYPE",
2800 gnat_type_for_size (8, 1),
2801 record64_type, size_int (dtype)));
2802 field_list64 = chainon (field_list64,
2803 make_descriptor_field ("CLASS",
2804 gnat_type_for_size (8, 1),
2805 record64_type, size_int (klass)));
2807 field_list64 = chainon (field_list64,
2808 make_descriptor_field ("MBMO",
2809 gnat_type_for_size (32, 1),
2810 record64_type, ssize_int (-1)));
2813 = chainon (field_list64,
2814 make_descriptor_field
2815 ("LENGTH", gnat_type_for_size (64, 1), record64_type,
2816 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2818 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
2821 = chainon (field_list64,
2822 make_descriptor_field
2823 ("POINTER", pointer64_type, record64_type,
2824 build_unary_op (ADDR_EXPR,
2826 build0 (PLACEHOLDER_EXPR, type))));
2831 case By_Descriptor_S:
2834 case By_Descriptor_SB:
2836 = chainon (field_list64,
2837 make_descriptor_field
2838 ("SB_L1", gnat_type_for_size (64, 1), record64_type,
2839 TREE_CODE (type) == ARRAY_TYPE
2840 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2842 = chainon (field_list64,
2843 make_descriptor_field
2844 ("SB_U1", gnat_type_for_size (64, 1), record64_type,
2845 TREE_CODE (type) == ARRAY_TYPE
2846 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2849 case By_Descriptor_A:
2850 case By_Descriptor_NCA:
2851 field_list64 = chainon (field_list64,
2852 make_descriptor_field ("SCALE",
2853 gnat_type_for_size (8, 1),
2857 field_list64 = chainon (field_list64,
2858 make_descriptor_field ("DIGITS",
2859 gnat_type_for_size (8, 1),
2864 = chainon (field_list64,
2865 make_descriptor_field
2866 ("AFLAGS", gnat_type_for_size (8, 1), record64_type,
2867 size_int (mech == By_Descriptor_NCA
2869 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2870 : (TREE_CODE (type) == ARRAY_TYPE
2871 && TYPE_CONVENTION_FORTRAN_P (type)
2874 field_list64 = chainon (field_list64,
2875 make_descriptor_field ("DIMCT",
2876 gnat_type_for_size (8, 1),
2880 field_list64 = chainon (field_list64,
2881 make_descriptor_field ("MBZ",
2882 gnat_type_for_size (32, 1),
2885 field_list64 = chainon (field_list64,
2886 make_descriptor_field ("ARSIZE",
2887 gnat_type_for_size (64, 1),
2889 size_in_bytes (type)));
2891 /* Now build a pointer to the 0,0,0... element. */
2892 tem = build0 (PLACEHOLDER_EXPR, type);
2893 for (i = 0, inner_type = type; i < ndim;
2894 i++, inner_type = TREE_TYPE (inner_type))
2895 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2896 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2897 NULL_TREE, NULL_TREE);
2900 = chainon (field_list64,
2901 make_descriptor_field
2903 build_pointer_type_for_mode (inner_type, DImode, false),
2906 build_pointer_type_for_mode (inner_type, DImode,
2910 /* Next come the addressing coefficients. */
2911 tem = size_one_node;
2912 for (i = 0; i < ndim; i++)
2916 = size_binop (MULT_EXPR, tem,
2917 size_binop (PLUS_EXPR,
2918 size_binop (MINUS_EXPR,
2919 TYPE_MAX_VALUE (idx_arr[i]),
2920 TYPE_MIN_VALUE (idx_arr[i])),
2923 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2924 fname[1] = '0' + i, fname[2] = 0;
2926 = chainon (field_list64,
2927 make_descriptor_field (fname,
2928 gnat_type_for_size (64, 1),
2929 record64_type, idx_length));
2931 if (mech == By_Descriptor_NCA)
2935 /* Finally here are the bounds. */
2936 for (i = 0; i < ndim; i++)
2940 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2942 = chainon (field_list64,
2943 make_descriptor_field
2944 (fname, gnat_type_for_size (64, 1), record64_type,
2945 TYPE_MIN_VALUE (idx_arr[i])));
2949 = chainon (field_list64,
2950 make_descriptor_field
2951 (fname, gnat_type_for_size (64, 1), record64_type,
2952 TYPE_MAX_VALUE (idx_arr[i])));
2957 post_error ("unsupported descriptor type for &", gnat_entity);
2960 TYPE_NAME (record64_type) = create_concat_name (gnat_entity, "DESC64");
2961 finish_record_type (record64_type, field_list64, 0, false);
2962 return record64_type;
2965 /* Utility routine for above code to make a field. */
2968 make_descriptor_field (const char *name, tree type,
2969 tree rec_type, tree initial)
2972 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2974 DECL_INITIAL (field) = initial;
2978 /* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
2979 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
2980 which the VMS descriptor is passed. */
2983 convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
2985 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
2986 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
2987 /* The CLASS field is the 3rd field in the descriptor. */
2988 tree klass = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
2989 /* The POINTER field is the 6th field in the descriptor. */
2990 tree pointer64 = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (klass)));
2992 /* Retrieve the value of the POINTER field. */
2994 = build3 (COMPONENT_REF, TREE_TYPE (pointer64), desc, pointer64, NULL_TREE);
2996 if (POINTER_TYPE_P (gnu_type))
2997 return convert (gnu_type, gnu_expr64);
2999 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
3001 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3002 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3003 tree template_type = TREE_TYPE (p_bounds_type);
3004 tree min_field = TYPE_FIELDS (template_type);
3005 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3006 tree template_tree, template_addr, aflags, dimct, t, u;
3007 /* See the head comment of build_vms_descriptor. */
3008 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
3009 tree lfield, ufield;
3011 /* Convert POINTER to the type of the P_ARRAY field. */
3012 gnu_expr64 = convert (p_array_type, gnu_expr64);
3016 case 1: /* Class S */
3017 case 15: /* Class SB */
3018 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
3019 t = TREE_CHAIN (TREE_CHAIN (klass));
3020 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3021 t = tree_cons (min_field,
3022 convert (TREE_TYPE (min_field), integer_one_node),
3023 tree_cons (max_field,
3024 convert (TREE_TYPE (max_field), t),
3026 template_tree = gnat_build_constructor (template_type, t);
3027 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
3029 /* For class S, we are done. */
3033 /* Test that we really have a SB descriptor, like DEC Ada. */
3034 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3035 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
3036 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
3037 /* If so, there is already a template in the descriptor and
3038 it is located right after the POINTER field. The fields are
3039 64bits so they must be repacked. */
3040 t = TREE_CHAIN (pointer64);
3041 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3042 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3045 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3047 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3049 /* Build the template in the form of a constructor. */
3050 t = tree_cons (TYPE_FIELDS (template_type), lfield,
3051 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3052 ufield, NULL_TREE));
3053 template_tree = gnat_build_constructor (template_type, t);
3055 /* Otherwise use the {1, LENGTH} template we build above. */
3056 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3057 build_unary_op (ADDR_EXPR, p_bounds_type,
3062 case 4: /* Class A */
3063 /* The AFLAGS field is the 3rd field after the pointer in the
3065 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer64)));
3066 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3067 /* The DIMCT field is the next field in the descriptor after
3070 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3071 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3072 or FL_COEFF or FL_BOUNDS not set. */
3073 u = build_int_cst (TREE_TYPE (aflags), 192);
3074 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3075 build_binary_op (NE_EXPR, integer_type_node,
3077 convert (TREE_TYPE (dimct),
3079 build_binary_op (NE_EXPR, integer_type_node,
3080 build2 (BIT_AND_EXPR,
3084 /* There is already a template in the descriptor and it is located
3085 in block 3. The fields are 64bits so they must be repacked. */
3086 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN
3088 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3089 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3092 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3094 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3096 /* Build the template in the form of a constructor. */
3097 t = tree_cons (TYPE_FIELDS (template_type), lfield,
3098 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3099 ufield, NULL_TREE));
3100 template_tree = gnat_build_constructor (template_type, t);
3101 template_tree = build3 (COND_EXPR, template_type, u,
3102 build_call_raise (CE_Length_Check_Failed, Empty,
3103 N_Raise_Constraint_Error),
3106 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
3109 case 10: /* Class NCA */
3111 post_error ("unsupported descriptor type for &", gnat_subprog);
3112 template_addr = integer_zero_node;
3116 /* Build the fat pointer in the form of a constructor. */
3117 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr64,
3118 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3119 template_addr, NULL_TREE));
3120 return gnat_build_constructor (gnu_type, t);
3127 /* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3128 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3129 which the VMS descriptor is passed. */
3132 convert_vms_descriptor32 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3134 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3135 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3136 /* The CLASS field is the 3rd field in the descriptor. */
3137 tree klass = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
3138 /* The POINTER field is the 4th field in the descriptor. */
3139 tree pointer = TREE_CHAIN (klass);
3141 /* Retrieve the value of the POINTER field. */
3143 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
3145 if (POINTER_TYPE_P (gnu_type))
3146 return convert (gnu_type, gnu_expr32);
3148 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
3150 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3151 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3152 tree template_type = TREE_TYPE (p_bounds_type);
3153 tree min_field = TYPE_FIELDS (template_type);
3154 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3155 tree template_tree, template_addr, aflags, dimct, t, u;
3156 /* See the head comment of build_vms_descriptor. */
3157 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
3159 /* Convert POINTER to the type of the P_ARRAY field. */
3160 gnu_expr32 = convert (p_array_type, gnu_expr32);
3164 case 1: /* Class S */
3165 case 15: /* Class SB */
3166 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
3167 t = TYPE_FIELDS (desc_type);
3168 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3169 t = tree_cons (min_field,
3170 convert (TREE_TYPE (min_field), integer_one_node),
3171 tree_cons (max_field,
3172 convert (TREE_TYPE (max_field), t),
3174 template_tree = gnat_build_constructor (template_type, t);
3175 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
3177 /* For class S, we are done. */
3181 /* Test that we really have a SB descriptor, like DEC Ada. */
3182 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3183 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
3184 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
3185 /* If so, there is already a template in the descriptor and
3186 it is located right after the POINTER field. */
3187 t = TREE_CHAIN (pointer);
3189 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3190 /* Otherwise use the {1, LENGTH} template we build above. */
3191 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3192 build_unary_op (ADDR_EXPR, p_bounds_type,
3197 case 4: /* Class A */
3198 /* The AFLAGS field is the 7th field in the descriptor. */
3199 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer)));
3200 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3201 /* The DIMCT field is the 8th field in the descriptor. */
3203 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3204 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3205 or FL_COEFF or FL_BOUNDS not set. */
3206 u = build_int_cst (TREE_TYPE (aflags), 192);
3207 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3208 build_binary_op (NE_EXPR, integer_type_node,
3210 convert (TREE_TYPE (dimct),
3212 build_binary_op (NE_EXPR, integer_type_node,
3213 build2 (BIT_AND_EXPR,
3217 /* There is already a template in the descriptor and it is
3218 located at the start of block 3 (12th field). */
3219 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (t))));
3221 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3222 template_tree = build3 (COND_EXPR, TREE_TYPE (t), u,
3223 build_call_raise (CE_Length_Check_Failed, Empty,
3224 N_Raise_Constraint_Error),
3227 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
3230 case 10: /* Class NCA */
3232 post_error ("unsupported descriptor type for &", gnat_subprog);
3233 template_addr = integer_zero_node;
3237 /* Build the fat pointer in the form of a constructor. */
3238 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr32,
3239 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3240 template_addr, NULL_TREE));
3242 return gnat_build_constructor (gnu_type, t);
3249 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
3250 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
3251 pointer type of GNU_EXPR. GNAT_SUBPROG is the subprogram to which the
3252 VMS descriptor is passed. */
3255 convert_vms_descriptor (tree gnu_type, tree gnu_expr, tree gnu_expr_alt_type,
3256 Entity_Id gnat_subprog)
3258 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3259 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3260 tree mbo = TYPE_FIELDS (desc_type);
3261 const char *mbostr = IDENTIFIER_POINTER (DECL_NAME (mbo));
3262 tree mbmo = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (mbo)));
3263 tree is64bit, gnu_expr32, gnu_expr64;
3265 /* If the field name is not MBO, it must be 32-bit and no alternate.
3266 Otherwise primary must be 64-bit and alternate 32-bit. */
3267 if (strcmp (mbostr, "MBO") != 0)
3268 return convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
3270 /* Build the test for 64-bit descriptor. */
3271 mbo = build3 (COMPONENT_REF, TREE_TYPE (mbo), desc, mbo, NULL_TREE);
3272 mbmo = build3 (COMPONENT_REF, TREE_TYPE (mbmo), desc, mbmo, NULL_TREE);
3274 = build_binary_op (TRUTH_ANDIF_EXPR, integer_type_node,
3275 build_binary_op (EQ_EXPR, integer_type_node,
3276 convert (integer_type_node, mbo),
3278 build_binary_op (EQ_EXPR, integer_type_node,
3279 convert (integer_type_node, mbmo),
3280 integer_minus_one_node));
3282 /* Build the 2 possible end results. */
3283 gnu_expr64 = convert_vms_descriptor64 (gnu_type, gnu_expr, gnat_subprog);
3284 gnu_expr = fold_convert (gnu_expr_alt_type, gnu_expr);
3285 gnu_expr32 = convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
3287 return build3 (COND_EXPR, gnu_type, is64bit, gnu_expr64, gnu_expr32);
3290 /* Build a stub for the subprogram specified by the GCC tree GNU_SUBPROG
3291 and the GNAT node GNAT_SUBPROG. */
3294 build_function_stub (tree gnu_subprog, Entity_Id gnat_subprog)
3296 tree gnu_subprog_type, gnu_subprog_addr, gnu_subprog_call;
3297 tree gnu_stub_param, gnu_param_list, gnu_arg_types, gnu_param;
3298 tree gnu_stub_decl = DECL_FUNCTION_STUB (gnu_subprog);
3301 gnu_subprog_type = TREE_TYPE (gnu_subprog);
3302 gnu_param_list = NULL_TREE;
3304 begin_subprog_body (gnu_stub_decl);
3307 start_stmt_group ();
3309 /* Loop over the parameters of the stub and translate any of them
3310 passed by descriptor into a by reference one. */
3311 for (gnu_stub_param = DECL_ARGUMENTS (gnu_stub_decl),
3312 gnu_arg_types = TYPE_ARG_TYPES (gnu_subprog_type);
3314 gnu_stub_param = TREE_CHAIN (gnu_stub_param),
3315 gnu_arg_types = TREE_CHAIN (gnu_arg_types))
3317 if (DECL_BY_DESCRIPTOR_P (gnu_stub_param))
3319 = convert_vms_descriptor (TREE_VALUE (gnu_arg_types),
3321 DECL_PARM_ALT_TYPE (gnu_stub_param),
3324 gnu_param = gnu_stub_param;
3326 gnu_param_list = tree_cons (NULL_TREE, gnu_param, gnu_param_list);
3329 gnu_body = end_stmt_group ();
3331 /* Invoke the internal subprogram. */
3332 gnu_subprog_addr = build1 (ADDR_EXPR, build_pointer_type (gnu_subprog_type),
3334 gnu_subprog_call = build_call_list (TREE_TYPE (gnu_subprog_type),
3336 nreverse (gnu_param_list));
3338 /* Propagate the return value, if any. */
3339 if (VOID_TYPE_P (TREE_TYPE (gnu_subprog_type)))
3340 append_to_statement_list (gnu_subprog_call, &gnu_body);
3342 append_to_statement_list (build_return_expr (DECL_RESULT (gnu_stub_decl),
3348 allocate_struct_function (gnu_stub_decl, false);
3349 end_subprog_body (gnu_body);
3352 /* Build a type to be used to represent an aliased object whose nominal
3353 type is an unconstrained array. This consists of a RECORD_TYPE containing
3354 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
3355 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
3356 is used to represent an arbitrary unconstrained object. Use NAME
3357 as the name of the record. */
3360 build_unc_object_type (tree template_type, tree object_type, tree name)
3362 tree type = make_node (RECORD_TYPE);
3363 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
3364 template_type, type, 0, 0, 0, 1);
3365 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
3368 TYPE_NAME (type) = name;
3369 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3370 finish_record_type (type,
3371 chainon (chainon (NULL_TREE, template_field),
3378 /* Same, taking a thin or fat pointer type instead of a template type. */
3381 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3386 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3389 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
3390 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3391 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3392 return build_unc_object_type (template_type, object_type, name);
3395 /* Shift the component offsets within an unconstrained object TYPE to make it
3396 suitable for use as a designated type for thin pointers. */
3399 shift_unc_components_for_thin_pointers (tree type)
3401 /* Thin pointer values designate the ARRAY data of an unconstrained object,
3402 allocated past the BOUNDS template. The designated type is adjusted to
3403 have ARRAY at position zero and the template at a negative offset, so
3404 that COMPONENT_REFs on (*thin_ptr) designate the proper location. */
3406 tree bounds_field = TYPE_FIELDS (type);
3407 tree array_field = TREE_CHAIN (TYPE_FIELDS (type));
3409 DECL_FIELD_OFFSET (bounds_field)
3410 = size_binop (MINUS_EXPR, size_zero_node, byte_position (array_field));
3412 DECL_FIELD_OFFSET (array_field) = size_zero_node;
3413 DECL_FIELD_BIT_OFFSET (array_field) = bitsize_zero_node;
3416 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3417 In the normal case this is just two adjustments, but we have more to
3418 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
3421 update_pointer_to (tree old_type, tree new_type)
3423 tree ptr = TYPE_POINTER_TO (old_type);
3424 tree ref = TYPE_REFERENCE_TO (old_type);
3428 /* If this is the main variant, process all the other variants first. */
3429 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3430 for (type = TYPE_NEXT_VARIANT (old_type); type;
3431 type = TYPE_NEXT_VARIANT (type))
3432 update_pointer_to (type, new_type);
3434 /* If no pointers and no references, we are done. */
3438 /* Merge the old type qualifiers in the new type.
3440 Each old variant has qualifiers for specific reasons, and the new
3441 designated type as well. Each set of qualifiers represents useful
3442 information grabbed at some point, and merging the two simply unifies
3443 these inputs into the final type description.
3445 Consider for instance a volatile type frozen after an access to constant
3446 type designating it; after the designated type's freeze, we get here with
3447 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3448 when the access type was processed. We will make a volatile and readonly
3449 designated type, because that's what it really is.
3451 We might also get here for a non-dummy OLD_TYPE variant with different
3452 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
3453 to private record type elaboration (see the comments around the call to
3454 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3455 the qualifiers in those cases too, to avoid accidentally discarding the
3456 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3458 = build_qualified_type (new_type,
3459 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3461 /* If old type and new type are identical, there is nothing to do. */
3462 if (old_type == new_type)
3465 /* Otherwise, first handle the simple case. */
3466 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3468 TYPE_POINTER_TO (new_type) = ptr;
3469 TYPE_REFERENCE_TO (new_type) = ref;
3471 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3472 for (ptr1 = TYPE_MAIN_VARIANT (ptr); ptr1;
3473 ptr1 = TYPE_NEXT_VARIANT (ptr1))
3474 TREE_TYPE (ptr1) = new_type;
3476 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3477 for (ref1 = TYPE_MAIN_VARIANT (ref); ref1;
3478 ref1 = TYPE_NEXT_VARIANT (ref1))
3479 TREE_TYPE (ref1) = new_type;
3482 /* Now deal with the unconstrained array case. In this case the "pointer"
3483 is actually a RECORD_TYPE where both fields are pointers to dummy nodes.
3484 Turn them into pointers to the correct types using update_pointer_to. */
3485 else if (!TYPE_IS_FAT_POINTER_P (ptr))
3490 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
3491 tree array_field = TYPE_FIELDS (ptr);
3492 tree bounds_field = TREE_CHAIN (TYPE_FIELDS (ptr));
3493 tree new_ptr = TYPE_POINTER_TO (new_type);
3497 /* Make pointers to the dummy template point to the real template. */
3499 (TREE_TYPE (TREE_TYPE (bounds_field)),
3500 TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_ptr)))));
3502 /* The references to the template bounds present in the array type
3503 are made through a PLACEHOLDER_EXPR of type NEW_PTR. Since we
3504 are updating PTR to make it a full replacement for NEW_PTR as
3505 pointer to NEW_TYPE, we must rework the PLACEHOLDER_EXPR so as
3506 to make it of type PTR. */
3507 new_ref = build3 (COMPONENT_REF, TREE_TYPE (bounds_field),
3508 build0 (PLACEHOLDER_EXPR, ptr),
3509 bounds_field, NULL_TREE);
3511 /* Create the new array for the new PLACEHOLDER_EXPR and make pointers
3512 to the dummy array point to it. */
3514 (TREE_TYPE (TREE_TYPE (array_field)),
3515 substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))),
3516 TREE_CHAIN (TYPE_FIELDS (new_ptr)), new_ref));
3518 /* Make PTR the pointer to NEW_TYPE. */
3519 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
3520 = TREE_TYPE (new_type) = ptr;
3522 /* And show the original pointer NEW_PTR to the debugger. This is the
3523 counterpart of the equivalent processing in gnat_pushdecl when the
3524 unconstrained array type is frozen after access types to it. Note
3525 that update_pointer_to can be invoked multiple times on the same
3526 couple of types because of the type variants. */
3528 && TREE_CODE (TYPE_NAME (ptr)) == TYPE_DECL
3529 && !DECL_ORIGINAL_TYPE (TYPE_NAME (ptr)))
3531 DECL_ORIGINAL_TYPE (TYPE_NAME (ptr)) = new_ptr;
3532 DECL_ARTIFICIAL (TYPE_NAME (ptr)) = 0;
3534 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
3535 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
3537 /* Now handle updating the allocation record, what the thin pointer
3538 points to. Update all pointers from the old record into the new
3539 one, update the type of the array field, and recompute the size. */
3540 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
3542 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
3543 = TREE_TYPE (TREE_TYPE (array_field));
3545 /* The size recomputation needs to account for alignment constraints, so
3546 we let layout_type work it out. This will reset the field offsets to
3547 what they would be in a regular record, so we shift them back to what
3548 we want them to be for a thin pointer designated type afterwards. */
3549 DECL_SIZE (TYPE_FIELDS (new_obj_rec)) = 0;
3550 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))) = 0;
3551 TYPE_SIZE (new_obj_rec) = 0;
3552 layout_type (new_obj_rec);
3554 shift_unc_components_for_thin_pointers (new_obj_rec);
3556 /* We are done, at last. */
3557 rest_of_record_type_compilation (ptr);
3561 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
3562 unconstrained one. This involves making or finding a template. */
3565 convert_to_fat_pointer (tree type, tree expr)
3567 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
3568 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
3569 tree etype = TREE_TYPE (expr);
3572 /* If EXPR is null, make a fat pointer that contains null pointers to the
3573 template and array. */
3574 if (integer_zerop (expr))
3576 gnat_build_constructor
3578 tree_cons (TYPE_FIELDS (type),
3579 convert (p_array_type, expr),
3580 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3581 convert (build_pointer_type (template_type),
3585 /* If EXPR is a thin pointer, make template and data from the record.. */
3586 else if (TYPE_IS_THIN_POINTER_P (etype))
3588 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
3590 expr = save_expr (expr);
3591 if (TREE_CODE (expr) == ADDR_EXPR)
3592 expr = TREE_OPERAND (expr, 0);
3594 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
3596 template_tree = build_component_ref (expr, NULL_TREE, fields, false);
3597 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3598 build_component_ref (expr, NULL_TREE,
3599 TREE_CHAIN (fields), false));
3602 /* Otherwise, build the constructor for the template. */
3604 template_tree = build_template (template_type, TREE_TYPE (etype), expr);
3606 /* The final result is a constructor for the fat pointer.
3608 If EXPR is an argument of a foreign convention subprogram, the type it
3609 points to is directly the component type. In this case, the expression
3610 type may not match the corresponding FIELD_DECL type at this point, so we
3611 call "convert" here to fix that up if necessary. This type consistency is
3612 required, for instance because it ensures that possible later folding of
3613 COMPONENT_REFs against this constructor always yields something of the
3614 same type as the initial reference.
3616 Note that the call to "build_template" above is still fine because it
3617 will only refer to the provided TEMPLATE_TYPE in this case. */
3619 gnat_build_constructor
3621 tree_cons (TYPE_FIELDS (type),
3622 convert (p_array_type, expr),
3623 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3624 build_unary_op (ADDR_EXPR, NULL_TREE,
3629 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
3630 is something that is a fat pointer, so convert to it first if it EXPR
3631 is not already a fat pointer. */
3634 convert_to_thin_pointer (tree type, tree expr)
3636 if (!TYPE_IS_FAT_POINTER_P (TREE_TYPE (expr)))
3638 = convert_to_fat_pointer
3639 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
3641 /* We get the pointer to the data and use a NOP_EXPR to make it the
3643 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)),
3645 expr = build1 (NOP_EXPR, type, expr);
3650 /* Create an expression whose value is that of EXPR,
3651 converted to type TYPE. The TREE_TYPE of the value
3652 is always TYPE. This function implements all reasonable
3653 conversions; callers should filter out those that are
3654 not permitted by the language being compiled. */
3657 convert (tree type, tree expr)
3659 enum tree_code code = TREE_CODE (type);
3660 tree etype = TREE_TYPE (expr);
3661 enum tree_code ecode = TREE_CODE (etype);
3663 /* If EXPR is already the right type, we are done. */
3667 /* If both input and output have padding and are of variable size, do this
3668 as an unchecked conversion. Likewise if one is a mere variant of the
3669 other, so we avoid a pointless unpad/repad sequence. */
3670 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
3671 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
3672 && (!TREE_CONSTANT (TYPE_SIZE (type))
3673 || !TREE_CONSTANT (TYPE_SIZE (etype))
3674 || gnat_types_compatible_p (type, etype)
3675 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
3676 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
3679 /* If the output type has padding, convert to the inner type and make a
3680 constructor to build the record, unless a variable size is involved. */
3681 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
3683 /* If we previously converted from another type and our type is
3684 of variable size, remove the conversion to avoid the need for
3685 variable-sized temporaries. Likewise for a conversion between
3686 original and packable version. */
3687 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3688 && (!TREE_CONSTANT (TYPE_SIZE (type))
3689 || (ecode == RECORD_TYPE
3690 && TYPE_NAME (etype)
3691 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
3692 expr = TREE_OPERAND (expr, 0);
3694 /* If we are just removing the padding from expr, convert the original
3695 object if we have variable size in order to avoid the need for some
3696 variable-sized temporaries. Likewise if the padding is a variant
3697 of the other, so we avoid a pointless unpad/repad sequence. */
3698 if (TREE_CODE (expr) == COMPONENT_REF
3699 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
3700 && (!TREE_CONSTANT (TYPE_SIZE (type))
3701 || gnat_types_compatible_p (type,
3702 TREE_TYPE (TREE_OPERAND (expr, 0)))
3703 || (ecode == RECORD_TYPE
3704 && TYPE_NAME (etype)
3705 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
3706 return convert (type, TREE_OPERAND (expr, 0));
3708 /* If the inner type is of self-referential size and the expression type
3709 is a record, do this as an unchecked conversion. But first pad the
3710 expression if possible to have the same size on both sides. */
3711 if (TREE_CODE (etype) == RECORD_TYPE
3712 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
3714 if (TREE_CONSTANT (TYPE_SIZE (etype)))
3715 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
3716 false, false, false, true), expr);