1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2012, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 ****************************************************************************/
28 #include "coretypes.h"
33 #include "diagnostic-core.h"
39 #include "common/common-target.h"
40 #include "langhooks.h"
42 #include "diagnostic.h"
43 #include "tree-dump.h"
44 #include "tree-inline.h"
45 #include "tree-iterator.h"
61 #ifndef MAX_BITS_PER_WORD
62 #define MAX_BITS_PER_WORD BITS_PER_WORD
65 /* If nonzero, pretend we are allocating at global level. */
68 /* The default alignment of "double" floating-point types, i.e. floating
69 point types whose size is equal to 64 bits, or 0 if this alignment is
70 not specifically capped. */
71 int double_float_alignment;
73 /* The default alignment of "double" or larger scalar types, i.e. scalar
74 types whose size is greater or equal to 64 bits, or 0 if this alignment
75 is not specifically capped. */
76 int double_scalar_alignment;
78 /* Tree nodes for the various types and decls we create. */
79 tree gnat_std_decls[(int) ADT_LAST];
81 /* Functions to call for each of the possible raise reasons. */
82 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
84 /* Likewise, but with extra info for each of the possible raise reasons. */
85 tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
87 /* Forward declarations for handlers of attributes. */
88 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
93 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
94 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
95 static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
96 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
97 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
98 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
99 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
101 /* Fake handler for attributes we don't properly support, typically because
102 they'd require dragging a lot of the common-c front-end circuitry. */
103 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
105 /* Table of machine-independent internal attributes for Ada. We support
106 this minimal set of attributes to accommodate the needs of builtins. */
107 const struct attribute_spec gnat_internal_attribute_table[] =
109 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
110 affects_type_identity } */
111 { "const", 0, 0, true, false, false, handle_const_attribute,
113 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute,
115 { "pure", 0, 0, true, false, false, handle_pure_attribute,
117 { "no vops", 0, 0, true, false, false, handle_novops_attribute,
119 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute,
121 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute,
123 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute,
125 { "leaf", 0, 0, true, false, false, handle_leaf_attribute,
127 { "malloc", 0, 0, true, false, false, handle_malloc_attribute,
129 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute,
132 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute,
134 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute,
136 { "may_alias", 0, 0, false, true, false, NULL, false },
138 /* ??? format and format_arg are heavy and not supported, which actually
139 prevents support for stdio builtins, which we however declare as part
140 of the common builtins.def contents. */
141 { "format", 3, 3, false, true, true, fake_attribute_handler, false },
142 { "format_arg", 1, 1, false, true, true, fake_attribute_handler, false },
144 { NULL, 0, 0, false, false, false, NULL, false }
147 /* Associates a GNAT tree node to a GCC tree node. It is used in
148 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
149 of `save_gnu_tree' for more info. */
150 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
152 #define GET_GNU_TREE(GNAT_ENTITY) \
153 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
155 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
156 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
158 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
159 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
161 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
162 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
164 #define GET_DUMMY_NODE(GNAT_ENTITY) \
165 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
167 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
168 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
170 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
171 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
173 /* This variable keeps a table for types for each precision so that we only
174 allocate each of them once. Signed and unsigned types are kept separate.
176 Note that these types are only used when fold-const requests something
177 special. Perhaps we should NOT share these types; we'll see how it
179 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
181 /* Likewise for float types, but record these by mode. */
182 static GTY(()) tree float_types[NUM_MACHINE_MODES];
184 /* For each binding contour we allocate a binding_level structure to indicate
185 the binding depth. */
187 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
188 /* The binding level containing this one (the enclosing binding level). */
189 struct gnat_binding_level *chain;
190 /* The BLOCK node for this level. */
192 /* If nonzero, the setjmp buffer that needs to be updated for any
193 variable-sized definition within this context. */
197 /* The binding level currently in effect. */
198 static GTY(()) struct gnat_binding_level *current_binding_level;
200 /* A chain of gnat_binding_level structures awaiting reuse. */
201 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
203 /* The context to be used for global declarations. */
204 static GTY(()) tree global_context;
206 /* An array of global declarations. */
207 static GTY(()) VEC(tree,gc) *global_decls;
209 /* An array of builtin function declarations. */
210 static GTY(()) VEC(tree,gc) *builtin_decls;
212 /* An array of global renaming pointers. */
213 static GTY(()) VEC(tree,gc) *global_renaming_pointers;
215 /* A chain of unused BLOCK nodes. */
216 static GTY((deletable)) tree free_block_chain;
218 static tree merge_sizes (tree, tree, tree, bool, bool);
219 static tree compute_related_constant (tree, tree);
220 static tree split_plus (tree, tree *);
221 static tree float_type_for_precision (int, enum machine_mode);
222 static tree convert_to_fat_pointer (tree, tree);
223 static tree convert_to_thin_pointer (tree, tree);
224 static bool potential_alignment_gap (tree, tree, tree);
225 static void process_attributes (tree, struct attrib *);
227 /* Initialize the association of GNAT nodes to GCC trees. */
230 init_gnat_to_gnu (void)
232 associate_gnat_to_gnu = ggc_alloc_cleared_vec_tree (max_gnat_nodes);
235 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
236 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
237 If NO_CHECK is true, the latter check is suppressed.
239 If GNU_DECL is zero, reset a previous association. */
242 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
244 /* Check that GNAT_ENTITY is not already defined and that it is being set
245 to something which is a decl. If that is not the case, this usually
246 means GNAT_ENTITY is defined twice, but occasionally is due to some
248 gcc_assert (!(gnu_decl
249 && (PRESENT_GNU_TREE (gnat_entity)
250 || (!no_check && !DECL_P (gnu_decl)))));
252 SET_GNU_TREE (gnat_entity, gnu_decl);
255 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
256 that was associated with it. If there is no such tree node, abort.
258 In some cases, such as delayed elaboration or expressions that need to
259 be elaborated only once, GNAT_ENTITY is really not an entity. */
262 get_gnu_tree (Entity_Id gnat_entity)
264 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
265 return GET_GNU_TREE (gnat_entity);
268 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
271 present_gnu_tree (Entity_Id gnat_entity)
273 return PRESENT_GNU_TREE (gnat_entity);
276 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
279 init_dummy_type (void)
281 dummy_node_table = ggc_alloc_cleared_vec_tree (max_gnat_nodes);
284 /* Make a dummy type corresponding to GNAT_TYPE. */
287 make_dummy_type (Entity_Id gnat_type)
289 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
292 /* If there is an equivalent type, get its underlying type. */
293 if (Present (gnat_underlying))
294 gnat_underlying = Gigi_Equivalent_Type (Underlying_Type (gnat_underlying));
296 /* If there was no equivalent type (can only happen when just annotating
297 types) or underlying type, go back to the original type. */
298 if (No (gnat_underlying))
299 gnat_underlying = gnat_type;
301 /* If it there already a dummy type, use that one. Else make one. */
302 if (PRESENT_DUMMY_NODE (gnat_underlying))
303 return GET_DUMMY_NODE (gnat_underlying);
305 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
307 gnu_type = make_node (Is_Record_Type (gnat_underlying)
308 ? tree_code_for_record_type (gnat_underlying)
310 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
311 TYPE_DUMMY_P (gnu_type) = 1;
312 TYPE_STUB_DECL (gnu_type)
313 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
314 if (Is_By_Reference_Type (gnat_underlying))
315 TYPE_BY_REFERENCE_P (gnu_type) = 1;
317 SET_DUMMY_NODE (gnat_underlying, gnu_type);
322 /* Return the dummy type that was made for GNAT_TYPE, if any. */
325 get_dummy_type (Entity_Id gnat_type)
327 return GET_DUMMY_NODE (gnat_type);
330 /* Build dummy fat and thin pointer types whose designated type is specified
331 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
334 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
336 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
337 tree gnu_fat_type, fields, gnu_object_type;
339 gnu_template_type = make_node (RECORD_TYPE);
340 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
341 TYPE_DUMMY_P (gnu_template_type) = 1;
342 gnu_ptr_template = build_pointer_type (gnu_template_type);
344 gnu_array_type = make_node (ENUMERAL_TYPE);
345 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
346 TYPE_DUMMY_P (gnu_array_type) = 1;
347 gnu_ptr_array = build_pointer_type (gnu_array_type);
349 gnu_fat_type = make_node (RECORD_TYPE);
350 /* Build a stub DECL to trigger the special processing for fat pointer types
352 TYPE_NAME (gnu_fat_type)
353 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
355 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
356 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
358 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
359 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
360 finish_fat_pointer_type (gnu_fat_type, fields);
361 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
362 /* Suppress debug info until after the type is completed. */
363 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
365 gnu_object_type = make_node (RECORD_TYPE);
366 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
367 TYPE_DUMMY_P (gnu_object_type) = 1;
369 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
370 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
373 /* Return true if we are in the global binding level. */
376 global_bindings_p (void)
378 return force_global || current_function_decl == NULL_TREE;
381 /* Enter a new binding level. */
384 gnat_pushlevel (void)
386 struct gnat_binding_level *newlevel = NULL;
388 /* Reuse a struct for this binding level, if there is one. */
389 if (free_binding_level)
391 newlevel = free_binding_level;
392 free_binding_level = free_binding_level->chain;
395 newlevel = ggc_alloc_gnat_binding_level ();
397 /* Use a free BLOCK, if any; otherwise, allocate one. */
398 if (free_block_chain)
400 newlevel->block = free_block_chain;
401 free_block_chain = BLOCK_CHAIN (free_block_chain);
402 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
405 newlevel->block = make_node (BLOCK);
407 /* Point the BLOCK we just made to its parent. */
408 if (current_binding_level)
409 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
411 BLOCK_VARS (newlevel->block) = NULL_TREE;
412 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
413 TREE_USED (newlevel->block) = 1;
415 /* Add this level to the front of the chain (stack) of active levels. */
416 newlevel->chain = current_binding_level;
417 newlevel->jmpbuf_decl = NULL_TREE;
418 current_binding_level = newlevel;
421 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
422 and point FNDECL to this BLOCK. */
425 set_current_block_context (tree fndecl)
427 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
428 DECL_INITIAL (fndecl) = current_binding_level->block;
429 set_block_for_group (current_binding_level->block);
432 /* Set the jmpbuf_decl for the current binding level to DECL. */
435 set_block_jmpbuf_decl (tree decl)
437 current_binding_level->jmpbuf_decl = decl;
440 /* Get the jmpbuf_decl, if any, for the current binding level. */
443 get_block_jmpbuf_decl (void)
445 return current_binding_level->jmpbuf_decl;
448 /* Exit a binding level. Set any BLOCK into the current code group. */
453 struct gnat_binding_level *level = current_binding_level;
454 tree block = level->block;
456 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
457 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
459 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
460 are no variables free the block and merge its subblocks into those of its
461 parent block. Otherwise, add it to the list of its parent. */
462 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
464 else if (BLOCK_VARS (block) == NULL_TREE)
466 BLOCK_SUBBLOCKS (level->chain->block)
467 = block_chainon (BLOCK_SUBBLOCKS (block),
468 BLOCK_SUBBLOCKS (level->chain->block));
469 BLOCK_CHAIN (block) = free_block_chain;
470 free_block_chain = block;
474 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
475 BLOCK_SUBBLOCKS (level->chain->block) = block;
476 TREE_USED (block) = 1;
477 set_block_for_group (block);
480 /* Free this binding structure. */
481 current_binding_level = level->chain;
482 level->chain = free_binding_level;
483 free_binding_level = level;
486 /* Exit a binding level and discard the associated BLOCK. */
491 struct gnat_binding_level *level = current_binding_level;
492 tree block = level->block;
494 BLOCK_CHAIN (block) = free_block_chain;
495 free_block_chain = block;
497 /* Free this binding structure. */
498 current_binding_level = level->chain;
499 level->chain = free_binding_level;
500 free_binding_level = level;
503 /* Record DECL as belonging to the current lexical scope and use GNAT_NODE
504 for location information and flag propagation. */
507 gnat_pushdecl (tree decl, Node_Id gnat_node)
509 /* If DECL is public external or at top level, it has global context. */
510 if ((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || global_bindings_p ())
513 global_context = build_translation_unit_decl (NULL_TREE);
514 DECL_CONTEXT (decl) = global_context;
518 DECL_CONTEXT (decl) = current_function_decl;
520 /* Functions imported in another function are not really nested.
521 For really nested functions mark them initially as needing
522 a static chain for uses of that flag before unnesting;
523 lower_nested_functions will then recompute it. */
524 if (TREE_CODE (decl) == FUNCTION_DECL && !TREE_PUBLIC (decl))
525 DECL_STATIC_CHAIN (decl) = 1;
528 TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
530 /* Set the location of DECL and emit a declaration for it. */
531 if (Present (gnat_node))
532 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
534 add_decl_expr (decl, gnat_node);
536 /* Put the declaration on the list. The list of declarations is in reverse
537 order. The list will be reversed later. Put global declarations in the
538 globals list and local ones in the current block. But skip TYPE_DECLs
539 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
540 with the debugger and aren't needed anyway. */
541 if (!(TREE_CODE (decl) == TYPE_DECL
542 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
544 if (global_bindings_p ())
546 VEC_safe_push (tree, gc, global_decls, decl);
548 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
549 VEC_safe_push (tree, gc, builtin_decls, decl);
551 else if (!DECL_EXTERNAL (decl))
553 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
554 BLOCK_VARS (current_binding_level->block) = decl;
558 /* For the declaration of a type, set its name if it either is not already
559 set or if the previous type name was not derived from a source name.
560 We'd rather have the type named with a real name and all the pointer
561 types to the same object have the same POINTER_TYPE node. Code in the
562 equivalent function of c-decl.c makes a copy of the type node here, but
563 that may cause us trouble with incomplete types. We make an exception
564 for fat pointer types because the compiler automatically builds them
565 for unconstrained array types and the debugger uses them to represent
566 both these and pointers to these. */
567 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
569 tree t = TREE_TYPE (decl);
571 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
573 /* Array and pointer types aren't "tagged" types so we force the
574 type to be associated with its typedef in the DWARF back-end,
575 in order to make sure that the latter is always preserved. */
576 if (!DECL_ARTIFICIAL (decl)
577 && (TREE_CODE (t) == ARRAY_TYPE
578 || TREE_CODE (t) == POINTER_TYPE))
580 tree tt = build_distinct_type_copy (t);
581 if (TREE_CODE (t) == POINTER_TYPE)
582 TYPE_NEXT_PTR_TO (t) = tt;
583 TYPE_NAME (tt) = DECL_NAME (decl);
584 TYPE_CONTEXT (tt) = DECL_CONTEXT (decl);
585 TYPE_STUB_DECL (tt) = TYPE_STUB_DECL (t);
586 DECL_ORIGINAL_TYPE (decl) = tt;
589 else if (TYPE_IS_FAT_POINTER_P (t))
591 /* We need a variant for the placeholder machinery to work. */
592 tree tt = build_variant_type_copy (t);
593 TYPE_NAME (tt) = decl;
594 TYPE_CONTEXT (tt) = DECL_CONTEXT (decl);
595 TREE_USED (tt) = TREE_USED (t);
596 TREE_TYPE (decl) = tt;
597 if (DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
598 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
600 DECL_ORIGINAL_TYPE (decl) = t;
601 DECL_ARTIFICIAL (decl) = 0;
604 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
609 /* Propagate the name to all the anonymous variants. This is needed
610 for the type qualifiers machinery to work properly. */
612 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
613 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
615 TYPE_NAME (t) = decl;
616 TYPE_CONTEXT (t) = DECL_CONTEXT (decl);
621 /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
622 ARTIFICIAL_P is true if it's a type that was generated by the compiler. */
625 record_builtin_type (const char *name, tree type, bool artificial_p)
627 tree type_decl = build_decl (input_location,
628 TYPE_DECL, get_identifier (name), type);
629 DECL_ARTIFICIAL (type_decl) = artificial_p;
630 TYPE_ARTIFICIAL (type) = artificial_p;
631 gnat_pushdecl (type_decl, Empty);
633 if (debug_hooks->type_decl)
634 debug_hooks->type_decl (type_decl, false);
637 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
638 finish constructing the record type as a fat pointer type. */
641 finish_fat_pointer_type (tree record_type, tree field_list)
643 /* Make sure we can put it into a register. */
644 TYPE_ALIGN (record_type) = MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE);
646 /* Show what it really is. */
647 TYPE_FAT_POINTER_P (record_type) = 1;
649 /* Do not emit debug info for it since the types of its fields may still be
650 incomplete at this point. */
651 finish_record_type (record_type, field_list, 0, false);
653 /* Force type_contains_placeholder_p to return true on it. Although the
654 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
655 type but the representation of the unconstrained array. */
656 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
659 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
660 finish constructing the record or union type. If REP_LEVEL is zero, this
661 record has no representation clause and so will be entirely laid out here.
662 If REP_LEVEL is one, this record has a representation clause and has been
663 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
664 this record is derived from a parent record and thus inherits its layout;
665 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
666 we need to write debug information about this type. */
669 finish_record_type (tree record_type, tree field_list, int rep_level,
672 enum tree_code code = TREE_CODE (record_type);
673 tree name = TYPE_NAME (record_type);
674 tree ada_size = bitsize_zero_node;
675 tree size = bitsize_zero_node;
676 bool had_size = TYPE_SIZE (record_type) != 0;
677 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
678 bool had_align = TYPE_ALIGN (record_type) != 0;
681 TYPE_FIELDS (record_type) = field_list;
683 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
684 generate debug info and have a parallel type. */
685 if (name && TREE_CODE (name) == TYPE_DECL)
686 name = DECL_NAME (name);
687 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
689 /* Globally initialize the record first. If this is a rep'ed record,
690 that just means some initializations; otherwise, layout the record. */
693 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
696 TYPE_SIZE_UNIT (record_type) = size_zero_node;
699 TYPE_SIZE (record_type) = bitsize_zero_node;
701 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
702 out just like a UNION_TYPE, since the size will be fixed. */
703 else if (code == QUAL_UNION_TYPE)
708 /* Ensure there isn't a size already set. There can be in an error
709 case where there is a rep clause but all fields have errors and
710 no longer have a position. */
711 TYPE_SIZE (record_type) = 0;
713 /* Ensure we use the traditional GCC layout for bitfields when we need
714 to pack the record type or have a representation clause. The other
715 possible layout (Microsoft C compiler), if available, would prevent
716 efficient packing in almost all cases. */
717 #ifdef TARGET_MS_BITFIELD_LAYOUT
718 if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
719 decl_attributes (&record_type,
720 tree_cons (get_identifier ("gcc_struct"),
721 NULL_TREE, NULL_TREE),
722 ATTR_FLAG_TYPE_IN_PLACE);
725 layout_type (record_type);
728 /* At this point, the position and size of each field is known. It was
729 either set before entry by a rep clause, or by laying out the type above.
731 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
732 to compute the Ada size; the GCC size and alignment (for rep'ed records
733 that are not padding types); and the mode (for rep'ed records). We also
734 clear the DECL_BIT_FIELD indication for the cases we know have not been
735 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
737 if (code == QUAL_UNION_TYPE)
738 field_list = nreverse (field_list);
740 for (field = field_list; field; field = DECL_CHAIN (field))
742 tree type = TREE_TYPE (field);
743 tree pos = bit_position (field);
744 tree this_size = DECL_SIZE (field);
747 if (RECORD_OR_UNION_TYPE_P (type)
748 && !TYPE_FAT_POINTER_P (type)
749 && !TYPE_CONTAINS_TEMPLATE_P (type)
750 && TYPE_ADA_SIZE (type))
751 this_ada_size = TYPE_ADA_SIZE (type);
753 this_ada_size = this_size;
755 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
756 if (DECL_BIT_FIELD (field)
757 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
759 unsigned int align = TYPE_ALIGN (type);
761 /* In the general case, type alignment is required. */
762 if (value_factor_p (pos, align))
764 /* The enclosing record type must be sufficiently aligned.
765 Otherwise, if no alignment was specified for it and it
766 has been laid out already, bump its alignment to the
767 desired one if this is compatible with its size. */
768 if (TYPE_ALIGN (record_type) >= align)
770 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
771 DECL_BIT_FIELD (field) = 0;
775 && value_factor_p (TYPE_SIZE (record_type), align))
777 TYPE_ALIGN (record_type) = align;
778 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
779 DECL_BIT_FIELD (field) = 0;
783 /* In the non-strict alignment case, only byte alignment is. */
784 if (!STRICT_ALIGNMENT
785 && DECL_BIT_FIELD (field)
786 && value_factor_p (pos, BITS_PER_UNIT))
787 DECL_BIT_FIELD (field) = 0;
790 /* If we still have DECL_BIT_FIELD set at this point, we know that the
791 field is technically not addressable. Except that it can actually
792 be addressed if it is BLKmode and happens to be properly aligned. */
793 if (DECL_BIT_FIELD (field)
794 && !(DECL_MODE (field) == BLKmode
795 && value_factor_p (pos, BITS_PER_UNIT)))
796 DECL_NONADDRESSABLE_P (field) = 1;
798 /* A type must be as aligned as its most aligned field that is not
799 a bit-field. But this is already enforced by layout_type. */
800 if (rep_level > 0 && !DECL_BIT_FIELD (field))
801 TYPE_ALIGN (record_type)
802 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
807 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
808 size = size_binop (MAX_EXPR, size, this_size);
811 case QUAL_UNION_TYPE:
813 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
814 this_ada_size, ada_size);
815 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
820 /* Since we know here that all fields are sorted in order of
821 increasing bit position, the size of the record is one
822 higher than the ending bit of the last field processed
823 unless we have a rep clause, since in that case we might
824 have a field outside a QUAL_UNION_TYPE that has a higher ending
825 position. So use a MAX in that case. Also, if this field is a
826 QUAL_UNION_TYPE, we need to take into account the previous size in
827 the case of empty variants. */
829 = merge_sizes (ada_size, pos, this_ada_size,
830 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
832 = merge_sizes (size, pos, this_size,
833 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
841 if (code == QUAL_UNION_TYPE)
842 nreverse (field_list);
846 /* If this is a padding record, we never want to make the size smaller
847 than what was specified in it, if any. */
848 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
849 size = TYPE_SIZE (record_type);
851 /* Now set any of the values we've just computed that apply. */
852 if (!TYPE_FAT_POINTER_P (record_type)
853 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
854 SET_TYPE_ADA_SIZE (record_type, ada_size);
858 tree size_unit = had_size_unit
859 ? TYPE_SIZE_UNIT (record_type)
861 size_binop (CEIL_DIV_EXPR, size,
863 unsigned int align = TYPE_ALIGN (record_type);
865 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
866 TYPE_SIZE_UNIT (record_type)
867 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
869 compute_record_mode (record_type);
874 rest_of_record_type_compilation (record_type);
877 /* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
878 associated with it. It need not be invoked directly in most cases since
879 finish_record_type takes care of doing so, but this can be necessary if
880 a parallel type is to be attached to the record type. */
883 rest_of_record_type_compilation (tree record_type)
885 tree field_list = TYPE_FIELDS (record_type);
887 enum tree_code code = TREE_CODE (record_type);
888 bool var_size = false;
890 for (field = field_list; field; field = DECL_CHAIN (field))
892 /* We need to make an XVE/XVU record if any field has variable size,
893 whether or not the record does. For example, if we have a union,
894 it may be that all fields, rounded up to the alignment, have the
895 same size, in which case we'll use that size. But the debug
896 output routines (except Dwarf2) won't be able to output the fields,
897 so we need to make the special record. */
898 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
899 /* If a field has a non-constant qualifier, the record will have
900 variable size too. */
901 || (code == QUAL_UNION_TYPE
902 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
909 /* If this record is of variable size, rename it so that the
910 debugger knows it is and make a new, parallel, record
911 that tells the debugger how the record is laid out. See
912 exp_dbug.ads. But don't do this for records that are padding
913 since they confuse GDB. */
914 if (var_size && !TYPE_IS_PADDING_P (record_type))
917 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
918 ? UNION_TYPE : TREE_CODE (record_type));
919 tree orig_name = TYPE_NAME (record_type), new_name;
920 tree last_pos = bitsize_zero_node;
921 tree old_field, prev_old_field = NULL_TREE;
923 if (TREE_CODE (orig_name) == TYPE_DECL)
924 orig_name = DECL_NAME (orig_name);
927 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
929 TYPE_NAME (new_record_type) = new_name;
930 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
931 TYPE_STUB_DECL (new_record_type)
932 = create_type_stub_decl (new_name, new_record_type);
933 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
934 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
935 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
936 TYPE_SIZE_UNIT (new_record_type)
937 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
939 /* Now scan all the fields, replacing each field with a new
940 field corresponding to the new encoding. */
941 for (old_field = TYPE_FIELDS (record_type); old_field;
942 old_field = DECL_CHAIN (old_field))
944 tree field_type = TREE_TYPE (old_field);
945 tree field_name = DECL_NAME (old_field);
947 tree curpos = bit_position (old_field);
949 unsigned int align = 0;
952 /* See how the position was modified from the last position.
954 There are two basic cases we support: a value was added
955 to the last position or the last position was rounded to
956 a boundary and they something was added. Check for the
957 first case first. If not, see if there is any evidence
958 of rounding. If so, round the last position and try
961 If this is a union, the position can be taken as zero. */
963 /* Some computations depend on the shape of the position expression,
964 so strip conversions to make sure it's exposed. */
965 curpos = remove_conversions (curpos, true);
967 if (TREE_CODE (new_record_type) == UNION_TYPE)
968 pos = bitsize_zero_node, align = 0;
970 pos = compute_related_constant (curpos, last_pos);
972 if (!pos && TREE_CODE (curpos) == MULT_EXPR
973 && host_integerp (TREE_OPERAND (curpos, 1), 1))
975 tree offset = TREE_OPERAND (curpos, 0);
976 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
978 /* An offset which is a bitwise AND with a negative power of 2
979 means an alignment corresponding to this power of 2. Note
980 that, as sizetype is sign-extended but nonetheless unsigned,
981 we don't directly use tree_int_cst_sgn. */
982 offset = remove_conversions (offset, true);
983 if (TREE_CODE (offset) == BIT_AND_EXPR
984 && host_integerp (TREE_OPERAND (offset, 1), 0)
985 && TREE_INT_CST_HIGH (TREE_OPERAND (offset, 1)) < 0)
988 = - tree_low_cst (TREE_OPERAND (offset, 1), 0);
989 if (exact_log2 (pow) > 0)
993 pos = compute_related_constant (curpos,
994 round_up (last_pos, align));
996 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
997 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
998 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
999 && host_integerp (TREE_OPERAND
1000 (TREE_OPERAND (curpos, 0), 1),
1005 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
1006 pos = compute_related_constant (curpos,
1007 round_up (last_pos, align));
1009 else if (potential_alignment_gap (prev_old_field, old_field,
1012 align = TYPE_ALIGN (field_type);
1013 pos = compute_related_constant (curpos,
1014 round_up (last_pos, align));
1017 /* If we can't compute a position, set it to zero.
1019 ??? We really should abort here, but it's too much work
1020 to get this correct for all cases. */
1023 pos = bitsize_zero_node;
1025 /* See if this type is variable-sized and make a pointer type
1026 and indicate the indirection if so. Beware that the debug
1027 back-end may adjust the position computed above according
1028 to the alignment of the field type, i.e. the pointer type
1029 in this case, if we don't preventively counter that. */
1030 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
1032 field_type = build_pointer_type (field_type);
1033 if (align != 0 && TYPE_ALIGN (field_type) > align)
1035 field_type = copy_node (field_type);
1036 TYPE_ALIGN (field_type) = align;
1041 /* Make a new field name, if necessary. */
1042 if (var || align != 0)
1047 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1048 align / BITS_PER_UNIT);
1050 strcpy (suffix, "XVL");
1052 field_name = concat_name (field_name, suffix);
1056 = create_field_decl (field_name, field_type, new_record_type,
1057 DECL_SIZE (old_field), pos, 0, 0);
1058 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1059 TYPE_FIELDS (new_record_type) = new_field;
1061 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1062 zero. The only time it's not the last field of the record
1063 is when there are other components at fixed positions after
1064 it (meaning there was a rep clause for every field) and we
1065 want to be able to encode them. */
1066 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1067 (TREE_CODE (TREE_TYPE (old_field))
1070 : DECL_SIZE (old_field));
1071 prev_old_field = old_field;
1074 TYPE_FIELDS (new_record_type)
1075 = nreverse (TYPE_FIELDS (new_record_type));
1077 /* We used to explicitly invoke rest_of_type_decl_compilation on the
1078 parallel type for the sake of STABS. We don't do it any more, so
1079 as to ensure that the parallel type be processed after the type
1080 by the debug back-end and, thus, prevent it from interfering with
1081 the processing of a recursive type. */
1082 add_parallel_type (TYPE_STUB_DECL (record_type), new_record_type);
1085 rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type));
1088 /* Append PARALLEL_TYPE on the chain of parallel types for decl. */
1091 add_parallel_type (tree decl, tree parallel_type)
1095 while (DECL_PARALLEL_TYPE (d))
1096 d = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (d));
1098 SET_DECL_PARALLEL_TYPE (d, parallel_type);
1101 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1102 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
1103 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
1104 replace a value of zero with the old size. If HAS_REP is true, we take the
1105 MAX of the end position of this field with LAST_SIZE. In all other cases,
1106 we use FIRST_BIT plus SIZE. Return an expression for the size. */
1109 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1112 tree type = TREE_TYPE (last_size);
1115 if (!special || TREE_CODE (size) != COND_EXPR)
1117 new_size = size_binop (PLUS_EXPR, first_bit, size);
1119 new_size = size_binop (MAX_EXPR, last_size, new_size);
1123 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1124 integer_zerop (TREE_OPERAND (size, 1))
1125 ? last_size : merge_sizes (last_size, first_bit,
1126 TREE_OPERAND (size, 1),
1128 integer_zerop (TREE_OPERAND (size, 2))
1129 ? last_size : merge_sizes (last_size, first_bit,
1130 TREE_OPERAND (size, 2),
1133 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1134 when fed through substitute_in_expr) into thinking that a constant
1135 size is not constant. */
1136 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
1137 new_size = TREE_OPERAND (new_size, 0);
1142 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1143 related by the addition of a constant. Return that constant if so. */
1146 compute_related_constant (tree op0, tree op1)
1148 tree op0_var, op1_var;
1149 tree op0_con = split_plus (op0, &op0_var);
1150 tree op1_con = split_plus (op1, &op1_var);
1151 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1153 if (operand_equal_p (op0_var, op1_var, 0))
1155 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1161 /* Utility function of above to split a tree OP which may be a sum, into a
1162 constant part, which is returned, and a variable part, which is stored
1163 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1167 split_plus (tree in, tree *pvar)
1169 /* Strip conversions in order to ease the tree traversal and maximize the
1170 potential for constant or plus/minus discovery. We need to be careful
1171 to always return and set *pvar to bitsizetype trees, but it's worth
1173 in = remove_conversions (in, false);
1175 *pvar = convert (bitsizetype, in);
1177 if (TREE_CODE (in) == INTEGER_CST)
1179 *pvar = bitsize_zero_node;
1180 return convert (bitsizetype, in);
1182 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1184 tree lhs_var, rhs_var;
1185 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1186 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1188 if (lhs_var == TREE_OPERAND (in, 0)
1189 && rhs_var == TREE_OPERAND (in, 1))
1190 return bitsize_zero_node;
1192 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1193 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1196 return bitsize_zero_node;
1199 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1200 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
1201 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1202 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
1203 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
1204 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
1205 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
1206 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
1207 invisible reference. */
1210 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1211 bool return_unconstrained_p, bool return_by_direct_ref_p,
1212 bool return_by_invisi_ref_p)
1214 /* A list of the data type nodes of the subprogram formal parameters.
1215 This list is generated by traversing the input list of PARM_DECL
1217 VEC(tree,gc) *param_type_list = NULL;
1220 for (t = param_decl_list; t; t = DECL_CHAIN (t))
1221 VEC_safe_push (tree, gc, param_type_list, TREE_TYPE (t));
1223 type = build_function_type_vec (return_type, param_type_list);
1225 /* TYPE may have been shared since GCC hashes types. If it has a different
1226 CICO_LIST, make a copy. Likewise for the various flags. */
1227 if (!fntype_same_flags_p (type, cico_list, return_unconstrained_p,
1228 return_by_direct_ref_p, return_by_invisi_ref_p))
1230 type = copy_type (type);
1231 TYPE_CI_CO_LIST (type) = cico_list;
1232 TYPE_RETURN_UNCONSTRAINED_P (type) = return_unconstrained_p;
1233 TYPE_RETURN_BY_DIRECT_REF_P (type) = return_by_direct_ref_p;
1234 TREE_ADDRESSABLE (type) = return_by_invisi_ref_p;
1240 /* Return a copy of TYPE but safe to modify in any way. */
1243 copy_type (tree type)
1245 tree new_type = copy_node (type);
1247 /* Unshare the language-specific data. */
1248 if (TYPE_LANG_SPECIFIC (type))
1250 TYPE_LANG_SPECIFIC (new_type) = NULL;
1251 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
1254 /* And the contents of the language-specific slot if needed. */
1255 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
1256 && TYPE_RM_VALUES (type))
1258 TYPE_RM_VALUES (new_type) = NULL_TREE;
1259 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
1260 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
1261 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
1264 /* copy_node clears this field instead of copying it, because it is
1265 aliased with TREE_CHAIN. */
1266 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
1268 TYPE_POINTER_TO (new_type) = 0;
1269 TYPE_REFERENCE_TO (new_type) = 0;
1270 TYPE_MAIN_VARIANT (new_type) = new_type;
1271 TYPE_NEXT_VARIANT (new_type) = 0;
1276 /* Return a subtype of sizetype with range MIN to MAX and whose
1277 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
1278 of the associated TYPE_DECL. */
1281 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
1283 /* First build a type for the desired range. */
1284 tree type = build_nonshared_range_type (sizetype, min, max);
1286 /* Then set the index type. */
1287 SET_TYPE_INDEX_TYPE (type, index);
1288 create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
1293 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
1294 sizetype is used. */
1297 create_range_type (tree type, tree min, tree max)
1301 if (type == NULL_TREE)
1304 /* First build a type with the base range. */
1305 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
1306 TYPE_MAX_VALUE (type));
1308 /* Then set the actual range. */
1309 SET_TYPE_RM_MIN_VALUE (range_type, convert (type, min));
1310 SET_TYPE_RM_MAX_VALUE (range_type, convert (type, max));
1315 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
1316 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
1320 create_type_stub_decl (tree type_name, tree type)
1322 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
1323 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
1324 emitted in DWARF. */
1325 tree type_decl = build_decl (input_location,
1326 TYPE_DECL, type_name, type);
1327 DECL_ARTIFICIAL (type_decl) = 1;
1328 TYPE_ARTIFICIAL (type) = 1;
1332 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
1333 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
1334 is a declaration that was generated by the compiler. DEBUG_INFO_P is
1335 true if we need to write debug information about this type. GNAT_NODE
1336 is used for the position of the decl. */
1339 create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1340 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1342 enum tree_code code = TREE_CODE (type);
1343 bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
1346 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
1347 gcc_assert (!TYPE_IS_DUMMY_P (type));
1349 /* If the type hasn't been named yet, we're naming it; preserve an existing
1350 TYPE_STUB_DECL that has been attached to it for some purpose. */
1351 if (!named && TYPE_STUB_DECL (type))
1353 type_decl = TYPE_STUB_DECL (type);
1354 DECL_NAME (type_decl) = type_name;
1357 type_decl = build_decl (input_location,
1358 TYPE_DECL, type_name, type);
1360 DECL_ARTIFICIAL (type_decl) = artificial_p;
1361 TYPE_ARTIFICIAL (type) = artificial_p;
1363 /* Add this decl to the current binding level. */
1364 gnat_pushdecl (type_decl, gnat_node);
1366 process_attributes (type_decl, attr_list);
1368 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
1369 This causes the name to be also viewed as a "tag" by the debug
1370 back-end, with the advantage that no DW_TAG_typedef is emitted
1371 for artificial "tagged" types in DWARF. */
1373 TYPE_STUB_DECL (type) = type_decl;
1375 /* Pass the type declaration to the debug back-end unless this is an
1376 UNCONSTRAINED_ARRAY_TYPE that the back-end does not support, or a
1377 type for which debugging information was not requested, or else an
1378 ENUMERAL_TYPE or RECORD_TYPE (except for fat pointers) which are
1379 handled separately. And do not pass dummy types either. */
1380 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
1381 DECL_IGNORED_P (type_decl) = 1;
1382 else if (code != ENUMERAL_TYPE
1383 && (code != RECORD_TYPE || TYPE_FAT_POINTER_P (type))
1384 && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
1385 && TYPE_IS_DUMMY_P (TREE_TYPE (type)))
1386 && !(code == RECORD_TYPE
1388 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))))))
1389 rest_of_type_decl_compilation (type_decl);
1394 /* Return a VAR_DECL or CONST_DECL node.
1396 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1397 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1398 the GCC tree for an optional initial expression; NULL_TREE if none.
1400 CONST_FLAG is true if this variable is constant, in which case we might
1401 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
1403 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
1404 definition to be made visible outside of the current compilation unit, for
1405 instance variable definitions in a package specification.
1407 EXTERN_FLAG is true when processing an external variable declaration (as
1408 opposed to a definition: no storage is to be allocated for the variable).
1410 STATIC_FLAG is only relevant when not at top level. In that case
1411 it indicates whether to always allocate storage to the variable.
1413 GNAT_NODE is used for the position of the decl. */
1416 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
1417 bool const_flag, bool public_flag, bool extern_flag,
1418 bool static_flag, bool const_decl_allowed_p,
1419 struct attrib *attr_list, Node_Id gnat_node)
1421 /* Whether the initializer is a constant initializer. At the global level
1422 or for an external object or an object to be allocated in static memory,
1423 we check that it is a valid constant expression for use in initializing
1424 a static variable; otherwise, we only check that it is constant. */
1427 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
1428 && (global_bindings_p () || extern_flag || static_flag
1429 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
1430 : TREE_CONSTANT (var_init)));
1432 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1433 case the initializer may be used in-lieu of the DECL node (as done in
1434 Identifier_to_gnu). This is useful to prevent the need of elaboration
1435 code when an identifier for which such a decl is made is in turn used as
1436 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1437 but extra constraints apply to this choice (see below) and are not
1438 relevant to the distinction we wish to make. */
1439 bool constant_p = const_flag && init_const;
1441 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1442 and may be used for scalars in general but not for aggregates. */
1444 = build_decl (input_location,
1445 (constant_p && const_decl_allowed_p
1446 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
1449 /* If this is external, throw away any initializations (they will be done
1450 elsewhere) unless this is a constant for which we would like to remain
1451 able to get the initializer. If we are defining a global here, leave a
1452 constant initialization and save any variable elaborations for the
1453 elaboration routine. If we are just annotating types, throw away the
1454 initialization if it isn't a constant. */
1455 if ((extern_flag && !constant_p)
1456 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1457 var_init = NULL_TREE;
1459 /* At the global level, an initializer requiring code to be generated
1460 produces elaboration statements. Check that such statements are allowed,
1461 that is, not violating a No_Elaboration_Code restriction. */
1462 if (global_bindings_p () && var_init != 0 && !init_const)
1463 Check_Elaboration_Code_Allowed (gnat_node);
1465 DECL_INITIAL (var_decl) = var_init;
1466 TREE_READONLY (var_decl) = const_flag;
1467 DECL_EXTERNAL (var_decl) = extern_flag;
1468 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1469 TREE_CONSTANT (var_decl) = constant_p;
1470 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1471 = TYPE_VOLATILE (type);
1473 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1474 try to fiddle with DECL_COMMON. However, on platforms that don't
1475 support global BSS sections, uninitialized global variables would
1476 go in DATA instead, thus increasing the size of the executable. */
1478 && TREE_CODE (var_decl) == VAR_DECL
1479 && TREE_PUBLIC (var_decl)
1480 && !have_global_bss_p ())
1481 DECL_COMMON (var_decl) = 1;
1483 /* At the global binding level, we need to allocate static storage for the
1484 variable if it isn't external. Otherwise, we allocate automatic storage
1485 unless requested not to. */
1486 TREE_STATIC (var_decl)
1487 = !extern_flag && (static_flag || global_bindings_p ());
1489 /* For an external constant whose initializer is not absolute, do not emit
1490 debug info. In DWARF this would mean a global relocation in a read-only
1491 section which runs afoul of the PE-COFF run-time relocation mechanism. */
1495 && initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
1496 != null_pointer_node)
1497 DECL_IGNORED_P (var_decl) = 1;
1499 /* Add this decl to the current binding level. */
1500 gnat_pushdecl (var_decl, gnat_node);
1502 if (TREE_SIDE_EFFECTS (var_decl))
1503 TREE_ADDRESSABLE (var_decl) = 1;
1505 if (TREE_CODE (var_decl) == VAR_DECL)
1508 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1509 process_attributes (var_decl, attr_list);
1510 if (global_bindings_p ())
1511 rest_of_decl_compilation (var_decl, true, 0);
1514 expand_decl (var_decl);
1519 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
1522 aggregate_type_contains_array_p (tree type)
1524 switch (TREE_CODE (type))
1528 case QUAL_UNION_TYPE:
1531 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1532 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
1533 && aggregate_type_contains_array_p (TREE_TYPE (field)))
1546 /* Return a FIELD_DECL node. FIELD_NAME is the field's name, FIELD_TYPE is
1547 its type and RECORD_TYPE is the type of the enclosing record. If SIZE is
1548 nonzero, it is the specified size of the field. If POS is nonzero, it is
1549 the bit position. PACKED is 1 if the enclosing record is packed, -1 if it
1550 has Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
1551 means we are allowed to take the address of the field; if it is negative,
1552 we should not make a bitfield, which is used by make_aligning_type. */
1555 create_field_decl (tree field_name, tree field_type, tree record_type,
1556 tree size, tree pos, int packed, int addressable)
1558 tree field_decl = build_decl (input_location,
1559 FIELD_DECL, field_name, field_type);
1561 DECL_CONTEXT (field_decl) = record_type;
1562 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1564 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1565 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1566 Likewise for an aggregate without specified position that contains an
1567 array, because in this case slices of variable length of this array
1568 must be handled by GCC and variable-sized objects need to be aligned
1569 to at least a byte boundary. */
1570 if (packed && (TYPE_MODE (field_type) == BLKmode
1572 && AGGREGATE_TYPE_P (field_type)
1573 && aggregate_type_contains_array_p (field_type))))
1574 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1576 /* If a size is specified, use it. Otherwise, if the record type is packed
1577 compute a size to use, which may differ from the object's natural size.
1578 We always set a size in this case to trigger the checks for bitfield
1579 creation below, which is typically required when no position has been
1582 size = convert (bitsizetype, size);
1583 else if (packed == 1)
1585 size = rm_size (field_type);
1586 if (TYPE_MODE (field_type) == BLKmode)
1587 size = round_up (size, BITS_PER_UNIT);
1590 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1591 specified for two reasons: first if the size differs from the natural
1592 size. Second, if the alignment is insufficient. There are a number of
1593 ways the latter can be true.
1595 We never make a bitfield if the type of the field has a nonconstant size,
1596 because no such entity requiring bitfield operations should reach here.
1598 We do *preventively* make a bitfield when there might be the need for it
1599 but we don't have all the necessary information to decide, as is the case
1600 of a field with no specified position in a packed record.
1602 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1603 in layout_decl or finish_record_type to clear the bit_field indication if
1604 it is in fact not needed. */
1605 if (addressable >= 0
1607 && TREE_CODE (size) == INTEGER_CST
1608 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1609 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
1610 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
1612 || (TYPE_ALIGN (record_type) != 0
1613 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1615 DECL_BIT_FIELD (field_decl) = 1;
1616 DECL_SIZE (field_decl) = size;
1617 if (!packed && !pos)
1619 if (TYPE_ALIGN (record_type) != 0
1620 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))
1621 DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
1623 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1627 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1629 /* Bump the alignment if need be, either for bitfield/packing purposes or
1630 to satisfy the type requirements if no such consideration applies. When
1631 we get the alignment from the type, indicate if this is from an explicit
1632 user request, which prevents stor-layout from lowering it later on. */
1634 unsigned int bit_align
1635 = (DECL_BIT_FIELD (field_decl) ? 1
1636 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
1638 if (bit_align > DECL_ALIGN (field_decl))
1639 DECL_ALIGN (field_decl) = bit_align;
1640 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
1642 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1643 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
1649 /* We need to pass in the alignment the DECL is known to have.
1650 This is the lowest-order bit set in POS, but no more than
1651 the alignment of the record, if one is specified. Note
1652 that an alignment of 0 is taken as infinite. */
1653 unsigned int known_align;
1655 if (host_integerp (pos, 1))
1656 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1658 known_align = BITS_PER_UNIT;
1660 if (TYPE_ALIGN (record_type)
1661 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1662 known_align = TYPE_ALIGN (record_type);
1664 layout_decl (field_decl, known_align);
1665 SET_DECL_OFFSET_ALIGN (field_decl,
1666 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1668 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1669 &DECL_FIELD_BIT_OFFSET (field_decl),
1670 DECL_OFFSET_ALIGN (field_decl), pos);
1673 /* In addition to what our caller says, claim the field is addressable if we
1674 know that its type is not suitable.
1676 The field may also be "technically" nonaddressable, meaning that even if
1677 we attempt to take the field's address we will actually get the address
1678 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
1679 value we have at this point is not accurate enough, so we don't account
1680 for this here and let finish_record_type decide. */
1681 if (!addressable && !type_for_nonaliased_component_p (field_type))
1684 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1689 /* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
1690 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
1691 (either an In parameter or an address of a pass-by-ref parameter). */
1694 create_param_decl (tree param_name, tree param_type, bool readonly)
1696 tree param_decl = build_decl (input_location,
1697 PARM_DECL, param_name, param_type);
1699 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
1700 can lead to various ABI violations. */
1701 if (targetm.calls.promote_prototypes (NULL_TREE)
1702 && INTEGRAL_TYPE_P (param_type)
1703 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1705 /* We have to be careful about biased types here. Make a subtype
1706 of integer_type_node with the proper biasing. */
1707 if (TREE_CODE (param_type) == INTEGER_TYPE
1708 && TYPE_BIASED_REPRESENTATION_P (param_type))
1711 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
1712 TREE_TYPE (subtype) = integer_type_node;
1713 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
1714 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (param_type));
1715 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (param_type));
1716 param_type = subtype;
1719 param_type = integer_type_node;
1722 DECL_ARG_TYPE (param_decl) = param_type;
1723 TREE_READONLY (param_decl) = readonly;
1727 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1730 process_attributes (tree decl, struct attrib *attr_list)
1732 for (; attr_list; attr_list = attr_list->next)
1733 switch (attr_list->type)
1735 case ATTR_MACHINE_ATTRIBUTE:
1736 input_location = DECL_SOURCE_LOCATION (decl);
1737 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1739 ATTR_FLAG_TYPE_IN_PLACE);
1742 case ATTR_LINK_ALIAS:
1743 if (! DECL_EXTERNAL (decl))
1745 TREE_STATIC (decl) = 1;
1746 assemble_alias (decl, attr_list->name);
1750 case ATTR_WEAK_EXTERNAL:
1752 declare_weak (decl);
1754 post_error ("?weak declarations not supported on this target",
1755 attr_list->error_point);
1758 case ATTR_LINK_SECTION:
1759 if (targetm_common.have_named_sections)
1761 DECL_SECTION_NAME (decl)
1762 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1763 IDENTIFIER_POINTER (attr_list->name));
1764 DECL_COMMON (decl) = 0;
1767 post_error ("?section attributes are not supported for this target",
1768 attr_list->error_point);
1771 case ATTR_LINK_CONSTRUCTOR:
1772 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1773 TREE_USED (decl) = 1;
1776 case ATTR_LINK_DESTRUCTOR:
1777 DECL_STATIC_DESTRUCTOR (decl) = 1;
1778 TREE_USED (decl) = 1;
1781 case ATTR_THREAD_LOCAL_STORAGE:
1782 DECL_TLS_MODEL (decl) = decl_default_tls_model (decl);
1783 DECL_COMMON (decl) = 0;
1788 /* Record DECL as a global renaming pointer. */
1791 record_global_renaming_pointer (tree decl)
1793 gcc_assert (!DECL_LOOP_PARM_P (decl) && DECL_RENAMED_OBJECT (decl));
1794 VEC_safe_push (tree, gc, global_renaming_pointers, decl);
1797 /* Invalidate the global renaming pointers. */
1800 invalidate_global_renaming_pointers (void)
1805 FOR_EACH_VEC_ELT (tree, global_renaming_pointers, i, iter)
1806 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
1808 VEC_free (tree, gc, global_renaming_pointers);
1811 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1815 value_factor_p (tree value, HOST_WIDE_INT factor)
1817 if (host_integerp (value, 1))
1818 return tree_low_cst (value, 1) % factor == 0;
1820 if (TREE_CODE (value) == MULT_EXPR)
1821 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1822 || value_factor_p (TREE_OPERAND (value, 1), factor));
1827 /* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
1828 unless we can prove these 2 fields are laid out in such a way that no gap
1829 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1830 is the distance in bits between the end of PREV_FIELD and the starting
1831 position of CURR_FIELD. It is ignored if null. */
1834 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1836 /* If this is the first field of the record, there cannot be any gap */
1840 /* If the previous field is a union type, then return False: The only
1841 time when such a field is not the last field of the record is when
1842 there are other components at fixed positions after it (meaning there
1843 was a rep clause for every field), in which case we don't want the
1844 alignment constraint to override them. */
1845 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1848 /* If the distance between the end of prev_field and the beginning of
1849 curr_field is constant, then there is a gap if the value of this
1850 constant is not null. */
1851 if (offset && host_integerp (offset, 1))
1852 return !integer_zerop (offset);
1854 /* If the size and position of the previous field are constant,
1855 then check the sum of this size and position. There will be a gap
1856 iff it is not multiple of the current field alignment. */
1857 if (host_integerp (DECL_SIZE (prev_field), 1)
1858 && host_integerp (bit_position (prev_field), 1))
1859 return ((tree_low_cst (bit_position (prev_field), 1)
1860 + tree_low_cst (DECL_SIZE (prev_field), 1))
1861 % DECL_ALIGN (curr_field) != 0);
1863 /* If both the position and size of the previous field are multiples
1864 of the current field alignment, there cannot be any gap. */
1865 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1866 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1869 /* Fallback, return that there may be a potential gap */
1873 /* Return a LABEL_DECL with LABEL_NAME. GNAT_NODE is used for the position
1877 create_label_decl (tree label_name, Node_Id gnat_node)
1880 = build_decl (input_location, LABEL_DECL, label_name, void_type_node);
1882 DECL_MODE (label_decl) = VOIDmode;
1884 /* Add this decl to the current binding level. */
1885 gnat_pushdecl (label_decl, gnat_node);
1890 /* Return a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1891 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1892 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1893 PARM_DECL nodes chained through the DECL_CHAIN field).
1895 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, ARTIFICIAL_FLAG and ATTR_LIST are
1896 used to set the appropriate fields in the FUNCTION_DECL. GNAT_NODE is
1897 used for the position of the decl. */
1900 create_subprog_decl (tree subprog_name, tree asm_name, tree subprog_type,
1901 tree param_decl_list, bool inline_flag, bool public_flag,
1902 bool extern_flag, bool artificial_flag,
1903 struct attrib *attr_list, Node_Id gnat_node)
1905 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, subprog_name,
1907 tree result_decl = build_decl (input_location, RESULT_DECL, NULL_TREE,
1908 TREE_TYPE (subprog_type));
1909 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1911 /* If this is a non-inline function nested inside an inlined external
1912 function, we cannot honor both requests without cloning the nested
1913 function in the current unit since it is private to the other unit.
1914 We could inline the nested function as well but it's probably better
1915 to err on the side of too little inlining. */
1918 && current_function_decl
1919 && DECL_DECLARED_INLINE_P (current_function_decl)
1920 && DECL_EXTERNAL (current_function_decl))
1921 DECL_DECLARED_INLINE_P (current_function_decl) = 0;
1923 DECL_ARTIFICIAL (subprog_decl) = artificial_flag;
1924 DECL_EXTERNAL (subprog_decl) = extern_flag;
1925 DECL_DECLARED_INLINE_P (subprog_decl) = inline_flag;
1926 DECL_NO_INLINE_WARNING_P (subprog_decl) = inline_flag && artificial_flag;
1928 TREE_PUBLIC (subprog_decl) = public_flag;
1929 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1930 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1931 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1933 DECL_ARTIFICIAL (result_decl) = 1;
1934 DECL_IGNORED_P (result_decl) = 1;
1935 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (subprog_type);
1936 DECL_RESULT (subprog_decl) = result_decl;
1940 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1942 /* The expand_main_function circuitry expects "main_identifier_node" to
1943 designate the DECL_NAME of the 'main' entry point, in turn expected
1944 to be declared as the "main" function literally by default. Ada
1945 program entry points are typically declared with a different name
1946 within the binder generated file, exported as 'main' to satisfy the
1947 system expectations. Force main_identifier_node in this case. */
1948 if (asm_name == main_identifier_node)
1949 DECL_NAME (subprog_decl) = main_identifier_node;
1952 /* Add this decl to the current binding level. */
1953 gnat_pushdecl (subprog_decl, gnat_node);
1955 process_attributes (subprog_decl, attr_list);
1957 /* Output the assembler code and/or RTL for the declaration. */
1958 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
1960 return subprog_decl;
1963 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1964 body. This routine needs to be invoked before processing the declarations
1965 appearing in the subprogram. */
1968 begin_subprog_body (tree subprog_decl)
1972 announce_function (subprog_decl);
1974 /* This function is being defined. */
1975 TREE_STATIC (subprog_decl) = 1;
1977 current_function_decl = subprog_decl;
1979 /* Enter a new binding level and show that all the parameters belong to
1983 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1984 param_decl = DECL_CHAIN (param_decl))
1985 DECL_CONTEXT (param_decl) = subprog_decl;
1987 make_decl_rtl (subprog_decl);
1990 /* Finish translating the current subprogram and set its BODY. */
1993 end_subprog_body (tree body)
1995 tree fndecl = current_function_decl;
1997 /* Attach the BLOCK for this level to the function and pop the level. */
1998 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
1999 DECL_INITIAL (fndecl) = current_binding_level->block;
2002 /* Mark the RESULT_DECL as being in this subprogram. */
2003 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
2005 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
2006 if (TREE_CODE (body) == BIND_EXPR)
2008 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
2009 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
2012 DECL_SAVED_TREE (fndecl) = body;
2014 current_function_decl = decl_function_context (fndecl);
2017 /* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
2020 rest_of_subprog_body_compilation (tree subprog_decl)
2022 /* We cannot track the location of errors past this point. */
2023 error_gnat_node = Empty;
2025 /* If we're only annotating types, don't actually compile this function. */
2026 if (type_annotate_only)
2029 /* Dump functions before gimplification. */
2030 dump_function (TDI_original, subprog_decl);
2032 /* ??? This special handling of nested functions is probably obsolete. */
2033 if (!decl_function_context (subprog_decl))
2034 cgraph_finalize_function (subprog_decl, false);
2036 /* Register this function with cgraph just far enough to get it
2037 added to our parent's nested function list. */
2038 (void) cgraph_get_create_node (subprog_decl);
2042 gnat_builtin_function (tree decl)
2044 gnat_pushdecl (decl, Empty);
2048 /* Return an integer type with the number of bits of precision given by
2049 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2050 it is a signed type. */
2053 gnat_type_for_size (unsigned precision, int unsignedp)
2058 if (precision <= 2 * MAX_BITS_PER_WORD
2059 && signed_and_unsigned_types[precision][unsignedp])
2060 return signed_and_unsigned_types[precision][unsignedp];
2063 t = make_unsigned_type (precision);
2065 t = make_signed_type (precision);
2067 if (precision <= 2 * MAX_BITS_PER_WORD)
2068 signed_and_unsigned_types[precision][unsignedp] = t;
2072 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2073 TYPE_NAME (t) = get_identifier (type_name);
2079 /* Likewise for floating-point types. */
2082 float_type_for_precision (int precision, enum machine_mode mode)
2087 if (float_types[(int) mode])
2088 return float_types[(int) mode];
2090 float_types[(int) mode] = t = make_node (REAL_TYPE);
2091 TYPE_PRECISION (t) = precision;
2094 gcc_assert (TYPE_MODE (t) == mode);
2097 sprintf (type_name, "FLOAT_%d", precision);
2098 TYPE_NAME (t) = get_identifier (type_name);
2104 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2105 an unsigned type; otherwise a signed type is returned. */
2108 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2110 if (mode == BLKmode)
2113 if (mode == VOIDmode)
2114 return void_type_node;
2116 if (COMPLEX_MODE_P (mode))
2119 if (SCALAR_FLOAT_MODE_P (mode))
2120 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2122 if (SCALAR_INT_MODE_P (mode))
2123 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2125 if (VECTOR_MODE_P (mode))
2127 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2128 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
2130 return build_vector_type_for_mode (inner_type, mode);
2136 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2139 gnat_unsigned_type (tree type_node)
2141 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2143 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2145 type = copy_node (type);
2146 TREE_TYPE (type) = type_node;
2148 else if (TREE_TYPE (type_node)
2149 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2150 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2152 type = copy_node (type);
2153 TREE_TYPE (type) = TREE_TYPE (type_node);
2159 /* Return the signed version of a TYPE_NODE, a scalar type. */
2162 gnat_signed_type (tree type_node)
2164 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2166 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2168 type = copy_node (type);
2169 TREE_TYPE (type) = type_node;
2171 else if (TREE_TYPE (type_node)
2172 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2173 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2175 type = copy_node (type);
2176 TREE_TYPE (type) = TREE_TYPE (type_node);
2182 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2183 transparently converted to each other. */
2186 gnat_types_compatible_p (tree t1, tree t2)
2188 enum tree_code code;
2190 /* This is the default criterion. */
2191 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2194 /* We only check structural equivalence here. */
2195 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2198 /* Vector types are also compatible if they have the same number of subparts
2199 and the same form of (scalar) element type. */
2200 if (code == VECTOR_TYPE
2201 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
2202 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
2203 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
2206 /* Array types are also compatible if they are constrained and have the same
2207 domain(s) and the same component type. */
2208 if (code == ARRAY_TYPE
2209 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
2210 || (TYPE_DOMAIN (t1)
2212 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2213 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2214 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
2215 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
2216 && (TREE_TYPE (t1) == TREE_TYPE (t2)
2217 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
2218 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))))
2221 /* Padding record types are also compatible if they pad the same
2222 type and have the same constant size. */
2223 if (code == RECORD_TYPE
2224 && TYPE_PADDING_P (t1) && TYPE_PADDING_P (t2)
2225 && TREE_TYPE (TYPE_FIELDS (t1)) == TREE_TYPE (TYPE_FIELDS (t2))
2226 && tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2)))
2232 /* Return true if EXPR is a useless type conversion. */
2235 gnat_useless_type_conversion (tree expr)
2237 if (CONVERT_EXPR_P (expr)
2238 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
2239 || TREE_CODE (expr) == NON_LVALUE_EXPR)
2240 return gnat_types_compatible_p (TREE_TYPE (expr),
2241 TREE_TYPE (TREE_OPERAND (expr, 0)));
2246 /* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
2249 fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
2250 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
2252 return TYPE_CI_CO_LIST (t) == cico_list
2253 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
2254 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
2255 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
2258 /* EXP is an expression for the size of an object. If this size contains
2259 discriminant references, replace them with the maximum (if MAX_P) or
2260 minimum (if !MAX_P) possible value of the discriminant. */
2263 max_size (tree exp, bool max_p)
2265 enum tree_code code = TREE_CODE (exp);
2266 tree type = TREE_TYPE (exp);
2268 switch (TREE_CODE_CLASS (code))
2270 case tcc_declaration:
2275 if (code == CALL_EXPR)
2280 t = maybe_inline_call_in_expr (exp);
2282 return max_size (t, max_p);
2284 n = call_expr_nargs (exp);
2286 argarray = XALLOCAVEC (tree, n);
2287 for (i = 0; i < n; i++)
2288 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
2289 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
2294 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2295 modify. Otherwise, we treat it like a variable. */
2296 if (!CONTAINS_PLACEHOLDER_P (exp))
2299 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2301 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
2303 case tcc_comparison:
2304 return max_p ? size_one_node : size_zero_node;
2308 case tcc_expression:
2309 switch (TREE_CODE_LENGTH (code))
2312 if (code == SAVE_EXPR)
2314 else if (code == NON_LVALUE_EXPR)
2315 return max_size (TREE_OPERAND (exp, 0), max_p);
2318 fold_build1 (code, type,
2319 max_size (TREE_OPERAND (exp, 0),
2320 code == NEGATE_EXPR ? !max_p : max_p));
2323 if (code == COMPOUND_EXPR)
2324 return max_size (TREE_OPERAND (exp, 1), max_p);
2327 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2328 tree rhs = max_size (TREE_OPERAND (exp, 1),
2329 code == MINUS_EXPR ? !max_p : max_p);
2331 /* Special-case wanting the maximum value of a MIN_EXPR.
2332 In that case, if one side overflows, return the other.
2333 sizetype is signed, but we know sizes are non-negative.
2334 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2335 overflowing and the RHS a variable. */
2338 && TREE_CODE (rhs) == INTEGER_CST
2339 && TREE_OVERFLOW (rhs))
2343 && TREE_CODE (lhs) == INTEGER_CST
2344 && TREE_OVERFLOW (lhs))
2346 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2347 && TREE_CODE (lhs) == INTEGER_CST
2348 && TREE_OVERFLOW (lhs)
2349 && !TREE_CONSTANT (rhs))
2352 return fold_build2 (code, type, lhs, rhs);
2356 if (code == COND_EXPR)
2357 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2358 max_size (TREE_OPERAND (exp, 1), max_p),
2359 max_size (TREE_OPERAND (exp, 2), max_p));
2362 /* Other tree classes cannot happen. */
2370 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2371 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2372 Return a constructor for the template. */
2375 build_template (tree template_type, tree array_type, tree expr)
2377 VEC(constructor_elt,gc) *template_elts = NULL;
2378 tree bound_list = NULL_TREE;
2381 while (TREE_CODE (array_type) == RECORD_TYPE
2382 && (TYPE_PADDING_P (array_type)
2383 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2384 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2386 if (TREE_CODE (array_type) == ARRAY_TYPE
2387 || (TREE_CODE (array_type) == INTEGER_TYPE
2388 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2389 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2391 /* First make the list for a CONSTRUCTOR for the template. Go down the
2392 field list of the template instead of the type chain because this
2393 array might be an Ada array of arrays and we can't tell where the
2394 nested arrays stop being the underlying object. */
2396 for (field = TYPE_FIELDS (template_type); field;
2398 ? (bound_list = TREE_CHAIN (bound_list))
2399 : (array_type = TREE_TYPE (array_type))),
2400 field = DECL_CHAIN (DECL_CHAIN (field)))
2402 tree bounds, min, max;
2404 /* If we have a bound list, get the bounds from there. Likewise
2405 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2406 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2407 This will give us a maximum range. */
2409 bounds = TREE_VALUE (bound_list);
2410 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2411 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2412 else if (expr && TREE_CODE (expr) == PARM_DECL
2413 && DECL_BY_COMPONENT_PTR_P (expr))
2414 bounds = TREE_TYPE (field);
2418 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
2419 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
2421 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2422 substitute it from OBJECT. */
2423 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2424 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2426 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
2427 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
2430 return gnat_build_constructor (template_type, template_elts);
2433 /* Helper routine to make a descriptor field. FIELD_LIST is the list of decls
2434 being built; the new decl is chained on to the front of the list. */
2437 make_descriptor_field (const char *name, tree type, tree rec_type,
2438 tree initial, tree field_list)
2441 = create_field_decl (get_identifier (name), type, rec_type, NULL_TREE,
2444 DECL_INITIAL (field) = initial;
2445 DECL_CHAIN (field) = field_list;
2449 /* Build a 32-bit VMS descriptor from a Mechanism_Type, which must specify a
2450 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
2451 type contains in its DECL_INITIAL the expression to use when a constructor
2452 is made for the type. GNAT_ENTITY is an entity used to print out an error
2453 message if the mechanism cannot be applied to an object of that type and
2454 also for the name. */
2457 build_vms_descriptor32 (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2459 tree record_type = make_node (RECORD_TYPE);
2460 tree pointer32_type, pointer64_type;
2461 tree field_list = NULL_TREE;
2462 int klass, ndim, i, dtype = 0;
2463 tree inner_type, tem;
2466 /* If TYPE is an unconstrained array, use the underlying array type. */
2467 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2468 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2470 /* If this is an array, compute the number of dimensions in the array,
2471 get the index types, and point to the inner type. */
2472 if (TREE_CODE (type) != ARRAY_TYPE)
2475 for (ndim = 1, inner_type = type;
2476 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2477 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2478 ndim++, inner_type = TREE_TYPE (inner_type))
2481 idx_arr = XALLOCAVEC (tree, ndim);
2483 if (mech != By_Descriptor_NCA && mech != By_Short_Descriptor_NCA
2484 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2485 for (i = ndim - 1, inner_type = type;
2487 i--, inner_type = TREE_TYPE (inner_type))
2488 idx_arr[i] = TYPE_DOMAIN (inner_type);
2490 for (i = 0, inner_type = type;
2492 i++, inner_type = TREE_TYPE (inner_type))
2493 idx_arr[i] = TYPE_DOMAIN (inner_type);
2495 /* Now get the DTYPE value. */
2496 switch (TREE_CODE (type))
2501 if (TYPE_VAX_FLOATING_POINT_P (type))
2502 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2515 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2518 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2521 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2524 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2527 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2530 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2536 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2540 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2541 && TYPE_VAX_FLOATING_POINT_P (type))
2542 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2554 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2565 /* Get the CLASS value. */
2568 case By_Descriptor_A:
2569 case By_Short_Descriptor_A:
2572 case By_Descriptor_NCA:
2573 case By_Short_Descriptor_NCA:
2576 case By_Descriptor_SB:
2577 case By_Short_Descriptor_SB:
2581 case By_Short_Descriptor:
2582 case By_Descriptor_S:
2583 case By_Short_Descriptor_S:
2589 /* Make the type for a descriptor for VMS. The first four fields are the
2590 same for all types. */
2592 = make_descriptor_field ("LENGTH", gnat_type_for_size (16, 1), record_type,
2593 size_in_bytes ((mech == By_Descriptor_A
2594 || mech == By_Short_Descriptor_A)
2595 ? inner_type : type),
2598 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1), record_type,
2599 size_int (dtype), field_list);
2601 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1), record_type,
2602 size_int (klass), field_list);
2604 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
2605 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
2607 /* Ensure that only 32-bit pointers are passed in 32-bit descriptors. Note
2608 that we cannot build a template call to the CE routine as it would get a
2609 wrong source location; instead we use a second placeholder for it. */
2610 tem = build_unary_op (ADDR_EXPR, pointer64_type,
2611 build0 (PLACEHOLDER_EXPR, type));
2612 tem = build3 (COND_EXPR, pointer32_type,
2614 ? build_binary_op (GE_EXPR, boolean_type_node, tem,
2615 build_int_cstu (pointer64_type, 0x80000000))
2616 : boolean_false_node,
2617 build0 (PLACEHOLDER_EXPR, void_type_node),
2618 convert (pointer32_type, tem));
2621 = make_descriptor_field ("POINTER", pointer32_type, record_type, tem,
2627 case By_Short_Descriptor:
2628 case By_Descriptor_S:
2629 case By_Short_Descriptor_S:
2632 case By_Descriptor_SB:
2633 case By_Short_Descriptor_SB:
2635 = make_descriptor_field ("SB_L1", gnat_type_for_size (32, 1),
2637 (TREE_CODE (type) == ARRAY_TYPE
2638 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type))
2642 = make_descriptor_field ("SB_U1", gnat_type_for_size (32, 1),
2644 (TREE_CODE (type) == ARRAY_TYPE
2645 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type))
2650 case By_Descriptor_A:
2651 case By_Short_Descriptor_A:
2652 case By_Descriptor_NCA:
2653 case By_Short_Descriptor_NCA:
2655 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
2656 record_type, size_zero_node, field_list);
2659 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
2660 record_type, size_zero_node, field_list);
2663 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
2665 size_int ((mech == By_Descriptor_NCA
2666 || mech == By_Short_Descriptor_NCA)
2668 /* Set FL_COLUMN, FL_COEFF, and
2670 : (TREE_CODE (type) == ARRAY_TYPE
2671 && TYPE_CONVENTION_FORTRAN_P
2677 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
2678 record_type, size_int (ndim), field_list);
2681 = make_descriptor_field ("ARSIZE", gnat_type_for_size (32, 1),
2682 record_type, size_in_bytes (type),
2685 /* Now build a pointer to the 0,0,0... element. */
2686 tem = build0 (PLACEHOLDER_EXPR, type);
2687 for (i = 0, inner_type = type; i < ndim;
2688 i++, inner_type = TREE_TYPE (inner_type))
2689 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2690 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2691 NULL_TREE, NULL_TREE);
2694 = make_descriptor_field ("A0", pointer32_type, record_type,
2695 build1 (ADDR_EXPR, pointer32_type, tem),
2698 /* Next come the addressing coefficients. */
2699 tem = size_one_node;
2700 for (i = 0; i < ndim; i++)
2704 = size_binop (MULT_EXPR, tem,
2705 size_binop (PLUS_EXPR,
2706 size_binop (MINUS_EXPR,
2707 TYPE_MAX_VALUE (idx_arr[i]),
2708 TYPE_MIN_VALUE (idx_arr[i])),
2711 fname[0] = ((mech == By_Descriptor_NCA ||
2712 mech == By_Short_Descriptor_NCA) ? 'S' : 'M');
2713 fname[1] = '0' + i, fname[2] = 0;
2715 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
2716 record_type, idx_length, field_list);
2718 if (mech == By_Descriptor_NCA || mech == By_Short_Descriptor_NCA)
2722 /* Finally here are the bounds. */
2723 for (i = 0; i < ndim; i++)
2727 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2729 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
2730 record_type, TYPE_MIN_VALUE (idx_arr[i]),
2735 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
2736 record_type, TYPE_MAX_VALUE (idx_arr[i]),
2742 post_error ("unsupported descriptor type for &", gnat_entity);
2745 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC");
2746 finish_record_type (record_type, nreverse (field_list), 0, false);
2750 /* Build a 64-bit VMS descriptor from a Mechanism_Type, which must specify a
2751 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
2752 type contains in its DECL_INITIAL the expression to use when a constructor
2753 is made for the type. GNAT_ENTITY is an entity used to print out an error
2754 message if the mechanism cannot be applied to an object of that type and
2755 also for the name. */
2758 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2760 tree record_type = make_node (RECORD_TYPE);
2761 tree pointer64_type;
2762 tree field_list = NULL_TREE;
2763 int klass, ndim, i, dtype = 0;
2764 tree inner_type, tem;
2767 /* If TYPE is an unconstrained array, use the underlying array type. */
2768 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2769 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2771 /* If this is an array, compute the number of dimensions in the array,
2772 get the index types, and point to the inner type. */
2773 if (TREE_CODE (type) != ARRAY_TYPE)
2776 for (ndim = 1, inner_type = type;
2777 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2778 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2779 ndim++, inner_type = TREE_TYPE (inner_type))
2782 idx_arr = XALLOCAVEC (tree, ndim);
2784 if (mech != By_Descriptor_NCA
2785 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2786 for (i = ndim - 1, inner_type = type;
2788 i--, inner_type = TREE_TYPE (inner_type))
2789 idx_arr[i] = TYPE_DOMAIN (inner_type);
2791 for (i = 0, inner_type = type;
2793 i++, inner_type = TREE_TYPE (inner_type))
2794 idx_arr[i] = TYPE_DOMAIN (inner_type);
2796 /* Now get the DTYPE value. */
2797 switch (TREE_CODE (type))
2802 if (TYPE_VAX_FLOATING_POINT_P (type))
2803 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2816 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2819 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2822 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2825 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2828 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2831 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2837 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2841 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2842 && TYPE_VAX_FLOATING_POINT_P (type))
2843 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2855 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2866 /* Get the CLASS value. */
2869 case By_Descriptor_A:
2872 case By_Descriptor_NCA:
2875 case By_Descriptor_SB:
2879 case By_Descriptor_S:
2885 /* Make the type for a 64-bit descriptor for VMS. The first six fields
2886 are the same for all types. */
2888 = make_descriptor_field ("MBO", gnat_type_for_size (16, 1),
2889 record_type, size_int (1), field_list);
2891 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1),
2892 record_type, size_int (dtype), field_list);
2894 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1),
2895 record_type, size_int (klass), field_list);
2897 = make_descriptor_field ("MBMO", gnat_type_for_size (32, 1),
2898 record_type, ssize_int (-1), field_list);
2900 = make_descriptor_field ("LENGTH", gnat_type_for_size (64, 1),
2902 size_in_bytes (mech == By_Descriptor_A
2903 ? inner_type : type),
2906 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
2909 = make_descriptor_field ("POINTER", pointer64_type, record_type,
2910 build_unary_op (ADDR_EXPR, pointer64_type,
2911 build0 (PLACEHOLDER_EXPR, type)),
2917 case By_Descriptor_S:
2920 case By_Descriptor_SB:
2922 = make_descriptor_field ("SB_L1", gnat_type_for_size (64, 1),
2924 (TREE_CODE (type) == ARRAY_TYPE
2925 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type))
2929 = make_descriptor_field ("SB_U1", gnat_type_for_size (64, 1),
2931 (TREE_CODE (type) == ARRAY_TYPE
2932 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type))
2937 case By_Descriptor_A:
2938 case By_Descriptor_NCA:
2940 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
2941 record_type, size_zero_node, field_list);
2944 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
2945 record_type, size_zero_node, field_list);
2947 dtype = (mech == By_Descriptor_NCA
2949 /* Set FL_COLUMN, FL_COEFF, and
2951 : (TREE_CODE (type) == ARRAY_TYPE
2952 && TYPE_CONVENTION_FORTRAN_P (type)
2955 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
2956 record_type, size_int (dtype),
2960 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
2961 record_type, size_int (ndim), field_list);
2964 = make_descriptor_field ("MBZ", gnat_type_for_size (32, 1),
2965 record_type, size_int (0), field_list);
2967 = make_descriptor_field ("ARSIZE", gnat_type_for_size (64, 1),
2968 record_type, size_in_bytes (type),
2971 /* Now build a pointer to the 0,0,0... element. */
2972 tem = build0 (PLACEHOLDER_EXPR, type);
2973 for (i = 0, inner_type = type; i < ndim;
2974 i++, inner_type = TREE_TYPE (inner_type))
2975 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2976 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2977 NULL_TREE, NULL_TREE);
2980 = make_descriptor_field ("A0", pointer64_type, record_type,
2981 build1 (ADDR_EXPR, pointer64_type, tem),
2984 /* Next come the addressing coefficients. */
2985 tem = size_one_node;
2986 for (i = 0; i < ndim; i++)
2990 = size_binop (MULT_EXPR, tem,
2991 size_binop (PLUS_EXPR,
2992 size_binop (MINUS_EXPR,
2993 TYPE_MAX_VALUE (idx_arr[i]),
2994 TYPE_MIN_VALUE (idx_arr[i])),
2997 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2998 fname[1] = '0' + i, fname[2] = 0;
3000 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
3001 record_type, idx_length, field_list);
3003 if (mech == By_Descriptor_NCA)
3007 /* Finally here are the bounds. */
3008 for (i = 0; i < ndim; i++)
3012 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
3014 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
3016 TYPE_MIN_VALUE (idx_arr[i]), field_list);
3020 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
3022 TYPE_MAX_VALUE (idx_arr[i]), field_list);
3027 post_error ("unsupported descriptor type for &", gnat_entity);
3030 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC64");
3031 finish_record_type (record_type, nreverse (field_list), 0, false);
3035 /* Fill in a VMS descriptor of GNU_TYPE for GNU_EXPR and return the result.
3036 GNAT_ACTUAL is the actual parameter for which the descriptor is built. */
3039 fill_vms_descriptor (tree gnu_type, tree gnu_expr, Node_Id gnat_actual)
3041 VEC(constructor_elt,gc) *v = NULL;
3044 gnu_expr = maybe_unconstrained_array (gnu_expr);
3045 gnu_expr = gnat_protect_expr (gnu_expr);
3046 gnat_mark_addressable (gnu_expr);
3048 /* We may need to substitute both GNU_EXPR and a CALL_EXPR to the raise CE
3049 routine in case we have a 32-bit descriptor. */
3050 gnu_expr = build2 (COMPOUND_EXPR, void_type_node,
3051 build_call_raise (CE_Range_Check_Failed, gnat_actual,
3052 N_Raise_Constraint_Error),
3055 for (field = TYPE_FIELDS (gnu_type); field; field = DECL_CHAIN (field))
3058 = convert (TREE_TYPE (field),
3059 SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_INITIAL (field),
3061 CONSTRUCTOR_APPEND_ELT (v, field, value);
3064 return gnat_build_constructor (gnu_type, v);
3067 /* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
3068 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3069 which the VMS descriptor is passed. */
3072 convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3074 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3075 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3076 /* The CLASS field is the 3rd field in the descriptor. */
3077 tree klass = DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type)));
3078 /* The POINTER field is the 6th field in the descriptor. */
3079 tree pointer = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (klass)));
3081 /* Retrieve the value of the POINTER field. */
3083 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
3085 if (POINTER_TYPE_P (gnu_type))
3086 return convert (gnu_type, gnu_expr64);
3088 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
3090 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3091 tree p_bounds_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type)));
3092 tree template_type = TREE_TYPE (p_bounds_type);
3093 tree min_field = TYPE_FIELDS (template_type);
3094 tree max_field = DECL_CHAIN (TYPE_FIELDS (template_type));
3095 tree template_tree, template_addr, aflags, dimct, t, u;
3096 /* See the head comment of build_vms_descriptor. */
3097 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
3098 tree lfield, ufield;
3099 VEC(constructor_elt,gc) *v;
3101 /* Convert POINTER to the pointer-to-array type. */
3102 gnu_expr64 = convert (p_array_type, gnu_expr64);
3106 case 1: /* Class S */
3107 case 15: /* Class SB */
3108 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
3109 v = VEC_alloc (constructor_elt, gc, 2);
3110 t = DECL_CHAIN (DECL_CHAIN (klass));
3111 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3112 CONSTRUCTOR_APPEND_ELT (v, min_field,
3113 convert (TREE_TYPE (min_field),
3115 CONSTRUCTOR_APPEND_ELT (v, max_field,
3116 convert (TREE_TYPE (max_field), t));
3117 template_tree = gnat_build_constructor (template_type, v);
3118 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
3120 /* For class S, we are done. */
3124 /* Test that we really have a SB descriptor, like DEC Ada. */
3125 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3126 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
3127 u = build_binary_op (EQ_EXPR, boolean_type_node, t, u);
3128 /* If so, there is already a template in the descriptor and
3129 it is located right after the POINTER field. The fields are
3130 64bits so they must be repacked. */
3131 t = DECL_CHAIN (pointer);
3132 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3133 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3136 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3138 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type))), ufield);
3140 /* Build the template in the form of a constructor. */
3141 v = VEC_alloc (constructor_elt, gc, 2);
3142 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (template_type), lfield);
3143 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (template_type)),
3145 template_tree = gnat_build_constructor (template_type, v);
3147 /* Otherwise use the {1, LENGTH} template we build above. */
3148 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3149 build_unary_op (ADDR_EXPR, p_bounds_type,
3154 case 4: /* Class A */
3155 /* The AFLAGS field is the 3rd field after the pointer in the
3157 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer)));
3158 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3159 /* The DIMCT field is the next field in the descriptor after
3162 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3163 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3164 or FL_COEFF or FL_BOUNDS not set. */
3165 u = build_int_cst (TREE_TYPE (aflags), 192);
3166 u = build_binary_op (TRUTH_OR_EXPR, boolean_type_node,
3167 build_binary_op (NE_EXPR, boolean_type_node,
3169 convert (TREE_TYPE (dimct),
3171 build_binary_op (NE_EXPR, boolean_type_node,
3172 build2 (BIT_AND_EXPR,
3176 /* There is already a template in the descriptor and it is located
3177 in block 3. The fields are 64bits so they must be repacked. */
3178 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN
3180 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3181 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3184 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3186 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type))), ufield);
3188 /* Build the template in the form of a constructor. */
3189 v = VEC_alloc (constructor_elt, gc, 2);
3190 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (template_type), lfield);
3191 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (template_type)),
3193 template_tree = gnat_build_constructor (template_type, v);
3194 template_tree = build3 (COND_EXPR, template_type, u,
3195 build_call_raise (CE_Length_Check_Failed, Empty,
3196 N_Raise_Constraint_Error),
3199 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
3202 case 10: /* Class NCA */
3204 post_error ("unsupported descriptor type for &", gnat_subprog);
3205 template_addr = integer_zero_node;
3209 /* Build the fat pointer in the form of a constructor. */
3210 v = VEC_alloc (constructor_elt, gc, 2);
3211 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (gnu_type), gnu_expr64);
3212 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (gnu_type)),
3214 return gnat_build_constructor (gnu_type, v);
3221 /* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3222 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3223 which the VMS descriptor is passed. */
3226 convert_vms_descriptor32 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3228 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3229 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3230 /* The CLASS field is the 3rd field in the descriptor. */
3231 tree klass = DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type)));
3232 /* The POINTER field is the 4th field in the descriptor. */
3233 tree pointer = DECL_CHAIN (klass);
3235 /* Retrieve the value of the POINTER field. */
3237 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
3239 if (POINTER_TYPE_P (gnu_type))
3240 return convert (gnu_type, gnu_expr32);
3242 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
3244 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3245 tree p_bounds_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type)));
3246 tree template_type = TREE_TYPE (p_bounds_type);
3247 tree min_field = TYPE_FIELDS (template_type);
3248 tree max_field = DECL_CHAIN (TYPE_FIELDS (template_type));
3249 tree template_tree, template_addr, aflags, dimct, t, u;
3250 /* See the head comment of build_vms_descriptor. */
3251 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
3252 VEC(constructor_elt,gc) *v;
3254 /* Convert POINTER to the pointer-to-array type. */
3255 gnu_expr32 = convert (p_array_type, gnu_expr32);
3259 case 1: /* Class S */
3260 case 15: /* Class SB */
3261 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
3262 v = VEC_alloc (constructor_elt, gc, 2);
3263 t = TYPE_FIELDS (desc_type);
3264 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3265 CONSTRUCTOR_APPEND_ELT (v, min_field,
3266 convert (TREE_TYPE (min_field),
3268 CONSTRUCTOR_APPEND_ELT (v, max_field,
3269 convert (TREE_TYPE (max_field), t));
3270 template_tree = gnat_build_constructor (template_type, v);
3271 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
3273 /* For class S, we are done. */
3277 /* Test that we really have a SB descriptor, like DEC Ada. */
3278 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3279 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
3280 u = build_binary_op (EQ_EXPR, boolean_type_node, t, u);
3281 /* If so, there is already a template in the descriptor and
3282 it is located right after the POINTER field. */
3283 t = DECL_CHAIN (pointer);
3285 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3286 /* Otherwise use the {1, LENGTH} template we build above. */
3287 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3288 build_unary_op (ADDR_EXPR, p_bounds_type,
3293 case 4: /* Class A */
3294 /* The AFLAGS field is the 7th field in the descriptor. */
3295 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer)));
3296 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3297 /* The DIMCT field is the 8th field in the descriptor. */
3299 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3300 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3301 or FL_COEFF or FL_BOUNDS not set. */
3302 u = build_int_cst (TREE_TYPE (aflags), 192);
3303 u = build_binary_op (TRUTH_OR_EXPR, boolean_type_node,
3304 build_binary_op (NE_EXPR, boolean_type_node,
3306 convert (TREE_TYPE (dimct),
3308 build_binary_op (NE_EXPR, boolean_type_node,
3309 build2 (BIT_AND_EXPR,
3313 /* There is already a template in the descriptor and it is
3314 located at the start of block 3 (12th field). */
3315 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (t))));
3317 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3318 template_tree = build3 (COND_EXPR, TREE_TYPE (t), u,
3319 build_call_raise (CE_Length_Check_Failed, Empty,
3320 N_Raise_Constraint_Error),
3323 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
3326 case 10: /* Class NCA */
3328 post_error ("unsupported descriptor type for &", gnat_subprog);
3329 template_addr = integer_zero_node;
3333 /* Build the fat pointer in the form of a constructor. */
3334 v = VEC_alloc (constructor_elt, gc, 2);
3335 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (gnu_type), gnu_expr32);
3336 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (gnu_type)),
3339 return gnat_build_constructor (gnu_type, v);
3346 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
3347 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
3348 pointer type of GNU_EXPR. BY_REF is true if the result is to be used by
3349 reference. GNAT_SUBPROG is the subprogram to which the VMS descriptor is
3353 convert_vms_descriptor (tree gnu_type, tree gnu_expr, tree gnu_expr_alt_type,
3354 bool by_ref, Entity_Id gnat_subprog)
3356 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3357 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3358 tree mbo = TYPE_FIELDS (desc_type);
3359 const char *mbostr = IDENTIFIER_POINTER (DECL_NAME (mbo));
3360 tree mbmo = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (mbo)));
3361 tree real_type, is64bit, gnu_expr32, gnu_expr64;
3364 real_type = TREE_TYPE (gnu_type);
3366 real_type = gnu_type;
3368 /* If the field name is not MBO, it must be 32-bit and no alternate.
3369 Otherwise primary must be 64-bit and alternate 32-bit. */
3370 if (strcmp (mbostr, "MBO") != 0)
3372 tree ret = convert_vms_descriptor32 (real_type, gnu_expr, gnat_subprog);
3374 ret = build_unary_op (ADDR_EXPR, gnu_type, ret);
3378 /* Build the test for 64-bit descriptor. */
3379 mbo = build3 (COMPONENT_REF, TREE_TYPE (mbo), desc, mbo, NULL_TREE);
3380 mbmo = build3 (COMPONENT_REF, TREE_TYPE (mbmo), desc, mbmo, NULL_TREE);
3382 = build_binary_op (TRUTH_ANDIF_EXPR, boolean_type_node,
3383 build_binary_op (EQ_EXPR, boolean_type_node,
3384 convert (integer_type_node, mbo),
3386 build_binary_op (EQ_EXPR, boolean_type_node,
3387 convert (integer_type_node, mbmo),
3388 integer_minus_one_node));
3390 /* Build the 2 possible end results. */
3391 gnu_expr64 = convert_vms_descriptor64 (real_type, gnu_expr, gnat_subprog);
3393 gnu_expr64 = build_unary_op (ADDR_EXPR, gnu_type, gnu_expr64);
3394 gnu_expr = fold_convert (gnu_expr_alt_type, gnu_expr);
3395 gnu_expr32 = convert_vms_descriptor32 (real_type, gnu_expr, gnat_subprog);
3397 gnu_expr32 = build_unary_op (ADDR_EXPR, gnu_type, gnu_expr32);
3399 return build3 (COND_EXPR, gnu_type, is64bit, gnu_expr64, gnu_expr32);
3402 /* Build a type to be used to represent an aliased object whose nominal type
3403 is an unconstrained array. This consists of a RECORD_TYPE containing a
3404 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
3405 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
3406 an arbitrary unconstrained object. Use NAME as the name of the record.
3407 DEBUG_INFO_P is true if we need to write debug information for the type. */
3410 build_unc_object_type (tree template_type, tree object_type, tree name,
3413 tree type = make_node (RECORD_TYPE);
3415 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
3416 NULL_TREE, NULL_TREE, 0, 1);
3418 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
3419 NULL_TREE, NULL_TREE, 0, 1);
3421 TYPE_NAME (type) = name;
3422 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3423 DECL_CHAIN (template_field) = array_field;
3424 finish_record_type (type, template_field, 0, true);
3426 /* Declare it now since it will never be declared otherwise. This is
3427 necessary to ensure that its subtrees are properly marked. */
3428 create_type_decl (name, type, NULL, true, debug_info_p, Empty);
3433 /* Same, taking a thin or fat pointer type instead of a template type. */
3436 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3437 tree name, bool debug_info_p)
3441 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3444 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
3445 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3446 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3449 build_unc_object_type (template_type, object_type, name, debug_info_p);
3452 /* Shift the component offsets within an unconstrained object TYPE to make it
3453 suitable for use as a designated type for thin pointers. */
3456 shift_unc_components_for_thin_pointers (tree type)
3458 /* Thin pointer values designate the ARRAY data of an unconstrained object,
3459 allocated past the BOUNDS template. The designated type is adjusted to
3460 have ARRAY at position zero and the template at a negative offset, so
3461 that COMPONENT_REFs on (*thin_ptr) designate the proper location. */
3463 tree bounds_field = TYPE_FIELDS (type);
3464 tree array_field = DECL_CHAIN (TYPE_FIELDS (type));
3466 DECL_FIELD_OFFSET (bounds_field)
3467 = size_binop (MINUS_EXPR, size_zero_node, byte_position (array_field));
3469 DECL_FIELD_OFFSET (array_field) = size_zero_node;
3470 DECL_FIELD_BIT_OFFSET (array_field) = bitsize_zero_node;
3473 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3474 In the normal case this is just two adjustments, but we have more to
3475 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
3478 update_pointer_to (tree old_type, tree new_type)
3480 tree ptr = TYPE_POINTER_TO (old_type);
3481 tree ref = TYPE_REFERENCE_TO (old_type);
3484 /* If this is the main variant, process all the other variants first. */
3485 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3486 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
3487 update_pointer_to (t, new_type);
3489 /* If no pointers and no references, we are done. */
3493 /* Merge the old type qualifiers in the new type.
3495 Each old variant has qualifiers for specific reasons, and the new
3496 designated type as well. Each set of qualifiers represents useful
3497 information grabbed at some point, and merging the two simply unifies
3498 these inputs into the final type description.
3500 Consider for instance a volatile type frozen after an access to constant
3501 type designating it; after the designated type's freeze, we get here with
3502 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3503 when the access type was processed. We will make a volatile and readonly
3504 designated type, because that's what it really is.
3506 We might also get here for a non-dummy OLD_TYPE variant with different
3507 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
3508 to private record type elaboration (see the comments around the call to
3509 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3510 the qualifiers in those cases too, to avoid accidentally discarding the
3511 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3513 = build_qualified_type (new_type,
3514 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3516 /* If old type and new type are identical, there is nothing to do. */
3517 if (old_type == new_type)
3520 /* Otherwise, first handle the simple case. */
3521 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3523 tree new_ptr, new_ref;
3525 /* If pointer or reference already points to new type, nothing to do.
3526 This can happen as update_pointer_to can be invoked multiple times
3527 on the same couple of types because of the type variants. */
3528 if ((ptr && TREE_TYPE (ptr) == new_type)
3529 || (ref && TREE_TYPE (ref) == new_type))
3532 /* Chain PTR and its variants at the end. */
3533 new_ptr = TYPE_POINTER_TO (new_type);
3536 while (TYPE_NEXT_PTR_TO (new_ptr))
3537 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
3538 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
3541 TYPE_POINTER_TO (new_type) = ptr;
3543 /* Now adjust them. */
3544 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3545 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
3547 TREE_TYPE (t) = new_type;
3548 if (TYPE_NULL_BOUNDS (t))
3549 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
3552 /* If we have adjusted named types, finalize them. This is necessary
3553 since we had forced a DWARF typedef for them in gnat_pushdecl. */
3554 for (ptr = TYPE_POINTER_TO (old_type); ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3555 if (TYPE_NAME (ptr) && TREE_CODE (TYPE_NAME (ptr)) == TYPE_DECL)
3556 rest_of_type_decl_compilation (TYPE_NAME (ptr));
3558 /* Chain REF and its variants at the end. */
3559 new_ref = TYPE_REFERENCE_TO (new_type);
3562 while (TYPE_NEXT_REF_TO (new_ref))
3563 new_ref = TYPE_NEXT_REF_TO (new_ref);
3564 TYPE_NEXT_REF_TO (new_ref) = ref;
3567 TYPE_REFERENCE_TO (new_type) = ref;
3569 /* Now adjust them. */
3570 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3571 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
3572 TREE_TYPE (t) = new_type;
3574 TYPE_POINTER_TO (old_type) = NULL_TREE;
3575 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
3578 /* Now deal with the unconstrained array case. In this case the pointer
3579 is actually a record where both fields are pointers to dummy nodes.
3580 Turn them into pointers to the correct types using update_pointer_to.
3581 Likewise for the pointer to the object record (thin pointer). */
3584 tree new_ptr = TYPE_POINTER_TO (new_type);
3586 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
3588 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
3589 since update_pointer_to can be invoked multiple times on the same
3590 couple of types because of the type variants. */
3591 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
3595 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
3596 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
3599 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
3600 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
3602 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
3603 TYPE_OBJECT_RECORD_TYPE (new_type));
3605 TYPE_POINTER_TO (old_type) = NULL_TREE;
3609 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
3610 unconstrained one. This involves making or finding a template. */
3613 convert_to_fat_pointer (tree type, tree expr)
3615 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
3616 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
3617 tree etype = TREE_TYPE (expr);
3619 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 2);
3621 /* If EXPR is null, make a fat pointer that contains a null pointer to the
3622 array (compare_fat_pointers ensures that this is the full discriminant)
3623 and a valid pointer to the bounds. This latter property is necessary
3624 since the compiler can hoist the load of the bounds done through it. */
3625 if (integer_zerop (expr))
3627 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
3628 tree null_bounds, t;
3630 if (TYPE_NULL_BOUNDS (ptr_template_type))
3631 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
3634 /* The template type can still be dummy at this point so we build an
3635 empty constructor. The middle-end will fill it in with zeros. */
3636 t = build_constructor (template_type, NULL);
3637 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
3638 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
3639 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
3642 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
3643 fold_convert (p_array_type, null_pointer_node));
3644 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
3645 t = build_constructor (type, v);
3646 /* Do not set TREE_CONSTANT so as to force T to static memory. */
3647 TREE_CONSTANT (t) = 0;
3648 TREE_STATIC (t) = 1;
3653 /* If EXPR is a thin pointer, make template and data from the record.. */
3654 else if (TYPE_IS_THIN_POINTER_P (etype))
3656 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
3658 expr = gnat_protect_expr (expr);
3659 if (TREE_CODE (expr) == ADDR_EXPR)
3660 expr = TREE_OPERAND (expr, 0);
3662 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
3664 template_tree = build_component_ref (expr, NULL_TREE, fields, false);
3665 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3666 build_component_ref (expr, NULL_TREE,
3667 DECL_CHAIN (fields), false));
3670 /* Otherwise, build the constructor for the template. */
3672 template_tree = build_template (template_type, TREE_TYPE (etype), expr);
3674 /* The final result is a constructor for the fat pointer.
3676 If EXPR is an argument of a foreign convention subprogram, the type it
3677 points to is directly the component type. In this case, the expression
3678 type may not match the corresponding FIELD_DECL type at this point, so we
3679 call "convert" here to fix that up if necessary. This type consistency is
3680 required, for instance because it ensures that possible later folding of
3681 COMPONENT_REFs against this constructor always yields something of the
3682 same type as the initial reference.
3684 Note that the call to "build_template" above is still fine because it
3685 will only refer to the provided TEMPLATE_TYPE in this case. */
3686 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
3687 convert (p_array_type, expr));
3688 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
3689 build_unary_op (ADDR_EXPR, NULL_TREE,
3691 return gnat_build_constructor (type, v);
3694 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
3695 is something that is a fat pointer, so convert to it first if it EXPR
3696 is not already a fat pointer. */
3699 convert_to_thin_pointer (tree type, tree expr)
3701 if (!TYPE_IS_FAT_POINTER_P (TREE_TYPE (expr)))
3703 = convert_to_fat_pointer
3704 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
3706 /* We get the pointer to the data and use a NOP_EXPR to make it the
3708 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)),
3710 expr = build1 (NOP_EXPR, type, expr);
3715 /* Create an expression whose value is that of EXPR,
3716 converted to type TYPE. The TREE_TYPE of the value
3717 is always TYPE. This function implements all reasonable
3718 conversions; callers should filter out those that are
3719 not permitted by the language being compiled. */
3722 convert (tree type, tree expr)
3724 tree etype = TREE_TYPE (expr);
3725 enum tree_code ecode = TREE_CODE (etype);
3726 enum tree_code code = TREE_CODE (type);
3728 /* If the expression is already of the right type, we are done. */
3732 /* If both input and output have padding and are of variable size, do this
3733 as an unchecked conversion. Likewise if one is a mere variant of the
3734 other, so we avoid a pointless unpad/repad sequence. */
3735 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
3736 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
3737 && (!TREE_CONSTANT (TYPE_SIZE (type))
3738 || !TREE_CONSTANT (TYPE_SIZE (etype))
3739 || gnat_types_compatible_p (type, etype)
3740 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
3741 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
3744 /* If the output type has padding, convert to the inner type and make a
3745 constructor to build the record, unless a variable size is involved. */
3746 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
3748 VEC(constructor_elt,gc) *v;
3750 /* If we previously converted from another type and our type is
3751 of variable size, remove the conversion to avoid the need for
3752 variable-sized temporaries. Likewise for a conversion between
3753 original and packable version. */
3754 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3755 && (!TREE_CONSTANT (TYPE_SIZE (type))
3756 || (ecode == RECORD_TYPE
3757 && TYPE_NAME (etype)
3758 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
3759 expr = TREE_OPERAND (expr, 0);
3761 /* If we are just removing the padding from expr, convert the original
3762 object if we have variable size in order to avoid the need for some
3763 variable-sized temporaries. Likewise if the padding is a variant
3764 of the other, so we avoid a pointless unpad/repad sequence. */
3765 if (TREE_CODE (expr) == COMPONENT_REF
3766 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
3767 && (!TREE_CONSTANT (TYPE_SIZE (type))
3768 || gnat_types_compatible_p (type,
3769 TREE_TYPE (TREE_OPERAND (expr, 0)))
3770 || (ecode == RECORD_TYPE
3771 && TYPE_NAME (etype)
3772 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
3773 return convert (type, TREE_OPERAND (expr, 0));
3775 /* If the inner type is of self-referential size and the expression type
3776 is a record, do this as an unchecked conversion. But first pad the
3777 expression if possible to have the same size on both sides. */
3778 if (ecode == RECORD_TYPE
3779 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
3781 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
3782 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
3783 false, false, false, true),
3785 return unchecked_convert (type, expr, false);
3788 /* If we are converting between array types with variable size, do the
3789 final conversion as an unchecked conversion, again to avoid the need
3790 for some variable-sized temporaries. If valid, this conversion is
3791 very likely purely technical and without real effects. */
3792 if (ecode == ARRAY_TYPE
3793 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
3794 && !TREE_CONSTANT (TYPE_SIZE (etype))
3795 && !TREE_CONSTANT (TYPE_SIZE (type)))
3796 return unchecked_convert (type,
3797 convert (TREE_TYPE (TYPE_FIELDS (type)),
3801 v = VEC_alloc (constructor_elt, gc, 1);
3802 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
3803 convert (TREE_TYPE (TYPE_FIELDS (type)), expr));
3804 return gnat_build_constructor (type, v);
3807 /* If the input type has padding, remove it and convert to the output type.
3808 The conditions ordering is arranged to ensure that the output type is not
3809 a padding type here, as it is not clear whether the conversion would
3810 always be correct if this was to happen. */
3811 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
3815 /* If we have just converted to this padded type, just get the
3816 inner expression. */
3817 if (TREE_CODE (expr) == CONSTRUCTOR
3818 && !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (expr))
3819 && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->index
3820 == TYPE_FIELDS (etype))
3822 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->value;
3824 /* Otherwise, build an explicit component reference. */
3827 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
3829 return convert (type, unpadded);
3832 /* If the input is a biased type, adjust first. */
3833 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
3834 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
3835 fold_convert (TREE_TYPE (etype),
3837 TYPE_MIN_VALUE (etype)));
3839 /* If the input is a justified modular type, we need to extract the actual
3840 object before converting it to any other type with the exceptions of an
3841 unconstrained array or of a mere type variant. It is useful to avoid the
3842 extraction and conversion in the type variant case because it could end
3843 up replacing a VAR_DECL expr by a constructor and we might be about the
3844 take the address of the result. */
3845 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
3846 && code != UNCONSTRAINED_ARRAY_TYPE
3847 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
3848 return convert (type, build_component_ref (expr, NULL_TREE,
3849 TYPE_FIELDS (etype), false));
3851 /* If converting to a type that contains a template, convert to the data
3852 type and then build the template. */
3853 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
3855 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
3856 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 2);
3858 /* If the source already has a template, get a reference to the
3859 associated array only, as we are going to rebuild a template
3860 for the target type anyway. */
3861 expr = maybe_unconstrained_array (expr);
3863 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
3864 build_template (TREE_TYPE (TYPE_FIELDS (type)),
3865 obj_type, NULL_TREE));
3866 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
3867 convert (obj_type, expr));
3868 return gnat_build_constructor (type, v);
3871 /* There are some cases of expressions that we process specially. */
3872 switch (TREE_CODE (expr))
3878 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
3879 conversion in gnat_expand_expr. NULL_EXPR does not represent
3880 and actual value, so no conversion is needed. */
3881 expr = copy_node (expr);
3882 TREE_TYPE (expr) = type;
3886 /* If we are converting a STRING_CST to another constrained array type,
3887 just make a new one in the proper type. */
3888 if (code == ecode && AGGREGATE_TYPE_P (etype)
3889 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
3890 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
3892 expr = copy_node (expr);
3893 TREE_TYPE (expr) = type;
3899 /* If we are converting a VECTOR_CST to a mere variant type, just make
3900 a new one in the proper type. */
3901 if (code == ecode && gnat_types_compatible_p (type, etype))
3903 expr = copy_node (expr);
3904 TREE_TYPE (expr) = type;
3909 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
3910 a new one in the proper type. */
3911 if (code == ecode && gnat_types_compatible_p (type, etype))
3913 expr = copy_node (expr);
3914 TREE_TYPE (expr) = type;
3918 /* Likewise for a conversion between original and packable version, or
3919 conversion between types of the same size and with the same list of
3920 fields, but we have to work harder to preserve type consistency. */
3922 && code == RECORD_TYPE
3923 && (TYPE_NAME (type) == TYPE_NAME (etype)
3924 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
3927 VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
3928 unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
3929 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, len);
3930 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
3931 unsigned HOST_WIDE_INT idx;
3934 /* Whether we need to clear TREE_CONSTANT et al. on the output
3935 constructor when we convert in place. */
3936 bool clear_constant = false;
3938 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
3940 constructor_elt *elt;
3941 /* We expect only simple constructors. */
3942 if (!SAME_FIELD_P (index, efield))
3944 /* The field must be the same. */
3945 if (!SAME_FIELD_P (efield, field))
3947 elt = VEC_quick_push (constructor_elt, v, NULL);
3949 elt->value = convert (TREE_TYPE (field), value);
3951 /* If packing has made this field a bitfield and the input
3952 value couldn't be emitted statically any more, we need to
3953 clear TREE_CONSTANT on our output. */
3955 && TREE_CONSTANT (expr)
3956 && !CONSTRUCTOR_BITFIELD_P (efield)
3957 && CONSTRUCTOR_BITFIELD_P (field)
3958 && !initializer_constant_valid_for_bitfield_p (value))
3959 clear_constant = true;
3961 efield = DECL_CHAIN (efield);
3962 field = DECL_CHAIN (field);
3965 /* If we have been able to match and convert all the input fields
3966 to their output type, convert in place now. We'll fallback to a
3967 view conversion downstream otherwise. */
3970 expr = copy_node (expr);
3971 TREE_TYPE (expr) = type;
3972 CONSTRUCTOR_ELTS (expr) = v;
3974 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
3979 /* Likewise for a conversion between array type and vector type with a
3980 compatible representative array. */
3981 else if (code == VECTOR_TYPE
3982 && ecode == ARRAY_TYPE
3983 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
3986 VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
3987 unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
3988 VEC(constructor_elt,gc) *v;
3989 unsigned HOST_WIDE_INT ix;
3992 /* Build a VECTOR_CST from a *constant* array constructor. */
3993 if (TREE_CONSTANT (expr))
3995 bool constant_p = true;
3997 /* Iterate through elements and check if all constructor
3998 elements are *_CSTs. */
3999 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4000 if (!CONSTANT_CLASS_P (value))
4007 return build_vector_from_ctor (type,
4008 CONSTRUCTOR_ELTS (expr));
4011 /* Otherwise, build a regular vector constructor. */
4012 v = VEC_alloc (constructor_elt, gc, len);
4013 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4015 constructor_elt *elt = VEC_quick_push (constructor_elt, v, NULL);
4016 elt->index = NULL_TREE;
4019 expr = copy_node (expr);
4020 TREE_TYPE (expr) = type;
4021 CONSTRUCTOR_ELTS (expr) = v;
4026 case UNCONSTRAINED_ARRAY_REF:
4027 /* First retrieve the underlying array. */
4028 expr = maybe_unconstrained_array (expr);
4029 etype = TREE_TYPE (expr);
4030 ecode = TREE_CODE (etype);
4033 case VIEW_CONVERT_EXPR:
4035 /* GCC 4.x is very sensitive to type consistency overall, and view
4036 conversions thus are very frequent. Even though just "convert"ing
4037 the inner operand to the output type is fine in most cases, it
4038 might expose unexpected input/output type mismatches in special
4039 circumstances so we avoid such recursive calls when we can. */
4040 tree op0 = TREE_OPERAND (expr, 0);
4042 /* If we are converting back to the original type, we can just
4043 lift the input conversion. This is a common occurrence with
4044 switches back-and-forth amongst type variants. */
4045 if (type == TREE_TYPE (op0))
4048 /* Otherwise, if we're converting between two aggregate or vector
4049 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4050 target type in place or to just convert the inner expression. */
4051 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4052 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
4054 /* If we are converting between mere variants, we can just
4055 substitute the VIEW_CONVERT_EXPR in place. */
4056 if (gnat_types_compatible_p (type, etype))
4057 return build1 (VIEW_CONVERT_EXPR, type, op0);
4059 /* Otherwise, we may just bypass the input view conversion unless
4060 one of the types is a fat pointer, which is handled by
4061 specialized code below which relies on exact type matching. */
4062 else if (!TYPE_IS_FAT_POINTER_P (type)
4063 && !TYPE_IS_FAT_POINTER_P (etype))
4064 return convert (type, op0);
4074 /* Check for converting to a pointer to an unconstrained array. */
4075 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4076 return convert_to_fat_pointer (type, expr);
4078 /* If we are converting between two aggregate or vector types that are mere
4079 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4080 to a vector type from its representative array type. */
4081 else if ((code == ecode
4082 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4083 && gnat_types_compatible_p (type, etype))
4084 || (code == VECTOR_TYPE
4085 && ecode == ARRAY_TYPE
4086 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4088 return build1 (VIEW_CONVERT_EXPR, type, expr);
4090 /* If we are converting between tagged types, try to upcast properly. */
4091 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4092 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type))
4094 tree child_etype = etype;
4096 tree field = TYPE_FIELDS (child_etype);
4097 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4098 return build_component_ref (expr, NULL_TREE, field, false);
4099 child_etype = TREE_TYPE (field);
4100 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4103 /* If we are converting from a smaller form of record type back to it, just
4104 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4105 size on both sides. */
4106 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4107 && smaller_form_type_p (etype, type))
4109 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4110 false, false, false, true),
4112 return build1 (VIEW_CONVERT_EXPR, type, expr);
4115 /* In all other cases of related types, make a NOP_EXPR. */
4116 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
4117 return fold_convert (type, expr);
4122 return fold_build1 (CONVERT_EXPR, type, expr);
4125 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4126 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4127 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4128 return unchecked_convert (type, expr, false);
4129 else if (TYPE_BIASED_REPRESENTATION_P (type))
4130 return fold_convert (type,
4131 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4132 convert (TREE_TYPE (type), expr),
4133 TYPE_MIN_VALUE (type)));
4135 /* ... fall through ... */
4139 /* If we are converting an additive expression to an integer type
4140 with lower precision, be wary of the optimization that can be
4141 applied by convert_to_integer. There are 2 problematic cases:
4142 - if the first operand was originally of a biased type,
4143 because we could be recursively called to convert it
4144 to an intermediate type and thus rematerialize the
4145 additive operator endlessly,
4146 - if the expression contains a placeholder, because an
4147 intermediate conversion that changes the sign could
4148 be inserted and thus introduce an artificial overflow
4149 at compile time when the placeholder is substituted. */
4150 if (code == INTEGER_TYPE
4151 && ecode == INTEGER_TYPE
4152 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4153 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4155 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4157 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4158 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4159 || CONTAINS_PLACEHOLDER_P (expr))
4160 return build1 (NOP_EXPR, type, expr);
4163 return fold (convert_to_integer (type, expr));
4166 case REFERENCE_TYPE:
4167 /* If converting between two pointers to records denoting
4168 both a template and type, adjust if needed to account
4169 for any differing offsets, since one might be negative. */
4170 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4173 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
4174 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
4176 = size_binop (CEIL_DIV_EXPR, bit_diff, sbitsize_unit_node);
4177 expr = build1 (NOP_EXPR, type, expr);
4178 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
4179 if (integer_zerop (byte_diff))
4182 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4183 fold (convert (sizetype, byte_diff)));
4186 /* If converting to a thin pointer, handle specially. */
4187 if (TYPE_IS_THIN_POINTER_P (type)
4188 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
4189 return convert_to_thin_pointer (type, expr);
4191 /* If converting fat pointer to normal pointer, get the pointer to the
4192 array and then convert it. */
4193 else if (TYPE_IS_FAT_POINTER_P (etype))
4195 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
4197 return fold (convert_to_pointer (type, expr));
4200 return fold (convert_to_real (type, expr));
4203 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4205 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 1);
4207 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4208 convert (TREE_TYPE (TYPE_FIELDS (type)),
4210 return gnat_build_constructor (type, v);
4213 /* ... fall through ... */
4216 /* In these cases, assume the front-end has validated the conversion.
4217 If the conversion is valid, it will be a bit-wise conversion, so
4218 it can be viewed as an unchecked conversion. */
4219 return unchecked_convert (type, expr, false);
4222 /* This is a either a conversion between a tagged type and some
4223 subtype, which we have to mark as a UNION_TYPE because of
4224 overlapping fields or a conversion of an Unchecked_Union. */
4225 return unchecked_convert (type, expr, false);
4227 case UNCONSTRAINED_ARRAY_TYPE:
4228 /* If the input is a VECTOR_TYPE, convert to the representative
4229 array type first. */
4230 if (ecode == VECTOR_TYPE)
4232 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4233 etype = TREE_TYPE (expr);
4234 ecode = TREE_CODE (etype);
4237 /* If EXPR is a constrained array, take its address, convert it to a
4238 fat pointer, and then dereference it. Likewise if EXPR is a
4239 record containing both a template and a constrained array.
4240 Note that a record representing a justified modular type
4241 always represents a packed constrained array. */
4242 if (ecode == ARRAY_TYPE
4243 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4244 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4245 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4248 (INDIRECT_REF, NULL_TREE,
4249 convert_to_fat_pointer (TREE_TYPE (type),
4250 build_unary_op (ADDR_EXPR,
4253 /* Do something very similar for converting one unconstrained
4254 array to another. */
4255 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4257 build_unary_op (INDIRECT_REF, NULL_TREE,
4258 convert (TREE_TYPE (type),
4259 build_unary_op (ADDR_EXPR,
4265 return fold (convert_to_complex (type, expr));
4272 /* Create an expression whose value is that of EXPR converted to the common
4273 index type, which is sizetype. EXPR is supposed to be in the base type
4274 of the GNAT index type. Calling it is equivalent to doing
4276 convert (sizetype, expr)
4278 but we try to distribute the type conversion with the knowledge that EXPR
4279 cannot overflow in its type. This is a best-effort approach and we fall
4280 back to the above expression as soon as difficulties are encountered.
4282 This is necessary to overcome issues that arise when the GNAT base index
4283 type and the GCC common index type (sizetype) don't have the same size,
4284 which is quite frequent on 64-bit architectures. In this case, and if
4285 the GNAT base index type is signed but the iteration type of the loop has
4286 been forced to unsigned, the loop scalar evolution engine cannot compute
4287 a simple evolution for the general induction variables associated with the
4288 array indices, because it will preserve the wrap-around semantics in the
4289 unsigned type of their "inner" part. As a result, many loop optimizations
4292 The solution is to use a special (basic) induction variable that is at
4293 least as large as sizetype, and to express the aforementioned general
4294 induction variables in terms of this induction variable, eliminating
4295 the problematic intermediate truncation to the GNAT base index type.
4296 This is possible as long as the original expression doesn't overflow
4297 and if the middle-end hasn't introduced artificial overflows in the
4298 course of the various simplification it can make to the expression. */
4301 convert_to_index_type (tree expr)
4303 enum tree_code code = TREE_CODE (expr);
4304 tree type = TREE_TYPE (expr);
4306 /* If the type is unsigned, overflow is allowed so we cannot be sure that
4307 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
4308 if (TYPE_UNSIGNED (type) || !optimize)
4309 return convert (sizetype, expr);
4314 /* The main effect of the function: replace a loop parameter with its
4315 associated special induction variable. */
4316 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
4317 expr = DECL_INDUCTION_VAR (expr);
4322 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
4323 /* Bail out as soon as we suspect some sort of type frobbing. */
4324 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
4325 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
4329 /* ... fall through ... */
4331 case NON_LVALUE_EXPR:
4332 return fold_build1 (code, sizetype,
4333 convert_to_index_type (TREE_OPERAND (expr, 0)));
4338 return fold_build2 (code, sizetype,
4339 convert_to_index_type (TREE_OPERAND (expr, 0)),
4340 convert_to_index_type (TREE_OPERAND (expr, 1)));
4343 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
4344 convert_to_index_type (TREE_OPERAND (expr, 1)));
4347 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
4348 convert_to_index_type (TREE_OPERAND (expr, 1)),
4349 convert_to_index_type (TREE_OPERAND (expr, 2)));
4355 return convert (sizetype, expr);
4358 /* Remove all conversions that are done in EXP. This includes converting
4359 from a padded type or to a justified modular type. If TRUE_ADDRESS
4360 is true, always return the address of the containing object even if
4361 the address is not bit-aligned. */
4364 remove_conversions (tree exp, bool true_address)
4366 switch (TREE_CODE (exp))
4370 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4371 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4373 remove_conversions (VEC_index (constructor_elt,
4374 CONSTRUCTOR_ELTS (exp), 0)->value,
4379 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4380 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4384 case VIEW_CONVERT_EXPR:
4385 case NON_LVALUE_EXPR:
4386 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4395 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4396 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
4397 likewise return an expression pointing to the underlying array. */
4400 maybe_unconstrained_array (tree exp)
4402 enum tree_code code = TREE_CODE (exp);
4403 tree type = TREE_TYPE (exp);
4405 switch (TREE_CODE (type))
4407 case UNCONSTRAINED_ARRAY_TYPE:
4408 if (code == UNCONSTRAINED_ARRAY_REF)
4410 const bool read_only = TREE_READONLY (exp);
4411 const bool no_trap = TREE_THIS_NOTRAP (exp);
4413 exp = TREE_OPERAND (exp, 0);
4414 type = TREE_TYPE (exp);
4416 if (TREE_CODE (exp) == COND_EXPR)
4419 = build_unary_op (INDIRECT_REF, NULL_TREE,
4420 build_component_ref (TREE_OPERAND (exp, 1),
4425 = build_unary_op (INDIRECT_REF, NULL_TREE,
4426 build_component_ref (TREE_OPERAND (exp, 2),
4431 exp = build3 (COND_EXPR,
4432 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
4433 TREE_OPERAND (exp, 0), op1, op2);
4437 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
4438 build_component_ref (exp, NULL_TREE,
4441 TREE_READONLY (exp) = read_only;
4442 TREE_THIS_NOTRAP (exp) = no_trap;
4446 else if (code == NULL_EXPR)
4447 exp = build1 (NULL_EXPR,
4448 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
4449 TREE_OPERAND (exp, 0));
4453 /* If this is a padded type and it contains a template, convert to the
4454 unpadded type first. */
4455 if (TYPE_PADDING_P (type)
4456 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
4457 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
4459 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
4460 type = TREE_TYPE (exp);
4463 if (TYPE_CONTAINS_TEMPLATE_P (type))
4465 exp = build_component_ref (exp, NULL_TREE,
4466 DECL_CHAIN (TYPE_FIELDS (type)),
4468 type = TREE_TYPE (exp);
4470 /* If the array type is padded, convert to the unpadded type. */
4471 if (TYPE_IS_PADDING_P (type))
4472 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
4483 /* Return true if EXPR is an expression that can be folded as an operand
4484 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
4487 can_fold_for_view_convert_p (tree expr)
4491 /* The folder will fold NOP_EXPRs between integral types with the same
4492 precision (in the middle-end's sense). We cannot allow it if the
4493 types don't have the same precision in the Ada sense as well. */
4494 if (TREE_CODE (expr) != NOP_EXPR)
4497 t1 = TREE_TYPE (expr);
4498 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4500 /* Defer to the folder for non-integral conversions. */
4501 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4504 /* Only fold conversions that preserve both precisions. */
4505 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4506 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4512 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
4513 If NOTRUNC_P is true, truncation operations should be suppressed.
4515 Special care is required with (source or target) integral types whose
4516 precision is not equal to their size, to make sure we fetch or assign
4517 the value bits whose location might depend on the endianness, e.g.
4519 Rmsize : constant := 8;
4520 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4522 type Bit_Array is array (1 .. Rmsize) of Boolean;
4523 pragma Pack (Bit_Array);
4525 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4527 Value : Int := 2#1000_0001#;
4528 Vbits : Bit_Array := To_Bit_Array (Value);
4530 we expect the 8 bits at Vbits'Address to always contain Value, while
4531 their original location depends on the endianness, at Value'Address
4532 on a little-endian architecture but not on a big-endian one. */
4535 unchecked_convert (tree type, tree expr, bool notrunc_p)
4537 tree etype = TREE_TYPE (expr);
4538 enum tree_code ecode = TREE_CODE (etype);
4539 enum tree_code code = TREE_CODE (type);
4542 /* If the expression is already of the right type, we are done. */
4546 /* If both types types are integral just do a normal conversion.
4547 Likewise for a conversion to an unconstrained array. */
4548 if ((((INTEGRAL_TYPE_P (type)
4549 && !(code == INTEGER_TYPE && TYPE_VAX_FLOATING_POINT_P (type)))
4550 || (POINTER_TYPE_P (type) && ! TYPE_IS_THIN_POINTER_P (type))
4551 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
4552 && ((INTEGRAL_TYPE_P (etype)
4553 && !(ecode == INTEGER_TYPE && TYPE_VAX_FLOATING_POINT_P (etype)))
4554 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
4555 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
4556 || code == UNCONSTRAINED_ARRAY_TYPE)
4558 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4560 tree ntype = copy_type (etype);
4561 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4562 TYPE_MAIN_VARIANT (ntype) = ntype;
4563 expr = build1 (NOP_EXPR, ntype, expr);
4566 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
4568 tree rtype = copy_type (type);
4569 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4570 TYPE_MAIN_VARIANT (rtype) = rtype;
4571 expr = convert (rtype, expr);
4572 expr = build1 (NOP_EXPR, type, expr);
4575 expr = convert (type, expr);
4578 /* If we are converting to an integral type whose precision is not equal
4579 to its size, first unchecked convert to a record type that contains an
4580 field of the given precision. Then extract the field. */
4581 else if (INTEGRAL_TYPE_P (type)
4582 && TYPE_RM_SIZE (type)
4583 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4584 GET_MODE_BITSIZE (TYPE_MODE (type))))
4586 tree rec_type = make_node (RECORD_TYPE);
4587 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
4588 tree field_type, field;
4590 if (TYPE_UNSIGNED (type))
4591 field_type = make_unsigned_type (prec);
4593 field_type = make_signed_type (prec);
4594 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
4596 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
4597 NULL_TREE, NULL_TREE, 1, 0);
4599 TYPE_FIELDS (rec_type) = field;
4600 layout_type (rec_type);
4602 expr = unchecked_convert (rec_type, expr, notrunc_p);
4603 expr = build_component_ref (expr, NULL_TREE, field, false);
4604 expr = fold_build1 (NOP_EXPR, type, expr);
4607 /* Similarly if we are converting from an integral type whose precision is
4608 not equal to its size, first copy into a field of the given precision
4609 and unchecked convert the record type. */
4610 else if (INTEGRAL_TYPE_P (etype)
4611 && TYPE_RM_SIZE (etype)
4612 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
4613 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4615 tree rec_type = make_node (RECORD_TYPE);
4616 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
4617 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 1);
4618 tree field_type, field;
4620 if (TYPE_UNSIGNED (etype))
4621 field_type = make_unsigned_type (prec);
4623 field_type = make_signed_type (prec);
4624 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
4626 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
4627 NULL_TREE, NULL_TREE, 1, 0);
4629 TYPE_FIELDS (rec_type) = field;
4630 layout_type (rec_type);
4632 expr = fold_build1 (NOP_EXPR, field_type, expr);
4633 CONSTRUCTOR_APPEND_ELT (v, field, expr);
4634 expr = gnat_build_constructor (rec_type, v);
4635 expr = unchecked_convert (type, expr, notrunc_p);
4638 /* If we are converting from a scalar type to a type with a different size,
4639 we need to pad to have the same size on both sides.
4641 ??? We cannot do it unconditionally because unchecked conversions are
4642 used liberally by the front-end to implement polymorphism, e.g. in:
4644 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
4645 return p___size__4 (p__object!(S191s.all));
4647 so we skip all expressions that are references. */
4648 else if (!REFERENCE_CLASS_P (expr)
4649 && !AGGREGATE_TYPE_P (etype)
4650 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
4651 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
4655 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4656 false, false, false, true),
4658 expr = unchecked_convert (type, expr, notrunc_p);
4662 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
4663 false, false, false, true);
4664 expr = unchecked_convert (rec_type, expr, notrunc_p);
4665 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (rec_type),
4670 /* We have a special case when we are converting between two unconstrained
4671 array types. In that case, take the address, convert the fat pointer
4672 types, and dereference. */
4673 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
4674 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4675 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
4676 build_unary_op (ADDR_EXPR, NULL_TREE,
4679 /* Another special case is when we are converting to a vector type from its
4680 representative array type; this a regular conversion. */
4681 else if (code == VECTOR_TYPE
4682 && ecode == ARRAY_TYPE
4683 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4685 expr = convert (type, expr);
4689 expr = maybe_unconstrained_array (expr);
4690 etype = TREE_TYPE (expr);
4691 ecode = TREE_CODE (etype);
4692 if (can_fold_for_view_convert_p (expr))
4693 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
4695 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
4698 /* If the result is an integral type whose precision is not equal to its
4699 size, sign- or zero-extend the result. We need not do this if the input
4700 is an integral type of the same precision and signedness or if the output
4701 is a biased type or if both the input and output are unsigned. */
4703 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4704 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
4705 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4706 GET_MODE_BITSIZE (TYPE_MODE (type)))
4707 && !(INTEGRAL_TYPE_P (etype)
4708 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
4709 && operand_equal_p (TYPE_RM_SIZE (type),
4710 (TYPE_RM_SIZE (etype) != 0
4711 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
4713 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
4716 = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
4718 = convert (base_type,
4719 size_binop (MINUS_EXPR,
4721 (GET_MODE_BITSIZE (TYPE_MODE (type))),
4722 TYPE_RM_SIZE (type)));
4725 build_binary_op (RSHIFT_EXPR, base_type,
4726 build_binary_op (LSHIFT_EXPR, base_type,
4727 convert (base_type, expr),
4732 /* An unchecked conversion should never raise Constraint_Error. The code
4733 below assumes that GCC's conversion routines overflow the same way that
4734 the underlying hardware does. This is probably true. In the rare case
4735 when it is false, we can rely on the fact that such conversions are
4736 erroneous anyway. */
4737 if (TREE_CODE (expr) == INTEGER_CST)
4738 TREE_OVERFLOW (expr) = 0;
4740 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
4741 show no longer constant. */
4742 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4743 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
4745 TREE_CONSTANT (expr) = 0;
4750 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
4751 the latter being a record type as predicated by Is_Record_Type. */
4754 tree_code_for_record_type (Entity_Id gnat_type)
4756 Node_Id component_list, component;
4758 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
4759 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
4760 if (!Is_Unchecked_Union (gnat_type))
4763 gnat_type = Implementation_Base_Type (gnat_type);
4765 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
4767 for (component = First_Non_Pragma (Component_Items (component_list));
4768 Present (component);
4769 component = Next_Non_Pragma (component))
4770 if (Ekind (Defining_Entity (component)) == E_Component)
4776 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
4777 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
4778 according to the presence of an alignment clause on the type or, if it
4779 is an array, on the component type. */
4782 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
4784 gnat_type = Underlying_Type (gnat_type);
4786 *align_clause = Present (Alignment_Clause (gnat_type));
4788 if (Is_Array_Type (gnat_type))
4790 gnat_type = Underlying_Type (Component_Type (gnat_type));
4791 if (Present (Alignment_Clause (gnat_type)))
4792 *align_clause = true;
4795 if (!Is_Floating_Point_Type (gnat_type))
4798 if (UI_To_Int (Esize (gnat_type)) != 64)
4804 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
4805 size is greater or equal to 64 bits, or an array of such a type. Set
4806 ALIGN_CLAUSE according to the presence of an alignment clause on the
4807 type or, if it is an array, on the component type. */
4810 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
4812 gnat_type = Underlying_Type (gnat_type);
4814 *align_clause = Present (Alignment_Clause (gnat_type));
4816 if (Is_Array_Type (gnat_type))
4818 gnat_type = Underlying_Type (Component_Type (gnat_type));
4819 if (Present (Alignment_Clause (gnat_type)))
4820 *align_clause = true;
4823 if (!Is_Scalar_Type (gnat_type))
4826 if (UI_To_Int (Esize (gnat_type)) < 64)
4832 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
4833 component of an aggregate type. */
4836 type_for_nonaliased_component_p (tree gnu_type)
4838 /* If the type is passed by reference, we may have pointers to the
4839 component so it cannot be made non-aliased. */
4840 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
4843 /* We used to say that any component of aggregate type is aliased
4844 because the front-end may take 'Reference of it. The front-end
4845 has been enhanced in the meantime so as to use a renaming instead
4846 in most cases, but the back-end can probably take the address of
4847 such a component too so we go for the conservative stance.
4849 For instance, we might need the address of any array type, even
4850 if normally passed by copy, to construct a fat pointer if the
4851 component is used as an actual for an unconstrained formal.
4853 Likewise for record types: even if a specific record subtype is
4854 passed by copy, the parent type might be passed by ref (e.g. if
4855 it's of variable size) and we might take the address of a child
4856 component to pass to a parent formal. We have no way to check
4857 for such conditions here. */
4858 if (AGGREGATE_TYPE_P (gnu_type))
4864 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
4867 smaller_form_type_p (tree type, tree orig_type)
4871 /* We're not interested in variants here. */
4872 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
4875 /* Like a variant, a packable version keeps the original TYPE_NAME. */
4876 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
4879 size = TYPE_SIZE (type);
4880 osize = TYPE_SIZE (orig_type);
4882 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
4885 return tree_int_cst_lt (size, osize) != 0;
4888 /* Perform final processing on global variables. */
4890 static GTY (()) tree dummy_global;
4893 gnat_write_global_declarations (void)
4898 /* If we have declared types as used at the global level, insert them in
4899 the global hash table. We use a dummy variable for this purpose. */
4900 if (!VEC_empty (tree, types_used_by_cur_var_decl))
4902 struct varpool_node *node;
4905 ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, 0);
4907 = build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
4909 TREE_STATIC (dummy_global) = 1;
4910 TREE_ASM_WRITTEN (dummy_global) = 1;
4911 node = varpool_node (dummy_global);
4912 node->force_output = 1;
4913 varpool_mark_needed_node (node);
4915 while (!VEC_empty (tree, types_used_by_cur_var_decl))
4917 tree t = VEC_pop (tree, types_used_by_cur_var_decl);
4918 types_used_by_var_decl_insert (t, dummy_global);
4922 /* Output debug information for all global type declarations first. This
4923 ensures that global types whose compilation hasn't been finalized yet,
4924 for example pointers to Taft amendment types, have their compilation
4925 finalized in the right context. */
4926 FOR_EACH_VEC_ELT (tree, global_decls, i, iter)
4927 if (TREE_CODE (iter) == TYPE_DECL)
4928 debug_hooks->global_decl (iter);
4930 /* Proceed to optimize and emit assembly.
4931 FIXME: shouldn't be the front end's responsibility to call this. */
4932 cgraph_finalize_compilation_unit ();
4934 /* After cgraph has had a chance to emit everything that's going to
4935 be emitted, output debug information for the rest of globals. */
4938 timevar_push (TV_SYMOUT);
4939 FOR_EACH_VEC_ELT (tree, global_decls, i, iter)
4940 if (TREE_CODE (iter) != TYPE_DECL)
4941 debug_hooks->global_decl (iter);
4942 timevar_pop (TV_SYMOUT);
4946 /* ************************************************************************
4947 * * GCC builtins support *
4948 * ************************************************************************ */
4950 /* The general scheme is fairly simple:
4952 For each builtin function/type to be declared, gnat_install_builtins calls
4953 internal facilities which eventually get to gnat_push_decl, which in turn
4954 tracks the so declared builtin function decls in the 'builtin_decls' global
4955 datastructure. When an Intrinsic subprogram declaration is processed, we
4956 search this global datastructure to retrieve the associated BUILT_IN DECL
4959 /* Search the chain of currently available builtin declarations for a node
4960 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
4961 found, if any, or NULL_TREE otherwise. */
4963 builtin_decl_for (tree name)
4968 FOR_EACH_VEC_ELT (tree, builtin_decls, i, decl)
4969 if (DECL_NAME (decl) == name)
4975 /* The code below eventually exposes gnat_install_builtins, which declares
4976 the builtin types and functions we might need, either internally or as
4977 user accessible facilities.
4979 ??? This is a first implementation shot, still in rough shape. It is
4980 heavily inspired from the "C" family implementation, with chunks copied
4981 verbatim from there.
4983 Two obvious TODO candidates are
4984 o Use a more efficient name/decl mapping scheme
4985 o Devise a middle-end infrastructure to avoid having to copy
4986 pieces between front-ends. */
4988 /* ----------------------------------------------------------------------- *
4989 * BUILTIN ELEMENTARY TYPES *
4990 * ----------------------------------------------------------------------- */
4992 /* Standard data types to be used in builtin argument declarations. */
4996 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
4998 CTI_CONST_STRING_TYPE,
5003 static tree c_global_trees[CTI_MAX];
5005 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5006 #define string_type_node c_global_trees[CTI_STRING_TYPE]
5007 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5009 /* ??? In addition some attribute handlers, we currently don't support a
5010 (small) number of builtin-types, which in turns inhibits support for a
5011 number of builtin functions. */
5012 #define wint_type_node void_type_node
5013 #define intmax_type_node void_type_node
5014 #define uintmax_type_node void_type_node
5016 /* Build the void_list_node (void_type_node having been created). */
5019 build_void_list_node (void)
5021 tree t = build_tree_list (NULL_TREE, void_type_node);
5025 /* Used to help initialize the builtin-types.def table. When a type of
5026 the correct size doesn't exist, use error_mark_node instead of NULL.
5027 The later results in segfaults even when a decl using the type doesn't
5031 builtin_type_for_size (int size, bool unsignedp)
5033 tree type = gnat_type_for_size (size, unsignedp);
5034 return type ? type : error_mark_node;
5037 /* Build/push the elementary type decls that builtin functions/types
5041 install_builtin_elementary_types (void)
5043 signed_size_type_node = gnat_signed_type (size_type_node);
5044 pid_type_node = integer_type_node;
5045 void_list_node = build_void_list_node ();
5047 string_type_node = build_pointer_type (char_type_node);
5048 const_string_type_node
5049 = build_pointer_type (build_qualified_type
5050 (char_type_node, TYPE_QUAL_CONST));
5053 /* ----------------------------------------------------------------------- *
5054 * BUILTIN FUNCTION TYPES *
5055 * ----------------------------------------------------------------------- */
5057 /* Now, builtin function types per se. */
5061 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5062 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5063 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5064 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5065 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5066 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5067 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5068 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
5069 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
5070 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5071 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5072 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5073 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5074 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5075 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
5077 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5078 #include "builtin-types.def"
5079 #undef DEF_PRIMITIVE_TYPE
5080 #undef DEF_FUNCTION_TYPE_0
5081 #undef DEF_FUNCTION_TYPE_1
5082 #undef DEF_FUNCTION_TYPE_2
5083 #undef DEF_FUNCTION_TYPE_3
5084 #undef DEF_FUNCTION_TYPE_4
5085 #undef DEF_FUNCTION_TYPE_5
5086 #undef DEF_FUNCTION_TYPE_6
5087 #undef DEF_FUNCTION_TYPE_7
5088 #undef DEF_FUNCTION_TYPE_VAR_0
5089 #undef DEF_FUNCTION_TYPE_VAR_1
5090 #undef DEF_FUNCTION_TYPE_VAR_2
5091 #undef DEF_FUNCTION_TYPE_VAR_3
5092 #undef DEF_FUNCTION_TYPE_VAR_4
5093 #undef DEF_FUNCTION_TYPE_VAR_5
5094 #undef DEF_POINTER_TYPE
5098 typedef enum c_builtin_type builtin_type;
5100 /* A temporary array used in communication with def_fn_type. */
5101 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
5103 /* A helper function for install_builtin_types. Build function type
5104 for DEF with return type RET and N arguments. If VAR is true, then the
5105 function should be variadic after those N arguments.
5107 Takes special care not to ICE if any of the types involved are
5108 error_mark_node, which indicates that said type is not in fact available
5109 (see builtin_type_for_size). In which case the function type as a whole
5110 should be error_mark_node. */
5113 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
5116 tree *args = XALLOCAVEC (tree, n);
5121 for (i = 0; i < n; ++i)
5123 builtin_type a = (builtin_type) va_arg (list, int);
5124 t = builtin_types[a];
5125 if (t == error_mark_node)
5130 t = builtin_types[ret];
5131 if (t == error_mark_node)
5134 t = build_varargs_function_type_array (t, n, args);
5136 t = build_function_type_array (t, n, args);
5139 builtin_types[def] = t;
5143 /* Build the builtin function types and install them in the builtin_types
5144 array for later use in builtin function decls. */
5147 install_builtin_function_types (void)
5149 tree va_list_ref_type_node;
5150 tree va_list_arg_type_node;
5152 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5154 va_list_arg_type_node = va_list_ref_type_node =
5155 build_pointer_type (TREE_TYPE (va_list_type_node));
5159 va_list_arg_type_node = va_list_type_node;
5160 va_list_ref_type_node = build_reference_type (va_list_type_node);
5163 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5164 builtin_types[ENUM] = VALUE;
5165 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5166 def_fn_type (ENUM, RETURN, 0, 0);
5167 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5168 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5169 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5170 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5171 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5172 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5173 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5174 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5175 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5176 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5177 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5179 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5180 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5182 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5183 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5184 def_fn_type (ENUM, RETURN, 1, 0);
5185 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5186 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5187 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5188 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5189 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5190 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5191 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5192 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5193 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5194 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5195 #define DEF_POINTER_TYPE(ENUM, TYPE) \
5196 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5198 #include "builtin-types.def"
5200 #undef DEF_PRIMITIVE_TYPE
5201 #undef DEF_FUNCTION_TYPE_1
5202 #undef DEF_FUNCTION_TYPE_2
5203 #undef DEF_FUNCTION_TYPE_3
5204 #undef DEF_FUNCTION_TYPE_4
5205 #undef DEF_FUNCTION_TYPE_5
5206 #undef DEF_FUNCTION_TYPE_6
5207 #undef DEF_FUNCTION_TYPE_VAR_0
5208 #undef DEF_FUNCTION_TYPE_VAR_1
5209 #undef DEF_FUNCTION_TYPE_VAR_2
5210 #undef DEF_FUNCTION_TYPE_VAR_3
5211 #undef DEF_FUNCTION_TYPE_VAR_4
5212 #undef DEF_FUNCTION_TYPE_VAR_5
5213 #undef DEF_POINTER_TYPE
5214 builtin_types[(int) BT_LAST] = NULL_TREE;
5217 /* ----------------------------------------------------------------------- *
5218 * BUILTIN ATTRIBUTES *
5219 * ----------------------------------------------------------------------- */
5221 enum built_in_attribute
5223 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5224 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5225 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5226 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5227 #include "builtin-attrs.def"
5228 #undef DEF_ATTR_NULL_TREE
5230 #undef DEF_ATTR_IDENT
5231 #undef DEF_ATTR_TREE_LIST
5235 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5238 install_builtin_attributes (void)
5240 /* Fill in the built_in_attributes array. */
5241 #define DEF_ATTR_NULL_TREE(ENUM) \
5242 built_in_attributes[(int) ENUM] = NULL_TREE;
5243 #define DEF_ATTR_INT(ENUM, VALUE) \
5244 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5245 #define DEF_ATTR_IDENT(ENUM, STRING) \
5246 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5247 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5248 built_in_attributes[(int) ENUM] \
5249 = tree_cons (built_in_attributes[(int) PURPOSE], \
5250 built_in_attributes[(int) VALUE], \
5251 built_in_attributes[(int) CHAIN]);
5252 #include "builtin-attrs.def"
5253 #undef DEF_ATTR_NULL_TREE
5255 #undef DEF_ATTR_IDENT
5256 #undef DEF_ATTR_TREE_LIST
5259 /* Handle a "const" attribute; arguments as in
5260 struct attribute_spec.handler. */
5263 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5264 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5267 if (TREE_CODE (*node) == FUNCTION_DECL)
5268 TREE_READONLY (*node) = 1;
5270 *no_add_attrs = true;
5275 /* Handle a "nothrow" attribute; arguments as in
5276 struct attribute_spec.handler. */
5279 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
5280 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5283 if (TREE_CODE (*node) == FUNCTION_DECL)
5284 TREE_NOTHROW (*node) = 1;
5286 *no_add_attrs = true;
5291 /* Handle a "pure" attribute; arguments as in
5292 struct attribute_spec.handler. */
5295 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5296 int ARG_UNUSED (flags), bool *no_add_attrs)
5298 if (TREE_CODE (*node) == FUNCTION_DECL)
5299 DECL_PURE_P (*node) = 1;
5300 /* ??? TODO: Support types. */
5303 warning (OPT_Wattributes, "%qs attribute ignored",
5304 IDENTIFIER_POINTER (name));
5305 *no_add_attrs = true;
5311 /* Handle a "no vops" attribute; arguments as in
5312 struct attribute_spec.handler. */
5315 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
5316 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5317 bool *ARG_UNUSED (no_add_attrs))
5319 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
5320 DECL_IS_NOVOPS (*node) = 1;
5324 /* Helper for nonnull attribute handling; fetch the operand number
5325 from the attribute argument list. */
5328 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
5330 /* Verify the arg number is a constant. */
5331 if (TREE_CODE (arg_num_expr) != INTEGER_CST
5332 || TREE_INT_CST_HIGH (arg_num_expr) != 0)
5335 *valp = TREE_INT_CST_LOW (arg_num_expr);
5339 /* Handle the "nonnull" attribute. */
5341 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
5342 tree args, int ARG_UNUSED (flags),
5346 unsigned HOST_WIDE_INT attr_arg_num;
5348 /* If no arguments are specified, all pointer arguments should be
5349 non-null. Verify a full prototype is given so that the arguments
5350 will have the correct types when we actually check them later. */
5353 if (!prototype_p (type))
5355 error ("nonnull attribute without arguments on a non-prototype");
5356 *no_add_attrs = true;
5361 /* Argument list specified. Verify that each argument number references
5362 a pointer argument. */
5363 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5365 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5367 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5369 error ("nonnull argument has invalid operand number (argument %lu)",
5370 (unsigned long) attr_arg_num);
5371 *no_add_attrs = true;
5375 if (prototype_p (type))
5377 function_args_iterator iter;
5380 function_args_iter_init (&iter, type);
5381 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
5383 argument = function_args_iter_cond (&iter);
5384 if (!argument || ck_num == arg_num)
5389 || TREE_CODE (argument) == VOID_TYPE)
5391 error ("nonnull argument with out-of-range operand number "
5392 "(argument %lu, operand %lu)",
5393 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5394 *no_add_attrs = true;
5398 if (TREE_CODE (argument) != POINTER_TYPE)
5400 error ("nonnull argument references non-pointer operand "
5401 "(argument %lu, operand %lu)",
5402 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5403 *no_add_attrs = true;
5412 /* Handle a "sentinel" attribute. */
5415 handle_sentinel_attribute (tree *node, tree name, tree args,
5416 int ARG_UNUSED (flags), bool *no_add_attrs)
5418 if (!prototype_p (*node))
5420 warning (OPT_Wattributes,
5421 "%qs attribute requires prototypes with named arguments",
5422 IDENTIFIER_POINTER (name));
5423 *no_add_attrs = true;
5427 if (!stdarg_p (*node))
5429 warning (OPT_Wattributes,
5430 "%qs attribute only applies to variadic functions",
5431 IDENTIFIER_POINTER (name));
5432 *no_add_attrs = true;
5438 tree position = TREE_VALUE (args);
5440 if (TREE_CODE (position) != INTEGER_CST)
5442 warning (0, "requested position is not an integer constant");
5443 *no_add_attrs = true;
5447 if (tree_int_cst_lt (position, integer_zero_node))
5449 warning (0, "requested position is less than zero");
5450 *no_add_attrs = true;
5458 /* Handle a "noreturn" attribute; arguments as in
5459 struct attribute_spec.handler. */
5462 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5463 int ARG_UNUSED (flags), bool *no_add_attrs)
5465 tree type = TREE_TYPE (*node);
5467 /* See FIXME comment in c_common_attribute_table. */
5468 if (TREE_CODE (*node) == FUNCTION_DECL)
5469 TREE_THIS_VOLATILE (*node) = 1;
5470 else if (TREE_CODE (type) == POINTER_TYPE
5471 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5473 = build_pointer_type
5474 (build_type_variant (TREE_TYPE (type),
5475 TYPE_READONLY (TREE_TYPE (type)), 1));
5478 warning (OPT_Wattributes, "%qs attribute ignored",
5479 IDENTIFIER_POINTER (name));
5480 *no_add_attrs = true;
5486 /* Handle a "leaf" attribute; arguments as in
5487 struct attribute_spec.handler. */
5490 handle_leaf_attribute (tree *node, tree name,
5491 tree ARG_UNUSED (args),
5492 int ARG_UNUSED (flags), bool *no_add_attrs)
5494 if (TREE_CODE (*node) != FUNCTION_DECL)
5496 warning (OPT_Wattributes, "%qE attribute ignored", name);
5497 *no_add_attrs = true;
5499 if (!TREE_PUBLIC (*node))
5501 warning (OPT_Wattributes, "%qE attribute has no effect", name);
5502 *no_add_attrs = true;
5508 /* Handle a "malloc" attribute; arguments as in
5509 struct attribute_spec.handler. */
5512 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5513 int ARG_UNUSED (flags), bool *no_add_attrs)
5515 if (TREE_CODE (*node) == FUNCTION_DECL
5516 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
5517 DECL_IS_MALLOC (*node) = 1;
5520 warning (OPT_Wattributes, "%qs attribute ignored",
5521 IDENTIFIER_POINTER (name));
5522 *no_add_attrs = true;
5528 /* Fake handler for attributes we don't properly support. */
5531 fake_attribute_handler (tree * ARG_UNUSED (node),
5532 tree ARG_UNUSED (name),
5533 tree ARG_UNUSED (args),
5534 int ARG_UNUSED (flags),
5535 bool * ARG_UNUSED (no_add_attrs))
5540 /* Handle a "type_generic" attribute. */
5543 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
5544 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5545 bool * ARG_UNUSED (no_add_attrs))
5547 /* Ensure we have a function type. */
5548 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
5550 /* Ensure we have a variadic function. */
5551 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
5556 /* Handle a "vector_size" attribute; arguments as in
5557 struct attribute_spec.handler. */
5560 handle_vector_size_attribute (tree *node, tree name, tree args,
5561 int ARG_UNUSED (flags),
5564 unsigned HOST_WIDE_INT vecsize, nunits;
5565 enum machine_mode orig_mode;
5566 tree type = *node, new_type, size;
5568 *no_add_attrs = true;
5570 size = TREE_VALUE (args);
5572 if (!host_integerp (size, 1))
5574 warning (OPT_Wattributes, "%qs attribute ignored",
5575 IDENTIFIER_POINTER (name));
5579 /* Get the vector size (in bytes). */
5580 vecsize = tree_low_cst (size, 1);
5582 /* We need to provide for vector pointers, vector arrays, and
5583 functions returning vectors. For example:
5585 __attribute__((vector_size(16))) short *foo;
5587 In this case, the mode is SI, but the type being modified is
5588 HI, so we need to look further. */
5590 while (POINTER_TYPE_P (type)
5591 || TREE_CODE (type) == FUNCTION_TYPE
5592 || TREE_CODE (type) == ARRAY_TYPE)
5593 type = TREE_TYPE (type);
5595 /* Get the mode of the type being modified. */
5596 orig_mode = TYPE_MODE (type);
5598 if ((!INTEGRAL_TYPE_P (type)
5599 && !SCALAR_FLOAT_TYPE_P (type)
5600 && !FIXED_POINT_TYPE_P (type))
5601 || (!SCALAR_FLOAT_MODE_P (orig_mode)
5602 && GET_MODE_CLASS (orig_mode) != MODE_INT
5603 && !ALL_SCALAR_FIXED_POINT_MODE_P (orig_mode))
5604 || !host_integerp (TYPE_SIZE_UNIT (type), 1)
5605 || TREE_CODE (type) == BOOLEAN_TYPE)
5607 error ("invalid vector type for attribute %qs",
5608 IDENTIFIER_POINTER (name));
5612 if (vecsize % tree_low_cst (TYPE_SIZE_UNIT (type), 1))
5614 error ("vector size not an integral multiple of component size");
5620 error ("zero vector size");
5624 /* Calculate how many units fit in the vector. */
5625 nunits = vecsize / tree_low_cst (TYPE_SIZE_UNIT (type), 1);
5626 if (nunits & (nunits - 1))
5628 error ("number of components of the vector not a power of two");
5632 new_type = build_vector_type (type, nunits);
5634 /* Build back pointers if needed. */
5635 *node = reconstruct_complex_type (*node, new_type);
5640 /* Handle a "vector_type" attribute; arguments as in
5641 struct attribute_spec.handler. */
5644 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5645 int ARG_UNUSED (flags),
5648 /* Vector representative type and size. */
5649 tree rep_type = *node;
5650 tree rep_size = TYPE_SIZE_UNIT (rep_type);
5653 /* Vector size in bytes and number of units. */
5654 unsigned HOST_WIDE_INT vec_bytes, vec_units;
5656 /* Vector element type and mode. */
5658 enum machine_mode elem_mode;
5660 *no_add_attrs = true;
5662 /* Get the representative array type, possibly nested within a
5663 padding record e.g. for alignment purposes. */
5665 if (TYPE_IS_PADDING_P (rep_type))
5666 rep_type = TREE_TYPE (TYPE_FIELDS (rep_type));
5668 if (TREE_CODE (rep_type) != ARRAY_TYPE)
5670 error ("attribute %qs applies to array types only",
5671 IDENTIFIER_POINTER (name));
5675 /* Silently punt on variable sizes. We can't make vector types for them,
5676 need to ignore them on front-end generated subtypes of unconstrained
5677 bases, and this attribute is for binding implementors, not end-users, so
5678 we should never get there from legitimate explicit uses. */
5680 if (!host_integerp (rep_size, 1))
5683 /* Get the element type/mode and check this is something we know
5684 how to make vectors of. */
5686 elem_type = TREE_TYPE (rep_type);
5687 elem_mode = TYPE_MODE (elem_type);
5689 if ((!INTEGRAL_TYPE_P (elem_type)
5690 && !SCALAR_FLOAT_TYPE_P (elem_type)
5691 && !FIXED_POINT_TYPE_P (elem_type))
5692 || (!SCALAR_FLOAT_MODE_P (elem_mode)
5693 && GET_MODE_CLASS (elem_mode) != MODE_INT
5694 && !ALL_SCALAR_FIXED_POINT_MODE_P (elem_mode))
5695 || !host_integerp (TYPE_SIZE_UNIT (elem_type), 1))
5697 error ("invalid element type for attribute %qs",
5698 IDENTIFIER_POINTER (name));
5702 /* Sanity check the vector size and element type consistency. */
5704 vec_bytes = tree_low_cst (rep_size, 1);
5706 if (vec_bytes % tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1))
5708 error ("vector size not an integral multiple of component size");
5714 error ("zero vector size");
5718 vec_units = vec_bytes / tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1);
5719 if (vec_units & (vec_units - 1))
5721 error ("number of components of the vector not a power of two");
5725 /* Build the vector type and replace. */
5727 *node = build_vector_type (elem_type, vec_units);
5728 rep_name = TYPE_NAME (rep_type);
5729 if (TREE_CODE (rep_name) == TYPE_DECL)
5730 rep_name = DECL_NAME (rep_name);
5731 TYPE_NAME (*node) = rep_name;
5732 TYPE_REPRESENTATIVE_ARRAY (*node) = rep_type;
5737 /* ----------------------------------------------------------------------- *
5738 * BUILTIN FUNCTIONS *
5739 * ----------------------------------------------------------------------- */
5741 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
5742 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
5743 if nonansi_p and flag_no_nonansi_builtin. */
5746 def_builtin_1 (enum built_in_function fncode,
5748 enum built_in_class fnclass,
5749 tree fntype, tree libtype,
5750 bool both_p, bool fallback_p,
5751 bool nonansi_p ATTRIBUTE_UNUSED,
5752 tree fnattrs, bool implicit_p)
5755 const char *libname;
5757 /* Preserve an already installed decl. It most likely was setup in advance
5758 (e.g. as part of the internal builtins) for specific reasons. */
5759 if (builtin_decl_explicit (fncode) != NULL_TREE)
5762 gcc_assert ((!both_p && !fallback_p)
5763 || !strncmp (name, "__builtin_",
5764 strlen ("__builtin_")));
5766 libname = name + strlen ("__builtin_");
5767 decl = add_builtin_function (name, fntype, fncode, fnclass,
5768 (fallback_p ? libname : NULL),
5771 /* ??? This is normally further controlled by command-line options
5772 like -fno-builtin, but we don't have them for Ada. */
5773 add_builtin_function (libname, libtype, fncode, fnclass,
5776 set_builtin_decl (fncode, decl, implicit_p);
5779 static int flag_isoc94 = 0;
5780 static int flag_isoc99 = 0;
5782 /* Install what the common builtins.def offers. */
5785 install_builtin_functions (void)
5787 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
5788 NONANSI_P, ATTRS, IMPLICIT, COND) \
5790 def_builtin_1 (ENUM, NAME, CLASS, \
5791 builtin_types[(int) TYPE], \
5792 builtin_types[(int) LIBTYPE], \
5793 BOTH_P, FALLBACK_P, NONANSI_P, \
5794 built_in_attributes[(int) ATTRS], IMPLICIT);
5795 #include "builtins.def"
5799 /* ----------------------------------------------------------------------- *
5800 * BUILTIN FUNCTIONS *
5801 * ----------------------------------------------------------------------- */
5803 /* Install the builtin functions we might need. */
5806 gnat_install_builtins (void)
5808 install_builtin_elementary_types ();
5809 install_builtin_function_types ();
5810 install_builtin_attributes ();
5812 /* Install builtins used by generic middle-end pieces first. Some of these
5813 know about internal specificities and control attributes accordingly, for
5814 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
5815 the generic definition from builtins.def. */
5816 build_common_builtin_nodes ();
5818 /* Now, install the target specific builtins, such as the AltiVec family on
5819 ppc, and the common set as exposed by builtins.def. */
5820 targetm.init_builtins ();
5821 install_builtin_functions ();
5824 #include "gt-ada-utils.h"
5825 #include "gtype-ada.h"