1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2007, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 2, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License distributed with GNAT; see file COPYING. If not, write *
19 * to the Free Software Foundation, 51 Franklin Street, Fifth Floor, *
20 * Boston, MA 02110-1301, USA. *
22 * GNAT was originally developed by the GNAT team at New York University. *
23 * Extensive contributions were provided by Ada Core Technologies Inc. *
25 ****************************************************************************/
29 #include "coretypes.h"
42 #include "tree-inline.h"
43 #include "tree-gimple.h"
44 #include "tree-dump.h"
45 #include "pointer-set.h"
61 #ifndef MAX_FIXED_MODE_SIZE
62 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
65 #ifndef MAX_BITS_PER_WORD
66 #define MAX_BITS_PER_WORD BITS_PER_WORD
69 /* If nonzero, pretend we are allocating at global level. */
72 /* Tree nodes for the various types and decls we create. */
73 tree gnat_std_decls[(int) ADT_LAST];
75 /* Functions to call for each of the possible raise reasons. */
76 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
78 /* Forward declarations for handlers of attributes. */
79 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
80 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
82 /* Table of machine-independent internal attributes for Ada. We support
83 this minimal set of attributes to accommodate the Alpha back-end which
84 unconditionally puts them on its builtins. */
85 const struct attribute_spec gnat_internal_attribute_table[] =
87 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
88 { "const", 0, 0, true, false, false, handle_const_attribute },
89 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute },
90 { NULL, 0, 0, false, false, false, NULL }
93 /* Associates a GNAT tree node to a GCC tree node. It is used in
94 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
95 of `save_gnu_tree' for more info. */
96 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
98 #define GET_GNU_TREE(GNAT_ENTITY) \
99 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
101 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
102 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
104 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
105 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
107 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
108 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
110 #define GET_DUMMY_NODE(GNAT_ENTITY) \
111 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
113 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
114 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
116 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
117 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
119 /* This variable keeps a table for types for each precision so that we only
120 allocate each of them once. Signed and unsigned types are kept separate.
122 Note that these types are only used when fold-const requests something
123 special. Perhaps we should NOT share these types; we'll see how it
125 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
127 /* Likewise for float types, but record these by mode. */
128 static GTY(()) tree float_types[NUM_MACHINE_MODES];
130 /* For each binding contour we allocate a binding_level structure to indicate
131 the binding depth. */
133 struct gnat_binding_level GTY((chain_next ("%h.chain")))
135 /* The binding level containing this one (the enclosing binding level). */
136 struct gnat_binding_level *chain;
137 /* The BLOCK node for this level. */
139 /* If nonzero, the setjmp buffer that needs to be updated for any
140 variable-sized definition within this context. */
144 /* The binding level currently in effect. */
145 static GTY(()) struct gnat_binding_level *current_binding_level;
147 /* A chain of gnat_binding_level structures awaiting reuse. */
148 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
150 /* An array of global declarations. */
151 static GTY(()) VEC(tree,gc) *global_decls;
153 /* An array of builtin declarations. */
154 static GTY(()) VEC(tree,gc) *builtin_decls;
156 /* An array of global renaming pointers. */
157 static GTY(()) VEC(tree,gc) *global_renaming_pointers;
159 /* A chain of unused BLOCK nodes. */
160 static GTY((deletable)) tree free_block_chain;
162 static void gnat_install_builtins (void);
163 static tree merge_sizes (tree, tree, tree, bool, bool);
164 static tree compute_related_constant (tree, tree);
165 static tree split_plus (tree, tree *);
166 static void gnat_gimplify_function (tree);
167 static tree float_type_for_precision (int, enum machine_mode);
168 static tree convert_to_fat_pointer (tree, tree);
169 static tree convert_to_thin_pointer (tree, tree);
170 static tree make_descriptor_field (const char *,tree, tree, tree);
171 static bool potential_alignment_gap (tree, tree, tree);
173 /* Initialize the association of GNAT nodes to GCC trees. */
176 init_gnat_to_gnu (void)
178 associate_gnat_to_gnu
179 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
182 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
183 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
184 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
186 If GNU_DECL is zero, a previous association is to be reset. */
189 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
191 /* Check that GNAT_ENTITY is not already defined and that it is being set
192 to something which is a decl. Raise gigi 401 if not. Usually, this
193 means GNAT_ENTITY is defined twice, but occasionally is due to some
195 gcc_assert (!(gnu_decl
196 && (PRESENT_GNU_TREE (gnat_entity)
197 || (!no_check && !DECL_P (gnu_decl)))));
199 SET_GNU_TREE (gnat_entity, gnu_decl);
202 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
203 Return the ..._DECL node that was associated with it. If there is no tree
204 node associated with GNAT_ENTITY, abort.
206 In some cases, such as delayed elaboration or expressions that need to
207 be elaborated only once, GNAT_ENTITY is really not an entity. */
210 get_gnu_tree (Entity_Id gnat_entity)
212 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
213 return GET_GNU_TREE (gnat_entity);
216 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
219 present_gnu_tree (Entity_Id gnat_entity)
221 return PRESENT_GNU_TREE (gnat_entity);
224 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
227 init_dummy_type (void)
230 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
233 /* Make a dummy type corresponding to GNAT_TYPE. */
236 make_dummy_type (Entity_Id gnat_type)
238 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
241 /* If there is an equivalent type, get its underlying type. */
242 if (Present (gnat_underlying))
243 gnat_underlying = Underlying_Type (gnat_underlying);
245 /* If there was no equivalent type (can only happen when just annotating
246 types) or underlying type, go back to the original type. */
247 if (No (gnat_underlying))
248 gnat_underlying = gnat_type;
250 /* If it there already a dummy type, use that one. Else make one. */
251 if (PRESENT_DUMMY_NODE (gnat_underlying))
252 return GET_DUMMY_NODE (gnat_underlying);
254 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
256 gnu_type = make_node (Is_Record_Type (gnat_underlying)
257 ? tree_code_for_record_type (gnat_underlying)
259 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
260 TYPE_DUMMY_P (gnu_type) = 1;
261 if (AGGREGATE_TYPE_P (gnu_type))
263 TYPE_STUB_DECL (gnu_type) = build_decl (TYPE_DECL, NULL_TREE, gnu_type);
264 TYPE_BY_REFERENCE_P (gnu_type) = Is_By_Reference_Type (gnat_type);
267 SET_DUMMY_NODE (gnat_underlying, gnu_type);
272 /* Return nonzero if we are currently in the global binding level. */
275 global_bindings_p (void)
277 return ((force_global || !current_function_decl) ? -1 : 0);
280 /* Enter a new binding level. */
285 struct gnat_binding_level *newlevel = NULL;
287 /* Reuse a struct for this binding level, if there is one. */
288 if (free_binding_level)
290 newlevel = free_binding_level;
291 free_binding_level = free_binding_level->chain;
295 = (struct gnat_binding_level *)
296 ggc_alloc (sizeof (struct gnat_binding_level));
298 /* Use a free BLOCK, if any; otherwise, allocate one. */
299 if (free_block_chain)
301 newlevel->block = free_block_chain;
302 free_block_chain = TREE_CHAIN (free_block_chain);
303 TREE_CHAIN (newlevel->block) = NULL_TREE;
306 newlevel->block = make_node (BLOCK);
308 /* Point the BLOCK we just made to its parent. */
309 if (current_binding_level)
310 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
312 BLOCK_VARS (newlevel->block) = BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
313 TREE_USED (newlevel->block) = 1;
315 /* Add this level to the front of the chain (stack) of levels that are
317 newlevel->chain = current_binding_level;
318 newlevel->jmpbuf_decl = NULL_TREE;
319 current_binding_level = newlevel;
322 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
323 and point FNDECL to this BLOCK. */
326 set_current_block_context (tree fndecl)
328 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
329 DECL_INITIAL (fndecl) = current_binding_level->block;
332 /* Set the jmpbuf_decl for the current binding level to DECL. */
335 set_block_jmpbuf_decl (tree decl)
337 current_binding_level->jmpbuf_decl = decl;
340 /* Get the jmpbuf_decl, if any, for the current binding level. */
343 get_block_jmpbuf_decl ()
345 return current_binding_level->jmpbuf_decl;
348 /* Exit a binding level. Set any BLOCK into the current code group. */
353 struct gnat_binding_level *level = current_binding_level;
354 tree block = level->block;
356 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
357 BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
359 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
360 are no variables free the block and merge its subblocks into those of its
361 parent block. Otherwise, add it to the list of its parent. */
362 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
364 else if (BLOCK_VARS (block) == NULL_TREE)
366 BLOCK_SUBBLOCKS (level->chain->block)
367 = chainon (BLOCK_SUBBLOCKS (block),
368 BLOCK_SUBBLOCKS (level->chain->block));
369 TREE_CHAIN (block) = free_block_chain;
370 free_block_chain = block;
374 TREE_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
375 BLOCK_SUBBLOCKS (level->chain->block) = block;
376 TREE_USED (block) = 1;
377 set_block_for_group (block);
380 /* Free this binding structure. */
381 current_binding_level = level->chain;
382 level->chain = free_binding_level;
383 free_binding_level = level;
386 /* Insert BLOCK at the end of the list of subblocks of the
387 current binding level. This is used when a BIND_EXPR is expanded,
388 to handle the BLOCK node inside the BIND_EXPR. */
391 insert_block (tree block)
393 TREE_USED (block) = 1;
394 TREE_CHAIN (block) = BLOCK_SUBBLOCKS (current_binding_level->block);
395 BLOCK_SUBBLOCKS (current_binding_level->block) = block;
398 /* Records a ..._DECL node DECL as belonging to the current lexical scope
399 and uses GNAT_NODE for location information and propagating flags. */
402 gnat_pushdecl (tree decl, Node_Id gnat_node)
404 /* If at top level, there is no context. But PARM_DECLs always go in the
405 level of its function. */
406 if (global_bindings_p () && TREE_CODE (decl) != PARM_DECL)
407 DECL_CONTEXT (decl) = 0;
410 DECL_CONTEXT (decl) = current_function_decl;
412 /* Functions imported in another function are not really nested. */
413 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_PUBLIC (decl))
414 DECL_NO_STATIC_CHAIN (decl) = 1;
417 TREE_NO_WARNING (decl) = (gnat_node == Empty || Warnings_Off (gnat_node));
419 /* Set the location of DECL and emit a declaration for it. */
420 if (Present (gnat_node))
421 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
422 add_decl_expr (decl, gnat_node);
424 /* Put the declaration on the list. The list of declarations is in reverse
425 order. The list will be reversed later. Put global variables in the
426 globals list and builtin functions in a dedicated list to speed up
427 further lookups. Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
428 the list, as they will cause trouble with the debugger and aren't needed
430 if (TREE_CODE (decl) != TYPE_DECL
431 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
433 if (global_bindings_p ())
435 VEC_safe_push (tree, gc, global_decls, decl);
437 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
438 VEC_safe_push (tree, gc, builtin_decls, decl);
442 TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
443 BLOCK_VARS (current_binding_level->block) = decl;
447 /* For the declaration of a type, set its name if it either is not already
448 set, was set to an IDENTIFIER_NODE, indicating an internal name,
449 or if the previous type name was not derived from a source name.
450 We'd rather have the type named with a real name and all the pointer
451 types to the same object have the same POINTER_TYPE node. Code in the
452 equivalent function of c-decl.c makes a copy of the type node here, but
453 that may cause us trouble with incomplete types. We make an exception
454 for fat pointer types because the compiler automatically builds them
455 for unconstrained array types and the debugger uses them to represent
456 both these and pointers to these. */
457 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
459 tree t = TREE_TYPE (decl);
461 if (!TYPE_NAME (t) || TREE_CODE (TYPE_NAME (t)) == IDENTIFIER_NODE)
462 TYPE_NAME (t) = decl;
463 else if (TYPE_FAT_POINTER_P (t))
465 tree tt = build_variant_type_copy (t);
466 TYPE_NAME (tt) = decl;
467 TREE_USED (tt) = TREE_USED (t);
468 TREE_TYPE (decl) = tt;
469 DECL_ORIGINAL_TYPE (decl) = t;
471 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
472 TYPE_NAME (t) = decl;
476 /* Do little here. Set up the standard declarations later after the
477 front end has been run. */
480 gnat_init_decl_processing (void)
484 /* Make the binding_level structure for global names. */
485 current_function_decl = 0;
486 current_binding_level = 0;
487 free_binding_level = 0;
490 build_common_tree_nodes (true, true);
492 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
493 corresponding to the size of Pmode. In most cases when ptr_mode and
494 Pmode differ, C will use the width of ptr_mode as sizetype. But we get
495 far better code using the width of Pmode. Make this here since we need
496 this before we can expand the GNAT types. */
497 size_type_node = gnat_type_for_size (GET_MODE_BITSIZE (Pmode), 0);
498 set_sizetype (size_type_node);
499 build_common_tree_nodes_2 (0);
501 /* Give names and make TYPE_DECLs for common types. */
502 create_type_decl (get_identifier (SIZE_TYPE), sizetype,
503 NULL, false, true, Empty);
504 create_type_decl (get_identifier ("integer"), integer_type_node,
505 NULL, false, true, Empty);
506 create_type_decl (get_identifier ("unsigned char"), char_type_node,
507 NULL, false, true, Empty);
508 create_type_decl (get_identifier ("long integer"), long_integer_type_node,
509 NULL, false, true, Empty);
511 ptr_void_type_node = build_pointer_type (void_type_node);
513 gnat_install_builtins ();
516 /* Install the builtin functions we might need. */
519 gnat_install_builtins ()
521 /* Builtins used by generic middle-end optimizers. */
522 build_common_builtin_nodes ();
524 /* Target specific builtins, such as the AltiVec family on ppc. */
525 targetm.init_builtins ();
528 /* Create the predefined scalar types such as `integer_type_node' needed
529 in the gcc back-end and initialize the global binding level. */
532 init_gigi_decls (tree long_long_float_type, tree exception_type)
537 /* Set the types that GCC and Gigi use from the front end. We would like
538 to do this for char_type_node, but it needs to correspond to the C
540 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
542 /* In this case, the builtin floating point types are VAX float,
543 so make up a type for use. */
544 longest_float_type_node = make_node (REAL_TYPE);
545 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
546 layout_type (longest_float_type_node);
547 create_type_decl (get_identifier ("longest float type"),
548 longest_float_type_node, NULL, false, true, Empty);
551 longest_float_type_node = TREE_TYPE (long_long_float_type);
553 except_type_node = TREE_TYPE (exception_type);
555 unsigned_type_node = gnat_type_for_size (INT_TYPE_SIZE, 1);
556 create_type_decl (get_identifier ("unsigned int"), unsigned_type_node,
557 NULL, false, true, Empty);
559 void_type_decl_node = create_type_decl (get_identifier ("void"),
560 void_type_node, NULL, false, true,
563 void_ftype = build_function_type (void_type_node, NULL_TREE);
564 ptr_void_ftype = build_pointer_type (void_ftype);
566 /* Now declare runtime functions. */
567 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
569 /* malloc is a function declaration tree for a function to allocate
571 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
573 build_function_type (ptr_void_type_node,
574 tree_cons (NULL_TREE,
577 NULL_TREE, false, true, true, NULL,
579 DECL_IS_MALLOC (malloc_decl) = 1;
581 /* free is a function declaration tree for a function to free memory. */
583 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
584 build_function_type (void_type_node,
585 tree_cons (NULL_TREE,
588 NULL_TREE, false, true, true, NULL, Empty);
590 /* Make the types and functions used for exception processing. */
592 = build_array_type (gnat_type_for_mode (Pmode, 0),
593 build_index_type (build_int_cst (NULL_TREE, 5)));
594 create_type_decl (get_identifier ("JMPBUF_T"), jmpbuf_type, NULL,
596 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
598 /* Functions to get and set the jumpbuf pointer for the current thread. */
600 = create_subprog_decl
601 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
602 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
603 NULL_TREE, false, true, true, NULL, Empty);
604 /* Avoid creating superfluous edges to __builtin_setjmp receivers. */
605 DECL_IS_PURE (get_jmpbuf_decl) = 1;
608 = create_subprog_decl
609 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
611 build_function_type (void_type_node,
612 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
613 NULL_TREE, false, true, true, NULL, Empty);
615 /* Function to get the current exception. */
617 = create_subprog_decl
618 (get_identifier ("system__soft_links__get_gnat_exception"),
620 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
621 NULL_TREE, false, true, true, NULL, Empty);
622 /* Avoid creating superfluous edges to __builtin_setjmp receivers. */
623 DECL_IS_PURE (get_excptr_decl) = 1;
625 /* Functions that raise exceptions. */
627 = create_subprog_decl
628 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
629 build_function_type (void_type_node,
630 tree_cons (NULL_TREE,
631 build_pointer_type (except_type_node),
633 NULL_TREE, false, true, true, NULL, Empty);
635 /* Dummy objects to materialize "others" and "all others" in the exception
636 tables. These are exported by a-exexpr.adb, so see this unit for the
640 = create_var_decl (get_identifier ("OTHERS"),
641 get_identifier ("__gnat_others_value"),
642 integer_type_node, 0, 1, 0, 1, 1, 0, Empty);
645 = create_var_decl (get_identifier ("ALL_OTHERS"),
646 get_identifier ("__gnat_all_others_value"),
647 integer_type_node, 0, 1, 0, 1, 1, 0, Empty);
649 /* Hooks to call when entering/leaving an exception handler. */
651 = create_subprog_decl (get_identifier ("__gnat_begin_handler"), NULL_TREE,
652 build_function_type (void_type_node,
653 tree_cons (NULL_TREE,
656 NULL_TREE, false, true, true, NULL, Empty);
659 = create_subprog_decl (get_identifier ("__gnat_end_handler"), NULL_TREE,
660 build_function_type (void_type_node,
661 tree_cons (NULL_TREE,
664 NULL_TREE, false, true, true, NULL, Empty);
666 /* If in no exception handlers mode, all raise statements are redirected to
667 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
668 this procedure will never be called in this mode. */
669 if (No_Exception_Handlers_Set ())
672 = create_subprog_decl
673 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE,
674 build_function_type (void_type_node,
675 tree_cons (NULL_TREE,
676 build_pointer_type (char_type_node),
677 tree_cons (NULL_TREE,
680 NULL_TREE, false, true, true, NULL, Empty);
682 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
683 gnat_raise_decls[i] = decl;
686 /* Otherwise, make one decl for each exception reason. */
687 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
691 sprintf (name, "__gnat_rcheck_%.2d", i);
693 = create_subprog_decl
694 (get_identifier (name), NULL_TREE,
695 build_function_type (void_type_node,
696 tree_cons (NULL_TREE,
699 tree_cons (NULL_TREE,
702 NULL_TREE, false, true, true, NULL, Empty);
705 /* Indicate that these never return. */
706 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
707 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
708 TREE_TYPE (raise_nodefer_decl)
709 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
712 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
714 TREE_THIS_VOLATILE (gnat_raise_decls[i]) = 1;
715 TREE_SIDE_EFFECTS (gnat_raise_decls[i]) = 1;
716 TREE_TYPE (gnat_raise_decls[i])
717 = build_qualified_type (TREE_TYPE (gnat_raise_decls[i]),
721 /* setjmp returns an integer and has one operand, which is a pointer to
724 = create_subprog_decl
725 (get_identifier ("__builtin_setjmp"), NULL_TREE,
726 build_function_type (integer_type_node,
727 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
728 NULL_TREE, false, true, true, NULL, Empty);
730 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
731 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
733 /* update_setjmp_buf updates a setjmp buffer from the current stack pointer
735 update_setjmp_buf_decl
736 = create_subprog_decl
737 (get_identifier ("__builtin_update_setjmp_buf"), NULL_TREE,
738 build_function_type (void_type_node,
739 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
740 NULL_TREE, false, true, true, NULL, Empty);
742 DECL_BUILT_IN_CLASS (update_setjmp_buf_decl) = BUILT_IN_NORMAL;
743 DECL_FUNCTION_CODE (update_setjmp_buf_decl) = BUILT_IN_UPDATE_SETJMP_BUF;
745 main_identifier_node = get_identifier ("main");
748 /* Given a record type RECORD_TYPE and a chain of FIELD_DECL nodes FIELDLIST,
749 finish constructing the record or union type. If REP_LEVEL is zero, this
750 record has no representation clause and so will be entirely laid out here.
751 If REP_LEVEL is one, this record has a representation clause and has been
752 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
753 this record is derived from a parent record and thus inherits its layout;
754 only make a pass on the fields to finalize them. If DO_NOT_FINALIZE is
755 true, the record type is expected to be modified afterwards so it will
756 not be sent to the back-end for finalization. */
759 finish_record_type (tree record_type, tree fieldlist, int rep_level,
760 bool do_not_finalize)
762 enum tree_code code = TREE_CODE (record_type);
763 tree ada_size = bitsize_zero_node;
764 tree size = bitsize_zero_node;
765 bool var_size = false;
766 bool had_size = TYPE_SIZE (record_type) != 0;
767 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
770 TYPE_FIELDS (record_type) = fieldlist;
771 TYPE_STUB_DECL (record_type)
772 = build_decl (TYPE_DECL, TYPE_NAME (record_type), record_type);
774 /* We don't need both the typedef name and the record name output in
775 the debugging information, since they are the same. */
776 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
778 /* Globally initialize the record first. If this is a rep'ed record,
779 that just means some initializations; otherwise, layout the record. */
782 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
783 TYPE_MODE (record_type) = BLKmode;
786 TYPE_SIZE_UNIT (record_type) = size_zero_node;
788 TYPE_SIZE (record_type) = bitsize_zero_node;
790 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
791 out just like a UNION_TYPE, since the size will be fixed. */
792 else if (code == QUAL_UNION_TYPE)
797 /* Ensure there isn't a size already set. There can be in an error
798 case where there is a rep clause but all fields have errors and
799 no longer have a position. */
800 TYPE_SIZE (record_type) = 0;
801 layout_type (record_type);
804 /* At this point, the position and size of each field is known. It was
805 either set before entry by a rep clause, or by laying out the type above.
807 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
808 to compute the Ada size; the GCC size and alignment (for rep'ed records
809 that are not padding types); and the mode (for rep'ed records). We also
810 clear the DECL_BIT_FIELD indication for the cases we know have not been
811 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
813 if (code == QUAL_UNION_TYPE)
814 fieldlist = nreverse (fieldlist);
816 for (field = fieldlist; field; field = TREE_CHAIN (field))
818 tree pos = bit_position (field);
820 tree type = TREE_TYPE (field);
821 tree this_size = DECL_SIZE (field);
822 tree this_ada_size = DECL_SIZE (field);
824 /* We need to make an XVE/XVU record if any field has variable size,
825 whether or not the record does. For example, if we have a union,
826 it may be that all fields, rounded up to the alignment, have the
827 same size, in which case we'll use that size. But the debug
828 output routines (except Dwarf2) won't be able to output the fields,
829 so we need to make the special record. */
830 if (TREE_CODE (this_size) != INTEGER_CST)
833 if ((TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
834 || TREE_CODE (type) == QUAL_UNION_TYPE)
835 && !TYPE_IS_FAT_POINTER_P (type)
836 && !TYPE_CONTAINS_TEMPLATE_P (type)
837 && TYPE_ADA_SIZE (type))
838 this_ada_size = TYPE_ADA_SIZE (type);
840 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
841 if (DECL_BIT_FIELD (field) && !STRICT_ALIGNMENT
842 && value_factor_p (pos, BITS_PER_UNIT)
843 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
844 DECL_BIT_FIELD (field) = 0;
846 /* If we still have DECL_BIT_FIELD set at this point, we know the field
847 is technically not addressable. Except that it can actually be
848 addressed if the field is BLKmode and happens to be properly
850 DECL_NONADDRESSABLE_P (field)
851 |= DECL_BIT_FIELD (field) && DECL_MODE (field) != BLKmode;
853 if ((rep_level > 0) && !DECL_BIT_FIELD (field))
854 TYPE_ALIGN (record_type)
855 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
860 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
861 size = size_binop (MAX_EXPR, size, this_size);
864 case QUAL_UNION_TYPE:
866 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
867 this_ada_size, ada_size);
868 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
873 /* Since we know here that all fields are sorted in order of
874 increasing bit position, the size of the record is one
875 higher than the ending bit of the last field processed
876 unless we have a rep clause, since in that case we might
877 have a field outside a QUAL_UNION_TYPE that has a higher ending
878 position. So use a MAX in that case. Also, if this field is a
879 QUAL_UNION_TYPE, we need to take into account the previous size in
880 the case of empty variants. */
882 = merge_sizes (ada_size, pos, this_ada_size,
883 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
885 = merge_sizes (size, pos, this_size,
886 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
894 if (code == QUAL_UNION_TYPE)
895 nreverse (fieldlist);
899 /* If this is a padding record, we never want to make the size smaller
900 than what was specified in it, if any. */
901 if (TREE_CODE (record_type) == RECORD_TYPE
902 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
903 size = TYPE_SIZE (record_type);
905 /* Now set any of the values we've just computed that apply. */
906 if (!TYPE_IS_FAT_POINTER_P (record_type)
907 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
908 SET_TYPE_ADA_SIZE (record_type, ada_size);
912 tree size_unit = had_size_unit
913 ? TYPE_SIZE_UNIT (record_type)
915 size_binop (CEIL_DIV_EXPR, size,
917 unsigned int align = TYPE_ALIGN (record_type);
919 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
920 TYPE_SIZE_UNIT (record_type)
921 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
923 compute_record_mode (record_type);
927 if (!do_not_finalize)
928 rest_of_record_type_compilation (record_type);
931 /* Wrap up compilation of RECORD_TYPE, i.e. most notably output all
932 the debug information associated with it. It need not be invoked
933 directly in most cases since finish_record_type takes care of doing
934 so, unless explicitly requested not to through DO_NOT_FINALIZE. */
937 rest_of_record_type_compilation (tree record_type)
939 tree fieldlist = TYPE_FIELDS (record_type);
941 enum tree_code code = TREE_CODE (record_type);
942 bool var_size = false;
944 for (field = fieldlist; field; field = TREE_CHAIN (field))
946 /* We need to make an XVE/XVU record if any field has variable size,
947 whether or not the record does. For example, if we have a union,
948 it may be that all fields, rounded up to the alignment, have the
949 same size, in which case we'll use that size. But the debug
950 output routines (except Dwarf2) won't be able to output the fields,
951 so we need to make the special record. */
952 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
953 /* If a field has a non-constant qualifier, the record will have
954 variable size too. */
955 || (code == QUAL_UNION_TYPE
956 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
963 /* If this record is of variable size, rename it so that the
964 debugger knows it is and make a new, parallel, record
965 that tells the debugger how the record is laid out. See
966 exp_dbug.ads. But don't do this for records that are padding
967 since they confuse GDB. */
969 && !(TREE_CODE (record_type) == RECORD_TYPE
970 && TYPE_IS_PADDING_P (record_type)))
973 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
974 ? UNION_TYPE : TREE_CODE (record_type));
975 tree orig_name = TYPE_NAME (record_type);
977 = (TREE_CODE (orig_name) == TYPE_DECL ? DECL_NAME (orig_name)
980 = concat_id_with_name (orig_id,
981 TREE_CODE (record_type) == QUAL_UNION_TYPE
983 tree last_pos = bitsize_zero_node;
985 tree prev_old_field = 0;
987 TYPE_NAME (new_record_type) = new_id;
988 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
989 TYPE_STUB_DECL (new_record_type)
990 = build_decl (TYPE_DECL, new_id, new_record_type);
991 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
992 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
993 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
994 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
995 TYPE_SIZE_UNIT (new_record_type)
996 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
998 /* Now scan all the fields, replacing each field with a new
999 field corresponding to the new encoding. */
1000 for (old_field = TYPE_FIELDS (record_type); old_field;
1001 old_field = TREE_CHAIN (old_field))
1003 tree field_type = TREE_TYPE (old_field);
1004 tree field_name = DECL_NAME (old_field);
1006 tree curpos = bit_position (old_field);
1008 unsigned int align = 0;
1011 /* See how the position was modified from the last position.
1013 There are two basic cases we support: a value was added
1014 to the last position or the last position was rounded to
1015 a boundary and they something was added. Check for the
1016 first case first. If not, see if there is any evidence
1017 of rounding. If so, round the last position and try
1020 If this is a union, the position can be taken as zero. */
1022 if (TREE_CODE (new_record_type) == UNION_TYPE)
1023 pos = bitsize_zero_node, align = 0;
1025 pos = compute_related_constant (curpos, last_pos);
1027 if (!pos && TREE_CODE (curpos) == MULT_EXPR
1028 && host_integerp (TREE_OPERAND (curpos, 1), 1))
1030 tree offset = TREE_OPERAND (curpos, 0);
1031 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
1033 /* Strip off any conversions. */
1034 while (TREE_CODE (offset) == NON_LVALUE_EXPR
1035 || TREE_CODE (offset) == NOP_EXPR
1036 || TREE_CODE (offset) == CONVERT_EXPR)
1037 offset = TREE_OPERAND (offset, 0);
1039 /* An offset which is a bitwise AND with a negative power of 2
1040 means an alignment corresponding to this power of 2. */
1041 if (TREE_CODE (offset) == BIT_AND_EXPR
1042 && host_integerp (TREE_OPERAND (offset, 1), 0)
1043 && tree_int_cst_sgn (TREE_OPERAND (offset, 1)) < 0)
1046 = - tree_low_cst (TREE_OPERAND (offset, 1), 0);
1047 if (exact_log2 (pow) > 0)
1051 pos = compute_related_constant (curpos,
1052 round_up (last_pos, align));
1054 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
1055 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
1056 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
1057 && host_integerp (TREE_OPERAND
1058 (TREE_OPERAND (curpos, 0), 1),
1063 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
1064 pos = compute_related_constant (curpos,
1065 round_up (last_pos, align));
1067 else if (potential_alignment_gap (prev_old_field, old_field,
1070 align = TYPE_ALIGN (field_type);
1071 pos = compute_related_constant (curpos,
1072 round_up (last_pos, align));
1075 /* If we can't compute a position, set it to zero.
1077 ??? We really should abort here, but it's too much work
1078 to get this correct for all cases. */
1081 pos = bitsize_zero_node;
1083 /* See if this type is variable-sized and make a pointer type
1084 and indicate the indirection if so. Beware that the debug
1085 back-end may adjust the position computed above according
1086 to the alignment of the field type, i.e. the pointer type
1087 in this case, if we don't preventively counter that. */
1088 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
1090 field_type = build_pointer_type (field_type);
1091 if (align != 0 && TYPE_ALIGN (field_type) > align)
1093 field_type = copy_node (field_type);
1094 TYPE_ALIGN (field_type) = align;
1099 /* Make a new field name, if necessary. */
1100 if (var || align != 0)
1105 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1106 align / BITS_PER_UNIT);
1108 strcpy (suffix, "XVL");
1110 field_name = concat_id_with_name (field_name, suffix);
1113 new_field = create_field_decl (field_name, field_type,
1115 DECL_SIZE (old_field), pos, 0);
1116 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1117 TYPE_FIELDS (new_record_type) = new_field;
1119 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1120 zero. The only time it's not the last field of the record
1121 is when there are other components at fixed positions after
1122 it (meaning there was a rep clause for every field) and we
1123 want to be able to encode them. */
1124 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1125 (TREE_CODE (TREE_TYPE (old_field))
1128 : DECL_SIZE (old_field));
1129 prev_old_field = old_field;
1132 TYPE_FIELDS (new_record_type)
1133 = nreverse (TYPE_FIELDS (new_record_type));
1135 rest_of_type_decl_compilation (TYPE_STUB_DECL (new_record_type));
1138 rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type));
1141 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1142 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1143 if this represents a QUAL_UNION_TYPE in which case we must look for
1144 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1145 is nonzero, we must take the MAX of the end position of this field
1146 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1148 We return an expression for the size. */
1151 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1154 tree type = TREE_TYPE (last_size);
1157 if (!special || TREE_CODE (size) != COND_EXPR)
1159 new = size_binop (PLUS_EXPR, first_bit, size);
1161 new = size_binop (MAX_EXPR, last_size, new);
1165 new = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1166 integer_zerop (TREE_OPERAND (size, 1))
1167 ? last_size : merge_sizes (last_size, first_bit,
1168 TREE_OPERAND (size, 1),
1170 integer_zerop (TREE_OPERAND (size, 2))
1171 ? last_size : merge_sizes (last_size, first_bit,
1172 TREE_OPERAND (size, 2),
1175 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1176 when fed through substitute_in_expr) into thinking that a constant
1177 size is not constant. */
1178 while (TREE_CODE (new) == NON_LVALUE_EXPR)
1179 new = TREE_OPERAND (new, 0);
1184 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1185 related by the addition of a constant. Return that constant if so. */
1188 compute_related_constant (tree op0, tree op1)
1190 tree op0_var, op1_var;
1191 tree op0_con = split_plus (op0, &op0_var);
1192 tree op1_con = split_plus (op1, &op1_var);
1193 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1195 if (operand_equal_p (op0_var, op1_var, 0))
1197 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1203 /* Utility function of above to split a tree OP which may be a sum, into a
1204 constant part, which is returned, and a variable part, which is stored
1205 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1209 split_plus (tree in, tree *pvar)
1211 /* Strip NOPS in order to ease the tree traversal and maximize the
1212 potential for constant or plus/minus discovery. We need to be careful
1213 to always return and set *pvar to bitsizetype trees, but it's worth
1217 *pvar = convert (bitsizetype, in);
1219 if (TREE_CODE (in) == INTEGER_CST)
1221 *pvar = bitsize_zero_node;
1222 return convert (bitsizetype, in);
1224 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1226 tree lhs_var, rhs_var;
1227 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1228 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1230 if (lhs_var == TREE_OPERAND (in, 0)
1231 && rhs_var == TREE_OPERAND (in, 1))
1232 return bitsize_zero_node;
1234 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1235 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1238 return bitsize_zero_node;
1241 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1242 subprogram. If it is void_type_node, then we are dealing with a procedure,
1243 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1244 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1245 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1246 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
1247 object. RETURNS_BY_REF is nonzero if the function returns by reference.
1248 RETURNS_WITH_DSP is nonzero if the function is to return with a
1249 depressed stack pointer. RETURNS_BY_TARGET_PTR is true if the function
1250 is to be passed (as its first parameter) the address of the place to copy
1254 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1255 bool returns_unconstrained, bool returns_by_ref,
1256 bool returns_with_dsp, bool returns_by_target_ptr)
1258 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1259 the subprogram formal parameters. This list is generated by traversing the
1260 input list of PARM_DECL nodes. */
1261 tree param_type_list = NULL;
1265 for (param_decl = param_decl_list; param_decl;
1266 param_decl = TREE_CHAIN (param_decl))
1267 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1270 /* The list of the function parameter types has to be terminated by the void
1271 type to signal to the back-end that we are not dealing with a variable
1272 parameter subprogram, but that the subprogram has a fixed number of
1274 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1276 /* The list of argument types has been created in reverse
1278 param_type_list = nreverse (param_type_list);
1280 type = build_function_type (return_type, param_type_list);
1282 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1283 or the new type should, make a copy of TYPE. Likewise for
1284 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1285 if (TYPE_CI_CO_LIST (type) || cico_list
1286 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1287 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref
1288 || TYPE_RETURNS_BY_TARGET_PTR_P (type) != returns_by_target_ptr)
1289 type = copy_type (type);
1291 TYPE_CI_CO_LIST (type) = cico_list;
1292 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1293 TYPE_RETURNS_STACK_DEPRESSED (type) = returns_with_dsp;
1294 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1295 TYPE_RETURNS_BY_TARGET_PTR_P (type) = returns_by_target_ptr;
1299 /* Return a copy of TYPE but safe to modify in any way. */
1302 copy_type (tree type)
1304 tree new = copy_node (type);
1306 /* copy_node clears this field instead of copying it, because it is
1307 aliased with TREE_CHAIN. */
1308 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1310 TYPE_POINTER_TO (new) = 0;
1311 TYPE_REFERENCE_TO (new) = 0;
1312 TYPE_MAIN_VARIANT (new) = new;
1313 TYPE_NEXT_VARIANT (new) = 0;
1318 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1319 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position of
1323 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
1325 /* First build a type for the desired range. */
1326 tree type = build_index_2_type (min, max);
1328 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1329 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1330 is set, but not to INDEX, make a copy of this type with the requested
1331 index type. Note that we have no way of sharing these types, but that's
1332 only a small hole. */
1333 if (TYPE_INDEX_TYPE (type) == index)
1335 else if (TYPE_INDEX_TYPE (type))
1336 type = copy_type (type);
1338 SET_TYPE_INDEX_TYPE (type, index);
1339 create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
1343 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1344 string) and TYPE is a ..._TYPE node giving its data type.
1345 ARTIFICIAL_P is true if this is a declaration that was generated
1346 by the compiler. DEBUG_INFO_P is true if we need to write debugging
1347 information about this type. GNAT_NODE is used for the position of
1351 create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1352 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1354 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1355 enum tree_code code = TREE_CODE (type);
1357 DECL_ARTIFICIAL (type_decl) = artificial_p;
1359 if (!TYPE_IS_DUMMY_P (type))
1360 gnat_pushdecl (type_decl, gnat_node);
1362 process_attributes (type_decl, attr_list);
1364 /* Pass type declaration information to the debugger unless this is an
1365 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1366 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately, or
1367 type for which debugging information was not requested. */
1368 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
1369 DECL_IGNORED_P (type_decl) = 1;
1370 else if (code != ENUMERAL_TYPE
1371 && (code != RECORD_TYPE || TYPE_IS_FAT_POINTER_P (type))
1372 && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
1373 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1374 rest_of_type_decl_compilation (type_decl);
1379 /* Helper for create_var_decl and create_true_var_decl. Returns a GCC VAR_DECL
1382 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1383 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1384 the GCC tree for an optional initial expression; NULL_TREE if none.
1386 CONST_FLAG is true if this variable is constant, in which case we might
1387 return a CONST_DECL node unless CONST_DECL_ALLOWED_FLAG is false.
1389 PUBLIC_FLAG is true if this definition is to be made visible outside of
1390 the current compilation unit. This flag should be set when processing the
1391 variable definitions in a package specification. EXTERN_FLAG is nonzero
1392 when processing an external variable declaration (as opposed to a
1393 definition: no storage is to be allocated for the variable here).
1395 STATIC_FLAG is only relevant when not at top level. In that case
1396 it indicates whether to always allocate storage to the variable.
1398 GNAT_NODE is used for the position of the decl. */
1401 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
1402 bool const_flag, bool const_decl_allowed_flag,
1403 bool public_flag, bool extern_flag, bool static_flag,
1404 struct attrib *attr_list, Node_Id gnat_node)
1408 && TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1409 && (global_bindings_p () || static_flag
1410 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
1411 : TREE_CONSTANT (var_init)));
1413 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1414 case the initializer may be used in-lieu of the DECL node (as done in
1415 Identifier_to_gnu). This is useful to prevent the need of elaboration
1416 code when an identifier for which such a decl is made is in turn used as
1417 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1418 but extra constraints apply to this choice (see below) and are not
1419 relevant to the distinction we wish to make. */
1420 bool constant_p = const_flag && init_const;
1422 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1423 and may be used for scalars in general but not for aggregates. */
1425 = build_decl ((constant_p && const_decl_allowed_flag
1426 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
1429 /* If this is external, throw away any initializations (they will be done
1430 elsewhere) unless this is a a constant for which we would like to remain
1431 able to get the initializer. If we are defining a global here, leave a
1432 constant initialization and save any variable elaborations for the
1433 elaboration routine. If we are just annotating types, throw away the
1434 initialization if it isn't a constant. */
1435 if ((extern_flag && !constant_p)
1436 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1437 var_init = NULL_TREE;
1439 /* At the global level, an initializer requiring code to be generated
1440 produces elaboration statements. Check that such statements are allowed,
1441 that is, not violating a No_Elaboration_Code restriction. */
1442 if (global_bindings_p () && var_init != 0 && ! init_const)
1443 Check_Elaboration_Code_Allowed (gnat_node);
1445 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1446 try to fiddle with DECL_COMMON. However, on platforms that don't
1447 support global BSS sections, uninitialized global variables would
1448 go in DATA instead, thus increasing the size of the executable. */
1450 && TREE_CODE (var_decl) == VAR_DECL
1451 && !have_global_bss_p ())
1452 DECL_COMMON (var_decl) = 1;
1453 DECL_INITIAL (var_decl) = var_init;
1454 TREE_READONLY (var_decl) = const_flag;
1455 DECL_EXTERNAL (var_decl) = extern_flag;
1456 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1457 TREE_CONSTANT (var_decl) = constant_p;
1458 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1459 = TYPE_VOLATILE (type);
1461 /* If it's public and not external, always allocate storage for it.
1462 At the global binding level we need to allocate static storage for the
1463 variable if and only if it's not external. If we are not at the top level
1464 we allocate automatic storage unless requested not to. */
1465 TREE_STATIC (var_decl)
1466 = public_flag || (global_bindings_p () ? !extern_flag : static_flag);
1468 if (asm_name && VAR_OR_FUNCTION_DECL_P (var_decl))
1469 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1471 process_attributes (var_decl, attr_list);
1473 /* Add this decl to the current binding level. */
1474 gnat_pushdecl (var_decl, gnat_node);
1476 if (TREE_SIDE_EFFECTS (var_decl))
1477 TREE_ADDRESSABLE (var_decl) = 1;
1479 if (TREE_CODE (var_decl) != CONST_DECL)
1480 rest_of_decl_compilation (var_decl, global_bindings_p (), 0);
1482 expand_decl (var_decl);
1487 /* Wrapper around create_var_decl_1 for cases where we don't care whether
1488 a VAR or a CONST decl node is created. */
1491 create_var_decl (tree var_name, tree asm_name, tree type, tree var_init,
1492 bool const_flag, bool public_flag, bool extern_flag,
1493 bool static_flag, struct attrib *attr_list,
1496 return create_var_decl_1 (var_name, asm_name, type, var_init,
1498 public_flag, extern_flag, static_flag,
1499 attr_list, gnat_node);
1502 /* Wrapper around create_var_decl_1 for cases where a VAR_DECL node is
1503 required. The primary intent is for DECL_CONST_CORRESPONDING_VARs, which
1504 must be VAR_DECLs and on which we want TREE_READONLY set to have them
1505 possibly assigned to a readonly data section. */
1508 create_true_var_decl (tree var_name, tree asm_name, tree type, tree var_init,
1509 bool const_flag, bool public_flag, bool extern_flag,
1510 bool static_flag, struct attrib *attr_list,
1513 return create_var_decl_1 (var_name, asm_name, type, var_init,
1515 public_flag, extern_flag, static_flag,
1516 attr_list, gnat_node);
1519 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1520 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1521 this field is in a record type with a "pragma pack". If SIZE is nonzero
1522 it is the specified size for this field. If POS is nonzero, it is the bit
1523 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1524 the address of this field for aliasing purposes. If it is negative, we
1525 should not make a bitfield, which is used by make_aligning_type. */
1528 create_field_decl (tree field_name, tree field_type, tree record_type,
1529 int packed, tree size, tree pos, int addressable)
1531 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1533 DECL_CONTEXT (field_decl) = record_type;
1534 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1536 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1537 byte boundary since GCC cannot handle less-aligned BLKmode bitfields. */
1538 if (packed && TYPE_MODE (field_type) == BLKmode)
1539 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1541 /* If a size is specified, use it. Otherwise, if the record type is packed
1542 compute a size to use, which may differ from the object's natural size.
1543 We always set a size in this case to trigger the checks for bitfield
1544 creation below, which is typically required when no position has been
1547 size = convert (bitsizetype, size);
1548 else if (packed == 1)
1550 size = rm_size (field_type);
1552 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1554 if (TREE_CODE (size) == INTEGER_CST
1555 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1556 size = round_up (size, BITS_PER_UNIT);
1559 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1560 specified for two reasons: first if the size differs from the natural
1561 size. Second, if the alignment is insufficient. There are a number of
1562 ways the latter can be true.
1564 We never make a bitfield if the type of the field has a nonconstant size,
1565 because no such entity requiring bitfield operations should reach here.
1567 We do *preventively* make a bitfield when there might be the need for it
1568 but we don't have all the necessary information to decide, as is the case
1569 of a field with no specified position in a packed record.
1571 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1572 in layout_decl or finish_record_type to clear the bit_field indication if
1573 it is in fact not needed. */
1574 if (addressable >= 0
1576 && TREE_CODE (size) == INTEGER_CST
1577 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1578 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
1579 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
1581 || (TYPE_ALIGN (record_type) != 0
1582 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1584 DECL_BIT_FIELD (field_decl) = 1;
1585 DECL_SIZE (field_decl) = size;
1586 if (!packed && !pos)
1587 DECL_ALIGN (field_decl)
1588 = (TYPE_ALIGN (record_type) != 0
1589 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1590 : TYPE_ALIGN (field_type));
1593 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1594 DECL_ALIGN (field_decl)
1595 = MAX (DECL_ALIGN (field_decl),
1596 DECL_BIT_FIELD (field_decl) ? 1
1597 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT
1598 : TYPE_ALIGN (field_type));
1602 /* We need to pass in the alignment the DECL is known to have.
1603 This is the lowest-order bit set in POS, but no more than
1604 the alignment of the record, if one is specified. Note
1605 that an alignment of 0 is taken as infinite. */
1606 unsigned int known_align;
1608 if (host_integerp (pos, 1))
1609 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1611 known_align = BITS_PER_UNIT;
1613 if (TYPE_ALIGN (record_type)
1614 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1615 known_align = TYPE_ALIGN (record_type);
1617 layout_decl (field_decl, known_align);
1618 SET_DECL_OFFSET_ALIGN (field_decl,
1619 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1621 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1622 &DECL_FIELD_BIT_OFFSET (field_decl),
1623 DECL_OFFSET_ALIGN (field_decl), pos);
1625 DECL_HAS_REP_P (field_decl) = 1;
1628 /* In addition to what our caller says, claim the field is addressable if we
1629 know we might ever attempt to take its address, then mark the decl as
1630 nonaddressable accordingly.
1632 The field may also be "technically" nonaddressable, meaning that even if
1633 we attempt to take the field's address we will actually get the address
1634 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
1635 value we have at this point is not accurate enough, so we don't account
1636 for this here and let finish_record_type decide. */
1638 /* We will take the address in any argument passing sequence if the field
1639 type is passed by reference, and we might need the address for any array
1640 type, even if normally passed by-copy, to construct a fat pointer if the
1641 field is used as an actual for an unconstrained formal. */
1642 if (TREE_CODE (field_type) == ARRAY_TYPE
1643 || must_pass_by_ref (field_type) || default_pass_by_ref (field_type))
1646 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1651 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1652 PARAM_TYPE is its type. READONLY is true if the parameter is
1653 readonly (either an IN parameter or an address of a pass-by-ref
1657 create_param_decl (tree param_name, tree param_type, bool readonly)
1659 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1661 /* Honor targetm.calls.promote_prototypes(), as not doing so can
1662 lead to various ABI violations. */
1663 if (targetm.calls.promote_prototypes (param_type)
1664 && (TREE_CODE (param_type) == INTEGER_TYPE
1665 || TREE_CODE (param_type) == ENUMERAL_TYPE)
1666 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1668 /* We have to be careful about biased types here. Make a subtype
1669 of integer_type_node with the proper biasing. */
1670 if (TREE_CODE (param_type) == INTEGER_TYPE
1671 && TYPE_BIASED_REPRESENTATION_P (param_type))
1674 = copy_type (build_range_type (integer_type_node,
1675 TYPE_MIN_VALUE (param_type),
1676 TYPE_MAX_VALUE (param_type)));
1678 TYPE_BIASED_REPRESENTATION_P (param_type) = 1;
1681 param_type = integer_type_node;
1684 DECL_ARG_TYPE (param_decl) = param_type;
1685 TREE_READONLY (param_decl) = readonly;
1689 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1692 process_attributes (tree decl, struct attrib *attr_list)
1694 for (; attr_list; attr_list = attr_list->next)
1695 switch (attr_list->type)
1697 case ATTR_MACHINE_ATTRIBUTE:
1698 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1700 ATTR_FLAG_TYPE_IN_PLACE);
1703 case ATTR_LINK_ALIAS:
1704 if (! DECL_EXTERNAL (decl))
1706 TREE_STATIC (decl) = 1;
1707 assemble_alias (decl, attr_list->name);
1711 case ATTR_WEAK_EXTERNAL:
1713 declare_weak (decl);
1715 post_error ("?weak declarations not supported on this target",
1716 attr_list->error_point);
1719 case ATTR_LINK_SECTION:
1720 if (targetm.have_named_sections)
1722 DECL_SECTION_NAME (decl)
1723 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1724 IDENTIFIER_POINTER (attr_list->name));
1725 DECL_COMMON (decl) = 0;
1728 post_error ("?section attributes are not supported for this target",
1729 attr_list->error_point);
1732 case ATTR_LINK_CONSTRUCTOR:
1733 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1734 TREE_USED (decl) = 1;
1737 case ATTR_LINK_DESTRUCTOR:
1738 DECL_STATIC_DESTRUCTOR (decl) = 1;
1739 TREE_USED (decl) = 1;
1744 /* Record a global renaming pointer. */
1747 record_global_renaming_pointer (tree decl)
1749 gcc_assert (DECL_RENAMED_OBJECT (decl));
1750 VEC_safe_push (tree, gc, global_renaming_pointers, decl);
1753 /* Invalidate the global renaming pointers. */
1756 invalidate_global_renaming_pointers (void)
1761 for (i = 0; VEC_iterate(tree, global_renaming_pointers, i, iter); i++)
1762 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
1764 VEC_free (tree, gc, global_renaming_pointers);
1767 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1771 value_factor_p (tree value, HOST_WIDE_INT factor)
1773 if (host_integerp (value, 1))
1774 return tree_low_cst (value, 1) % factor == 0;
1776 if (TREE_CODE (value) == MULT_EXPR)
1777 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1778 || value_factor_p (TREE_OPERAND (value, 1), factor));
1783 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1784 unless we can prove these 2 fields are laid out in such a way that no gap
1785 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1786 is the distance in bits between the end of PREV_FIELD and the starting
1787 position of CURR_FIELD. It is ignored if null. */
1790 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1792 /* If this is the first field of the record, there cannot be any gap */
1796 /* If the previous field is a union type, then return False: The only
1797 time when such a field is not the last field of the record is when
1798 there are other components at fixed positions after it (meaning there
1799 was a rep clause for every field), in which case we don't want the
1800 alignment constraint to override them. */
1801 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1804 /* If the distance between the end of prev_field and the beginning of
1805 curr_field is constant, then there is a gap if the value of this
1806 constant is not null. */
1807 if (offset && host_integerp (offset, 1))
1808 return !integer_zerop (offset);
1810 /* If the size and position of the previous field are constant,
1811 then check the sum of this size and position. There will be a gap
1812 iff it is not multiple of the current field alignment. */
1813 if (host_integerp (DECL_SIZE (prev_field), 1)
1814 && host_integerp (bit_position (prev_field), 1))
1815 return ((tree_low_cst (bit_position (prev_field), 1)
1816 + tree_low_cst (DECL_SIZE (prev_field), 1))
1817 % DECL_ALIGN (curr_field) != 0);
1819 /* If both the position and size of the previous field are multiples
1820 of the current field alignment, there cannot be any gap. */
1821 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1822 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1825 /* Fallback, return that there may be a potential gap */
1829 /* Returns a LABEL_DECL node for LABEL_NAME. */
1832 create_label_decl (tree label_name)
1834 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1836 DECL_CONTEXT (label_decl) = current_function_decl;
1837 DECL_MODE (label_decl) = VOIDmode;
1838 DECL_SOURCE_LOCATION (label_decl) = input_location;
1843 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1844 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1845 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1846 PARM_DECL nodes chained through the TREE_CHAIN field).
1848 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1849 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1852 create_subprog_decl (tree subprog_name, tree asm_name,
1853 tree subprog_type, tree param_decl_list, bool inline_flag,
1854 bool public_flag, bool extern_flag,
1855 struct attrib *attr_list, Node_Id gnat_node)
1857 tree return_type = TREE_TYPE (subprog_type);
1858 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1860 /* If this is a function nested inside an inlined external function, it
1861 means we aren't going to compile the outer function unless it is
1862 actually inlined, so do the same for us. */
1863 if (current_function_decl && DECL_INLINE (current_function_decl)
1864 && DECL_EXTERNAL (current_function_decl))
1867 DECL_EXTERNAL (subprog_decl) = extern_flag;
1868 TREE_PUBLIC (subprog_decl) = public_flag;
1869 TREE_STATIC (subprog_decl) = 1;
1870 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1871 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1872 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1873 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1874 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1875 DECL_ARTIFICIAL (DECL_RESULT (subprog_decl)) = 1;
1876 DECL_IGNORED_P (DECL_RESULT (subprog_decl)) = 1;
1878 /* TREE_ADDRESSABLE is set on the result type to request the use of the
1879 target by-reference return mechanism. This is not supported all the
1880 way down to RTL expansion with GCC 4, which ICEs on temporary creation
1881 attempts with such a type and expects DECL_BY_REFERENCE to be set on
1882 the RESULT_DECL instead - see gnat_genericize for more details. */
1883 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (subprog_decl))))
1885 tree result_decl = DECL_RESULT (subprog_decl);
1887 TREE_ADDRESSABLE (TREE_TYPE (result_decl)) = 0;
1888 DECL_BY_REFERENCE (result_decl) = 1;
1892 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
1895 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1897 process_attributes (subprog_decl, attr_list);
1899 /* Add this decl to the current binding level. */
1900 gnat_pushdecl (subprog_decl, gnat_node);
1902 /* Output the assembler code and/or RTL for the declaration. */
1903 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
1905 return subprog_decl;
1908 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1909 body. This routine needs to be invoked before processing the declarations
1910 appearing in the subprogram. */
1913 begin_subprog_body (tree subprog_decl)
1917 current_function_decl = subprog_decl;
1918 announce_function (subprog_decl);
1920 /* Enter a new binding level and show that all the parameters belong to
1923 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1924 param_decl = TREE_CHAIN (param_decl))
1925 DECL_CONTEXT (param_decl) = subprog_decl;
1927 make_decl_rtl (subprog_decl);
1929 /* We handle pending sizes via the elaboration of types, so we don't need to
1930 save them. This causes them to be marked as part of the outer function
1931 and then discarded. */
1932 get_pending_sizes ();
1936 /* Helper for the genericization callback. Return a dereference of VAL
1937 if it is of a reference type. */
1940 convert_from_reference (tree val)
1942 tree value_type, ref;
1944 if (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE)
1947 value_type = TREE_TYPE (TREE_TYPE (val));
1948 ref = build1 (INDIRECT_REF, value_type, val);
1950 /* See if what we reference is CONST or VOLATILE, which requires
1951 looking into array types to get to the component type. */
1953 while (TREE_CODE (value_type) == ARRAY_TYPE)
1954 value_type = TREE_TYPE (value_type);
1957 = (TYPE_QUALS (value_type) & TYPE_QUAL_CONST);
1958 TREE_THIS_VOLATILE (ref)
1959 = (TYPE_QUALS (value_type) & TYPE_QUAL_VOLATILE);
1961 TREE_SIDE_EFFECTS (ref)
1962 = (TREE_THIS_VOLATILE (ref) || TREE_SIDE_EFFECTS (val));
1967 /* Helper for the genericization callback. Returns true if T denotes
1968 a RESULT_DECL with DECL_BY_REFERENCE set. */
1971 is_byref_result (tree t)
1973 return (TREE_CODE (t) == RESULT_DECL && DECL_BY_REFERENCE (t));
1977 /* Tree walking callback for gnat_genericize. Currently ...
1979 o Adjust references to the function's DECL_RESULT if it is marked
1980 DECL_BY_REFERENCE and so has had its type turned into a reference
1981 type at the end of the function compilation. */
1984 gnat_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1986 /* This implementation is modeled after what the C++ front-end is
1987 doing, basis of the downstream passes behavior. */
1989 tree stmt = *stmt_p;
1990 struct pointer_set_t *p_set = (struct pointer_set_t*) data;
1992 /* If we have a direct mention of the result decl, dereference. */
1993 if (is_byref_result (stmt))
1995 *stmt_p = convert_from_reference (stmt);
2000 /* Otherwise, no need to walk the the same tree twice. */
2001 if (pointer_set_contains (p_set, stmt))
2007 /* If we are taking the address of what now is a reference, just get the
2009 if (TREE_CODE (stmt) == ADDR_EXPR
2010 && is_byref_result (TREE_OPERAND (stmt, 0)))
2012 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
2016 /* Don't dereference an by-reference RESULT_DECL inside a RETURN_EXPR. */
2017 else if (TREE_CODE (stmt) == RETURN_EXPR
2018 && TREE_OPERAND (stmt, 0)
2019 && is_byref_result (TREE_OPERAND (stmt, 0)))
2022 /* Don't look inside trees that cannot embed references of interest. */
2023 else if (IS_TYPE_OR_DECL_P (stmt))
2026 pointer_set_insert (p_set, *stmt_p);
2031 /* Perform lowering of Ada trees to GENERIC. In particular:
2033 o Turn a DECL_BY_REFERENCE RESULT_DECL into a real by-reference decl
2034 and adjust all the references to this decl accordingly. */
2037 gnat_genericize (tree fndecl)
2039 /* Prior to GCC 4, an explicit By_Reference result mechanism for a function
2040 was handled by simply setting TREE_ADDRESSABLE on the result type.
2041 Everything required to actually pass by invisible ref using the target
2042 mechanism (e.g. extra parameter) was handled at RTL expansion time.
2044 This doesn't work with GCC 4 any more for several reasons. First, the
2045 gimplification process might need the creation of temporaries of this
2046 type, and the gimplifier ICEs on such attempts. Second, the middle-end
2047 now relies on a different attribute for such cases (DECL_BY_REFERENCE on
2048 RESULT/PARM_DECLs), and expects the user invisible by-reference-ness to
2049 be explicitely accounted for by the front-end in the function body.
2051 We achieve the complete transformation in two steps:
2053 1/ create_subprog_decl performs early attribute tweaks: it clears
2054 TREE_ADDRESSABLE from the result type and sets DECL_BY_REFERENCE on
2055 the result decl. The former ensures that the bit isn't set in the GCC
2056 tree saved for the function, so prevents ICEs on temporary creation.
2057 The latter we use here to trigger the rest of the processing.
2059 2/ This function performs the type transformation on the result decl
2060 and adjusts all the references to this decl from the function body
2063 Clearing TREE_ADDRESSABLE from the type differs from the C++ front-end
2064 strategy, which escapes the gimplifier temporary creation issues by
2065 creating it's own temporaries using TARGET_EXPR nodes. Our way relies
2066 on simple specific support code in aggregate_value_p to look at the
2067 target function result decl explicitely. */
2069 struct pointer_set_t *p_set;
2070 tree decl_result = DECL_RESULT (fndecl);
2072 if (!DECL_BY_REFERENCE (decl_result))
2075 /* Make the DECL_RESULT explicitely by-reference and adjust all the
2076 occurrences in the function body using the common tree-walking facility.
2077 We want to see every occurrence of the result decl to adjust the
2078 referencing tree, so need to use our own pointer set to control which
2079 trees should be visited again or not. */
2081 p_set = pointer_set_create ();
2083 TREE_TYPE (decl_result) = build_reference_type (TREE_TYPE (decl_result));
2084 TREE_ADDRESSABLE (decl_result) = 0;
2085 relayout_decl (decl_result);
2087 walk_tree (&DECL_SAVED_TREE (fndecl), gnat_genericize_r, p_set, NULL);
2089 pointer_set_destroy (p_set);
2092 /* Finish the definition of the current subprogram and compile it all the way
2093 to assembler language output. BODY is the tree corresponding to
2097 end_subprog_body (tree body)
2099 tree fndecl = current_function_decl;
2101 /* Mark the BLOCK for this level as being for this function and pop the
2102 level. Since the vars in it are the parameters, clear them. */
2103 BLOCK_VARS (current_binding_level->block) = 0;
2104 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
2105 DECL_INITIAL (fndecl) = current_binding_level->block;
2108 /* Deal with inline. If declared inline or we should default to inline,
2109 set the flag in the decl. */
2110 DECL_INLINE (fndecl)
2111 = DECL_DECLARED_INLINE_P (fndecl) || flag_inline_trees == 2;
2113 /* We handle pending sizes via the elaboration of types, so we don't
2114 need to save them. */
2115 get_pending_sizes ();
2117 /* Mark the RESULT_DECL as being in this subprogram. */
2118 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
2120 DECL_SAVED_TREE (fndecl) = body;
2122 current_function_decl = DECL_CONTEXT (fndecl);
2125 /* We cannot track the location of errors past this point. */
2126 error_gnat_node = Empty;
2128 /* If we're only annotating types, don't actually compile this function. */
2129 if (type_annotate_only)
2132 /* Perform the required pre-gimplfication transformations on the tree. */
2133 gnat_genericize (fndecl);
2135 /* We do different things for nested and non-nested functions.
2136 ??? This should be in cgraph. */
2137 if (!DECL_CONTEXT (fndecl))
2139 gnat_gimplify_function (fndecl);
2140 cgraph_finalize_function (fndecl, false);
2143 /* Register this function with cgraph just far enough to get it
2144 added to our parent's nested function list. */
2145 (void) cgraph_node (fndecl);
2148 /* Convert FNDECL's code to GIMPLE and handle any nested functions. */
2151 gnat_gimplify_function (tree fndecl)
2153 struct cgraph_node *cgn;
2155 dump_function (TDI_original, fndecl);
2156 gimplify_function_tree (fndecl);
2157 dump_function (TDI_generic, fndecl);
2159 /* Convert all nested functions to GIMPLE now. We do things in this order
2160 so that items like VLA sizes are expanded properly in the context of the
2161 correct function. */
2162 cgn = cgraph_node (fndecl);
2163 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
2164 gnat_gimplify_function (cgn->decl);
2169 gnat_builtin_function (tree decl)
2171 gnat_pushdecl (decl, Empty);
2175 /* Handle a "const" attribute; arguments as in
2176 struct attribute_spec.handler. */
2179 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
2180 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
2183 if (TREE_CODE (*node) == FUNCTION_DECL)
2184 TREE_READONLY (*node) = 1;
2186 *no_add_attrs = true;
2191 /* Handle a "nothrow" attribute; arguments as in
2192 struct attribute_spec.handler. */
2195 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
2196 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
2199 if (TREE_CODE (*node) == FUNCTION_DECL)
2200 TREE_NOTHROW (*node) = 1;
2202 *no_add_attrs = true;
2207 /* Return an integer type with the number of bits of precision given by
2208 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2209 it is a signed type. */
2212 gnat_type_for_size (unsigned precision, int unsignedp)
2217 if (precision <= 2 * MAX_BITS_PER_WORD
2218 && signed_and_unsigned_types[precision][unsignedp])
2219 return signed_and_unsigned_types[precision][unsignedp];
2222 t = make_unsigned_type (precision);
2224 t = make_signed_type (precision);
2226 if (precision <= 2 * MAX_BITS_PER_WORD)
2227 signed_and_unsigned_types[precision][unsignedp] = t;
2231 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2232 TYPE_NAME (t) = get_identifier (type_name);
2238 /* Likewise for floating-point types. */
2241 float_type_for_precision (int precision, enum machine_mode mode)
2246 if (float_types[(int) mode])
2247 return float_types[(int) mode];
2249 float_types[(int) mode] = t = make_node (REAL_TYPE);
2250 TYPE_PRECISION (t) = precision;
2253 gcc_assert (TYPE_MODE (t) == mode);
2256 sprintf (type_name, "FLOAT_%d", precision);
2257 TYPE_NAME (t) = get_identifier (type_name);
2263 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2264 an unsigned type; otherwise a signed type is returned. */
2267 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2269 if (mode == BLKmode)
2271 else if (mode == VOIDmode)
2272 return void_type_node;
2273 else if (COMPLEX_MODE_P (mode))
2275 else if (SCALAR_FLOAT_MODE_P (mode))
2276 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2277 else if (SCALAR_INT_MODE_P (mode))
2278 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2283 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2286 gnat_unsigned_type (tree type_node)
2288 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2290 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2292 type = copy_node (type);
2293 TREE_TYPE (type) = type_node;
2295 else if (TREE_TYPE (type_node)
2296 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2297 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2299 type = copy_node (type);
2300 TREE_TYPE (type) = TREE_TYPE (type_node);
2306 /* Return the signed version of a TYPE_NODE, a scalar type. */
2309 gnat_signed_type (tree type_node)
2311 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2313 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2315 type = copy_node (type);
2316 TREE_TYPE (type) = type_node;
2318 else if (TREE_TYPE (type_node)
2319 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2320 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2322 type = copy_node (type);
2323 TREE_TYPE (type) = TREE_TYPE (type_node);
2330 /* EXP is an expression for the size of an object. If this size contains
2331 discriminant references, replace them with the maximum (if MAX_P) or
2332 minimum (if !MAX_P) possible value of the discriminant. */
2335 max_size (tree exp, bool max_p)
2337 enum tree_code code = TREE_CODE (exp);
2338 tree type = TREE_TYPE (exp);
2340 switch (TREE_CODE_CLASS (code))
2342 case tcc_declaration:
2347 if (code == CALL_EXPR)
2350 int i, n = call_expr_nargs (exp);
2353 argarray = (tree *) alloca (n * sizeof (tree));
2354 for (i = 0; i < n; i++)
2355 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
2356 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
2361 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2362 modify. Otherwise, we treat it like a variable. */
2363 if (!CONTAINS_PLACEHOLDER_P (exp))
2366 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2368 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
2370 case tcc_comparison:
2371 return max_p ? size_one_node : size_zero_node;
2375 case tcc_expression:
2376 switch (TREE_CODE_LENGTH (code))
2379 if (code == NON_LVALUE_EXPR)
2380 return max_size (TREE_OPERAND (exp, 0), max_p);
2383 fold_build1 (code, type,
2384 max_size (TREE_OPERAND (exp, 0),
2385 code == NEGATE_EXPR ? !max_p : max_p));
2388 if (code == COMPOUND_EXPR)
2389 return max_size (TREE_OPERAND (exp, 1), max_p);
2391 /* Calculate "(A ? B : C) - D" as "A ? B - D : C - D" which
2392 may provide a tighter bound on max_size. */
2393 if (code == MINUS_EXPR
2394 && TREE_CODE (TREE_OPERAND (exp, 0)) == COND_EXPR)
2396 tree lhs = fold_build2 (MINUS_EXPR, type,
2397 TREE_OPERAND (TREE_OPERAND (exp, 0), 1),
2398 TREE_OPERAND (exp, 1));
2399 tree rhs = fold_build2 (MINUS_EXPR, type,
2400 TREE_OPERAND (TREE_OPERAND (exp, 0), 2),
2401 TREE_OPERAND (exp, 1));
2402 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2403 max_size (lhs, max_p),
2404 max_size (rhs, max_p));
2408 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2409 tree rhs = max_size (TREE_OPERAND (exp, 1),
2410 code == MINUS_EXPR ? !max_p : max_p);
2412 /* Special-case wanting the maximum value of a MIN_EXPR.
2413 In that case, if one side overflows, return the other.
2414 sizetype is signed, but we know sizes are non-negative.
2415 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2416 overflowing or the maximum possible value and the RHS
2420 && TREE_CODE (rhs) == INTEGER_CST
2421 && TREE_OVERFLOW (rhs))
2425 && TREE_CODE (lhs) == INTEGER_CST
2426 && TREE_OVERFLOW (lhs))
2428 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2429 && ((TREE_CODE (lhs) == INTEGER_CST
2430 && TREE_OVERFLOW (lhs))
2431 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2432 && !TREE_CONSTANT (rhs))
2435 return fold_build2 (code, type, lhs, rhs);
2439 if (code == SAVE_EXPR)
2441 else if (code == COND_EXPR)
2442 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2443 max_size (TREE_OPERAND (exp, 1), max_p),
2444 max_size (TREE_OPERAND (exp, 2), max_p));
2447 /* Other tree classes cannot happen. */
2455 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2456 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2457 Return a constructor for the template. */
2460 build_template (tree template_type, tree array_type, tree expr)
2462 tree template_elts = NULL_TREE;
2463 tree bound_list = NULL_TREE;
2466 if (TREE_CODE (array_type) == RECORD_TYPE
2467 && (TYPE_IS_PADDING_P (array_type)
2468 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2469 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2471 if (TREE_CODE (array_type) == ARRAY_TYPE
2472 || (TREE_CODE (array_type) == INTEGER_TYPE
2473 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2474 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2476 /* First make the list for a CONSTRUCTOR for the template. Go down the
2477 field list of the template instead of the type chain because this
2478 array might be an Ada array of arrays and we can't tell where the
2479 nested arrays stop being the underlying object. */
2481 for (field = TYPE_FIELDS (template_type); field;
2483 ? (bound_list = TREE_CHAIN (bound_list))
2484 : (array_type = TREE_TYPE (array_type))),
2485 field = TREE_CHAIN (TREE_CHAIN (field)))
2487 tree bounds, min, max;
2489 /* If we have a bound list, get the bounds from there. Likewise
2490 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2491 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2492 This will give us a maximum range. */
2494 bounds = TREE_VALUE (bound_list);
2495 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2496 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2497 else if (expr && TREE_CODE (expr) == PARM_DECL
2498 && DECL_BY_COMPONENT_PTR_P (expr))
2499 bounds = TREE_TYPE (field);
2503 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
2504 max = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MAX_VALUE (bounds));
2506 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2507 substitute it from OBJECT. */
2508 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2509 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2511 template_elts = tree_cons (TREE_CHAIN (field), max,
2512 tree_cons (field, min, template_elts));
2515 return gnat_build_constructor (template_type, nreverse (template_elts));
2518 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2519 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2520 in the type contains in its DECL_INITIAL the expression to use when
2521 a constructor is made for the type. GNAT_ENTITY is an entity used
2522 to print out an error message if the mechanism cannot be applied to
2523 an object of that type and also for the name. */
2526 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2528 tree record_type = make_node (RECORD_TYPE);
2529 tree pointer32_type;
2530 tree field_list = 0;
2539 /* If TYPE is an unconstrained array, use the underlying array type. */
2540 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2541 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2543 /* If this is an array, compute the number of dimensions in the array,
2544 get the index types, and point to the inner type. */
2545 if (TREE_CODE (type) != ARRAY_TYPE)
2548 for (ndim = 1, inner_type = type;
2549 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2550 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2551 ndim++, inner_type = TREE_TYPE (inner_type))
2554 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2556 if (mech != By_Descriptor_NCA
2557 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2558 for (i = ndim - 1, inner_type = type;
2560 i--, inner_type = TREE_TYPE (inner_type))
2561 idx_arr[i] = TYPE_DOMAIN (inner_type);
2563 for (i = 0, inner_type = type;
2565 i++, inner_type = TREE_TYPE (inner_type))
2566 idx_arr[i] = TYPE_DOMAIN (inner_type);
2568 /* Now get the DTYPE value. */
2569 switch (TREE_CODE (type))
2573 if (TYPE_VAX_FLOATING_POINT_P (type))
2574 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2587 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2590 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2593 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2596 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2599 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2602 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2608 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2612 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2613 && TYPE_VAX_FLOATING_POINT_P (type))
2614 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2626 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2637 /* Get the CLASS value. */
2640 case By_Descriptor_A:
2643 case By_Descriptor_NCA:
2646 case By_Descriptor_SB:
2650 case By_Descriptor_S:
2656 /* Make the type for a descriptor for VMS. The first four fields
2657 are the same for all types. */
2660 = chainon (field_list,
2661 make_descriptor_field
2662 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2663 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2665 field_list = chainon (field_list,
2666 make_descriptor_field ("DTYPE",
2667 gnat_type_for_size (8, 1),
2668 record_type, size_int (dtype)));
2669 field_list = chainon (field_list,
2670 make_descriptor_field ("CLASS",
2671 gnat_type_for_size (8, 1),
2672 record_type, size_int (class)));
2674 /* Of course this will crash at run-time if the address space is not
2675 within the low 32 bits, but there is nothing else we can do. */
2676 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
2679 = chainon (field_list,
2680 make_descriptor_field
2681 ("POINTER", pointer32_type, record_type,
2682 build_unary_op (ADDR_EXPR,
2684 build0 (PLACEHOLDER_EXPR, type))));
2689 case By_Descriptor_S:
2692 case By_Descriptor_SB:
2694 = chainon (field_list,
2695 make_descriptor_field
2696 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2697 TREE_CODE (type) == ARRAY_TYPE
2698 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2700 = chainon (field_list,
2701 make_descriptor_field
2702 ("SB_U1", gnat_type_for_size (32, 1), record_type,
2703 TREE_CODE (type) == ARRAY_TYPE
2704 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2707 case By_Descriptor_A:
2708 case By_Descriptor_NCA:
2709 field_list = chainon (field_list,
2710 make_descriptor_field ("SCALE",
2711 gnat_type_for_size (8, 1),
2715 field_list = chainon (field_list,
2716 make_descriptor_field ("DIGITS",
2717 gnat_type_for_size (8, 1),
2722 = chainon (field_list,
2723 make_descriptor_field
2724 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2725 size_int (mech == By_Descriptor_NCA
2727 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2728 : (TREE_CODE (type) == ARRAY_TYPE
2729 && TYPE_CONVENTION_FORTRAN_P (type)
2732 field_list = chainon (field_list,
2733 make_descriptor_field ("DIMCT",
2734 gnat_type_for_size (8, 1),
2738 field_list = chainon (field_list,
2739 make_descriptor_field ("ARSIZE",
2740 gnat_type_for_size (32, 1),
2742 size_in_bytes (type)));
2744 /* Now build a pointer to the 0,0,0... element. */
2745 tem = build0 (PLACEHOLDER_EXPR, type);
2746 for (i = 0, inner_type = type; i < ndim;
2747 i++, inner_type = TREE_TYPE (inner_type))
2748 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2749 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2750 NULL_TREE, NULL_TREE);
2753 = chainon (field_list,
2754 make_descriptor_field
2756 build_pointer_type_for_mode (inner_type, SImode, false),
2759 build_pointer_type_for_mode (inner_type, SImode,
2763 /* Next come the addressing coefficients. */
2764 tem = size_one_node;
2765 for (i = 0; i < ndim; i++)
2769 = size_binop (MULT_EXPR, tem,
2770 size_binop (PLUS_EXPR,
2771 size_binop (MINUS_EXPR,
2772 TYPE_MAX_VALUE (idx_arr[i]),
2773 TYPE_MIN_VALUE (idx_arr[i])),
2776 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2777 fname[1] = '0' + i, fname[2] = 0;
2779 = chainon (field_list,
2780 make_descriptor_field (fname,
2781 gnat_type_for_size (32, 1),
2782 record_type, idx_length));
2784 if (mech == By_Descriptor_NCA)
2788 /* Finally here are the bounds. */
2789 for (i = 0; i < ndim; i++)
2793 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2795 = chainon (field_list,
2796 make_descriptor_field
2797 (fname, gnat_type_for_size (32, 1), record_type,
2798 TYPE_MIN_VALUE (idx_arr[i])));
2802 = chainon (field_list,
2803 make_descriptor_field
2804 (fname, gnat_type_for_size (32, 1), record_type,
2805 TYPE_MAX_VALUE (idx_arr[i])));
2810 post_error ("unsupported descriptor type for &", gnat_entity);
2813 finish_record_type (record_type, field_list, 0, true);
2814 create_type_decl (create_concat_name (gnat_entity, "DESC"), record_type,
2815 NULL, true, false, gnat_entity);
2820 /* Utility routine for above code to make a field. */
2823 make_descriptor_field (const char *name, tree type,
2824 tree rec_type, tree initial)
2827 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2829 DECL_INITIAL (field) = initial;
2833 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
2834 pointer or fat pointer type. GNAT_SUBPROG is the subprogram to which
2835 the VMS descriptor is passed. */
2838 convert_vms_descriptor (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
2840 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
2841 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
2842 /* The CLASS field is the 3rd field in the descriptor. */
2843 tree class = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
2844 /* The POINTER field is the 4th field in the descriptor. */
2845 tree pointer = TREE_CHAIN (class);
2847 /* Retrieve the value of the POINTER field. */
2849 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
2851 if (POINTER_TYPE_P (gnu_type))
2852 return convert (gnu_type, gnu_expr);
2854 else if (TYPE_FAT_POINTER_P (gnu_type))
2856 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
2857 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
2858 tree template_type = TREE_TYPE (p_bounds_type);
2859 tree min_field = TYPE_FIELDS (template_type);
2860 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
2861 tree template, template_addr, aflags, dimct, t, u;
2862 /* See the head comment of build_vms_descriptor. */
2863 int iclass = TREE_INT_CST_LOW (DECL_INITIAL (class));
2865 /* Convert POINTER to the type of the P_ARRAY field. */
2866 gnu_expr = convert (p_array_type, gnu_expr);
2870 case 1: /* Class S */
2871 case 15: /* Class SB */
2872 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
2873 t = TYPE_FIELDS (desc_type);
2874 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2875 t = tree_cons (min_field,
2876 convert (TREE_TYPE (min_field), integer_one_node),
2877 tree_cons (max_field,
2878 convert (TREE_TYPE (max_field), t),
2880 template = gnat_build_constructor (template_type, t);
2881 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
2883 /* For class S, we are done. */
2887 /* Test that we really have a SB descriptor, like DEC Ada. */
2888 t = build3 (COMPONENT_REF, TREE_TYPE (class), desc, class, NULL);
2889 u = convert (TREE_TYPE (class), DECL_INITIAL (class));
2890 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
2891 /* If so, there is already a template in the descriptor and
2892 it is located right after the POINTER field. */
2893 t = TREE_CHAIN (pointer);
2894 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2895 /* Otherwise use the {1, LENGTH} template we build above. */
2896 template_addr = build3 (COND_EXPR, p_bounds_type, u,
2897 build_unary_op (ADDR_EXPR, p_bounds_type,
2902 case 4: /* Class A */
2903 /* The AFLAGS field is the 7th field in the descriptor. */
2904 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer)));
2905 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2906 /* The DIMCT field is the 8th field in the descriptor. */
2908 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2909 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
2910 or FL_COEFF or FL_BOUNDS not set. */
2911 u = build_int_cst (TREE_TYPE (aflags), 192);
2912 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
2913 build_binary_op (NE_EXPR, integer_type_node,
2915 convert (TREE_TYPE (dimct),
2917 build_binary_op (NE_EXPR, integer_type_node,
2918 build2 (BIT_AND_EXPR,
2922 add_stmt (build3 (COND_EXPR, void_type_node, u,
2923 build_call_raise (CE_Length_Check_Failed, Empty,
2924 N_Raise_Constraint_Error),
2926 /* There is already a template in the descriptor and it is
2927 located at the start of block 3 (12th field). */
2928 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (t))));
2929 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2930 template_addr = build_unary_op (ADDR_EXPR, p_bounds_type, template);
2933 case 10: /* Class NCA */
2935 post_error ("unsupported descriptor type for &", gnat_subprog);
2936 template_addr = integer_zero_node;
2940 /* Build the fat pointer in the form of a constructor. */
2941 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr,
2942 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
2943 template_addr, NULL_TREE));
2944 return gnat_build_constructor (gnu_type, t);
2951 /* Build a stub for the subprogram specified by the GCC tree GNU_SUBPROG
2952 and the GNAT node GNAT_SUBPROG. */
2955 build_function_stub (tree gnu_subprog, Entity_Id gnat_subprog)
2957 tree gnu_subprog_type, gnu_subprog_addr, gnu_subprog_call;
2958 tree gnu_stub_param, gnu_param_list, gnu_arg_types, gnu_param;
2959 tree gnu_stub_decl = DECL_FUNCTION_STUB (gnu_subprog);
2962 gnu_subprog_type = TREE_TYPE (gnu_subprog);
2963 gnu_param_list = NULL_TREE;
2965 begin_subprog_body (gnu_stub_decl);
2968 start_stmt_group ();
2970 /* Loop over the parameters of the stub and translate any of them
2971 passed by descriptor into a by reference one. */
2972 for (gnu_stub_param = DECL_ARGUMENTS (gnu_stub_decl),
2973 gnu_arg_types = TYPE_ARG_TYPES (gnu_subprog_type);
2975 gnu_stub_param = TREE_CHAIN (gnu_stub_param),
2976 gnu_arg_types = TREE_CHAIN (gnu_arg_types))
2978 if (DECL_BY_DESCRIPTOR_P (gnu_stub_param))
2979 gnu_param = convert_vms_descriptor (TREE_VALUE (gnu_arg_types),
2980 gnu_stub_param, gnat_subprog);
2982 gnu_param = gnu_stub_param;
2984 gnu_param_list = tree_cons (NULL_TREE, gnu_param, gnu_param_list);
2987 gnu_body = end_stmt_group ();
2989 /* Invoke the internal subprogram. */
2990 gnu_subprog_addr = build1 (ADDR_EXPR, build_pointer_type (gnu_subprog_type),
2992 gnu_subprog_call = build3 (CALL_EXPR, TREE_TYPE (gnu_subprog_type),
2993 gnu_subprog_addr, nreverse (gnu_param_list),
2996 /* Propagate the return value, if any. */
2997 if (VOID_TYPE_P (TREE_TYPE (gnu_subprog_type)))
2998 append_to_statement_list (gnu_subprog_call, &gnu_body);
3000 append_to_statement_list (build_return_expr (DECL_RESULT (gnu_stub_decl),
3006 allocate_struct_function (gnu_stub_decl);
3007 end_subprog_body (gnu_body);
3010 /* Build a type to be used to represent an aliased object whose nominal
3011 type is an unconstrained array. This consists of a RECORD_TYPE containing
3012 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
3013 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
3014 is used to represent an arbitrary unconstrained object. Use NAME
3015 as the name of the record. */
3018 build_unc_object_type (tree template_type, tree object_type, tree name)
3020 tree type = make_node (RECORD_TYPE);
3021 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
3022 template_type, type, 0, 0, 0, 1);
3023 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
3026 TYPE_NAME (type) = name;
3027 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3028 finish_record_type (type,
3029 chainon (chainon (NULL_TREE, template_field),
3036 /* Same, taking a thin or fat pointer type instead of a template type. */
3039 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3044 gcc_assert (TYPE_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3047 = (TYPE_FAT_POINTER_P (thin_fat_ptr_type)
3048 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3049 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3050 return build_unc_object_type (template_type, object_type, name);
3053 /* Shift the component offsets within an unconstrained object TYPE to make it
3054 suitable for use as a designated type for thin pointers. */
3057 shift_unc_components_for_thin_pointers (tree type)
3059 /* Thin pointer values designate the ARRAY data of an unconstrained object,
3060 allocated past the BOUNDS template. The designated type is adjusted to
3061 have ARRAY at position zero and the template at a negative offset, so
3062 that COMPONENT_REFs on (*thin_ptr) designate the proper location. */
3064 tree bounds_field = TYPE_FIELDS (type);
3065 tree array_field = TREE_CHAIN (TYPE_FIELDS (type));
3067 DECL_FIELD_OFFSET (bounds_field)
3068 = size_binop (MINUS_EXPR, size_zero_node, byte_position (array_field));
3070 DECL_FIELD_OFFSET (array_field) = size_zero_node;
3071 DECL_FIELD_BIT_OFFSET (array_field) = bitsize_zero_node;
3074 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
3075 the normal case this is just two adjustments, but we have more to do
3076 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
3079 update_pointer_to (tree old_type, tree new_type)
3081 tree ptr = TYPE_POINTER_TO (old_type);
3082 tree ref = TYPE_REFERENCE_TO (old_type);
3086 /* If this is the main variant, process all the other variants first. */
3087 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3088 for (type = TYPE_NEXT_VARIANT (old_type); type;
3089 type = TYPE_NEXT_VARIANT (type))
3090 update_pointer_to (type, new_type);
3092 /* If no pointer or reference, we are done. */
3096 /* Merge the old type qualifiers in the new type.
3098 Each old variant has qualifiers for specific reasons, and the new
3099 designated type as well. Each set of qualifiers represents useful
3100 information grabbed at some point, and merging the two simply unifies
3101 these inputs into the final type description.
3103 Consider for instance a volatile type frozen after an access to constant
3104 type designating it. After the designated type freeze, we get here with a
3105 volatile new_type and a dummy old_type with a readonly variant, created
3106 when the access type was processed. We shall make a volatile and readonly
3107 designated type, because that's what it really is.
3109 We might also get here for a non-dummy old_type variant with different
3110 qualifiers than the new_type ones, for instance in some cases of pointers
3111 to private record type elaboration (see the comments around the call to
3112 this routine from gnat_to_gnu_entity/E_Access_Type). We have to merge the
3113 qualifiers in thoses cases too, to avoid accidentally discarding the
3114 initial set, and will often end up with old_type == new_type then. */
3115 new_type = build_qualified_type (new_type,
3116 TYPE_QUALS (old_type)
3117 | TYPE_QUALS (new_type));
3119 /* If the new type and the old one are identical, there is nothing to
3121 if (old_type == new_type)
3124 /* Otherwise, first handle the simple case. */
3125 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3127 TYPE_POINTER_TO (new_type) = ptr;
3128 TYPE_REFERENCE_TO (new_type) = ref;
3130 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3131 for (ptr1 = TYPE_MAIN_VARIANT (ptr); ptr1;
3132 ptr1 = TYPE_NEXT_VARIANT (ptr1))
3133 TREE_TYPE (ptr1) = new_type;
3135 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3136 for (ref1 = TYPE_MAIN_VARIANT (ref); ref1;
3137 ref1 = TYPE_NEXT_VARIANT (ref1))
3138 TREE_TYPE (ref1) = new_type;
3141 /* Now deal with the unconstrained array case. In this case the "pointer"
3142 is actually a RECORD_TYPE where both fields are pointers to dummy nodes.
3143 Turn them into pointers to the correct types using update_pointer_to. */
3144 else if (TREE_CODE (ptr) != RECORD_TYPE || !TYPE_IS_FAT_POINTER_P (ptr))
3149 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
3150 tree array_field = TYPE_FIELDS (ptr);
3151 tree bounds_field = TREE_CHAIN (TYPE_FIELDS (ptr));
3152 tree new_ptr = TYPE_POINTER_TO (new_type);
3156 /* Make pointers to the dummy template point to the real template. */
3158 (TREE_TYPE (TREE_TYPE (bounds_field)),
3159 TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_ptr)))));
3161 /* The references to the template bounds present in the array type
3162 are made through a PLACEHOLDER_EXPR of type new_ptr. Since we
3163 are updating ptr to make it a full replacement for new_ptr as
3164 pointer to new_type, we must rework the PLACEHOLDER_EXPR so as
3165 to make it of type ptr. */
3166 new_ref = build3 (COMPONENT_REF, TREE_TYPE (bounds_field),
3167 build0 (PLACEHOLDER_EXPR, ptr),
3168 bounds_field, NULL_TREE);
3170 /* Create the new array for the new PLACEHOLDER_EXPR and make
3171 pointers to the dummy array point to it.
3173 ??? This is now the only use of substitute_in_type,
3174 which is a very "heavy" routine to do this, so it
3175 should be replaced at some point. */
3177 (TREE_TYPE (TREE_TYPE (array_field)),
3178 substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))),
3179 TREE_CHAIN (TYPE_FIELDS (new_ptr)), new_ref));
3181 /* Make ptr the pointer to new_type. */
3182 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
3183 = TREE_TYPE (new_type) = ptr;
3185 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
3186 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
3188 /* Now handle updating the allocation record, what the thin pointer
3189 points to. Update all pointers from the old record into the new
3190 one, update the type of the array field, and recompute the size. */
3191 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
3193 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
3194 = TREE_TYPE (TREE_TYPE (array_field));
3196 /* The size recomputation needs to account for alignment constraints, so
3197 we let layout_type work it out. This will reset the field offsets to
3198 what they would be in a regular record, so we shift them back to what
3199 we want them to be for a thin pointer designated type afterwards. */
3200 DECL_SIZE (TYPE_FIELDS (new_obj_rec)) = 0;
3201 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))) = 0;
3202 TYPE_SIZE (new_obj_rec) = 0;
3203 layout_type (new_obj_rec);
3205 shift_unc_components_for_thin_pointers (new_obj_rec);
3207 /* We are done, at last. */
3208 rest_of_record_type_compilation (ptr);
3212 /* Convert a pointer to a constrained array into a pointer to a fat
3213 pointer. This involves making or finding a template. */
3216 convert_to_fat_pointer (tree type, tree expr)
3218 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
3219 tree template, template_addr;
3220 tree etype = TREE_TYPE (expr);
3222 /* If EXPR is a constant of zero, we make a fat pointer that has a null
3223 pointer to the template and array. */
3224 if (integer_zerop (expr))
3226 gnat_build_constructor
3228 tree_cons (TYPE_FIELDS (type),
3229 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3230 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3231 convert (build_pointer_type (template_type),
3235 /* If EXPR is a thin pointer, make the template and data from the record. */
3237 else if (TYPE_THIN_POINTER_P (etype))
3239 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
3241 expr = save_expr (expr);
3242 if (TREE_CODE (expr) == ADDR_EXPR)
3243 expr = TREE_OPERAND (expr, 0);
3245 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
3247 template = build_component_ref (expr, NULL_TREE, fields, false);
3248 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3249 build_component_ref (expr, NULL_TREE,
3250 TREE_CHAIN (fields), false));
3253 /* Otherwise, build the constructor for the template. */
3254 template = build_template (template_type, TREE_TYPE (etype), expr);
3256 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
3258 /* The result is a CONSTRUCTOR for the fat pointer.
3260 If expr is an argument of a foreign convention subprogram, the type it
3261 points to is directly the component type. In this case, the expression
3262 type may not match the corresponding FIELD_DECL type at this point, so we
3263 call "convert" here to fix that up if necessary. This type consistency is
3264 required, for instance because it ensures that possible later folding of
3265 component_refs against this constructor always yields something of the
3266 same type as the initial reference.
3268 Note that the call to "build_template" above is still fine, because it
3269 will only refer to the provided template_type in this case. */
3271 gnat_build_constructor
3272 (type, tree_cons (TYPE_FIELDS (type),
3273 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3274 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3275 template_addr, NULL_TREE)));
3278 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
3279 is something that is a fat pointer, so convert to it first if it EXPR
3280 is not already a fat pointer. */
3283 convert_to_thin_pointer (tree type, tree expr)
3285 if (!TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
3287 = convert_to_fat_pointer
3288 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
3290 /* We get the pointer to the data and use a NOP_EXPR to make it the
3292 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)),
3294 expr = build1 (NOP_EXPR, type, expr);
3299 /* Create an expression whose value is that of EXPR,
3300 converted to type TYPE. The TREE_TYPE of the value
3301 is always TYPE. This function implements all reasonable
3302 conversions; callers should filter out those that are
3303 not permitted by the language being compiled. */
3306 convert (tree type, tree expr)
3308 enum tree_code code = TREE_CODE (type);
3309 tree etype = TREE_TYPE (expr);
3310 enum tree_code ecode = TREE_CODE (etype);
3312 /* If EXPR is already the right type, we are done. */
3316 /* If both input and output have padding and are of variable size, do this
3317 as an unchecked conversion. Likewise if one is a mere variant of the
3318 other, so we avoid a pointless unpad/repad sequence. */
3319 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
3320 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
3321 && (!TREE_CONSTANT (TYPE_SIZE (type))
3322 || !TREE_CONSTANT (TYPE_SIZE (etype))
3323 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)))
3326 /* If the output type has padding, make a constructor to build the
3328 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
3330 /* If we previously converted from another type and our type is
3331 of variable size, remove the conversion to avoid the need for
3332 variable-size temporaries. */
3333 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3334 && !TREE_CONSTANT (TYPE_SIZE (type)))
3335 expr = TREE_OPERAND (expr, 0);
3337 /* If we are just removing the padding from expr, convert the original
3338 object if we have variable size. That will avoid the need
3339 for some variable-size temporaries. */
3340 if (TREE_CODE (expr) == COMPONENT_REF
3341 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
3342 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
3343 && !TREE_CONSTANT (TYPE_SIZE (type)))
3344 return convert (type, TREE_OPERAND (expr, 0));
3346 /* If the result type is a padded type with a self-referentially-sized
3347 field and the expression type is a record, do this as an
3348 unchecked conversion. */
3349 else if (TREE_CODE (etype) == RECORD_TYPE
3350 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
3351 return unchecked_convert (type, expr, false);
3355 gnat_build_constructor (type,
3356 tree_cons (TYPE_FIELDS (type),
3358 (TYPE_FIELDS (type)),
3363 /* If the input type has padding, remove it and convert to the output type.
3364 The conditions ordering is arranged to ensure that the output type is not
3365 a padding type here, as it is not clear whether the conversion would
3366 always be correct if this was to happen. */
3367 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
3371 /* If we have just converted to this padded type, just get the
3372 inner expression. */
3373 if (TREE_CODE (expr) == CONSTRUCTOR
3374 && !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (expr))
3375 && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->index
3376 == TYPE_FIELDS (etype))
3378 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->value;
3380 /* Otherwise, build an explicit component reference. */
3383 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
3385 return convert (type, unpadded);
3388 /* If the input is a biased type, adjust first. */
3389 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
3390 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
3391 fold_convert (TREE_TYPE (etype),
3393 TYPE_MIN_VALUE (etype)));
3395 /* If the input is a justified modular type, we need to extract the actual
3396 object before converting it to any other type with the exceptions of an
3397 unconstrained array or of a mere type variant. It is useful to avoid the
3398 extraction and conversion in the type variant case because it could end
3399 up replacing a VAR_DECL expr by a constructor and we might be about the
3400 take the address of the result. */
3401 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
3402 && code != UNCONSTRAINED_ARRAY_TYPE
3403 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
3404 return convert (type, build_component_ref (expr, NULL_TREE,
3405 TYPE_FIELDS (etype), false));
3407 /* If converting to a type that contains a template, convert to the data
3408 type and then build the template. */
3409 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
3411 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
3413 /* If the source already has a template, get a reference to the
3414 associated array only, as we are going to rebuild a template
3415 for the target type anyway. */
3416 expr = maybe_unconstrained_array (expr);
3419 gnat_build_constructor
3421 tree_cons (TYPE_FIELDS (type),
3422 build_template (TREE_TYPE (TYPE_FIELDS (type)),
3423 obj_type, NULL_TREE),
3424 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3425 convert (obj_type, expr), NULL_TREE)));
3428 /* There are some special cases of expressions that we process
3430 switch (TREE_CODE (expr))
3436 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
3437 conversion in gnat_expand_expr. NULL_EXPR does not represent
3438 and actual value, so no conversion is needed. */
3439 expr = copy_node (expr);
3440 TREE_TYPE (expr) = type;
3444 /* If we are converting a STRING_CST to another constrained array type,
3445 just make a new one in the proper type. */
3446 if (code == ecode && AGGREGATE_TYPE_P (etype)
3447 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
3448 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
3450 expr = copy_node (expr);
3451 TREE_TYPE (expr) = type;
3457 /* If we are converting a CONSTRUCTOR to another constrained array type
3458 with the same domain, just make a new one in the proper type. */
3459 if (code == ecode && code == ARRAY_TYPE
3460 && TREE_TYPE (type) == TREE_TYPE (etype)
3461 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (type)),
3462 TYPE_MIN_VALUE (TYPE_DOMAIN (etype)))
3463 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
3464 TYPE_MAX_VALUE (TYPE_DOMAIN (etype))))
3466 expr = copy_node (expr);
3467 TREE_TYPE (expr) = type;
3472 case UNCONSTRAINED_ARRAY_REF:
3473 /* Convert this to the type of the inner array by getting the address of
3474 the array from the template. */
3475 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3476 build_component_ref (TREE_OPERAND (expr, 0),
3477 get_identifier ("P_ARRAY"),
3479 etype = TREE_TYPE (expr);
3480 ecode = TREE_CODE (etype);
3483 case VIEW_CONVERT_EXPR:
3485 /* GCC 4.x is very sensitive to type consistency overall, and view
3486 conversions thus are very frequent. Even though just "convert"ing
3487 the inner operand to the output type is fine in most cases, it
3488 might expose unexpected input/output type mismatches in special
3489 circumstances so we avoid such recursive calls when we can. */
3491 tree op0 = TREE_OPERAND (expr, 0);
3493 /* If we are converting back to the original type, we can just
3494 lift the input conversion. This is a common occurrence with
3495 switches back-and-forth amongst type variants. */
3496 if (type == TREE_TYPE (op0))
3499 /* Otherwise, if we're converting between two aggregate types, we
3500 might be allowed to substitute the VIEW_CONVERT target type in
3501 place or to just convert the inner expression. */
3502 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
3504 /* If we are converting between type variants, we can just
3505 substitute the VIEW_CONVERT in place. */
3506 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
3507 return build1 (VIEW_CONVERT_EXPR, type, op0);
3509 /* Otherwise, we may just bypass the input view conversion unless
3510 one of the types is a fat pointer, which is handled by
3511 specialized code below which relies on exact type matching. */
3512 else if (!TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
3513 return convert (type, op0);
3519 /* If both types are record types, just convert the pointer and
3520 make a new INDIRECT_REF.
3522 ??? Disable this for now since it causes problems with the
3523 code in build_binary_op for MODIFY_EXPR which wants to
3524 strip off conversions. But that code really is a mess and
3525 we need to do this a much better way some time. */
3527 && (TREE_CODE (type) == RECORD_TYPE
3528 || TREE_CODE (type) == UNION_TYPE)
3529 && (TREE_CODE (etype) == RECORD_TYPE
3530 || TREE_CODE (etype) == UNION_TYPE)
3531 && !TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
3532 return build_unary_op (INDIRECT_REF, NULL_TREE,
3533 convert (build_pointer_type (type),
3534 TREE_OPERAND (expr, 0)));
3541 /* Check for converting to a pointer to an unconstrained array. */
3542 if (TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
3543 return convert_to_fat_pointer (type, expr);
3545 /* If we're converting between two aggregate types that have the same main
3546 variant, just make a VIEW_CONVER_EXPR. */
3547 else if (AGGREGATE_TYPE_P (type)
3548 && TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
3549 return build1 (VIEW_CONVERT_EXPR, type, expr);
3551 /* In all other cases of related types, make a NOP_EXPR. */
3552 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
3553 || (code == INTEGER_CST && ecode == INTEGER_CST
3554 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
3555 return fold_convert (type, expr);
3560 return fold_build1 (CONVERT_EXPR, type, expr);
3563 return fold_convert (type, gnat_truthvalue_conversion (expr));
3566 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
3567 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
3568 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
3569 return unchecked_convert (type, expr, false);
3570 else if (TYPE_BIASED_REPRESENTATION_P (type))
3571 return fold_convert (type,
3572 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
3573 convert (TREE_TYPE (type), expr),
3574 TYPE_MIN_VALUE (type)));
3576 /* ... fall through ... */
3579 return fold (convert_to_integer (type, expr));
3582 case REFERENCE_TYPE:
3583 /* If converting between two pointers to records denoting
3584 both a template and type, adjust if needed to account
3585 for any differing offsets, since one might be negative. */
3586 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
3589 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
3590 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
3591 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
3592 sbitsize_int (BITS_PER_UNIT));
3594 expr = build1 (NOP_EXPR, type, expr);
3595 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
3596 if (integer_zerop (byte_diff))
3599 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
3600 fold (convert (sizetype, byte_diff)));
3603 /* If converting to a thin pointer, handle specially. */
3604 if (TYPE_THIN_POINTER_P (type)
3605 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
3606 return convert_to_thin_pointer (type, expr);
3608 /* If converting fat pointer to normal pointer, get the pointer to the
3609 array and then convert it. */
3610 else if (TYPE_FAT_POINTER_P (etype))
3611 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
3614 return fold (convert_to_pointer (type, expr));
3617 return fold (convert_to_real (type, expr));
3620 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
3622 gnat_build_constructor
3623 (type, tree_cons (TYPE_FIELDS (type),
3624 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3627 /* ... fall through ... */
3630 /* In these cases, assume the front-end has validated the conversion.
3631 If the conversion is valid, it will be a bit-wise conversion, so
3632 it can be viewed as an unchecked conversion. */
3633 return unchecked_convert (type, expr, false);
3636 /* This is a either a conversion between a tagged type and some
3637 subtype, which we have to mark as a UNION_TYPE because of
3638 overlapping fields or a conversion of an Unchecked_Union. */
3639 return unchecked_convert (type, expr, false);
3641 case UNCONSTRAINED_ARRAY_TYPE:
3642 /* If EXPR is a constrained array, take its address, convert it to a
3643 fat pointer, and then dereference it. Likewise if EXPR is a
3644 record containing both a template and a constrained array.
3645 Note that a record representing a justified modular type
3646 always represents a packed constrained array. */
3647 if (ecode == ARRAY_TYPE
3648 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
3649 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
3650 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
3653 (INDIRECT_REF, NULL_TREE,
3654 convert_to_fat_pointer (TREE_TYPE (type),
3655 build_unary_op (ADDR_EXPR,
3658 /* Do something very similar for converting one unconstrained
3659 array to another. */
3660 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
3662 build_unary_op (INDIRECT_REF, NULL_TREE,
3663 convert (TREE_TYPE (type),
3664 build_unary_op (ADDR_EXPR,
3670 return fold (convert_to_complex (type, expr));
3677 /* Remove all conversions that are done in EXP. This includes converting
3678 from a padded type or to a justified modular type. If TRUE_ADDRESS
3679 is true, always return the address of the containing object even if
3680 the address is not bit-aligned. */
3683 remove_conversions (tree exp, bool true_address)
3685 switch (TREE_CODE (exp))
3689 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3690 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
3692 remove_conversions (VEC_index (constructor_elt,
3693 CONSTRUCTOR_ELTS (exp), 0)->value,
3698 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
3699 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
3700 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3703 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
3704 case NOP_EXPR: case CONVERT_EXPR:
3705 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3714 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3715 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3716 likewise return an expression pointing to the underlying array. */
3719 maybe_unconstrained_array (tree exp)
3721 enum tree_code code = TREE_CODE (exp);
3724 switch (TREE_CODE (TREE_TYPE (exp)))
3726 case UNCONSTRAINED_ARRAY_TYPE:
3727 if (code == UNCONSTRAINED_ARRAY_REF)
3730 = build_unary_op (INDIRECT_REF, NULL_TREE,
3731 build_component_ref (TREE_OPERAND (exp, 0),
3732 get_identifier ("P_ARRAY"),
3734 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
3738 else if (code == NULL_EXPR)
3739 return build1 (NULL_EXPR,
3740 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3741 (TREE_TYPE (TREE_TYPE (exp))))),
3742 TREE_OPERAND (exp, 0));
3745 /* If this is a padded type, convert to the unpadded type and see if
3746 it contains a template. */
3747 if (TYPE_IS_PADDING_P (TREE_TYPE (exp)))
3749 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
3750 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3751 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
3753 build_component_ref (new, NULL_TREE,
3754 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
3757 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
3759 build_component_ref (exp, NULL_TREE,
3760 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
3770 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
3771 If NOTRUNC_P is true, truncation operations should be suppressed. */
3774 unchecked_convert (tree type, tree expr, bool notrunc_p)
3776 tree etype = TREE_TYPE (expr);
3778 /* If the expression is already the right type, we are done. */
3782 /* If both types types are integral just do a normal conversion.
3783 Likewise for a conversion to an unconstrained array. */
3784 if ((((INTEGRAL_TYPE_P (type)
3785 && !(TREE_CODE (type) == INTEGER_TYPE
3786 && TYPE_VAX_FLOATING_POINT_P (type)))
3787 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
3788 || (TREE_CODE (type) == RECORD_TYPE
3789 && TYPE_JUSTIFIED_MODULAR_P (type)))
3790 && ((INTEGRAL_TYPE_P (etype)
3791 && !(TREE_CODE (etype) == INTEGER_TYPE
3792 && TYPE_VAX_FLOATING_POINT_P (etype)))
3793 || (POINTER_TYPE_P (etype) && !TYPE_THIN_POINTER_P (etype))
3794 || (TREE_CODE (etype) == RECORD_TYPE
3795 && TYPE_JUSTIFIED_MODULAR_P (etype))))
3796 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3799 bool final_unchecked = false;
3801 if (TREE_CODE (etype) == INTEGER_TYPE
3802 && TYPE_BIASED_REPRESENTATION_P (etype))
3804 tree ntype = copy_type (etype);
3806 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
3807 TYPE_MAIN_VARIANT (ntype) = ntype;
3808 expr = build1 (NOP_EXPR, ntype, expr);
3811 if (TREE_CODE (type) == INTEGER_TYPE
3812 && TYPE_BIASED_REPRESENTATION_P (type))
3814 rtype = copy_type (type);
3815 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
3816 TYPE_MAIN_VARIANT (rtype) = rtype;
3819 /* We have another special case. If we are unchecked converting subtype
3820 into a base type, we need to ensure that VRP doesn't propagate range
3821 information since this conversion may be done precisely to validate
3822 that the object is within the range it is supposed to have. */
3823 else if (TREE_CODE (expr) != INTEGER_CST
3824 && TREE_CODE (type) == INTEGER_TYPE && !TREE_TYPE (type)
3825 && ((TREE_CODE (etype) == INTEGER_TYPE && TREE_TYPE (etype))
3826 || TREE_CODE (etype) == ENUMERAL_TYPE
3827 || TREE_CODE (etype) == BOOLEAN_TYPE))
3829 /* ??? The pattern to be "preserved" by the middle-end and the
3830 optimizers is a VIEW_CONVERT_EXPR between a pair of different
3831 "base" types (integer types without TREE_TYPE). But this may
3832 raise addressability/aliasing issues because VIEW_CONVERT_EXPR
3833 gets gimplified as an lvalue, thus causing the address of its
3834 operand to be taken if it is deemed addressable and not already
3836 rtype = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
3840 rtype = copy_type (rtype);
3841 TYPE_MAIN_VARIANT (rtype) = rtype;
3844 final_unchecked = true;
3847 expr = convert (rtype, expr);
3849 expr = build1 (final_unchecked ? VIEW_CONVERT_EXPR : NOP_EXPR,
3853 /* If we are converting TO an integral type whose precision is not the
3854 same as its size, first unchecked convert to a record that contains
3855 an object of the output type. Then extract the field. */
3856 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
3857 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3858 GET_MODE_BITSIZE (TYPE_MODE (type))))
3860 tree rec_type = make_node (RECORD_TYPE);
3861 tree field = create_field_decl (get_identifier ("OBJ"), type,
3862 rec_type, 1, 0, 0, 0);
3864 TYPE_FIELDS (rec_type) = field;
3865 layout_type (rec_type);
3867 expr = unchecked_convert (rec_type, expr, notrunc_p);
3868 expr = build_component_ref (expr, NULL_TREE, field, 0);
3871 /* Similarly for integral input type whose precision is not equal to its
3873 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype)
3874 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
3875 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3877 tree rec_type = make_node (RECORD_TYPE);
3879 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
3882 TYPE_FIELDS (rec_type) = field;
3883 layout_type (rec_type);
3885 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
3886 expr = unchecked_convert (type, expr, notrunc_p);
3889 /* We have a special case when we are converting between two
3890 unconstrained array types. In that case, take the address,
3891 convert the fat pointer types, and dereference. */
3892 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
3893 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3894 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3895 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
3896 build_unary_op (ADDR_EXPR, NULL_TREE,
3900 expr = maybe_unconstrained_array (expr);
3902 /* There's no point in doing two unchecked conversions in a row. */
3903 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3904 expr = TREE_OPERAND (expr, 0);
3906 etype = TREE_TYPE (expr);
3907 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
3910 /* If the result is an integral type whose size is not equal to
3911 the size of the underlying machine type, sign- or zero-extend
3912 the result. We need not do this in the case where the input is
3913 an integral type of the same precision and signedness or if the output
3914 is a biased type or if both the input and output are unsigned. */
3916 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
3917 && !(TREE_CODE (type) == INTEGER_TYPE
3918 && TYPE_BIASED_REPRESENTATION_P (type))
3919 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3920 GET_MODE_BITSIZE (TYPE_MODE (type)))
3921 && !(INTEGRAL_TYPE_P (etype)
3922 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
3923 && operand_equal_p (TYPE_RM_SIZE (type),
3924 (TYPE_RM_SIZE (etype) != 0
3925 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
3927 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
3929 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
3930 TYPE_UNSIGNED (type));
3932 = convert (base_type,
3933 size_binop (MINUS_EXPR,
3935 (GET_MODE_BITSIZE (TYPE_MODE (type))),
3936 TYPE_RM_SIZE (type)));
3939 build_binary_op (RSHIFT_EXPR, base_type,
3940 build_binary_op (LSHIFT_EXPR, base_type,
3941 convert (base_type, expr),
3946 /* An unchecked conversion should never raise Constraint_Error. The code
3947 below assumes that GCC's conversion routines overflow the same way that
3948 the underlying hardware does. This is probably true. In the rare case
3949 when it is false, we can rely on the fact that such conversions are
3950 erroneous anyway. */
3951 if (TREE_CODE (expr) == INTEGER_CST)
3952 TREE_OVERFLOW (expr) = 0;
3954 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
3955 show no longer constant. */
3956 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3957 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
3959 TREE_CONSTANT (expr) = 0;
3964 /* Search the chain of currently available builtin declarations for a node
3965 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
3966 found, if any, or NULL_TREE otherwise. */
3968 builtin_decl_for (tree name)
3973 for (i = 0; VEC_iterate(tree, builtin_decls, i, decl); i++)
3974 if (DECL_NAME (decl) == name)
3980 /* Return the appropriate GCC tree code for the specified GNAT type,
3981 the latter being a record type as predicated by Is_Record_Type. */
3984 tree_code_for_record_type (Entity_Id gnat_type)
3986 Node_Id component_list
3987 = Component_List (Type_Definition
3989 (Implementation_Base_Type (gnat_type))));
3992 /* Make this a UNION_TYPE unless it's either not an Unchecked_Union or
3993 we have a non-discriminant field outside a variant. In either case,
3994 it's a RECORD_TYPE. */
3996 if (!Is_Unchecked_Union (gnat_type))
3999 for (component = First_Non_Pragma (Component_Items (component_list));
4000 Present (component);
4001 component = Next_Non_Pragma (component))
4002 if (Ekind (Defining_Entity (component)) == E_Component)
4008 /* Perform final processing on global variables. */
4011 gnat_write_global_declarations (void)
4013 /* Proceed to optimize and emit assembly.
4014 FIXME: shouldn't be the front end's responsibility to call this. */
4017 /* Emit debug info for all global declarations. */
4018 emit_debug_global_declarations (VEC_address (tree, global_decls),
4019 VEC_length (tree, global_decls));
4022 #include "gt-ada-utils.h"
4023 #include "gtype-ada.h"