1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2007, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 ****************************************************************************/
28 #include "coretypes.h"
41 #include "tree-inline.h"
42 #include "tree-gimple.h"
43 #include "tree-dump.h"
44 #include "pointer-set.h"
60 #ifndef MAX_FIXED_MODE_SIZE
61 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
64 #ifndef MAX_BITS_PER_WORD
65 #define MAX_BITS_PER_WORD BITS_PER_WORD
68 /* If nonzero, pretend we are allocating at global level. */
71 /* Tree nodes for the various types and decls we create. */
72 tree gnat_std_decls[(int) ADT_LAST];
74 /* Functions to call for each of the possible raise reasons. */
75 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
77 /* Forward declarations for handlers of attributes. */
78 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
79 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
81 /* Table of machine-independent internal attributes for Ada. We support
82 this minimal set of attributes to accommodate the Alpha back-end which
83 unconditionally puts them on its builtins. */
84 const struct attribute_spec gnat_internal_attribute_table[] =
86 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
87 { "const", 0, 0, true, false, false, handle_const_attribute },
88 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute },
89 { NULL, 0, 0, false, false, false, NULL }
92 /* Associates a GNAT tree node to a GCC tree node. It is used in
93 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
94 of `save_gnu_tree' for more info. */
95 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
97 #define GET_GNU_TREE(GNAT_ENTITY) \
98 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
100 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
101 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
103 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
104 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
106 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
107 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
109 #define GET_DUMMY_NODE(GNAT_ENTITY) \
110 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
112 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
113 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
115 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
116 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
118 /* This variable keeps a table for types for each precision so that we only
119 allocate each of them once. Signed and unsigned types are kept separate.
121 Note that these types are only used when fold-const requests something
122 special. Perhaps we should NOT share these types; we'll see how it
124 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
126 /* Likewise for float types, but record these by mode. */
127 static GTY(()) tree float_types[NUM_MACHINE_MODES];
129 /* For each binding contour we allocate a binding_level structure to indicate
130 the binding depth. */
132 struct gnat_binding_level GTY((chain_next ("%h.chain")))
134 /* The binding level containing this one (the enclosing binding level). */
135 struct gnat_binding_level *chain;
136 /* The BLOCK node for this level. */
138 /* If nonzero, the setjmp buffer that needs to be updated for any
139 variable-sized definition within this context. */
143 /* The binding level currently in effect. */
144 static GTY(()) struct gnat_binding_level *current_binding_level;
146 /* A chain of gnat_binding_level structures awaiting reuse. */
147 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
149 /* An array of global declarations. */
150 static GTY(()) VEC(tree,gc) *global_decls;
152 /* An array of builtin declarations. */
153 static GTY(()) VEC(tree,gc) *builtin_decls;
155 /* An array of global renaming pointers. */
156 static GTY(()) VEC(tree,gc) *global_renaming_pointers;
158 /* A chain of unused BLOCK nodes. */
159 static GTY((deletable)) tree free_block_chain;
161 static void gnat_install_builtins (void);
162 static tree merge_sizes (tree, tree, tree, bool, bool);
163 static tree compute_related_constant (tree, tree);
164 static tree split_plus (tree, tree *);
165 static void gnat_gimplify_function (tree);
166 static tree float_type_for_precision (int, enum machine_mode);
167 static tree convert_to_fat_pointer (tree, tree);
168 static tree convert_to_thin_pointer (tree, tree);
169 static tree make_descriptor_field (const char *,tree, tree, tree);
170 static bool potential_alignment_gap (tree, tree, tree);
172 /* Initialize the association of GNAT nodes to GCC trees. */
175 init_gnat_to_gnu (void)
177 associate_gnat_to_gnu
178 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
181 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
182 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
183 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
185 If GNU_DECL is zero, a previous association is to be reset. */
188 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
190 /* Check that GNAT_ENTITY is not already defined and that it is being set
191 to something which is a decl. Raise gigi 401 if not. Usually, this
192 means GNAT_ENTITY is defined twice, but occasionally is due to some
194 gcc_assert (!(gnu_decl
195 && (PRESENT_GNU_TREE (gnat_entity)
196 || (!no_check && !DECL_P (gnu_decl)))));
198 SET_GNU_TREE (gnat_entity, gnu_decl);
201 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
202 Return the ..._DECL node that was associated with it. If there is no tree
203 node associated with GNAT_ENTITY, abort.
205 In some cases, such as delayed elaboration or expressions that need to
206 be elaborated only once, GNAT_ENTITY is really not an entity. */
209 get_gnu_tree (Entity_Id gnat_entity)
211 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
212 return GET_GNU_TREE (gnat_entity);
215 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
218 present_gnu_tree (Entity_Id gnat_entity)
220 return PRESENT_GNU_TREE (gnat_entity);
223 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
226 init_dummy_type (void)
229 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
232 /* Make a dummy type corresponding to GNAT_TYPE. */
235 make_dummy_type (Entity_Id gnat_type)
237 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
240 /* If there is an equivalent type, get its underlying type. */
241 if (Present (gnat_underlying))
242 gnat_underlying = Underlying_Type (gnat_underlying);
244 /* If there was no equivalent type (can only happen when just annotating
245 types) or underlying type, go back to the original type. */
246 if (No (gnat_underlying))
247 gnat_underlying = gnat_type;
249 /* If it there already a dummy type, use that one. Else make one. */
250 if (PRESENT_DUMMY_NODE (gnat_underlying))
251 return GET_DUMMY_NODE (gnat_underlying);
253 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
255 gnu_type = make_node (Is_Record_Type (gnat_underlying)
256 ? tree_code_for_record_type (gnat_underlying)
258 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
259 TYPE_DUMMY_P (gnu_type) = 1;
260 if (AGGREGATE_TYPE_P (gnu_type))
262 TYPE_STUB_DECL (gnu_type) = build_decl (TYPE_DECL, NULL_TREE, gnu_type);
263 TYPE_BY_REFERENCE_P (gnu_type) = Is_By_Reference_Type (gnat_type);
266 SET_DUMMY_NODE (gnat_underlying, gnu_type);
271 /* Return nonzero if we are currently in the global binding level. */
274 global_bindings_p (void)
276 return ((force_global || !current_function_decl) ? -1 : 0);
279 /* Enter a new binding level. */
284 struct gnat_binding_level *newlevel = NULL;
286 /* Reuse a struct for this binding level, if there is one. */
287 if (free_binding_level)
289 newlevel = free_binding_level;
290 free_binding_level = free_binding_level->chain;
294 = (struct gnat_binding_level *)
295 ggc_alloc (sizeof (struct gnat_binding_level));
297 /* Use a free BLOCK, if any; otherwise, allocate one. */
298 if (free_block_chain)
300 newlevel->block = free_block_chain;
301 free_block_chain = TREE_CHAIN (free_block_chain);
302 TREE_CHAIN (newlevel->block) = NULL_TREE;
305 newlevel->block = make_node (BLOCK);
307 /* Point the BLOCK we just made to its parent. */
308 if (current_binding_level)
309 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
311 BLOCK_VARS (newlevel->block) = BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
312 TREE_USED (newlevel->block) = 1;
314 /* Add this level to the front of the chain (stack) of levels that are
316 newlevel->chain = current_binding_level;
317 newlevel->jmpbuf_decl = NULL_TREE;
318 current_binding_level = newlevel;
321 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
322 and point FNDECL to this BLOCK. */
325 set_current_block_context (tree fndecl)
327 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
328 DECL_INITIAL (fndecl) = current_binding_level->block;
331 /* Set the jmpbuf_decl for the current binding level to DECL. */
334 set_block_jmpbuf_decl (tree decl)
336 current_binding_level->jmpbuf_decl = decl;
339 /* Get the jmpbuf_decl, if any, for the current binding level. */
342 get_block_jmpbuf_decl ()
344 return current_binding_level->jmpbuf_decl;
347 /* Exit a binding level. Set any BLOCK into the current code group. */
352 struct gnat_binding_level *level = current_binding_level;
353 tree block = level->block;
355 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
356 BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
358 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
359 are no variables free the block and merge its subblocks into those of its
360 parent block. Otherwise, add it to the list of its parent. */
361 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
363 else if (BLOCK_VARS (block) == NULL_TREE)
365 BLOCK_SUBBLOCKS (level->chain->block)
366 = chainon (BLOCK_SUBBLOCKS (block),
367 BLOCK_SUBBLOCKS (level->chain->block));
368 TREE_CHAIN (block) = free_block_chain;
369 free_block_chain = block;
373 TREE_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
374 BLOCK_SUBBLOCKS (level->chain->block) = block;
375 TREE_USED (block) = 1;
376 set_block_for_group (block);
379 /* Free this binding structure. */
380 current_binding_level = level->chain;
381 level->chain = free_binding_level;
382 free_binding_level = level;
385 /* Insert BLOCK at the end of the list of subblocks of the
386 current binding level. This is used when a BIND_EXPR is expanded,
387 to handle the BLOCK node inside the BIND_EXPR. */
390 insert_block (tree block)
392 TREE_USED (block) = 1;
393 TREE_CHAIN (block) = BLOCK_SUBBLOCKS (current_binding_level->block);
394 BLOCK_SUBBLOCKS (current_binding_level->block) = block;
397 /* Records a ..._DECL node DECL as belonging to the current lexical scope
398 and uses GNAT_NODE for location information and propagating flags. */
401 gnat_pushdecl (tree decl, Node_Id gnat_node)
403 /* If at top level, there is no context. But PARM_DECLs always go in the
404 level of its function. */
405 if (global_bindings_p () && TREE_CODE (decl) != PARM_DECL)
406 DECL_CONTEXT (decl) = 0;
409 DECL_CONTEXT (decl) = current_function_decl;
411 /* Functions imported in another function are not really nested. */
412 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_PUBLIC (decl))
413 DECL_NO_STATIC_CHAIN (decl) = 1;
416 TREE_NO_WARNING (decl) = (gnat_node == Empty || Warnings_Off (gnat_node));
418 /* Set the location of DECL and emit a declaration for it. */
419 if (Present (gnat_node))
420 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
421 add_decl_expr (decl, gnat_node);
423 /* Put the declaration on the list. The list of declarations is in reverse
424 order. The list will be reversed later. Put global variables in the
425 globals list and builtin functions in a dedicated list to speed up
426 further lookups. Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
427 the list, as they will cause trouble with the debugger and aren't needed
429 if (TREE_CODE (decl) != TYPE_DECL
430 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
432 if (global_bindings_p ())
434 VEC_safe_push (tree, gc, global_decls, decl);
436 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
437 VEC_safe_push (tree, gc, builtin_decls, decl);
441 TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
442 BLOCK_VARS (current_binding_level->block) = decl;
446 /* For the declaration of a type, set its name if it either is not already
447 set, was set to an IDENTIFIER_NODE, indicating an internal name,
448 or if the previous type name was not derived from a source name.
449 We'd rather have the type named with a real name and all the pointer
450 types to the same object have the same POINTER_TYPE node. Code in the
451 equivalent function of c-decl.c makes a copy of the type node here, but
452 that may cause us trouble with incomplete types. We make an exception
453 for fat pointer types because the compiler automatically builds them
454 for unconstrained array types and the debugger uses them to represent
455 both these and pointers to these. */
456 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
458 tree t = TREE_TYPE (decl);
460 if (!TYPE_NAME (t) || TREE_CODE (TYPE_NAME (t)) == IDENTIFIER_NODE)
461 TYPE_NAME (t) = decl;
462 else if (TYPE_FAT_POINTER_P (t))
464 tree tt = build_variant_type_copy (t);
465 TYPE_NAME (tt) = decl;
466 TREE_USED (tt) = TREE_USED (t);
467 TREE_TYPE (decl) = tt;
468 DECL_ORIGINAL_TYPE (decl) = t;
470 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
471 TYPE_NAME (t) = decl;
475 /* Do little here. Set up the standard declarations later after the
476 front end has been run. */
479 gnat_init_decl_processing (void)
483 /* Make the binding_level structure for global names. */
484 current_function_decl = 0;
485 current_binding_level = 0;
486 free_binding_level = 0;
489 build_common_tree_nodes (true, true);
491 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
492 corresponding to the size of Pmode. In most cases when ptr_mode and
493 Pmode differ, C will use the width of ptr_mode as sizetype. But we get
494 far better code using the width of Pmode. Make this here since we need
495 this before we can expand the GNAT types. */
496 size_type_node = gnat_type_for_size (GET_MODE_BITSIZE (Pmode), 0);
497 set_sizetype (size_type_node);
498 build_common_tree_nodes_2 (0);
500 /* Give names and make TYPE_DECLs for common types. */
501 create_type_decl (get_identifier (SIZE_TYPE), sizetype,
502 NULL, false, true, Empty);
503 create_type_decl (get_identifier ("integer"), integer_type_node,
504 NULL, false, true, Empty);
505 create_type_decl (get_identifier ("unsigned char"), char_type_node,
506 NULL, false, true, Empty);
507 create_type_decl (get_identifier ("long integer"), long_integer_type_node,
508 NULL, false, true, Empty);
510 ptr_void_type_node = build_pointer_type (void_type_node);
512 gnat_install_builtins ();
515 /* Install the builtin functions we might need. */
518 gnat_install_builtins ()
520 /* Builtins used by generic middle-end optimizers. */
521 build_common_builtin_nodes ();
523 /* Target specific builtins, such as the AltiVec family on ppc. */
524 targetm.init_builtins ();
527 /* Create the predefined scalar types such as `integer_type_node' needed
528 in the gcc back-end and initialize the global binding level. */
531 init_gigi_decls (tree long_long_float_type, tree exception_type)
536 /* Set the types that GCC and Gigi use from the front end. We would like
537 to do this for char_type_node, but it needs to correspond to the C
539 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
541 /* In this case, the builtin floating point types are VAX float,
542 so make up a type for use. */
543 longest_float_type_node = make_node (REAL_TYPE);
544 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
545 layout_type (longest_float_type_node);
546 create_type_decl (get_identifier ("longest float type"),
547 longest_float_type_node, NULL, false, true, Empty);
550 longest_float_type_node = TREE_TYPE (long_long_float_type);
552 except_type_node = TREE_TYPE (exception_type);
554 unsigned_type_node = gnat_type_for_size (INT_TYPE_SIZE, 1);
555 create_type_decl (get_identifier ("unsigned int"), unsigned_type_node,
556 NULL, false, true, Empty);
558 void_type_decl_node = create_type_decl (get_identifier ("void"),
559 void_type_node, NULL, false, true,
562 void_ftype = build_function_type (void_type_node, NULL_TREE);
563 ptr_void_ftype = build_pointer_type (void_ftype);
565 /* Now declare runtime functions. */
566 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
568 /* malloc is a function declaration tree for a function to allocate
570 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
572 build_function_type (ptr_void_type_node,
573 tree_cons (NULL_TREE,
576 NULL_TREE, false, true, true, NULL,
578 DECL_IS_MALLOC (malloc_decl) = 1;
580 /* free is a function declaration tree for a function to free memory. */
582 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
583 build_function_type (void_type_node,
584 tree_cons (NULL_TREE,
587 NULL_TREE, false, true, true, NULL, Empty);
589 /* Make the types and functions used for exception processing. */
591 = build_array_type (gnat_type_for_mode (Pmode, 0),
592 build_index_type (build_int_cst (NULL_TREE, 5)));
593 create_type_decl (get_identifier ("JMPBUF_T"), jmpbuf_type, NULL,
595 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
597 /* Functions to get and set the jumpbuf pointer for the current thread. */
599 = create_subprog_decl
600 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
601 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
602 NULL_TREE, false, true, true, NULL, Empty);
603 /* Avoid creating superfluous edges to __builtin_setjmp receivers. */
604 DECL_IS_PURE (get_jmpbuf_decl) = 1;
607 = create_subprog_decl
608 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
610 build_function_type (void_type_node,
611 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
612 NULL_TREE, false, true, true, NULL, Empty);
614 /* Function to get the current exception. */
616 = create_subprog_decl
617 (get_identifier ("system__soft_links__get_gnat_exception"),
619 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
620 NULL_TREE, false, true, true, NULL, Empty);
621 /* Avoid creating superfluous edges to __builtin_setjmp receivers. */
622 DECL_IS_PURE (get_excptr_decl) = 1;
624 /* Functions that raise exceptions. */
626 = create_subprog_decl
627 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
628 build_function_type (void_type_node,
629 tree_cons (NULL_TREE,
630 build_pointer_type (except_type_node),
632 NULL_TREE, false, true, true, NULL, Empty);
634 /* Dummy objects to materialize "others" and "all others" in the exception
635 tables. These are exported by a-exexpr.adb, so see this unit for the
639 = create_var_decl (get_identifier ("OTHERS"),
640 get_identifier ("__gnat_others_value"),
641 integer_type_node, 0, 1, 0, 1, 1, 0, Empty);
644 = create_var_decl (get_identifier ("ALL_OTHERS"),
645 get_identifier ("__gnat_all_others_value"),
646 integer_type_node, 0, 1, 0, 1, 1, 0, Empty);
648 /* Hooks to call when entering/leaving an exception handler. */
650 = create_subprog_decl (get_identifier ("__gnat_begin_handler"), NULL_TREE,
651 build_function_type (void_type_node,
652 tree_cons (NULL_TREE,
655 NULL_TREE, false, true, true, NULL, Empty);
658 = create_subprog_decl (get_identifier ("__gnat_end_handler"), NULL_TREE,
659 build_function_type (void_type_node,
660 tree_cons (NULL_TREE,
663 NULL_TREE, false, true, true, NULL, Empty);
665 /* If in no exception handlers mode, all raise statements are redirected to
666 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
667 this procedure will never be called in this mode. */
668 if (No_Exception_Handlers_Set ())
671 = create_subprog_decl
672 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE,
673 build_function_type (void_type_node,
674 tree_cons (NULL_TREE,
675 build_pointer_type (char_type_node),
676 tree_cons (NULL_TREE,
679 NULL_TREE, false, true, true, NULL, Empty);
681 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
682 gnat_raise_decls[i] = decl;
685 /* Otherwise, make one decl for each exception reason. */
686 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
690 sprintf (name, "__gnat_rcheck_%.2d", i);
692 = create_subprog_decl
693 (get_identifier (name), NULL_TREE,
694 build_function_type (void_type_node,
695 tree_cons (NULL_TREE,
698 tree_cons (NULL_TREE,
701 NULL_TREE, false, true, true, NULL, Empty);
704 /* Indicate that these never return. */
705 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
706 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
707 TREE_TYPE (raise_nodefer_decl)
708 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
711 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
713 TREE_THIS_VOLATILE (gnat_raise_decls[i]) = 1;
714 TREE_SIDE_EFFECTS (gnat_raise_decls[i]) = 1;
715 TREE_TYPE (gnat_raise_decls[i])
716 = build_qualified_type (TREE_TYPE (gnat_raise_decls[i]),
720 /* setjmp returns an integer and has one operand, which is a pointer to
723 = create_subprog_decl
724 (get_identifier ("__builtin_setjmp"), NULL_TREE,
725 build_function_type (integer_type_node,
726 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
727 NULL_TREE, false, true, true, NULL, Empty);
729 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
730 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
732 /* update_setjmp_buf updates a setjmp buffer from the current stack pointer
734 update_setjmp_buf_decl
735 = create_subprog_decl
736 (get_identifier ("__builtin_update_setjmp_buf"), NULL_TREE,
737 build_function_type (void_type_node,
738 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
739 NULL_TREE, false, true, true, NULL, Empty);
741 DECL_BUILT_IN_CLASS (update_setjmp_buf_decl) = BUILT_IN_NORMAL;
742 DECL_FUNCTION_CODE (update_setjmp_buf_decl) = BUILT_IN_UPDATE_SETJMP_BUF;
744 main_identifier_node = get_identifier ("main");
747 /* Given a record type RECORD_TYPE and a chain of FIELD_DECL nodes FIELDLIST,
748 finish constructing the record or union type. If REP_LEVEL is zero, this
749 record has no representation clause and so will be entirely laid out here.
750 If REP_LEVEL is one, this record has a representation clause and has been
751 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
752 this record is derived from a parent record and thus inherits its layout;
753 only make a pass on the fields to finalize them. If DO_NOT_FINALIZE is
754 true, the record type is expected to be modified afterwards so it will
755 not be sent to the back-end for finalization. */
758 finish_record_type (tree record_type, tree fieldlist, int rep_level,
759 bool do_not_finalize)
761 enum tree_code code = TREE_CODE (record_type);
762 tree ada_size = bitsize_zero_node;
763 tree size = bitsize_zero_node;
764 bool var_size = false;
765 bool had_size = TYPE_SIZE (record_type) != 0;
766 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
769 TYPE_FIELDS (record_type) = fieldlist;
770 TYPE_STUB_DECL (record_type)
771 = build_decl (TYPE_DECL, TYPE_NAME (record_type), record_type);
773 /* We don't need both the typedef name and the record name output in
774 the debugging information, since they are the same. */
775 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
777 /* Globally initialize the record first. If this is a rep'ed record,
778 that just means some initializations; otherwise, layout the record. */
781 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
782 TYPE_MODE (record_type) = BLKmode;
785 TYPE_SIZE_UNIT (record_type) = size_zero_node;
787 TYPE_SIZE (record_type) = bitsize_zero_node;
789 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
790 out just like a UNION_TYPE, since the size will be fixed. */
791 else if (code == QUAL_UNION_TYPE)
796 /* Ensure there isn't a size already set. There can be in an error
797 case where there is a rep clause but all fields have errors and
798 no longer have a position. */
799 TYPE_SIZE (record_type) = 0;
800 layout_type (record_type);
803 /* At this point, the position and size of each field is known. It was
804 either set before entry by a rep clause, or by laying out the type above.
806 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
807 to compute the Ada size; the GCC size and alignment (for rep'ed records
808 that are not padding types); and the mode (for rep'ed records). We also
809 clear the DECL_BIT_FIELD indication for the cases we know have not been
810 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
812 if (code == QUAL_UNION_TYPE)
813 fieldlist = nreverse (fieldlist);
815 for (field = fieldlist; field; field = TREE_CHAIN (field))
817 tree pos = bit_position (field);
819 tree type = TREE_TYPE (field);
820 tree this_size = DECL_SIZE (field);
821 tree this_ada_size = DECL_SIZE (field);
823 /* We need to make an XVE/XVU record if any field has variable size,
824 whether or not the record does. For example, if we have a union,
825 it may be that all fields, rounded up to the alignment, have the
826 same size, in which case we'll use that size. But the debug
827 output routines (except Dwarf2) won't be able to output the fields,
828 so we need to make the special record. */
829 if (TREE_CODE (this_size) != INTEGER_CST)
832 if ((TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
833 || TREE_CODE (type) == QUAL_UNION_TYPE)
834 && !TYPE_IS_FAT_POINTER_P (type)
835 && !TYPE_CONTAINS_TEMPLATE_P (type)
836 && TYPE_ADA_SIZE (type))
837 this_ada_size = TYPE_ADA_SIZE (type);
839 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
840 if (DECL_BIT_FIELD (field) && !STRICT_ALIGNMENT
841 && value_factor_p (pos, BITS_PER_UNIT)
842 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
843 DECL_BIT_FIELD (field) = 0;
845 /* If we still have DECL_BIT_FIELD set at this point, we know the field
846 is technically not addressable. Except that it can actually be
847 addressed if the field is BLKmode and happens to be properly
849 DECL_NONADDRESSABLE_P (field)
850 |= DECL_BIT_FIELD (field) && DECL_MODE (field) != BLKmode;
852 if ((rep_level > 0) && !DECL_BIT_FIELD (field))
853 TYPE_ALIGN (record_type)
854 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
859 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
860 size = size_binop (MAX_EXPR, size, this_size);
863 case QUAL_UNION_TYPE:
865 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
866 this_ada_size, ada_size);
867 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
872 /* Since we know here that all fields are sorted in order of
873 increasing bit position, the size of the record is one
874 higher than the ending bit of the last field processed
875 unless we have a rep clause, since in that case we might
876 have a field outside a QUAL_UNION_TYPE that has a higher ending
877 position. So use a MAX in that case. Also, if this field is a
878 QUAL_UNION_TYPE, we need to take into account the previous size in
879 the case of empty variants. */
881 = merge_sizes (ada_size, pos, this_ada_size,
882 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
884 = merge_sizes (size, pos, this_size,
885 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
893 if (code == QUAL_UNION_TYPE)
894 nreverse (fieldlist);
898 /* If this is a padding record, we never want to make the size smaller
899 than what was specified in it, if any. */
900 if (TREE_CODE (record_type) == RECORD_TYPE
901 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
902 size = TYPE_SIZE (record_type);
904 /* Now set any of the values we've just computed that apply. */
905 if (!TYPE_IS_FAT_POINTER_P (record_type)
906 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
907 SET_TYPE_ADA_SIZE (record_type, ada_size);
911 tree size_unit = had_size_unit
912 ? TYPE_SIZE_UNIT (record_type)
914 size_binop (CEIL_DIV_EXPR, size,
916 unsigned int align = TYPE_ALIGN (record_type);
918 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
919 TYPE_SIZE_UNIT (record_type)
920 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
922 compute_record_mode (record_type);
926 if (!do_not_finalize)
927 rest_of_record_type_compilation (record_type);
930 /* Wrap up compilation of RECORD_TYPE, i.e. most notably output all
931 the debug information associated with it. It need not be invoked
932 directly in most cases since finish_record_type takes care of doing
933 so, unless explicitly requested not to through DO_NOT_FINALIZE. */
936 rest_of_record_type_compilation (tree record_type)
938 tree fieldlist = TYPE_FIELDS (record_type);
940 enum tree_code code = TREE_CODE (record_type);
941 bool var_size = false;
943 for (field = fieldlist; field; field = TREE_CHAIN (field))
945 /* We need to make an XVE/XVU record if any field has variable size,
946 whether or not the record does. For example, if we have a union,
947 it may be that all fields, rounded up to the alignment, have the
948 same size, in which case we'll use that size. But the debug
949 output routines (except Dwarf2) won't be able to output the fields,
950 so we need to make the special record. */
951 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
952 /* If a field has a non-constant qualifier, the record will have
953 variable size too. */
954 || (code == QUAL_UNION_TYPE
955 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
962 /* If this record is of variable size, rename it so that the
963 debugger knows it is and make a new, parallel, record
964 that tells the debugger how the record is laid out. See
965 exp_dbug.ads. But don't do this for records that are padding
966 since they confuse GDB. */
968 && !(TREE_CODE (record_type) == RECORD_TYPE
969 && TYPE_IS_PADDING_P (record_type)))
972 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
973 ? UNION_TYPE : TREE_CODE (record_type));
974 tree orig_name = TYPE_NAME (record_type);
976 = (TREE_CODE (orig_name) == TYPE_DECL ? DECL_NAME (orig_name)
979 = concat_id_with_name (orig_id,
980 TREE_CODE (record_type) == QUAL_UNION_TYPE
982 tree last_pos = bitsize_zero_node;
984 tree prev_old_field = 0;
986 TYPE_NAME (new_record_type) = new_id;
987 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
988 TYPE_STUB_DECL (new_record_type)
989 = build_decl (TYPE_DECL, new_id, new_record_type);
990 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
991 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
992 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
993 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
994 TYPE_SIZE_UNIT (new_record_type)
995 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
997 /* Now scan all the fields, replacing each field with a new
998 field corresponding to the new encoding. */
999 for (old_field = TYPE_FIELDS (record_type); old_field;
1000 old_field = TREE_CHAIN (old_field))
1002 tree field_type = TREE_TYPE (old_field);
1003 tree field_name = DECL_NAME (old_field);
1005 tree curpos = bit_position (old_field);
1007 unsigned int align = 0;
1010 /* See how the position was modified from the last position.
1012 There are two basic cases we support: a value was added
1013 to the last position or the last position was rounded to
1014 a boundary and they something was added. Check for the
1015 first case first. If not, see if there is any evidence
1016 of rounding. If so, round the last position and try
1019 If this is a union, the position can be taken as zero. */
1021 if (TREE_CODE (new_record_type) == UNION_TYPE)
1022 pos = bitsize_zero_node, align = 0;
1024 pos = compute_related_constant (curpos, last_pos);
1026 if (!pos && TREE_CODE (curpos) == MULT_EXPR
1027 && host_integerp (TREE_OPERAND (curpos, 1), 1))
1029 tree offset = TREE_OPERAND (curpos, 0);
1030 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
1032 /* Strip off any conversions. */
1033 while (TREE_CODE (offset) == NON_LVALUE_EXPR
1034 || TREE_CODE (offset) == NOP_EXPR
1035 || TREE_CODE (offset) == CONVERT_EXPR)
1036 offset = TREE_OPERAND (offset, 0);
1038 /* An offset which is a bitwise AND with a negative power of 2
1039 means an alignment corresponding to this power of 2. */
1040 if (TREE_CODE (offset) == BIT_AND_EXPR
1041 && host_integerp (TREE_OPERAND (offset, 1), 0)
1042 && tree_int_cst_sgn (TREE_OPERAND (offset, 1)) < 0)
1045 = - tree_low_cst (TREE_OPERAND (offset, 1), 0);
1046 if (exact_log2 (pow) > 0)
1050 pos = compute_related_constant (curpos,
1051 round_up (last_pos, align));
1053 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
1054 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
1055 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
1056 && host_integerp (TREE_OPERAND
1057 (TREE_OPERAND (curpos, 0), 1),
1062 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
1063 pos = compute_related_constant (curpos,
1064 round_up (last_pos, align));
1066 else if (potential_alignment_gap (prev_old_field, old_field,
1069 align = TYPE_ALIGN (field_type);
1070 pos = compute_related_constant (curpos,
1071 round_up (last_pos, align));
1074 /* If we can't compute a position, set it to zero.
1076 ??? We really should abort here, but it's too much work
1077 to get this correct for all cases. */
1080 pos = bitsize_zero_node;
1082 /* See if this type is variable-sized and make a pointer type
1083 and indicate the indirection if so. Beware that the debug
1084 back-end may adjust the position computed above according
1085 to the alignment of the field type, i.e. the pointer type
1086 in this case, if we don't preventively counter that. */
1087 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
1089 field_type = build_pointer_type (field_type);
1090 if (align != 0 && TYPE_ALIGN (field_type) > align)
1092 field_type = copy_node (field_type);
1093 TYPE_ALIGN (field_type) = align;
1098 /* Make a new field name, if necessary. */
1099 if (var || align != 0)
1104 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1105 align / BITS_PER_UNIT);
1107 strcpy (suffix, "XVL");
1109 field_name = concat_id_with_name (field_name, suffix);
1112 new_field = create_field_decl (field_name, field_type,
1114 DECL_SIZE (old_field), pos, 0);
1115 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1116 TYPE_FIELDS (new_record_type) = new_field;
1118 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1119 zero. The only time it's not the last field of the record
1120 is when there are other components at fixed positions after
1121 it (meaning there was a rep clause for every field) and we
1122 want to be able to encode them. */
1123 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1124 (TREE_CODE (TREE_TYPE (old_field))
1127 : DECL_SIZE (old_field));
1128 prev_old_field = old_field;
1131 TYPE_FIELDS (new_record_type)
1132 = nreverse (TYPE_FIELDS (new_record_type));
1134 rest_of_type_decl_compilation (TYPE_STUB_DECL (new_record_type));
1137 rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type));
1140 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1141 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1142 if this represents a QUAL_UNION_TYPE in which case we must look for
1143 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1144 is nonzero, we must take the MAX of the end position of this field
1145 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1147 We return an expression for the size. */
1150 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1153 tree type = TREE_TYPE (last_size);
1156 if (!special || TREE_CODE (size) != COND_EXPR)
1158 new = size_binop (PLUS_EXPR, first_bit, size);
1160 new = size_binop (MAX_EXPR, last_size, new);
1164 new = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1165 integer_zerop (TREE_OPERAND (size, 1))
1166 ? last_size : merge_sizes (last_size, first_bit,
1167 TREE_OPERAND (size, 1),
1169 integer_zerop (TREE_OPERAND (size, 2))
1170 ? last_size : merge_sizes (last_size, first_bit,
1171 TREE_OPERAND (size, 2),
1174 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1175 when fed through substitute_in_expr) into thinking that a constant
1176 size is not constant. */
1177 while (TREE_CODE (new) == NON_LVALUE_EXPR)
1178 new = TREE_OPERAND (new, 0);
1183 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1184 related by the addition of a constant. Return that constant if so. */
1187 compute_related_constant (tree op0, tree op1)
1189 tree op0_var, op1_var;
1190 tree op0_con = split_plus (op0, &op0_var);
1191 tree op1_con = split_plus (op1, &op1_var);
1192 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1194 if (operand_equal_p (op0_var, op1_var, 0))
1196 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1202 /* Utility function of above to split a tree OP which may be a sum, into a
1203 constant part, which is returned, and a variable part, which is stored
1204 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1208 split_plus (tree in, tree *pvar)
1210 /* Strip NOPS in order to ease the tree traversal and maximize the
1211 potential for constant or plus/minus discovery. We need to be careful
1212 to always return and set *pvar to bitsizetype trees, but it's worth
1216 *pvar = convert (bitsizetype, in);
1218 if (TREE_CODE (in) == INTEGER_CST)
1220 *pvar = bitsize_zero_node;
1221 return convert (bitsizetype, in);
1223 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1225 tree lhs_var, rhs_var;
1226 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1227 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1229 if (lhs_var == TREE_OPERAND (in, 0)
1230 && rhs_var == TREE_OPERAND (in, 1))
1231 return bitsize_zero_node;
1233 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1234 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1237 return bitsize_zero_node;
1240 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1241 subprogram. If it is void_type_node, then we are dealing with a procedure,
1242 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1243 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1244 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1245 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
1246 object. RETURNS_BY_REF is nonzero if the function returns by reference.
1247 RETURNS_WITH_DSP is nonzero if the function is to return with a
1248 depressed stack pointer. RETURNS_BY_TARGET_PTR is true if the function
1249 is to be passed (as its first parameter) the address of the place to copy
1253 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1254 bool returns_unconstrained, bool returns_by_ref,
1255 bool returns_with_dsp, bool returns_by_target_ptr)
1257 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1258 the subprogram formal parameters. This list is generated by traversing the
1259 input list of PARM_DECL nodes. */
1260 tree param_type_list = NULL;
1264 for (param_decl = param_decl_list; param_decl;
1265 param_decl = TREE_CHAIN (param_decl))
1266 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1269 /* The list of the function parameter types has to be terminated by the void
1270 type to signal to the back-end that we are not dealing with a variable
1271 parameter subprogram, but that the subprogram has a fixed number of
1273 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1275 /* The list of argument types has been created in reverse
1277 param_type_list = nreverse (param_type_list);
1279 type = build_function_type (return_type, param_type_list);
1281 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1282 or the new type should, make a copy of TYPE. Likewise for
1283 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1284 if (TYPE_CI_CO_LIST (type) || cico_list
1285 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1286 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref
1287 || TYPE_RETURNS_BY_TARGET_PTR_P (type) != returns_by_target_ptr)
1288 type = copy_type (type);
1290 TYPE_CI_CO_LIST (type) = cico_list;
1291 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1292 TYPE_RETURNS_STACK_DEPRESSED (type) = returns_with_dsp;
1293 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1294 TYPE_RETURNS_BY_TARGET_PTR_P (type) = returns_by_target_ptr;
1298 /* Return a copy of TYPE but safe to modify in any way. */
1301 copy_type (tree type)
1303 tree new = copy_node (type);
1305 /* copy_node clears this field instead of copying it, because it is
1306 aliased with TREE_CHAIN. */
1307 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1309 TYPE_POINTER_TO (new) = 0;
1310 TYPE_REFERENCE_TO (new) = 0;
1311 TYPE_MAIN_VARIANT (new) = new;
1312 TYPE_NEXT_VARIANT (new) = 0;
1317 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1318 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position of
1322 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
1324 /* First build a type for the desired range. */
1325 tree type = build_index_2_type (min, max);
1327 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1328 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1329 is set, but not to INDEX, make a copy of this type with the requested
1330 index type. Note that we have no way of sharing these types, but that's
1331 only a small hole. */
1332 if (TYPE_INDEX_TYPE (type) == index)
1334 else if (TYPE_INDEX_TYPE (type))
1335 type = copy_type (type);
1337 SET_TYPE_INDEX_TYPE (type, index);
1338 create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
1342 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1343 string) and TYPE is a ..._TYPE node giving its data type.
1344 ARTIFICIAL_P is true if this is a declaration that was generated
1345 by the compiler. DEBUG_INFO_P is true if we need to write debugging
1346 information about this type. GNAT_NODE is used for the position of
1350 create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1351 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1353 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1354 enum tree_code code = TREE_CODE (type);
1356 DECL_ARTIFICIAL (type_decl) = artificial_p;
1358 if (!TYPE_IS_DUMMY_P (type))
1359 gnat_pushdecl (type_decl, gnat_node);
1361 process_attributes (type_decl, attr_list);
1363 /* Pass type declaration information to the debugger unless this is an
1364 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1365 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately, or
1366 type for which debugging information was not requested. */
1367 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
1368 DECL_IGNORED_P (type_decl) = 1;
1369 else if (code != ENUMERAL_TYPE
1370 && (code != RECORD_TYPE || TYPE_IS_FAT_POINTER_P (type))
1371 && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
1372 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1373 rest_of_type_decl_compilation (type_decl);
1378 /* Helper for create_var_decl and create_true_var_decl. Returns a GCC VAR_DECL
1381 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1382 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1383 the GCC tree for an optional initial expression; NULL_TREE if none.
1385 CONST_FLAG is true if this variable is constant, in which case we might
1386 return a CONST_DECL node unless CONST_DECL_ALLOWED_FLAG is false.
1388 PUBLIC_FLAG is true if this definition is to be made visible outside of
1389 the current compilation unit. This flag should be set when processing the
1390 variable definitions in a package specification. EXTERN_FLAG is nonzero
1391 when processing an external variable declaration (as opposed to a
1392 definition: no storage is to be allocated for the variable here).
1394 STATIC_FLAG is only relevant when not at top level. In that case
1395 it indicates whether to always allocate storage to the variable.
1397 GNAT_NODE is used for the position of the decl. */
1400 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
1401 bool const_flag, bool const_decl_allowed_flag,
1402 bool public_flag, bool extern_flag, bool static_flag,
1403 struct attrib *attr_list, Node_Id gnat_node)
1407 && TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1408 && (global_bindings_p () || static_flag
1409 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
1410 : TREE_CONSTANT (var_init)));
1412 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1413 case the initializer may be used in-lieu of the DECL node (as done in
1414 Identifier_to_gnu). This is useful to prevent the need of elaboration
1415 code when an identifier for which such a decl is made is in turn used as
1416 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1417 but extra constraints apply to this choice (see below) and are not
1418 relevant to the distinction we wish to make. */
1419 bool constant_p = const_flag && init_const;
1421 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1422 and may be used for scalars in general but not for aggregates. */
1424 = build_decl ((constant_p && const_decl_allowed_flag
1425 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
1428 /* If this is external, throw away any initializations (they will be done
1429 elsewhere) unless this is a a constant for which we would like to remain
1430 able to get the initializer. If we are defining a global here, leave a
1431 constant initialization and save any variable elaborations for the
1432 elaboration routine. If we are just annotating types, throw away the
1433 initialization if it isn't a constant. */
1434 if ((extern_flag && !constant_p)
1435 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1436 var_init = NULL_TREE;
1438 /* At the global level, an initializer requiring code to be generated
1439 produces elaboration statements. Check that such statements are allowed,
1440 that is, not violating a No_Elaboration_Code restriction. */
1441 if (global_bindings_p () && var_init != 0 && ! init_const)
1442 Check_Elaboration_Code_Allowed (gnat_node);
1444 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1445 try to fiddle with DECL_COMMON. However, on platforms that don't
1446 support global BSS sections, uninitialized global variables would
1447 go in DATA instead, thus increasing the size of the executable. */
1449 && TREE_CODE (var_decl) == VAR_DECL
1450 && !have_global_bss_p ())
1451 DECL_COMMON (var_decl) = 1;
1452 DECL_INITIAL (var_decl) = var_init;
1453 TREE_READONLY (var_decl) = const_flag;
1454 DECL_EXTERNAL (var_decl) = extern_flag;
1455 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1456 TREE_CONSTANT (var_decl) = constant_p;
1457 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1458 = TYPE_VOLATILE (type);
1460 /* If it's public and not external, always allocate storage for it.
1461 At the global binding level we need to allocate static storage for the
1462 variable if and only if it's not external. If we are not at the top level
1463 we allocate automatic storage unless requested not to. */
1464 TREE_STATIC (var_decl)
1465 = public_flag || (global_bindings_p () ? !extern_flag : static_flag);
1467 if (asm_name && VAR_OR_FUNCTION_DECL_P (var_decl))
1468 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1470 process_attributes (var_decl, attr_list);
1472 /* Add this decl to the current binding level. */
1473 gnat_pushdecl (var_decl, gnat_node);
1475 if (TREE_SIDE_EFFECTS (var_decl))
1476 TREE_ADDRESSABLE (var_decl) = 1;
1478 if (TREE_CODE (var_decl) != CONST_DECL)
1479 rest_of_decl_compilation (var_decl, global_bindings_p (), 0);
1481 expand_decl (var_decl);
1486 /* Wrapper around create_var_decl_1 for cases where we don't care whether
1487 a VAR or a CONST decl node is created. */
1490 create_var_decl (tree var_name, tree asm_name, tree type, tree var_init,
1491 bool const_flag, bool public_flag, bool extern_flag,
1492 bool static_flag, struct attrib *attr_list,
1495 return create_var_decl_1 (var_name, asm_name, type, var_init,
1497 public_flag, extern_flag, static_flag,
1498 attr_list, gnat_node);
1501 /* Wrapper around create_var_decl_1 for cases where a VAR_DECL node is
1502 required. The primary intent is for DECL_CONST_CORRESPONDING_VARs, which
1503 must be VAR_DECLs and on which we want TREE_READONLY set to have them
1504 possibly assigned to a readonly data section. */
1507 create_true_var_decl (tree var_name, tree asm_name, tree type, tree var_init,
1508 bool const_flag, bool public_flag, bool extern_flag,
1509 bool static_flag, struct attrib *attr_list,
1512 return create_var_decl_1 (var_name, asm_name, type, var_init,
1514 public_flag, extern_flag, static_flag,
1515 attr_list, gnat_node);
1518 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1519 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1520 this field is in a record type with a "pragma pack". If SIZE is nonzero
1521 it is the specified size for this field. If POS is nonzero, it is the bit
1522 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1523 the address of this field for aliasing purposes. If it is negative, we
1524 should not make a bitfield, which is used by make_aligning_type. */
1527 create_field_decl (tree field_name, tree field_type, tree record_type,
1528 int packed, tree size, tree pos, int addressable)
1530 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1532 DECL_CONTEXT (field_decl) = record_type;
1533 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1535 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1536 byte boundary since GCC cannot handle less-aligned BLKmode bitfields. */
1537 if (packed && TYPE_MODE (field_type) == BLKmode)
1538 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1540 /* If a size is specified, use it. Otherwise, if the record type is packed
1541 compute a size to use, which may differ from the object's natural size.
1542 We always set a size in this case to trigger the checks for bitfield
1543 creation below, which is typically required when no position has been
1546 size = convert (bitsizetype, size);
1547 else if (packed == 1)
1549 size = rm_size (field_type);
1551 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1553 if (TREE_CODE (size) == INTEGER_CST
1554 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1555 size = round_up (size, BITS_PER_UNIT);
1558 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1559 specified for two reasons: first if the size differs from the natural
1560 size. Second, if the alignment is insufficient. There are a number of
1561 ways the latter can be true.
1563 We never make a bitfield if the type of the field has a nonconstant size,
1564 because no such entity requiring bitfield operations should reach here.
1566 We do *preventively* make a bitfield when there might be the need for it
1567 but we don't have all the necessary information to decide, as is the case
1568 of a field with no specified position in a packed record.
1570 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1571 in layout_decl or finish_record_type to clear the bit_field indication if
1572 it is in fact not needed. */
1573 if (addressable >= 0
1575 && TREE_CODE (size) == INTEGER_CST
1576 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1577 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
1578 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
1580 || (TYPE_ALIGN (record_type) != 0
1581 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1583 DECL_BIT_FIELD (field_decl) = 1;
1584 DECL_SIZE (field_decl) = size;
1585 if (!packed && !pos)
1586 DECL_ALIGN (field_decl)
1587 = (TYPE_ALIGN (record_type) != 0
1588 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1589 : TYPE_ALIGN (field_type));
1592 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1593 DECL_ALIGN (field_decl)
1594 = MAX (DECL_ALIGN (field_decl),
1595 DECL_BIT_FIELD (field_decl) ? 1
1596 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT
1597 : TYPE_ALIGN (field_type));
1601 /* We need to pass in the alignment the DECL is known to have.
1602 This is the lowest-order bit set in POS, but no more than
1603 the alignment of the record, if one is specified. Note
1604 that an alignment of 0 is taken as infinite. */
1605 unsigned int known_align;
1607 if (host_integerp (pos, 1))
1608 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1610 known_align = BITS_PER_UNIT;
1612 if (TYPE_ALIGN (record_type)
1613 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1614 known_align = TYPE_ALIGN (record_type);
1616 layout_decl (field_decl, known_align);
1617 SET_DECL_OFFSET_ALIGN (field_decl,
1618 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1620 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1621 &DECL_FIELD_BIT_OFFSET (field_decl),
1622 DECL_OFFSET_ALIGN (field_decl), pos);
1624 DECL_HAS_REP_P (field_decl) = 1;
1627 /* In addition to what our caller says, claim the field is addressable if we
1628 know that its type is not suitable.
1630 The field may also be "technically" nonaddressable, meaning that even if
1631 we attempt to take the field's address we will actually get the address
1632 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
1633 value we have at this point is not accurate enough, so we don't account
1634 for this here and let finish_record_type decide. */
1635 if (!type_for_nonaliased_component_p (field_type))
1638 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1643 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1644 PARAM_TYPE is its type. READONLY is true if the parameter is
1645 readonly (either an IN parameter or an address of a pass-by-ref
1649 create_param_decl (tree param_name, tree param_type, bool readonly)
1651 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1653 /* Honor targetm.calls.promote_prototypes(), as not doing so can
1654 lead to various ABI violations. */
1655 if (targetm.calls.promote_prototypes (param_type)
1656 && (TREE_CODE (param_type) == INTEGER_TYPE
1657 || TREE_CODE (param_type) == ENUMERAL_TYPE)
1658 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1660 /* We have to be careful about biased types here. Make a subtype
1661 of integer_type_node with the proper biasing. */
1662 if (TREE_CODE (param_type) == INTEGER_TYPE
1663 && TYPE_BIASED_REPRESENTATION_P (param_type))
1666 = copy_type (build_range_type (integer_type_node,
1667 TYPE_MIN_VALUE (param_type),
1668 TYPE_MAX_VALUE (param_type)));
1670 TYPE_BIASED_REPRESENTATION_P (param_type) = 1;
1673 param_type = integer_type_node;
1676 DECL_ARG_TYPE (param_decl) = param_type;
1677 TREE_READONLY (param_decl) = readonly;
1681 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1684 process_attributes (tree decl, struct attrib *attr_list)
1686 for (; attr_list; attr_list = attr_list->next)
1687 switch (attr_list->type)
1689 case ATTR_MACHINE_ATTRIBUTE:
1690 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1692 ATTR_FLAG_TYPE_IN_PLACE);
1695 case ATTR_LINK_ALIAS:
1696 if (! DECL_EXTERNAL (decl))
1698 TREE_STATIC (decl) = 1;
1699 assemble_alias (decl, attr_list->name);
1703 case ATTR_WEAK_EXTERNAL:
1705 declare_weak (decl);
1707 post_error ("?weak declarations not supported on this target",
1708 attr_list->error_point);
1711 case ATTR_LINK_SECTION:
1712 if (targetm.have_named_sections)
1714 DECL_SECTION_NAME (decl)
1715 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1716 IDENTIFIER_POINTER (attr_list->name));
1717 DECL_COMMON (decl) = 0;
1720 post_error ("?section attributes are not supported for this target",
1721 attr_list->error_point);
1724 case ATTR_LINK_CONSTRUCTOR:
1725 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1726 TREE_USED (decl) = 1;
1729 case ATTR_LINK_DESTRUCTOR:
1730 DECL_STATIC_DESTRUCTOR (decl) = 1;
1731 TREE_USED (decl) = 1;
1736 /* Record a global renaming pointer. */
1739 record_global_renaming_pointer (tree decl)
1741 gcc_assert (DECL_RENAMED_OBJECT (decl));
1742 VEC_safe_push (tree, gc, global_renaming_pointers, decl);
1745 /* Invalidate the global renaming pointers. */
1748 invalidate_global_renaming_pointers (void)
1753 for (i = 0; VEC_iterate(tree, global_renaming_pointers, i, iter); i++)
1754 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
1756 VEC_free (tree, gc, global_renaming_pointers);
1759 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1763 value_factor_p (tree value, HOST_WIDE_INT factor)
1765 if (host_integerp (value, 1))
1766 return tree_low_cst (value, 1) % factor == 0;
1768 if (TREE_CODE (value) == MULT_EXPR)
1769 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1770 || value_factor_p (TREE_OPERAND (value, 1), factor));
1775 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1776 unless we can prove these 2 fields are laid out in such a way that no gap
1777 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1778 is the distance in bits between the end of PREV_FIELD and the starting
1779 position of CURR_FIELD. It is ignored if null. */
1782 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1784 /* If this is the first field of the record, there cannot be any gap */
1788 /* If the previous field is a union type, then return False: The only
1789 time when such a field is not the last field of the record is when
1790 there are other components at fixed positions after it (meaning there
1791 was a rep clause for every field), in which case we don't want the
1792 alignment constraint to override them. */
1793 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1796 /* If the distance between the end of prev_field and the beginning of
1797 curr_field is constant, then there is a gap if the value of this
1798 constant is not null. */
1799 if (offset && host_integerp (offset, 1))
1800 return !integer_zerop (offset);
1802 /* If the size and position of the previous field are constant,
1803 then check the sum of this size and position. There will be a gap
1804 iff it is not multiple of the current field alignment. */
1805 if (host_integerp (DECL_SIZE (prev_field), 1)
1806 && host_integerp (bit_position (prev_field), 1))
1807 return ((tree_low_cst (bit_position (prev_field), 1)
1808 + tree_low_cst (DECL_SIZE (prev_field), 1))
1809 % DECL_ALIGN (curr_field) != 0);
1811 /* If both the position and size of the previous field are multiples
1812 of the current field alignment, there cannot be any gap. */
1813 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1814 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1817 /* Fallback, return that there may be a potential gap */
1821 /* Returns a LABEL_DECL node for LABEL_NAME. */
1824 create_label_decl (tree label_name)
1826 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1828 DECL_CONTEXT (label_decl) = current_function_decl;
1829 DECL_MODE (label_decl) = VOIDmode;
1830 DECL_SOURCE_LOCATION (label_decl) = input_location;
1835 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1836 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1837 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1838 PARM_DECL nodes chained through the TREE_CHAIN field).
1840 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1841 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1844 create_subprog_decl (tree subprog_name, tree asm_name,
1845 tree subprog_type, tree param_decl_list, bool inline_flag,
1846 bool public_flag, bool extern_flag,
1847 struct attrib *attr_list, Node_Id gnat_node)
1849 tree return_type = TREE_TYPE (subprog_type);
1850 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1852 /* If this is a function nested inside an inlined external function, it
1853 means we aren't going to compile the outer function unless it is
1854 actually inlined, so do the same for us. */
1855 if (current_function_decl && DECL_INLINE (current_function_decl)
1856 && DECL_EXTERNAL (current_function_decl))
1859 DECL_EXTERNAL (subprog_decl) = extern_flag;
1860 TREE_PUBLIC (subprog_decl) = public_flag;
1861 TREE_STATIC (subprog_decl) = 1;
1862 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1863 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1864 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1865 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1866 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1867 DECL_ARTIFICIAL (DECL_RESULT (subprog_decl)) = 1;
1868 DECL_IGNORED_P (DECL_RESULT (subprog_decl)) = 1;
1870 /* TREE_ADDRESSABLE is set on the result type to request the use of the
1871 target by-reference return mechanism. This is not supported all the
1872 way down to RTL expansion with GCC 4, which ICEs on temporary creation
1873 attempts with such a type and expects DECL_BY_REFERENCE to be set on
1874 the RESULT_DECL instead - see gnat_genericize for more details. */
1875 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (subprog_decl))))
1877 tree result_decl = DECL_RESULT (subprog_decl);
1879 TREE_ADDRESSABLE (TREE_TYPE (result_decl)) = 0;
1880 DECL_BY_REFERENCE (result_decl) = 1;
1884 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
1887 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1889 process_attributes (subprog_decl, attr_list);
1891 /* Add this decl to the current binding level. */
1892 gnat_pushdecl (subprog_decl, gnat_node);
1894 /* Output the assembler code and/or RTL for the declaration. */
1895 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
1897 return subprog_decl;
1900 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1901 body. This routine needs to be invoked before processing the declarations
1902 appearing in the subprogram. */
1905 begin_subprog_body (tree subprog_decl)
1909 current_function_decl = subprog_decl;
1910 announce_function (subprog_decl);
1912 /* Enter a new binding level and show that all the parameters belong to
1915 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1916 param_decl = TREE_CHAIN (param_decl))
1917 DECL_CONTEXT (param_decl) = subprog_decl;
1919 make_decl_rtl (subprog_decl);
1921 /* We handle pending sizes via the elaboration of types, so we don't need to
1922 save them. This causes them to be marked as part of the outer function
1923 and then discarded. */
1924 get_pending_sizes ();
1928 /* Helper for the genericization callback. Return a dereference of VAL
1929 if it is of a reference type. */
1932 convert_from_reference (tree val)
1934 tree value_type, ref;
1936 if (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE)
1939 value_type = TREE_TYPE (TREE_TYPE (val));
1940 ref = build1 (INDIRECT_REF, value_type, val);
1942 /* See if what we reference is CONST or VOLATILE, which requires
1943 looking into array types to get to the component type. */
1945 while (TREE_CODE (value_type) == ARRAY_TYPE)
1946 value_type = TREE_TYPE (value_type);
1949 = (TYPE_QUALS (value_type) & TYPE_QUAL_CONST);
1950 TREE_THIS_VOLATILE (ref)
1951 = (TYPE_QUALS (value_type) & TYPE_QUAL_VOLATILE);
1953 TREE_SIDE_EFFECTS (ref)
1954 = (TREE_THIS_VOLATILE (ref) || TREE_SIDE_EFFECTS (val));
1959 /* Helper for the genericization callback. Returns true if T denotes
1960 a RESULT_DECL with DECL_BY_REFERENCE set. */
1963 is_byref_result (tree t)
1965 return (TREE_CODE (t) == RESULT_DECL && DECL_BY_REFERENCE (t));
1969 /* Tree walking callback for gnat_genericize. Currently ...
1971 o Adjust references to the function's DECL_RESULT if it is marked
1972 DECL_BY_REFERENCE and so has had its type turned into a reference
1973 type at the end of the function compilation. */
1976 gnat_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1978 /* This implementation is modeled after what the C++ front-end is
1979 doing, basis of the downstream passes behavior. */
1981 tree stmt = *stmt_p;
1982 struct pointer_set_t *p_set = (struct pointer_set_t*) data;
1984 /* If we have a direct mention of the result decl, dereference. */
1985 if (is_byref_result (stmt))
1987 *stmt_p = convert_from_reference (stmt);
1992 /* Otherwise, no need to walk the the same tree twice. */
1993 if (pointer_set_contains (p_set, stmt))
1999 /* If we are taking the address of what now is a reference, just get the
2001 if (TREE_CODE (stmt) == ADDR_EXPR
2002 && is_byref_result (TREE_OPERAND (stmt, 0)))
2004 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
2008 /* Don't dereference an by-reference RESULT_DECL inside a RETURN_EXPR. */
2009 else if (TREE_CODE (stmt) == RETURN_EXPR
2010 && TREE_OPERAND (stmt, 0)
2011 && is_byref_result (TREE_OPERAND (stmt, 0)))
2014 /* Don't look inside trees that cannot embed references of interest. */
2015 else if (IS_TYPE_OR_DECL_P (stmt))
2018 pointer_set_insert (p_set, *stmt_p);
2023 /* Perform lowering of Ada trees to GENERIC. In particular:
2025 o Turn a DECL_BY_REFERENCE RESULT_DECL into a real by-reference decl
2026 and adjust all the references to this decl accordingly. */
2029 gnat_genericize (tree fndecl)
2031 /* Prior to GCC 4, an explicit By_Reference result mechanism for a function
2032 was handled by simply setting TREE_ADDRESSABLE on the result type.
2033 Everything required to actually pass by invisible ref using the target
2034 mechanism (e.g. extra parameter) was handled at RTL expansion time.
2036 This doesn't work with GCC 4 any more for several reasons. First, the
2037 gimplification process might need the creation of temporaries of this
2038 type, and the gimplifier ICEs on such attempts. Second, the middle-end
2039 now relies on a different attribute for such cases (DECL_BY_REFERENCE on
2040 RESULT/PARM_DECLs), and expects the user invisible by-reference-ness to
2041 be explicitely accounted for by the front-end in the function body.
2043 We achieve the complete transformation in two steps:
2045 1/ create_subprog_decl performs early attribute tweaks: it clears
2046 TREE_ADDRESSABLE from the result type and sets DECL_BY_REFERENCE on
2047 the result decl. The former ensures that the bit isn't set in the GCC
2048 tree saved for the function, so prevents ICEs on temporary creation.
2049 The latter we use here to trigger the rest of the processing.
2051 2/ This function performs the type transformation on the result decl
2052 and adjusts all the references to this decl from the function body
2055 Clearing TREE_ADDRESSABLE from the type differs from the C++ front-end
2056 strategy, which escapes the gimplifier temporary creation issues by
2057 creating it's own temporaries using TARGET_EXPR nodes. Our way relies
2058 on simple specific support code in aggregate_value_p to look at the
2059 target function result decl explicitely. */
2061 struct pointer_set_t *p_set;
2062 tree decl_result = DECL_RESULT (fndecl);
2064 if (!DECL_BY_REFERENCE (decl_result))
2067 /* Make the DECL_RESULT explicitely by-reference and adjust all the
2068 occurrences in the function body using the common tree-walking facility.
2069 We want to see every occurrence of the result decl to adjust the
2070 referencing tree, so need to use our own pointer set to control which
2071 trees should be visited again or not. */
2073 p_set = pointer_set_create ();
2075 TREE_TYPE (decl_result) = build_reference_type (TREE_TYPE (decl_result));
2076 TREE_ADDRESSABLE (decl_result) = 0;
2077 relayout_decl (decl_result);
2079 walk_tree (&DECL_SAVED_TREE (fndecl), gnat_genericize_r, p_set, NULL);
2081 pointer_set_destroy (p_set);
2084 /* Finish the definition of the current subprogram and compile it all the way
2085 to assembler language output. BODY is the tree corresponding to
2089 end_subprog_body (tree body)
2091 tree fndecl = current_function_decl;
2093 /* Mark the BLOCK for this level as being for this function and pop the
2094 level. Since the vars in it are the parameters, clear them. */
2095 BLOCK_VARS (current_binding_level->block) = 0;
2096 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
2097 DECL_INITIAL (fndecl) = current_binding_level->block;
2100 /* Deal with inline. If declared inline or we should default to inline,
2101 set the flag in the decl. */
2102 DECL_INLINE (fndecl)
2103 = DECL_DECLARED_INLINE_P (fndecl) || flag_inline_trees == 2;
2105 /* We handle pending sizes via the elaboration of types, so we don't
2106 need to save them. */
2107 get_pending_sizes ();
2109 /* Mark the RESULT_DECL as being in this subprogram. */
2110 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
2112 DECL_SAVED_TREE (fndecl) = body;
2114 current_function_decl = DECL_CONTEXT (fndecl);
2117 /* We cannot track the location of errors past this point. */
2118 error_gnat_node = Empty;
2120 /* If we're only annotating types, don't actually compile this function. */
2121 if (type_annotate_only)
2124 /* Perform the required pre-gimplfication transformations on the tree. */
2125 gnat_genericize (fndecl);
2127 /* We do different things for nested and non-nested functions.
2128 ??? This should be in cgraph. */
2129 if (!DECL_CONTEXT (fndecl))
2131 gnat_gimplify_function (fndecl);
2132 cgraph_finalize_function (fndecl, false);
2135 /* Register this function with cgraph just far enough to get it
2136 added to our parent's nested function list. */
2137 (void) cgraph_node (fndecl);
2140 /* Convert FNDECL's code to GIMPLE and handle any nested functions. */
2143 gnat_gimplify_function (tree fndecl)
2145 struct cgraph_node *cgn;
2147 dump_function (TDI_original, fndecl);
2148 gimplify_function_tree (fndecl);
2149 dump_function (TDI_generic, fndecl);
2151 /* Convert all nested functions to GIMPLE now. We do things in this order
2152 so that items like VLA sizes are expanded properly in the context of the
2153 correct function. */
2154 cgn = cgraph_node (fndecl);
2155 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
2156 gnat_gimplify_function (cgn->decl);
2161 gnat_builtin_function (tree decl)
2163 gnat_pushdecl (decl, Empty);
2167 /* Handle a "const" attribute; arguments as in
2168 struct attribute_spec.handler. */
2171 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
2172 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
2175 if (TREE_CODE (*node) == FUNCTION_DECL)
2176 TREE_READONLY (*node) = 1;
2178 *no_add_attrs = true;
2183 /* Handle a "nothrow" attribute; arguments as in
2184 struct attribute_spec.handler. */
2187 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
2188 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
2191 if (TREE_CODE (*node) == FUNCTION_DECL)
2192 TREE_NOTHROW (*node) = 1;
2194 *no_add_attrs = true;
2199 /* Return an integer type with the number of bits of precision given by
2200 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2201 it is a signed type. */
2204 gnat_type_for_size (unsigned precision, int unsignedp)
2209 if (precision <= 2 * MAX_BITS_PER_WORD
2210 && signed_and_unsigned_types[precision][unsignedp])
2211 return signed_and_unsigned_types[precision][unsignedp];
2214 t = make_unsigned_type (precision);
2216 t = make_signed_type (precision);
2218 if (precision <= 2 * MAX_BITS_PER_WORD)
2219 signed_and_unsigned_types[precision][unsignedp] = t;
2223 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2224 TYPE_NAME (t) = get_identifier (type_name);
2230 /* Likewise for floating-point types. */
2233 float_type_for_precision (int precision, enum machine_mode mode)
2238 if (float_types[(int) mode])
2239 return float_types[(int) mode];
2241 float_types[(int) mode] = t = make_node (REAL_TYPE);
2242 TYPE_PRECISION (t) = precision;
2245 gcc_assert (TYPE_MODE (t) == mode);
2248 sprintf (type_name, "FLOAT_%d", precision);
2249 TYPE_NAME (t) = get_identifier (type_name);
2255 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2256 an unsigned type; otherwise a signed type is returned. */
2259 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2261 if (mode == BLKmode)
2263 else if (mode == VOIDmode)
2264 return void_type_node;
2265 else if (COMPLEX_MODE_P (mode))
2267 else if (SCALAR_FLOAT_MODE_P (mode))
2268 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2269 else if (SCALAR_INT_MODE_P (mode))
2270 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2275 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2278 gnat_unsigned_type (tree type_node)
2280 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2282 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2284 type = copy_node (type);
2285 TREE_TYPE (type) = type_node;
2287 else if (TREE_TYPE (type_node)
2288 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2289 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2291 type = copy_node (type);
2292 TREE_TYPE (type) = TREE_TYPE (type_node);
2298 /* Return the signed version of a TYPE_NODE, a scalar type. */
2301 gnat_signed_type (tree type_node)
2303 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2305 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2307 type = copy_node (type);
2308 TREE_TYPE (type) = type_node;
2310 else if (TREE_TYPE (type_node)
2311 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2312 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2314 type = copy_node (type);
2315 TREE_TYPE (type) = TREE_TYPE (type_node);
2322 /* EXP is an expression for the size of an object. If this size contains
2323 discriminant references, replace them with the maximum (if MAX_P) or
2324 minimum (if !MAX_P) possible value of the discriminant. */
2327 max_size (tree exp, bool max_p)
2329 enum tree_code code = TREE_CODE (exp);
2330 tree type = TREE_TYPE (exp);
2332 switch (TREE_CODE_CLASS (code))
2334 case tcc_declaration:
2339 if (code == CALL_EXPR)
2342 int i, n = call_expr_nargs (exp);
2345 argarray = (tree *) alloca (n * sizeof (tree));
2346 for (i = 0; i < n; i++)
2347 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
2348 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
2353 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2354 modify. Otherwise, we treat it like a variable. */
2355 if (!CONTAINS_PLACEHOLDER_P (exp))
2358 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2360 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
2362 case tcc_comparison:
2363 return max_p ? size_one_node : size_zero_node;
2367 case tcc_expression:
2368 switch (TREE_CODE_LENGTH (code))
2371 if (code == NON_LVALUE_EXPR)
2372 return max_size (TREE_OPERAND (exp, 0), max_p);
2375 fold_build1 (code, type,
2376 max_size (TREE_OPERAND (exp, 0),
2377 code == NEGATE_EXPR ? !max_p : max_p));
2380 if (code == COMPOUND_EXPR)
2381 return max_size (TREE_OPERAND (exp, 1), max_p);
2383 /* Calculate "(A ? B : C) - D" as "A ? B - D : C - D" which
2384 may provide a tighter bound on max_size. */
2385 if (code == MINUS_EXPR
2386 && TREE_CODE (TREE_OPERAND (exp, 0)) == COND_EXPR)
2388 tree lhs = fold_build2 (MINUS_EXPR, type,
2389 TREE_OPERAND (TREE_OPERAND (exp, 0), 1),
2390 TREE_OPERAND (exp, 1));
2391 tree rhs = fold_build2 (MINUS_EXPR, type,
2392 TREE_OPERAND (TREE_OPERAND (exp, 0), 2),
2393 TREE_OPERAND (exp, 1));
2394 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2395 max_size (lhs, max_p),
2396 max_size (rhs, max_p));
2400 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2401 tree rhs = max_size (TREE_OPERAND (exp, 1),
2402 code == MINUS_EXPR ? !max_p : max_p);
2404 /* Special-case wanting the maximum value of a MIN_EXPR.
2405 In that case, if one side overflows, return the other.
2406 sizetype is signed, but we know sizes are non-negative.
2407 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2408 overflowing or the maximum possible value and the RHS
2412 && TREE_CODE (rhs) == INTEGER_CST
2413 && TREE_OVERFLOW (rhs))
2417 && TREE_CODE (lhs) == INTEGER_CST
2418 && TREE_OVERFLOW (lhs))
2420 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2421 && ((TREE_CODE (lhs) == INTEGER_CST
2422 && TREE_OVERFLOW (lhs))
2423 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2424 && !TREE_CONSTANT (rhs))
2427 return fold_build2 (code, type, lhs, rhs);
2431 if (code == SAVE_EXPR)
2433 else if (code == COND_EXPR)
2434 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2435 max_size (TREE_OPERAND (exp, 1), max_p),
2436 max_size (TREE_OPERAND (exp, 2), max_p));
2439 /* Other tree classes cannot happen. */
2447 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2448 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2449 Return a constructor for the template. */
2452 build_template (tree template_type, tree array_type, tree expr)
2454 tree template_elts = NULL_TREE;
2455 tree bound_list = NULL_TREE;
2458 if (TREE_CODE (array_type) == RECORD_TYPE
2459 && (TYPE_IS_PADDING_P (array_type)
2460 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2461 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2463 if (TREE_CODE (array_type) == ARRAY_TYPE
2464 || (TREE_CODE (array_type) == INTEGER_TYPE
2465 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2466 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2468 /* First make the list for a CONSTRUCTOR for the template. Go down the
2469 field list of the template instead of the type chain because this
2470 array might be an Ada array of arrays and we can't tell where the
2471 nested arrays stop being the underlying object. */
2473 for (field = TYPE_FIELDS (template_type); field;
2475 ? (bound_list = TREE_CHAIN (bound_list))
2476 : (array_type = TREE_TYPE (array_type))),
2477 field = TREE_CHAIN (TREE_CHAIN (field)))
2479 tree bounds, min, max;
2481 /* If we have a bound list, get the bounds from there. Likewise
2482 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2483 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2484 This will give us a maximum range. */
2486 bounds = TREE_VALUE (bound_list);
2487 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2488 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2489 else if (expr && TREE_CODE (expr) == PARM_DECL
2490 && DECL_BY_COMPONENT_PTR_P (expr))
2491 bounds = TREE_TYPE (field);
2495 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
2496 max = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MAX_VALUE (bounds));
2498 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2499 substitute it from OBJECT. */
2500 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2501 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2503 template_elts = tree_cons (TREE_CHAIN (field), max,
2504 tree_cons (field, min, template_elts));
2507 return gnat_build_constructor (template_type, nreverse (template_elts));
2510 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2511 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2512 in the type contains in its DECL_INITIAL the expression to use when
2513 a constructor is made for the type. GNAT_ENTITY is an entity used
2514 to print out an error message if the mechanism cannot be applied to
2515 an object of that type and also for the name. */
2518 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2520 tree record_type = make_node (RECORD_TYPE);
2521 tree pointer32_type;
2522 tree field_list = 0;
2531 /* If TYPE is an unconstrained array, use the underlying array type. */
2532 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2533 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2535 /* If this is an array, compute the number of dimensions in the array,
2536 get the index types, and point to the inner type. */
2537 if (TREE_CODE (type) != ARRAY_TYPE)
2540 for (ndim = 1, inner_type = type;
2541 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2542 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2543 ndim++, inner_type = TREE_TYPE (inner_type))
2546 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2548 if (mech != By_Descriptor_NCA
2549 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2550 for (i = ndim - 1, inner_type = type;
2552 i--, inner_type = TREE_TYPE (inner_type))
2553 idx_arr[i] = TYPE_DOMAIN (inner_type);
2555 for (i = 0, inner_type = type;
2557 i++, inner_type = TREE_TYPE (inner_type))
2558 idx_arr[i] = TYPE_DOMAIN (inner_type);
2560 /* Now get the DTYPE value. */
2561 switch (TREE_CODE (type))
2565 if (TYPE_VAX_FLOATING_POINT_P (type))
2566 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2579 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2582 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2585 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2588 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2591 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2594 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2600 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2604 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2605 && TYPE_VAX_FLOATING_POINT_P (type))
2606 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2618 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2629 /* Get the CLASS value. */
2632 case By_Descriptor_A:
2635 case By_Descriptor_NCA:
2638 case By_Descriptor_SB:
2642 case By_Descriptor_S:
2648 /* Make the type for a descriptor for VMS. The first four fields
2649 are the same for all types. */
2652 = chainon (field_list,
2653 make_descriptor_field
2654 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2655 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2657 field_list = chainon (field_list,
2658 make_descriptor_field ("DTYPE",
2659 gnat_type_for_size (8, 1),
2660 record_type, size_int (dtype)));
2661 field_list = chainon (field_list,
2662 make_descriptor_field ("CLASS",
2663 gnat_type_for_size (8, 1),
2664 record_type, size_int (class)));
2666 /* Of course this will crash at run-time if the address space is not
2667 within the low 32 bits, but there is nothing else we can do. */
2668 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
2671 = chainon (field_list,
2672 make_descriptor_field
2673 ("POINTER", pointer32_type, record_type,
2674 build_unary_op (ADDR_EXPR,
2676 build0 (PLACEHOLDER_EXPR, type))));
2681 case By_Descriptor_S:
2684 case By_Descriptor_SB:
2686 = chainon (field_list,
2687 make_descriptor_field
2688 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2689 TREE_CODE (type) == ARRAY_TYPE
2690 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2692 = chainon (field_list,
2693 make_descriptor_field
2694 ("SB_U1", gnat_type_for_size (32, 1), record_type,
2695 TREE_CODE (type) == ARRAY_TYPE
2696 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2699 case By_Descriptor_A:
2700 case By_Descriptor_NCA:
2701 field_list = chainon (field_list,
2702 make_descriptor_field ("SCALE",
2703 gnat_type_for_size (8, 1),
2707 field_list = chainon (field_list,
2708 make_descriptor_field ("DIGITS",
2709 gnat_type_for_size (8, 1),
2714 = chainon (field_list,
2715 make_descriptor_field
2716 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2717 size_int (mech == By_Descriptor_NCA
2719 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2720 : (TREE_CODE (type) == ARRAY_TYPE
2721 && TYPE_CONVENTION_FORTRAN_P (type)
2724 field_list = chainon (field_list,
2725 make_descriptor_field ("DIMCT",
2726 gnat_type_for_size (8, 1),
2730 field_list = chainon (field_list,
2731 make_descriptor_field ("ARSIZE",
2732 gnat_type_for_size (32, 1),
2734 size_in_bytes (type)));
2736 /* Now build a pointer to the 0,0,0... element. */
2737 tem = build0 (PLACEHOLDER_EXPR, type);
2738 for (i = 0, inner_type = type; i < ndim;
2739 i++, inner_type = TREE_TYPE (inner_type))
2740 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2741 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2742 NULL_TREE, NULL_TREE);
2745 = chainon (field_list,
2746 make_descriptor_field
2748 build_pointer_type_for_mode (inner_type, SImode, false),
2751 build_pointer_type_for_mode (inner_type, SImode,
2755 /* Next come the addressing coefficients. */
2756 tem = size_one_node;
2757 for (i = 0; i < ndim; i++)
2761 = size_binop (MULT_EXPR, tem,
2762 size_binop (PLUS_EXPR,
2763 size_binop (MINUS_EXPR,
2764 TYPE_MAX_VALUE (idx_arr[i]),
2765 TYPE_MIN_VALUE (idx_arr[i])),
2768 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2769 fname[1] = '0' + i, fname[2] = 0;
2771 = chainon (field_list,
2772 make_descriptor_field (fname,
2773 gnat_type_for_size (32, 1),
2774 record_type, idx_length));
2776 if (mech == By_Descriptor_NCA)
2780 /* Finally here are the bounds. */
2781 for (i = 0; i < ndim; i++)
2785 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2787 = chainon (field_list,
2788 make_descriptor_field
2789 (fname, gnat_type_for_size (32, 1), record_type,
2790 TYPE_MIN_VALUE (idx_arr[i])));
2794 = chainon (field_list,
2795 make_descriptor_field
2796 (fname, gnat_type_for_size (32, 1), record_type,
2797 TYPE_MAX_VALUE (idx_arr[i])));
2802 post_error ("unsupported descriptor type for &", gnat_entity);
2805 finish_record_type (record_type, field_list, 0, true);
2806 create_type_decl (create_concat_name (gnat_entity, "DESC"), record_type,
2807 NULL, true, false, gnat_entity);
2812 /* Utility routine for above code to make a field. */
2815 make_descriptor_field (const char *name, tree type,
2816 tree rec_type, tree initial)
2819 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2821 DECL_INITIAL (field) = initial;
2825 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
2826 pointer or fat pointer type. GNAT_SUBPROG is the subprogram to which
2827 the VMS descriptor is passed. */
2830 convert_vms_descriptor (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
2832 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
2833 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
2834 /* The CLASS field is the 3rd field in the descriptor. */
2835 tree class = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
2836 /* The POINTER field is the 4th field in the descriptor. */
2837 tree pointer = TREE_CHAIN (class);
2839 /* Retrieve the value of the POINTER field. */
2841 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
2843 if (POINTER_TYPE_P (gnu_type))
2844 return convert (gnu_type, gnu_expr);
2846 else if (TYPE_FAT_POINTER_P (gnu_type))
2848 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
2849 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
2850 tree template_type = TREE_TYPE (p_bounds_type);
2851 tree min_field = TYPE_FIELDS (template_type);
2852 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
2853 tree template, template_addr, aflags, dimct, t, u;
2854 /* See the head comment of build_vms_descriptor. */
2855 int iclass = TREE_INT_CST_LOW (DECL_INITIAL (class));
2857 /* Convert POINTER to the type of the P_ARRAY field. */
2858 gnu_expr = convert (p_array_type, gnu_expr);
2862 case 1: /* Class S */
2863 case 15: /* Class SB */
2864 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
2865 t = TYPE_FIELDS (desc_type);
2866 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2867 t = tree_cons (min_field,
2868 convert (TREE_TYPE (min_field), integer_one_node),
2869 tree_cons (max_field,
2870 convert (TREE_TYPE (max_field), t),
2872 template = gnat_build_constructor (template_type, t);
2873 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
2875 /* For class S, we are done. */
2879 /* Test that we really have a SB descriptor, like DEC Ada. */
2880 t = build3 (COMPONENT_REF, TREE_TYPE (class), desc, class, NULL);
2881 u = convert (TREE_TYPE (class), DECL_INITIAL (class));
2882 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
2883 /* If so, there is already a template in the descriptor and
2884 it is located right after the POINTER field. */
2885 t = TREE_CHAIN (pointer);
2886 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2887 /* Otherwise use the {1, LENGTH} template we build above. */
2888 template_addr = build3 (COND_EXPR, p_bounds_type, u,
2889 build_unary_op (ADDR_EXPR, p_bounds_type,
2894 case 4: /* Class A */
2895 /* The AFLAGS field is the 7th field in the descriptor. */
2896 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer)));
2897 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2898 /* The DIMCT field is the 8th field in the descriptor. */
2900 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2901 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
2902 or FL_COEFF or FL_BOUNDS not set. */
2903 u = build_int_cst (TREE_TYPE (aflags), 192);
2904 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
2905 build_binary_op (NE_EXPR, integer_type_node,
2907 convert (TREE_TYPE (dimct),
2909 build_binary_op (NE_EXPR, integer_type_node,
2910 build2 (BIT_AND_EXPR,
2914 add_stmt (build3 (COND_EXPR, void_type_node, u,
2915 build_call_raise (CE_Length_Check_Failed, Empty,
2916 N_Raise_Constraint_Error),
2918 /* There is already a template in the descriptor and it is
2919 located at the start of block 3 (12th field). */
2920 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (t))));
2921 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2922 template_addr = build_unary_op (ADDR_EXPR, p_bounds_type, template);
2925 case 10: /* Class NCA */
2927 post_error ("unsupported descriptor type for &", gnat_subprog);
2928 template_addr = integer_zero_node;
2932 /* Build the fat pointer in the form of a constructor. */
2933 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr,
2934 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
2935 template_addr, NULL_TREE));
2936 return gnat_build_constructor (gnu_type, t);
2943 /* Build a stub for the subprogram specified by the GCC tree GNU_SUBPROG
2944 and the GNAT node GNAT_SUBPROG. */
2947 build_function_stub (tree gnu_subprog, Entity_Id gnat_subprog)
2949 tree gnu_subprog_type, gnu_subprog_addr, gnu_subprog_call;
2950 tree gnu_stub_param, gnu_param_list, gnu_arg_types, gnu_param;
2951 tree gnu_stub_decl = DECL_FUNCTION_STUB (gnu_subprog);
2954 gnu_subprog_type = TREE_TYPE (gnu_subprog);
2955 gnu_param_list = NULL_TREE;
2957 begin_subprog_body (gnu_stub_decl);
2960 start_stmt_group ();
2962 /* Loop over the parameters of the stub and translate any of them
2963 passed by descriptor into a by reference one. */
2964 for (gnu_stub_param = DECL_ARGUMENTS (gnu_stub_decl),
2965 gnu_arg_types = TYPE_ARG_TYPES (gnu_subprog_type);
2967 gnu_stub_param = TREE_CHAIN (gnu_stub_param),
2968 gnu_arg_types = TREE_CHAIN (gnu_arg_types))
2970 if (DECL_BY_DESCRIPTOR_P (gnu_stub_param))
2971 gnu_param = convert_vms_descriptor (TREE_VALUE (gnu_arg_types),
2972 gnu_stub_param, gnat_subprog);
2974 gnu_param = gnu_stub_param;
2976 gnu_param_list = tree_cons (NULL_TREE, gnu_param, gnu_param_list);
2979 gnu_body = end_stmt_group ();
2981 /* Invoke the internal subprogram. */
2982 gnu_subprog_addr = build1 (ADDR_EXPR, build_pointer_type (gnu_subprog_type),
2984 gnu_subprog_call = build3 (CALL_EXPR, TREE_TYPE (gnu_subprog_type),
2985 gnu_subprog_addr, nreverse (gnu_param_list),
2988 /* Propagate the return value, if any. */
2989 if (VOID_TYPE_P (TREE_TYPE (gnu_subprog_type)))
2990 append_to_statement_list (gnu_subprog_call, &gnu_body);
2992 append_to_statement_list (build_return_expr (DECL_RESULT (gnu_stub_decl),
2998 allocate_struct_function (gnu_stub_decl);
2999 end_subprog_body (gnu_body);
3002 /* Build a type to be used to represent an aliased object whose nominal
3003 type is an unconstrained array. This consists of a RECORD_TYPE containing
3004 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
3005 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
3006 is used to represent an arbitrary unconstrained object. Use NAME
3007 as the name of the record. */
3010 build_unc_object_type (tree template_type, tree object_type, tree name)
3012 tree type = make_node (RECORD_TYPE);
3013 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
3014 template_type, type, 0, 0, 0, 1);
3015 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
3018 TYPE_NAME (type) = name;
3019 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3020 finish_record_type (type,
3021 chainon (chainon (NULL_TREE, template_field),
3028 /* Same, taking a thin or fat pointer type instead of a template type. */
3031 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3036 gcc_assert (TYPE_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3039 = (TYPE_FAT_POINTER_P (thin_fat_ptr_type)
3040 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3041 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3042 return build_unc_object_type (template_type, object_type, name);
3045 /* Shift the component offsets within an unconstrained object TYPE to make it
3046 suitable for use as a designated type for thin pointers. */
3049 shift_unc_components_for_thin_pointers (tree type)
3051 /* Thin pointer values designate the ARRAY data of an unconstrained object,
3052 allocated past the BOUNDS template. The designated type is adjusted to
3053 have ARRAY at position zero and the template at a negative offset, so
3054 that COMPONENT_REFs on (*thin_ptr) designate the proper location. */
3056 tree bounds_field = TYPE_FIELDS (type);
3057 tree array_field = TREE_CHAIN (TYPE_FIELDS (type));
3059 DECL_FIELD_OFFSET (bounds_field)
3060 = size_binop (MINUS_EXPR, size_zero_node, byte_position (array_field));
3062 DECL_FIELD_OFFSET (array_field) = size_zero_node;
3063 DECL_FIELD_BIT_OFFSET (array_field) = bitsize_zero_node;
3066 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
3067 the normal case this is just two adjustments, but we have more to do
3068 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
3071 update_pointer_to (tree old_type, tree new_type)
3073 tree ptr = TYPE_POINTER_TO (old_type);
3074 tree ref = TYPE_REFERENCE_TO (old_type);
3078 /* If this is the main variant, process all the other variants first. */
3079 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3080 for (type = TYPE_NEXT_VARIANT (old_type); type;
3081 type = TYPE_NEXT_VARIANT (type))
3082 update_pointer_to (type, new_type);
3084 /* If no pointer or reference, we are done. */
3088 /* Merge the old type qualifiers in the new type.
3090 Each old variant has qualifiers for specific reasons, and the new
3091 designated type as well. Each set of qualifiers represents useful
3092 information grabbed at some point, and merging the two simply unifies
3093 these inputs into the final type description.
3095 Consider for instance a volatile type frozen after an access to constant
3096 type designating it. After the designated type freeze, we get here with a
3097 volatile new_type and a dummy old_type with a readonly variant, created
3098 when the access type was processed. We shall make a volatile and readonly
3099 designated type, because that's what it really is.
3101 We might also get here for a non-dummy old_type variant with different
3102 qualifiers than the new_type ones, for instance in some cases of pointers
3103 to private record type elaboration (see the comments around the call to
3104 this routine from gnat_to_gnu_entity/E_Access_Type). We have to merge the
3105 qualifiers in thoses cases too, to avoid accidentally discarding the
3106 initial set, and will often end up with old_type == new_type then. */
3107 new_type = build_qualified_type (new_type,
3108 TYPE_QUALS (old_type)
3109 | TYPE_QUALS (new_type));
3111 /* If the new type and the old one are identical, there is nothing to
3113 if (old_type == new_type)
3116 /* Otherwise, first handle the simple case. */
3117 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3119 TYPE_POINTER_TO (new_type) = ptr;
3120 TYPE_REFERENCE_TO (new_type) = ref;
3122 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3123 for (ptr1 = TYPE_MAIN_VARIANT (ptr); ptr1;
3124 ptr1 = TYPE_NEXT_VARIANT (ptr1))
3125 TREE_TYPE (ptr1) = new_type;
3127 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3128 for (ref1 = TYPE_MAIN_VARIANT (ref); ref1;
3129 ref1 = TYPE_NEXT_VARIANT (ref1))
3130 TREE_TYPE (ref1) = new_type;
3133 /* Now deal with the unconstrained array case. In this case the "pointer"
3134 is actually a RECORD_TYPE where both fields are pointers to dummy nodes.
3135 Turn them into pointers to the correct types using update_pointer_to. */
3136 else if (TREE_CODE (ptr) != RECORD_TYPE || !TYPE_IS_FAT_POINTER_P (ptr))
3141 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
3142 tree array_field = TYPE_FIELDS (ptr);
3143 tree bounds_field = TREE_CHAIN (TYPE_FIELDS (ptr));
3144 tree new_ptr = TYPE_POINTER_TO (new_type);
3148 /* Make pointers to the dummy template point to the real template. */
3150 (TREE_TYPE (TREE_TYPE (bounds_field)),
3151 TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_ptr)))));
3153 /* The references to the template bounds present in the array type
3154 are made through a PLACEHOLDER_EXPR of type new_ptr. Since we
3155 are updating ptr to make it a full replacement for new_ptr as
3156 pointer to new_type, we must rework the PLACEHOLDER_EXPR so as
3157 to make it of type ptr. */
3158 new_ref = build3 (COMPONENT_REF, TREE_TYPE (bounds_field),
3159 build0 (PLACEHOLDER_EXPR, ptr),
3160 bounds_field, NULL_TREE);
3162 /* Create the new array for the new PLACEHOLDER_EXPR and make
3163 pointers to the dummy array point to it.
3165 ??? This is now the only use of substitute_in_type,
3166 which is a very "heavy" routine to do this, so it
3167 should be replaced at some point. */
3169 (TREE_TYPE (TREE_TYPE (array_field)),
3170 substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))),
3171 TREE_CHAIN (TYPE_FIELDS (new_ptr)), new_ref));
3173 /* Make ptr the pointer to new_type. */
3174 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
3175 = TREE_TYPE (new_type) = ptr;
3177 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
3178 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
3180 /* Now handle updating the allocation record, what the thin pointer
3181 points to. Update all pointers from the old record into the new
3182 one, update the type of the array field, and recompute the size. */
3183 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
3185 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
3186 = TREE_TYPE (TREE_TYPE (array_field));
3188 /* The size recomputation needs to account for alignment constraints, so
3189 we let layout_type work it out. This will reset the field offsets to
3190 what they would be in a regular record, so we shift them back to what
3191 we want them to be for a thin pointer designated type afterwards. */
3192 DECL_SIZE (TYPE_FIELDS (new_obj_rec)) = 0;
3193 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))) = 0;
3194 TYPE_SIZE (new_obj_rec) = 0;
3195 layout_type (new_obj_rec);
3197 shift_unc_components_for_thin_pointers (new_obj_rec);
3199 /* We are done, at last. */
3200 rest_of_record_type_compilation (ptr);
3204 /* Convert a pointer to a constrained array into a pointer to a fat
3205 pointer. This involves making or finding a template. */
3208 convert_to_fat_pointer (tree type, tree expr)
3210 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
3211 tree template, template_addr;
3212 tree etype = TREE_TYPE (expr);
3214 /* If EXPR is a constant of zero, we make a fat pointer that has a null
3215 pointer to the template and array. */
3216 if (integer_zerop (expr))
3218 gnat_build_constructor
3220 tree_cons (TYPE_FIELDS (type),
3221 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3222 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3223 convert (build_pointer_type (template_type),
3227 /* If EXPR is a thin pointer, make the template and data from the record. */
3229 else if (TYPE_THIN_POINTER_P (etype))
3231 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
3233 expr = save_expr (expr);
3234 if (TREE_CODE (expr) == ADDR_EXPR)
3235 expr = TREE_OPERAND (expr, 0);
3237 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
3239 template = build_component_ref (expr, NULL_TREE, fields, false);
3240 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3241 build_component_ref (expr, NULL_TREE,
3242 TREE_CHAIN (fields), false));
3245 /* Otherwise, build the constructor for the template. */
3246 template = build_template (template_type, TREE_TYPE (etype), expr);
3248 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
3250 /* The result is a CONSTRUCTOR for the fat pointer.
3252 If expr is an argument of a foreign convention subprogram, the type it
3253 points to is directly the component type. In this case, the expression
3254 type may not match the corresponding FIELD_DECL type at this point, so we
3255 call "convert" here to fix that up if necessary. This type consistency is
3256 required, for instance because it ensures that possible later folding of
3257 component_refs against this constructor always yields something of the
3258 same type as the initial reference.
3260 Note that the call to "build_template" above is still fine, because it
3261 will only refer to the provided template_type in this case. */
3263 gnat_build_constructor
3264 (type, tree_cons (TYPE_FIELDS (type),
3265 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3266 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3267 template_addr, NULL_TREE)));
3270 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
3271 is something that is a fat pointer, so convert to it first if it EXPR
3272 is not already a fat pointer. */
3275 convert_to_thin_pointer (tree type, tree expr)
3277 if (!TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
3279 = convert_to_fat_pointer
3280 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
3282 /* We get the pointer to the data and use a NOP_EXPR to make it the
3284 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)),
3286 expr = build1 (NOP_EXPR, type, expr);
3291 /* Create an expression whose value is that of EXPR,
3292 converted to type TYPE. The TREE_TYPE of the value
3293 is always TYPE. This function implements all reasonable
3294 conversions; callers should filter out those that are
3295 not permitted by the language being compiled. */
3298 convert (tree type, tree expr)
3300 enum tree_code code = TREE_CODE (type);
3301 tree etype = TREE_TYPE (expr);
3302 enum tree_code ecode = TREE_CODE (etype);
3304 /* If EXPR is already the right type, we are done. */
3308 /* If both input and output have padding and are of variable size, do this
3309 as an unchecked conversion. Likewise if one is a mere variant of the
3310 other, so we avoid a pointless unpad/repad sequence. */
3311 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
3312 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
3313 && (!TREE_CONSTANT (TYPE_SIZE (type))
3314 || !TREE_CONSTANT (TYPE_SIZE (etype))
3315 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)))
3318 /* If the output type has padding, make a constructor to build the
3320 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
3322 /* If we previously converted from another type and our type is
3323 of variable size, remove the conversion to avoid the need for
3324 variable-size temporaries. */
3325 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3326 && !TREE_CONSTANT (TYPE_SIZE (type)))
3327 expr = TREE_OPERAND (expr, 0);
3329 /* If we are just removing the padding from expr, convert the original
3330 object if we have variable size. That will avoid the need
3331 for some variable-size temporaries. */
3332 if (TREE_CODE (expr) == COMPONENT_REF
3333 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
3334 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
3335 && !TREE_CONSTANT (TYPE_SIZE (type)))
3336 return convert (type, TREE_OPERAND (expr, 0));
3338 /* If the result type is a padded type with a self-referentially-sized
3339 field and the expression type is a record, do this as an
3340 unchecked conversion. */
3341 else if (TREE_CODE (etype) == RECORD_TYPE
3342 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
3343 return unchecked_convert (type, expr, false);
3347 gnat_build_constructor (type,
3348 tree_cons (TYPE_FIELDS (type),
3350 (TYPE_FIELDS (type)),
3355 /* If the input type has padding, remove it and convert to the output type.
3356 The conditions ordering is arranged to ensure that the output type is not
3357 a padding type here, as it is not clear whether the conversion would
3358 always be correct if this was to happen. */
3359 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
3363 /* If we have just converted to this padded type, just get the
3364 inner expression. */
3365 if (TREE_CODE (expr) == CONSTRUCTOR
3366 && !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (expr))
3367 && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->index
3368 == TYPE_FIELDS (etype))
3370 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->value;
3372 /* Otherwise, build an explicit component reference. */
3375 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
3377 return convert (type, unpadded);
3380 /* If the input is a biased type, adjust first. */
3381 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
3382 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
3383 fold_convert (TREE_TYPE (etype),
3385 TYPE_MIN_VALUE (etype)));
3387 /* If the input is a justified modular type, we need to extract the actual
3388 object before converting it to any other type with the exceptions of an
3389 unconstrained array or of a mere type variant. It is useful to avoid the
3390 extraction and conversion in the type variant case because it could end
3391 up replacing a VAR_DECL expr by a constructor and we might be about the
3392 take the address of the result. */
3393 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
3394 && code != UNCONSTRAINED_ARRAY_TYPE
3395 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
3396 return convert (type, build_component_ref (expr, NULL_TREE,
3397 TYPE_FIELDS (etype), false));
3399 /* If converting to a type that contains a template, convert to the data
3400 type and then build the template. */
3401 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
3403 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
3405 /* If the source already has a template, get a reference to the
3406 associated array only, as we are going to rebuild a template
3407 for the target type anyway. */
3408 expr = maybe_unconstrained_array (expr);
3411 gnat_build_constructor
3413 tree_cons (TYPE_FIELDS (type),
3414 build_template (TREE_TYPE (TYPE_FIELDS (type)),
3415 obj_type, NULL_TREE),
3416 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3417 convert (obj_type, expr), NULL_TREE)));
3420 /* There are some special cases of expressions that we process
3422 switch (TREE_CODE (expr))
3428 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
3429 conversion in gnat_expand_expr. NULL_EXPR does not represent
3430 and actual value, so no conversion is needed. */
3431 expr = copy_node (expr);
3432 TREE_TYPE (expr) = type;
3436 /* If we are converting a STRING_CST to another constrained array type,
3437 just make a new one in the proper type. */
3438 if (code == ecode && AGGREGATE_TYPE_P (etype)
3439 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
3440 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
3442 expr = copy_node (expr);
3443 TREE_TYPE (expr) = type;
3449 /* If we are converting a CONSTRUCTOR to another constrained array type
3450 with the same domain, just make a new one in the proper type. */
3451 if (code == ecode && code == ARRAY_TYPE
3452 && TREE_TYPE (type) == TREE_TYPE (etype)
3453 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (type)),
3454 TYPE_MIN_VALUE (TYPE_DOMAIN (etype)))
3455 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
3456 TYPE_MAX_VALUE (TYPE_DOMAIN (etype))))
3458 expr = copy_node (expr);
3459 TREE_TYPE (expr) = type;
3464 case UNCONSTRAINED_ARRAY_REF:
3465 /* Convert this to the type of the inner array by getting the address of
3466 the array from the template. */
3467 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3468 build_component_ref (TREE_OPERAND (expr, 0),
3469 get_identifier ("P_ARRAY"),
3471 etype = TREE_TYPE (expr);
3472 ecode = TREE_CODE (etype);
3475 case VIEW_CONVERT_EXPR:
3477 /* GCC 4.x is very sensitive to type consistency overall, and view
3478 conversions thus are very frequent. Even though just "convert"ing
3479 the inner operand to the output type is fine in most cases, it
3480 might expose unexpected input/output type mismatches in special
3481 circumstances so we avoid such recursive calls when we can. */
3483 tree op0 = TREE_OPERAND (expr, 0);
3485 /* If we are converting back to the original type, we can just
3486 lift the input conversion. This is a common occurrence with
3487 switches back-and-forth amongst type variants. */
3488 if (type == TREE_TYPE (op0))
3491 /* Otherwise, if we're converting between two aggregate types, we
3492 might be allowed to substitute the VIEW_CONVERT target type in
3493 place or to just convert the inner expression. */
3494 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
3496 /* If we are converting between type variants, we can just
3497 substitute the VIEW_CONVERT in place. */
3498 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
3499 return build1 (VIEW_CONVERT_EXPR, type, op0);
3501 /* Otherwise, we may just bypass the input view conversion unless
3502 one of the types is a fat pointer, which is handled by
3503 specialized code below which relies on exact type matching. */
3504 else if (!TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
3505 return convert (type, op0);
3511 /* If both types are record types, just convert the pointer and
3512 make a new INDIRECT_REF.
3514 ??? Disable this for now since it causes problems with the
3515 code in build_binary_op for MODIFY_EXPR which wants to
3516 strip off conversions. But that code really is a mess and
3517 we need to do this a much better way some time. */
3519 && (TREE_CODE (type) == RECORD_TYPE
3520 || TREE_CODE (type) == UNION_TYPE)
3521 && (TREE_CODE (etype) == RECORD_TYPE
3522 || TREE_CODE (etype) == UNION_TYPE)
3523 && !TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
3524 return build_unary_op (INDIRECT_REF, NULL_TREE,
3525 convert (build_pointer_type (type),
3526 TREE_OPERAND (expr, 0)));
3533 /* Check for converting to a pointer to an unconstrained array. */
3534 if (TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
3535 return convert_to_fat_pointer (type, expr);
3537 /* If we're converting between two aggregate types that have the same main
3538 variant, just make a VIEW_CONVER_EXPR. */
3539 else if (AGGREGATE_TYPE_P (type)
3540 && TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
3541 return build1 (VIEW_CONVERT_EXPR, type, expr);
3543 /* In all other cases of related types, make a NOP_EXPR. */
3544 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
3545 || (code == INTEGER_CST && ecode == INTEGER_CST
3546 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
3547 return fold_convert (type, expr);
3552 return fold_build1 (CONVERT_EXPR, type, expr);
3555 return fold_convert (type, gnat_truthvalue_conversion (expr));
3558 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
3559 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
3560 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
3561 return unchecked_convert (type, expr, false);
3562 else if (TYPE_BIASED_REPRESENTATION_P (type))
3563 return fold_convert (type,
3564 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
3565 convert (TREE_TYPE (type), expr),
3566 TYPE_MIN_VALUE (type)));
3568 /* ... fall through ... */
3571 return fold (convert_to_integer (type, expr));
3574 case REFERENCE_TYPE:
3575 /* If converting between two pointers to records denoting
3576 both a template and type, adjust if needed to account
3577 for any differing offsets, since one might be negative. */
3578 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
3581 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
3582 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
3583 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
3584 sbitsize_int (BITS_PER_UNIT));
3586 expr = build1 (NOP_EXPR, type, expr);
3587 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
3588 if (integer_zerop (byte_diff))
3591 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
3592 fold (convert (sizetype, byte_diff)));
3595 /* If converting to a thin pointer, handle specially. */
3596 if (TYPE_THIN_POINTER_P (type)
3597 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
3598 return convert_to_thin_pointer (type, expr);
3600 /* If converting fat pointer to normal pointer, get the pointer to the
3601 array and then convert it. */
3602 else if (TYPE_FAT_POINTER_P (etype))
3603 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
3606 return fold (convert_to_pointer (type, expr));
3609 return fold (convert_to_real (type, expr));
3612 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
3614 gnat_build_constructor
3615 (type, tree_cons (TYPE_FIELDS (type),
3616 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3619 /* ... fall through ... */
3622 /* In these cases, assume the front-end has validated the conversion.
3623 If the conversion is valid, it will be a bit-wise conversion, so
3624 it can be viewed as an unchecked conversion. */
3625 return unchecked_convert (type, expr, false);
3628 /* This is a either a conversion between a tagged type and some
3629 subtype, which we have to mark as a UNION_TYPE because of
3630 overlapping fields or a conversion of an Unchecked_Union. */
3631 return unchecked_convert (type, expr, false);
3633 case UNCONSTRAINED_ARRAY_TYPE:
3634 /* If EXPR is a constrained array, take its address, convert it to a
3635 fat pointer, and then dereference it. Likewise if EXPR is a
3636 record containing both a template and a constrained array.
3637 Note that a record representing a justified modular type
3638 always represents a packed constrained array. */
3639 if (ecode == ARRAY_TYPE
3640 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
3641 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
3642 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
3645 (INDIRECT_REF, NULL_TREE,
3646 convert_to_fat_pointer (TREE_TYPE (type),
3647 build_unary_op (ADDR_EXPR,
3650 /* Do something very similar for converting one unconstrained
3651 array to another. */
3652 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
3654 build_unary_op (INDIRECT_REF, NULL_TREE,
3655 convert (TREE_TYPE (type),
3656 build_unary_op (ADDR_EXPR,
3662 return fold (convert_to_complex (type, expr));
3669 /* Remove all conversions that are done in EXP. This includes converting
3670 from a padded type or to a justified modular type. If TRUE_ADDRESS
3671 is true, always return the address of the containing object even if
3672 the address is not bit-aligned. */
3675 remove_conversions (tree exp, bool true_address)
3677 switch (TREE_CODE (exp))
3681 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3682 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
3684 remove_conversions (VEC_index (constructor_elt,
3685 CONSTRUCTOR_ELTS (exp), 0)->value,
3690 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
3691 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
3692 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3695 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
3696 case NOP_EXPR: case CONVERT_EXPR:
3697 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3706 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3707 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3708 likewise return an expression pointing to the underlying array. */
3711 maybe_unconstrained_array (tree exp)
3713 enum tree_code code = TREE_CODE (exp);
3716 switch (TREE_CODE (TREE_TYPE (exp)))
3718 case UNCONSTRAINED_ARRAY_TYPE:
3719 if (code == UNCONSTRAINED_ARRAY_REF)
3722 = build_unary_op (INDIRECT_REF, NULL_TREE,
3723 build_component_ref (TREE_OPERAND (exp, 0),
3724 get_identifier ("P_ARRAY"),
3726 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
3730 else if (code == NULL_EXPR)
3731 return build1 (NULL_EXPR,
3732 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3733 (TREE_TYPE (TREE_TYPE (exp))))),
3734 TREE_OPERAND (exp, 0));
3737 /* If this is a padded type, convert to the unpadded type and see if
3738 it contains a template. */
3739 if (TYPE_IS_PADDING_P (TREE_TYPE (exp)))
3741 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
3742 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3743 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
3745 build_component_ref (new, NULL_TREE,
3746 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
3749 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
3751 build_component_ref (exp, NULL_TREE,
3752 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
3762 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
3763 If NOTRUNC_P is true, truncation operations should be suppressed. */
3766 unchecked_convert (tree type, tree expr, bool notrunc_p)
3768 tree etype = TREE_TYPE (expr);
3770 /* If the expression is already the right type, we are done. */
3774 /* If both types types are integral just do a normal conversion.
3775 Likewise for a conversion to an unconstrained array. */
3776 if ((((INTEGRAL_TYPE_P (type)
3777 && !(TREE_CODE (type) == INTEGER_TYPE
3778 && TYPE_VAX_FLOATING_POINT_P (type)))
3779 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
3780 || (TREE_CODE (type) == RECORD_TYPE
3781 && TYPE_JUSTIFIED_MODULAR_P (type)))
3782 && ((INTEGRAL_TYPE_P (etype)
3783 && !(TREE_CODE (etype) == INTEGER_TYPE
3784 && TYPE_VAX_FLOATING_POINT_P (etype)))
3785 || (POINTER_TYPE_P (etype) && !TYPE_THIN_POINTER_P (etype))
3786 || (TREE_CODE (etype) == RECORD_TYPE
3787 && TYPE_JUSTIFIED_MODULAR_P (etype))))
3788 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3791 bool final_unchecked = false;
3793 if (TREE_CODE (etype) == INTEGER_TYPE
3794 && TYPE_BIASED_REPRESENTATION_P (etype))
3796 tree ntype = copy_type (etype);
3798 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
3799 TYPE_MAIN_VARIANT (ntype) = ntype;
3800 expr = build1 (NOP_EXPR, ntype, expr);
3803 if (TREE_CODE (type) == INTEGER_TYPE
3804 && TYPE_BIASED_REPRESENTATION_P (type))
3806 rtype = copy_type (type);
3807 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
3808 TYPE_MAIN_VARIANT (rtype) = rtype;
3811 /* We have another special case: if we are unchecked converting subtype
3812 into a base type, we need to ensure that VRP doesn't propagate range
3813 information since this conversion may be done precisely to validate
3814 that the object is within the range it is supposed to have. */
3815 else if (TREE_CODE (expr) != INTEGER_CST
3816 && TREE_CODE (type) == INTEGER_TYPE && !TREE_TYPE (type)
3817 && ((TREE_CODE (etype) == INTEGER_TYPE && TREE_TYPE (etype))
3818 || TREE_CODE (etype) == ENUMERAL_TYPE
3819 || TREE_CODE (etype) == BOOLEAN_TYPE))
3821 /* The optimization barrier is a VIEW_CONVERT_EXPR node; moreover,
3822 in order not to be deemed an useless type conversion, it must
3823 be from subtype to base type.
3825 ??? This may raise addressability and/or aliasing issues because
3826 VIEW_CONVERT_EXPR gets gimplified as an lvalue, thus causing the
3827 address of its operand to be taken if it is deemed addressable
3828 and not already in GIMPLE form. */
3829 rtype = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
3830 rtype = copy_type (rtype);
3831 TYPE_MAIN_VARIANT (rtype) = rtype;
3832 TREE_TYPE (rtype) = type;
3833 final_unchecked = true;
3836 expr = convert (rtype, expr);
3838 expr = build1 (final_unchecked ? VIEW_CONVERT_EXPR : NOP_EXPR,
3842 /* If we are converting TO an integral type whose precision is not the
3843 same as its size, first unchecked convert to a record that contains
3844 an object of the output type. Then extract the field. */
3845 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
3846 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3847 GET_MODE_BITSIZE (TYPE_MODE (type))))
3849 tree rec_type = make_node (RECORD_TYPE);
3850 tree field = create_field_decl (get_identifier ("OBJ"), type,
3851 rec_type, 1, 0, 0, 0);
3853 TYPE_FIELDS (rec_type) = field;
3854 layout_type (rec_type);
3856 expr = unchecked_convert (rec_type, expr, notrunc_p);
3857 expr = build_component_ref (expr, NULL_TREE, field, 0);
3860 /* Similarly for integral input type whose precision is not equal to its
3862 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype)
3863 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
3864 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3866 tree rec_type = make_node (RECORD_TYPE);
3868 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
3871 TYPE_FIELDS (rec_type) = field;
3872 layout_type (rec_type);
3874 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
3875 expr = unchecked_convert (type, expr, notrunc_p);
3878 /* We have a special case when we are converting between two
3879 unconstrained array types. In that case, take the address,
3880 convert the fat pointer types, and dereference. */
3881 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
3882 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3883 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3884 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
3885 build_unary_op (ADDR_EXPR, NULL_TREE,
3889 expr = maybe_unconstrained_array (expr);
3891 /* There's no point in doing two unchecked conversions in a row. */
3892 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3893 expr = TREE_OPERAND (expr, 0);
3895 etype = TREE_TYPE (expr);
3896 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
3899 /* If the result is an integral type whose size is not equal to
3900 the size of the underlying machine type, sign- or zero-extend
3901 the result. We need not do this in the case where the input is
3902 an integral type of the same precision and signedness or if the output
3903 is a biased type or if both the input and output are unsigned. */
3905 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
3906 && !(TREE_CODE (type) == INTEGER_TYPE
3907 && TYPE_BIASED_REPRESENTATION_P (type))
3908 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3909 GET_MODE_BITSIZE (TYPE_MODE (type)))
3910 && !(INTEGRAL_TYPE_P (etype)
3911 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
3912 && operand_equal_p (TYPE_RM_SIZE (type),
3913 (TYPE_RM_SIZE (etype) != 0
3914 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
3916 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
3918 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
3919 TYPE_UNSIGNED (type));
3921 = convert (base_type,
3922 size_binop (MINUS_EXPR,
3924 (GET_MODE_BITSIZE (TYPE_MODE (type))),
3925 TYPE_RM_SIZE (type)));
3928 build_binary_op (RSHIFT_EXPR, base_type,
3929 build_binary_op (LSHIFT_EXPR, base_type,
3930 convert (base_type, expr),
3935 /* An unchecked conversion should never raise Constraint_Error. The code
3936 below assumes that GCC's conversion routines overflow the same way that
3937 the underlying hardware does. This is probably true. In the rare case
3938 when it is false, we can rely on the fact that such conversions are
3939 erroneous anyway. */
3940 if (TREE_CODE (expr) == INTEGER_CST)
3941 TREE_OVERFLOW (expr) = 0;
3943 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
3944 show no longer constant. */
3945 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3946 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
3948 TREE_CONSTANT (expr) = 0;
3953 /* Search the chain of currently available builtin declarations for a node
3954 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
3955 found, if any, or NULL_TREE otherwise. */
3957 builtin_decl_for (tree name)
3962 for (i = 0; VEC_iterate(tree, builtin_decls, i, decl); i++)
3963 if (DECL_NAME (decl) == name)
3969 /* Return the appropriate GCC tree code for the specified GNAT type,
3970 the latter being a record type as predicated by Is_Record_Type. */
3973 tree_code_for_record_type (Entity_Id gnat_type)
3975 Node_Id component_list
3976 = Component_List (Type_Definition
3978 (Implementation_Base_Type (gnat_type))));
3981 /* Make this a UNION_TYPE unless it's either not an Unchecked_Union or
3982 we have a non-discriminant field outside a variant. In either case,
3983 it's a RECORD_TYPE. */
3985 if (!Is_Unchecked_Union (gnat_type))
3988 for (component = First_Non_Pragma (Component_Items (component_list));
3989 Present (component);
3990 component = Next_Non_Pragma (component))
3991 if (Ekind (Defining_Entity (component)) == E_Component)
3997 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
3998 component of an aggregate type. */
4001 type_for_nonaliased_component_p (tree gnu_type)
4003 /* If the type is passed by reference, we may have pointers to the
4004 component so it cannot be made non-aliased. */
4005 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
4008 /* We used to say that any component of aggregate type is aliased
4009 because the front-end may take 'Reference of it. The front-end
4010 has been enhanced in the meantime so as to use a renaming instead
4011 in most cases, but the back-end can probably take the address of
4012 such a component too so we go for the conservative stance.
4014 For instance, we might need the address of any array type, even
4015 if normally passed by copy, to construct a fat pointer if the
4016 component is used as an actual for an unconstrained formal.
4018 Likewise for record types: even if a specific record subtype is
4019 passed by copy, the parent type might be passed by ref (e.g. if
4020 it's of variable size) and we might take the address of a child
4021 component to pass to a parent formal. We have no way to check
4022 for such conditions here. */
4023 if (AGGREGATE_TYPE_P (gnu_type))
4029 /* Perform final processing on global variables. */
4032 gnat_write_global_declarations (void)
4034 /* Proceed to optimize and emit assembly.
4035 FIXME: shouldn't be the front end's responsibility to call this. */
4038 /* Emit debug info for all global declarations. */
4039 emit_debug_global_declarations (VEC_address (tree, global_decls),
4040 VEC_length (tree, global_decls));
4043 #include "gt-ada-utils.h"
4044 #include "gtype-ada.h"