1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2008, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 ****************************************************************************/
28 #include "coretypes.h"
41 #include "tree-inline.h"
42 #include "tree-gimple.h"
43 #include "tree-dump.h"
44 #include "pointer-set.h"
60 #ifndef MAX_FIXED_MODE_SIZE
61 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
64 #ifndef MAX_BITS_PER_WORD
65 #define MAX_BITS_PER_WORD BITS_PER_WORD
68 /* If nonzero, pretend we are allocating at global level. */
71 /* Tree nodes for the various types and decls we create. */
72 tree gnat_std_decls[(int) ADT_LAST];
74 /* Functions to call for each of the possible raise reasons. */
75 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
77 /* Forward declarations for handlers of attributes. */
78 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
79 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
81 /* Table of machine-independent internal attributes for Ada. We support
82 this minimal set of attributes to accommodate the Alpha back-end which
83 unconditionally puts them on its builtins. */
84 const struct attribute_spec gnat_internal_attribute_table[] =
86 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
87 { "const", 0, 0, true, false, false, handle_const_attribute },
88 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute },
89 { NULL, 0, 0, false, false, false, NULL }
92 /* Associates a GNAT tree node to a GCC tree node. It is used in
93 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
94 of `save_gnu_tree' for more info. */
95 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
97 #define GET_GNU_TREE(GNAT_ENTITY) \
98 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
100 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
101 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
103 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
104 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
106 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
107 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
109 #define GET_DUMMY_NODE(GNAT_ENTITY) \
110 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
112 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
113 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
115 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
116 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
118 /* This variable keeps a table for types for each precision so that we only
119 allocate each of them once. Signed and unsigned types are kept separate.
121 Note that these types are only used when fold-const requests something
122 special. Perhaps we should NOT share these types; we'll see how it
124 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
126 /* Likewise for float types, but record these by mode. */
127 static GTY(()) tree float_types[NUM_MACHINE_MODES];
129 /* For each binding contour we allocate a binding_level structure to indicate
130 the binding depth. */
132 struct gnat_binding_level GTY((chain_next ("%h.chain")))
134 /* The binding level containing this one (the enclosing binding level). */
135 struct gnat_binding_level *chain;
136 /* The BLOCK node for this level. */
138 /* If nonzero, the setjmp buffer that needs to be updated for any
139 variable-sized definition within this context. */
143 /* The binding level currently in effect. */
144 static GTY(()) struct gnat_binding_level *current_binding_level;
146 /* A chain of gnat_binding_level structures awaiting reuse. */
147 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
149 /* An array of global declarations. */
150 static GTY(()) VEC(tree,gc) *global_decls;
152 /* An array of builtin declarations. */
153 static GTY(()) VEC(tree,gc) *builtin_decls;
155 /* An array of global renaming pointers. */
156 static GTY(()) VEC(tree,gc) *global_renaming_pointers;
158 /* A chain of unused BLOCK nodes. */
159 static GTY((deletable)) tree free_block_chain;
161 static void gnat_install_builtins (void);
162 static tree merge_sizes (tree, tree, tree, bool, bool);
163 static tree compute_related_constant (tree, tree);
164 static tree split_plus (tree, tree *);
165 static void gnat_gimplify_function (tree);
166 static tree float_type_for_precision (int, enum machine_mode);
167 static tree convert_to_fat_pointer (tree, tree);
168 static tree convert_to_thin_pointer (tree, tree);
169 static tree make_descriptor_field (const char *,tree, tree, tree);
170 static bool potential_alignment_gap (tree, tree, tree);
172 /* Initialize the association of GNAT nodes to GCC trees. */
175 init_gnat_to_gnu (void)
177 associate_gnat_to_gnu
178 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
181 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
182 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
183 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
185 If GNU_DECL is zero, a previous association is to be reset. */
188 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
190 /* Check that GNAT_ENTITY is not already defined and that it is being set
191 to something which is a decl. Raise gigi 401 if not. Usually, this
192 means GNAT_ENTITY is defined twice, but occasionally is due to some
194 gcc_assert (!(gnu_decl
195 && (PRESENT_GNU_TREE (gnat_entity)
196 || (!no_check && !DECL_P (gnu_decl)))));
198 SET_GNU_TREE (gnat_entity, gnu_decl);
201 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
202 Return the ..._DECL node that was associated with it. If there is no tree
203 node associated with GNAT_ENTITY, abort.
205 In some cases, such as delayed elaboration or expressions that need to
206 be elaborated only once, GNAT_ENTITY is really not an entity. */
209 get_gnu_tree (Entity_Id gnat_entity)
211 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
212 return GET_GNU_TREE (gnat_entity);
215 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
218 present_gnu_tree (Entity_Id gnat_entity)
220 return PRESENT_GNU_TREE (gnat_entity);
223 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
226 init_dummy_type (void)
229 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
232 /* Make a dummy type corresponding to GNAT_TYPE. */
235 make_dummy_type (Entity_Id gnat_type)
237 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
240 /* If there is an equivalent type, get its underlying type. */
241 if (Present (gnat_underlying))
242 gnat_underlying = Underlying_Type (gnat_underlying);
244 /* If there was no equivalent type (can only happen when just annotating
245 types) or underlying type, go back to the original type. */
246 if (No (gnat_underlying))
247 gnat_underlying = gnat_type;
249 /* If it there already a dummy type, use that one. Else make one. */
250 if (PRESENT_DUMMY_NODE (gnat_underlying))
251 return GET_DUMMY_NODE (gnat_underlying);
253 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
255 gnu_type = make_node (Is_Record_Type (gnat_underlying)
256 ? tree_code_for_record_type (gnat_underlying)
258 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
259 TYPE_DUMMY_P (gnu_type) = 1;
260 if (AGGREGATE_TYPE_P (gnu_type))
262 TYPE_STUB_DECL (gnu_type) = build_decl (TYPE_DECL, NULL_TREE, gnu_type);
263 TYPE_BY_REFERENCE_P (gnu_type) = Is_By_Reference_Type (gnat_type);
266 SET_DUMMY_NODE (gnat_underlying, gnu_type);
271 /* Return nonzero if we are currently in the global binding level. */
274 global_bindings_p (void)
276 return ((force_global || !current_function_decl) ? -1 : 0);
279 /* Enter a new binding level. */
284 struct gnat_binding_level *newlevel = NULL;
286 /* Reuse a struct for this binding level, if there is one. */
287 if (free_binding_level)
289 newlevel = free_binding_level;
290 free_binding_level = free_binding_level->chain;
294 = (struct gnat_binding_level *)
295 ggc_alloc (sizeof (struct gnat_binding_level));
297 /* Use a free BLOCK, if any; otherwise, allocate one. */
298 if (free_block_chain)
300 newlevel->block = free_block_chain;
301 free_block_chain = BLOCK_CHAIN (free_block_chain);
302 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
305 newlevel->block = make_node (BLOCK);
307 /* Point the BLOCK we just made to its parent. */
308 if (current_binding_level)
309 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
311 BLOCK_VARS (newlevel->block) = BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
312 TREE_USED (newlevel->block) = 1;
314 /* Add this level to the front of the chain (stack) of levels that are
316 newlevel->chain = current_binding_level;
317 newlevel->jmpbuf_decl = NULL_TREE;
318 current_binding_level = newlevel;
321 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
322 and point FNDECL to this BLOCK. */
325 set_current_block_context (tree fndecl)
327 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
328 DECL_INITIAL (fndecl) = current_binding_level->block;
331 /* Set the jmpbuf_decl for the current binding level to DECL. */
334 set_block_jmpbuf_decl (tree decl)
336 current_binding_level->jmpbuf_decl = decl;
339 /* Get the jmpbuf_decl, if any, for the current binding level. */
342 get_block_jmpbuf_decl ()
344 return current_binding_level->jmpbuf_decl;
347 /* Exit a binding level. Set any BLOCK into the current code group. */
352 struct gnat_binding_level *level = current_binding_level;
353 tree block = level->block;
355 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
356 BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
358 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
359 are no variables free the block and merge its subblocks into those of its
360 parent block. Otherwise, add it to the list of its parent. */
361 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
363 else if (BLOCK_VARS (block) == NULL_TREE)
365 BLOCK_SUBBLOCKS (level->chain->block)
366 = chainon (BLOCK_SUBBLOCKS (block),
367 BLOCK_SUBBLOCKS (level->chain->block));
368 BLOCK_CHAIN (block) = free_block_chain;
369 free_block_chain = block;
373 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
374 BLOCK_SUBBLOCKS (level->chain->block) = block;
375 TREE_USED (block) = 1;
376 set_block_for_group (block);
379 /* Free this binding structure. */
380 current_binding_level = level->chain;
381 level->chain = free_binding_level;
382 free_binding_level = level;
385 /* Insert BLOCK at the end of the list of subblocks of the
386 current binding level. This is used when a BIND_EXPR is expanded,
387 to handle the BLOCK node inside the BIND_EXPR. */
390 insert_block (tree block)
392 TREE_USED (block) = 1;
393 TREE_CHAIN (block) = BLOCK_SUBBLOCKS (current_binding_level->block);
394 BLOCK_SUBBLOCKS (current_binding_level->block) = block;
397 /* Records a ..._DECL node DECL as belonging to the current lexical scope
398 and uses GNAT_NODE for location information and propagating flags. */
401 gnat_pushdecl (tree decl, Node_Id gnat_node)
403 /* If at top level, there is no context. But PARM_DECLs always go in the
404 level of its function. */
405 if (global_bindings_p () && TREE_CODE (decl) != PARM_DECL)
406 DECL_CONTEXT (decl) = 0;
409 DECL_CONTEXT (decl) = current_function_decl;
411 /* Functions imported in another function are not really nested. */
412 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_PUBLIC (decl))
413 DECL_NO_STATIC_CHAIN (decl) = 1;
416 TREE_NO_WARNING (decl) = (gnat_node == Empty || Warnings_Off (gnat_node));
418 /* Set the location of DECL and emit a declaration for it. */
419 if (Present (gnat_node))
420 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
421 add_decl_expr (decl, gnat_node);
423 /* Put the declaration on the list. The list of declarations is in reverse
424 order. The list will be reversed later. Put global variables in the
425 globals list and builtin functions in a dedicated list to speed up
426 further lookups. Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
427 the list, as they will cause trouble with the debugger and aren't needed
429 if (TREE_CODE (decl) != TYPE_DECL
430 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
432 if (global_bindings_p ())
434 VEC_safe_push (tree, gc, global_decls, decl);
436 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
437 VEC_safe_push (tree, gc, builtin_decls, decl);
441 TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
442 BLOCK_VARS (current_binding_level->block) = decl;
446 /* For the declaration of a type, set its name if it either is not already
447 set, was set to an IDENTIFIER_NODE, indicating an internal name,
448 or if the previous type name was not derived from a source name.
449 We'd rather have the type named with a real name and all the pointer
450 types to the same object have the same POINTER_TYPE node. Code in the
451 equivalent function of c-decl.c makes a copy of the type node here, but
452 that may cause us trouble with incomplete types. We make an exception
453 for fat pointer types because the compiler automatically builds them
454 for unconstrained array types and the debugger uses them to represent
455 both these and pointers to these. */
456 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
458 tree t = TREE_TYPE (decl);
460 if (!TYPE_NAME (t) || TREE_CODE (TYPE_NAME (t)) == IDENTIFIER_NODE)
461 TYPE_NAME (t) = decl;
462 else if (TYPE_FAT_POINTER_P (t))
464 tree tt = build_variant_type_copy (t);
465 TYPE_NAME (tt) = decl;
466 TREE_USED (tt) = TREE_USED (t);
467 TREE_TYPE (decl) = tt;
468 DECL_ORIGINAL_TYPE (decl) = t;
470 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
471 TYPE_NAME (t) = decl;
475 /* Do little here. Set up the standard declarations later after the
476 front end has been run. */
479 gnat_init_decl_processing (void)
481 /* Make the binding_level structure for global names. */
482 current_function_decl = 0;
483 current_binding_level = 0;
484 free_binding_level = 0;
487 build_common_tree_nodes (true, true);
489 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
490 corresponding to the size of Pmode. In most cases when ptr_mode and
491 Pmode differ, C will use the width of ptr_mode as sizetype. But we get
492 far better code using the width of Pmode. Make this here since we need
493 this before we can expand the GNAT types. */
494 size_type_node = gnat_type_for_size (GET_MODE_BITSIZE (Pmode), 0);
495 set_sizetype (size_type_node);
496 build_common_tree_nodes_2 (0);
498 ptr_void_type_node = build_pointer_type (void_type_node);
500 gnat_install_builtins ();
503 /* Install the builtin functions we might need. */
506 gnat_install_builtins ()
508 /* Builtins used by generic middle-end optimizers. */
509 build_common_builtin_nodes ();
511 /* Target specific builtins, such as the AltiVec family on ppc. */
512 targetm.init_builtins ();
515 /* Create the predefined scalar types such as `integer_type_node' needed
516 in the gcc back-end and initialize the global binding level. */
519 init_gigi_decls (tree long_long_float_type, tree exception_type)
524 /* Set the types that GCC and Gigi use from the front end. We would like
525 to do this for char_type_node, but it needs to correspond to the C
527 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
529 /* In this case, the builtin floating point types are VAX float,
530 so make up a type for use. */
531 longest_float_type_node = make_node (REAL_TYPE);
532 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
533 layout_type (longest_float_type_node);
534 create_type_decl (get_identifier ("longest float type"),
535 longest_float_type_node, NULL, false, true, Empty);
538 longest_float_type_node = TREE_TYPE (long_long_float_type);
540 except_type_node = TREE_TYPE (exception_type);
542 unsigned_type_node = gnat_type_for_size (INT_TYPE_SIZE, 1);
543 create_type_decl (get_identifier ("unsigned int"), unsigned_type_node,
544 NULL, false, true, Empty);
546 void_type_decl_node = create_type_decl (get_identifier ("void"),
547 void_type_node, NULL, false, true,
550 void_ftype = build_function_type (void_type_node, NULL_TREE);
551 ptr_void_ftype = build_pointer_type (void_ftype);
553 /* Now declare runtime functions. */
554 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
556 /* malloc is a function declaration tree for a function to allocate
558 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
560 build_function_type (ptr_void_type_node,
561 tree_cons (NULL_TREE,
564 NULL_TREE, false, true, true, NULL,
566 DECL_IS_MALLOC (malloc_decl) = 1;
568 /* free is a function declaration tree for a function to free memory. */
570 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
571 build_function_type (void_type_node,
572 tree_cons (NULL_TREE,
575 NULL_TREE, false, true, true, NULL, Empty);
577 /* Make the types and functions used for exception processing. */
579 = build_array_type (gnat_type_for_mode (Pmode, 0),
580 build_index_type (build_int_cst (NULL_TREE, 5)));
581 create_type_decl (get_identifier ("JMPBUF_T"), jmpbuf_type, NULL,
583 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
585 /* Functions to get and set the jumpbuf pointer for the current thread. */
587 = create_subprog_decl
588 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
589 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
590 NULL_TREE, false, true, true, NULL, Empty);
591 /* Avoid creating superfluous edges to __builtin_setjmp receivers. */
592 DECL_IS_PURE (get_jmpbuf_decl) = 1;
595 = create_subprog_decl
596 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
598 build_function_type (void_type_node,
599 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
600 NULL_TREE, false, true, true, NULL, Empty);
602 /* Function to get the current exception. */
604 = create_subprog_decl
605 (get_identifier ("system__soft_links__get_gnat_exception"),
607 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
608 NULL_TREE, false, true, true, NULL, Empty);
609 /* Avoid creating superfluous edges to __builtin_setjmp receivers. */
610 DECL_IS_PURE (get_excptr_decl) = 1;
612 /* Functions that raise exceptions. */
614 = create_subprog_decl
615 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
616 build_function_type (void_type_node,
617 tree_cons (NULL_TREE,
618 build_pointer_type (except_type_node),
620 NULL_TREE, false, true, true, NULL, Empty);
622 /* Dummy objects to materialize "others" and "all others" in the exception
623 tables. These are exported by a-exexpr.adb, so see this unit for the
627 = create_var_decl (get_identifier ("OTHERS"),
628 get_identifier ("__gnat_others_value"),
629 integer_type_node, 0, 1, 0, 1, 1, 0, Empty);
632 = create_var_decl (get_identifier ("ALL_OTHERS"),
633 get_identifier ("__gnat_all_others_value"),
634 integer_type_node, 0, 1, 0, 1, 1, 0, Empty);
636 /* Hooks to call when entering/leaving an exception handler. */
638 = create_subprog_decl (get_identifier ("__gnat_begin_handler"), NULL_TREE,
639 build_function_type (void_type_node,
640 tree_cons (NULL_TREE,
643 NULL_TREE, false, true, true, NULL, Empty);
646 = create_subprog_decl (get_identifier ("__gnat_end_handler"), NULL_TREE,
647 build_function_type (void_type_node,
648 tree_cons (NULL_TREE,
651 NULL_TREE, false, true, true, NULL, Empty);
653 /* If in no exception handlers mode, all raise statements are redirected to
654 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
655 this procedure will never be called in this mode. */
656 if (No_Exception_Handlers_Set ())
659 = create_subprog_decl
660 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE,
661 build_function_type (void_type_node,
662 tree_cons (NULL_TREE,
663 build_pointer_type (char_type_node),
664 tree_cons (NULL_TREE,
667 NULL_TREE, false, true, true, NULL, Empty);
669 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
670 gnat_raise_decls[i] = decl;
673 /* Otherwise, make one decl for each exception reason. */
674 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
678 sprintf (name, "__gnat_rcheck_%.2d", i);
680 = create_subprog_decl
681 (get_identifier (name), NULL_TREE,
682 build_function_type (void_type_node,
683 tree_cons (NULL_TREE,
686 tree_cons (NULL_TREE,
689 NULL_TREE, false, true, true, NULL, Empty);
692 /* Indicate that these never return. */
693 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
694 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
695 TREE_TYPE (raise_nodefer_decl)
696 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
699 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
701 TREE_THIS_VOLATILE (gnat_raise_decls[i]) = 1;
702 TREE_SIDE_EFFECTS (gnat_raise_decls[i]) = 1;
703 TREE_TYPE (gnat_raise_decls[i])
704 = build_qualified_type (TREE_TYPE (gnat_raise_decls[i]),
708 /* setjmp returns an integer and has one operand, which is a pointer to
711 = create_subprog_decl
712 (get_identifier ("__builtin_setjmp"), NULL_TREE,
713 build_function_type (integer_type_node,
714 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
715 NULL_TREE, false, true, true, NULL, Empty);
717 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
718 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
720 /* update_setjmp_buf updates a setjmp buffer from the current stack pointer
722 update_setjmp_buf_decl
723 = create_subprog_decl
724 (get_identifier ("__builtin_update_setjmp_buf"), NULL_TREE,
725 build_function_type (void_type_node,
726 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
727 NULL_TREE, false, true, true, NULL, Empty);
729 DECL_BUILT_IN_CLASS (update_setjmp_buf_decl) = BUILT_IN_NORMAL;
730 DECL_FUNCTION_CODE (update_setjmp_buf_decl) = BUILT_IN_UPDATE_SETJMP_BUF;
732 main_identifier_node = get_identifier ("main");
735 /* Given a record type RECORD_TYPE and a chain of FIELD_DECL nodes FIELDLIST,
736 finish constructing the record or union type. If REP_LEVEL is zero, this
737 record has no representation clause and so will be entirely laid out here.
738 If REP_LEVEL is one, this record has a representation clause and has been
739 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
740 this record is derived from a parent record and thus inherits its layout;
741 only make a pass on the fields to finalize them. If DO_NOT_FINALIZE is
742 true, the record type is expected to be modified afterwards so it will
743 not be sent to the back-end for finalization. */
746 finish_record_type (tree record_type, tree fieldlist, int rep_level,
747 bool do_not_finalize)
749 enum tree_code code = TREE_CODE (record_type);
750 tree name = TYPE_NAME (record_type);
751 tree ada_size = bitsize_zero_node;
752 tree size = bitsize_zero_node;
753 bool had_size = TYPE_SIZE (record_type) != 0;
754 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
757 if (name && TREE_CODE (name) == TYPE_DECL)
758 name = DECL_NAME (name);
760 TYPE_FIELDS (record_type) = fieldlist;
761 TYPE_STUB_DECL (record_type) = build_decl (TYPE_DECL, name, record_type);
763 /* We don't need both the typedef name and the record name output in
764 the debugging information, since they are the same. */
765 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
767 /* Globally initialize the record first. If this is a rep'ed record,
768 that just means some initializations; otherwise, layout the record. */
771 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
772 TYPE_MODE (record_type) = BLKmode;
775 TYPE_SIZE_UNIT (record_type) = size_zero_node;
777 TYPE_SIZE (record_type) = bitsize_zero_node;
779 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
780 out just like a UNION_TYPE, since the size will be fixed. */
781 else if (code == QUAL_UNION_TYPE)
786 /* Ensure there isn't a size already set. There can be in an error
787 case where there is a rep clause but all fields have errors and
788 no longer have a position. */
789 TYPE_SIZE (record_type) = 0;
790 layout_type (record_type);
793 /* At this point, the position and size of each field is known. It was
794 either set before entry by a rep clause, or by laying out the type above.
796 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
797 to compute the Ada size; the GCC size and alignment (for rep'ed records
798 that are not padding types); and the mode (for rep'ed records). We also
799 clear the DECL_BIT_FIELD indication for the cases we know have not been
800 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
802 if (code == QUAL_UNION_TYPE)
803 fieldlist = nreverse (fieldlist);
805 for (field = fieldlist; field; field = TREE_CHAIN (field))
807 tree pos = bit_position (field);
809 tree type = TREE_TYPE (field);
810 tree this_size = DECL_SIZE (field);
811 tree this_ada_size = DECL_SIZE (field);
813 if ((TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
814 || TREE_CODE (type) == QUAL_UNION_TYPE)
815 && !TYPE_IS_FAT_POINTER_P (type)
816 && !TYPE_CONTAINS_TEMPLATE_P (type)
817 && TYPE_ADA_SIZE (type))
818 this_ada_size = TYPE_ADA_SIZE (type);
820 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
821 if (DECL_BIT_FIELD (field) && !STRICT_ALIGNMENT
822 && value_factor_p (pos, BITS_PER_UNIT)
823 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
824 DECL_BIT_FIELD (field) = 0;
826 /* If we still have DECL_BIT_FIELD set at this point, we know the field
827 is technically not addressable. Except that it can actually be
828 addressed if the field is BLKmode and happens to be properly
830 DECL_NONADDRESSABLE_P (field)
831 |= DECL_BIT_FIELD (field) && DECL_MODE (field) != BLKmode;
833 if ((rep_level > 0) && !DECL_BIT_FIELD (field))
834 TYPE_ALIGN (record_type)
835 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
840 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
841 size = size_binop (MAX_EXPR, size, this_size);
844 case QUAL_UNION_TYPE:
846 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
847 this_ada_size, ada_size);
848 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
853 /* Since we know here that all fields are sorted in order of
854 increasing bit position, the size of the record is one
855 higher than the ending bit of the last field processed
856 unless we have a rep clause, since in that case we might
857 have a field outside a QUAL_UNION_TYPE that has a higher ending
858 position. So use a MAX in that case. Also, if this field is a
859 QUAL_UNION_TYPE, we need to take into account the previous size in
860 the case of empty variants. */
862 = merge_sizes (ada_size, pos, this_ada_size,
863 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
865 = merge_sizes (size, pos, this_size,
866 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
874 if (code == QUAL_UNION_TYPE)
875 nreverse (fieldlist);
879 /* If this is a padding record, we never want to make the size smaller
880 than what was specified in it, if any. */
881 if (TREE_CODE (record_type) == RECORD_TYPE
882 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
883 size = TYPE_SIZE (record_type);
885 /* Now set any of the values we've just computed that apply. */
886 if (!TYPE_IS_FAT_POINTER_P (record_type)
887 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
888 SET_TYPE_ADA_SIZE (record_type, ada_size);
892 tree size_unit = had_size_unit
893 ? TYPE_SIZE_UNIT (record_type)
895 size_binop (CEIL_DIV_EXPR, size,
897 unsigned int align = TYPE_ALIGN (record_type);
899 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
900 TYPE_SIZE_UNIT (record_type)
901 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
903 compute_record_mode (record_type);
907 if (!do_not_finalize)
908 rest_of_record_type_compilation (record_type);
911 /* Wrap up compilation of RECORD_TYPE, i.e. most notably output all
912 the debug information associated with it. It need not be invoked
913 directly in most cases since finish_record_type takes care of doing
914 so, unless explicitly requested not to through DO_NOT_FINALIZE. */
917 rest_of_record_type_compilation (tree record_type)
919 tree fieldlist = TYPE_FIELDS (record_type);
921 enum tree_code code = TREE_CODE (record_type);
922 bool var_size = false;
924 for (field = fieldlist; field; field = TREE_CHAIN (field))
926 /* We need to make an XVE/XVU record if any field has variable size,
927 whether or not the record does. For example, if we have a union,
928 it may be that all fields, rounded up to the alignment, have the
929 same size, in which case we'll use that size. But the debug
930 output routines (except Dwarf2) won't be able to output the fields,
931 so we need to make the special record. */
932 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
933 /* If a field has a non-constant qualifier, the record will have
934 variable size too. */
935 || (code == QUAL_UNION_TYPE
936 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
943 /* If this record is of variable size, rename it so that the
944 debugger knows it is and make a new, parallel, record
945 that tells the debugger how the record is laid out. See
946 exp_dbug.ads. But don't do this for records that are padding
947 since they confuse GDB. */
949 && !(TREE_CODE (record_type) == RECORD_TYPE
950 && TYPE_IS_PADDING_P (record_type)))
953 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
954 ? UNION_TYPE : TREE_CODE (record_type));
955 tree orig_name = TYPE_NAME (record_type);
957 = (TREE_CODE (orig_name) == TYPE_DECL ? DECL_NAME (orig_name)
960 = concat_id_with_name (orig_id,
961 TREE_CODE (record_type) == QUAL_UNION_TYPE
963 tree last_pos = bitsize_zero_node;
965 tree prev_old_field = 0;
967 TYPE_NAME (new_record_type) = new_id;
968 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
969 TYPE_STUB_DECL (new_record_type)
970 = build_decl (TYPE_DECL, new_id, new_record_type);
971 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
972 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
973 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
974 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
975 TYPE_SIZE_UNIT (new_record_type)
976 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
978 /* Now scan all the fields, replacing each field with a new
979 field corresponding to the new encoding. */
980 for (old_field = TYPE_FIELDS (record_type); old_field;
981 old_field = TREE_CHAIN (old_field))
983 tree field_type = TREE_TYPE (old_field);
984 tree field_name = DECL_NAME (old_field);
986 tree curpos = bit_position (old_field);
988 unsigned int align = 0;
991 /* See how the position was modified from the last position.
993 There are two basic cases we support: a value was added
994 to the last position or the last position was rounded to
995 a boundary and they something was added. Check for the
996 first case first. If not, see if there is any evidence
997 of rounding. If so, round the last position and try
1000 If this is a union, the position can be taken as zero. */
1002 if (TREE_CODE (new_record_type) == UNION_TYPE)
1003 pos = bitsize_zero_node, align = 0;
1005 pos = compute_related_constant (curpos, last_pos);
1007 if (!pos && TREE_CODE (curpos) == MULT_EXPR
1008 && host_integerp (TREE_OPERAND (curpos, 1), 1))
1010 tree offset = TREE_OPERAND (curpos, 0);
1011 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
1013 /* Strip off any conversions. */
1014 while (TREE_CODE (offset) == NON_LVALUE_EXPR
1015 || TREE_CODE (offset) == NOP_EXPR
1016 || TREE_CODE (offset) == CONVERT_EXPR)
1017 offset = TREE_OPERAND (offset, 0);
1019 /* An offset which is a bitwise AND with a negative power of 2
1020 means an alignment corresponding to this power of 2. */
1021 if (TREE_CODE (offset) == BIT_AND_EXPR
1022 && host_integerp (TREE_OPERAND (offset, 1), 0)
1023 && tree_int_cst_sgn (TREE_OPERAND (offset, 1)) < 0)
1026 = - tree_low_cst (TREE_OPERAND (offset, 1), 0);
1027 if (exact_log2 (pow) > 0)
1031 pos = compute_related_constant (curpos,
1032 round_up (last_pos, align));
1034 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
1035 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
1036 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
1037 && host_integerp (TREE_OPERAND
1038 (TREE_OPERAND (curpos, 0), 1),
1043 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
1044 pos = compute_related_constant (curpos,
1045 round_up (last_pos, align));
1047 else if (potential_alignment_gap (prev_old_field, old_field,
1050 align = TYPE_ALIGN (field_type);
1051 pos = compute_related_constant (curpos,
1052 round_up (last_pos, align));
1055 /* If we can't compute a position, set it to zero.
1057 ??? We really should abort here, but it's too much work
1058 to get this correct for all cases. */
1061 pos = bitsize_zero_node;
1063 /* See if this type is variable-sized and make a pointer type
1064 and indicate the indirection if so. Beware that the debug
1065 back-end may adjust the position computed above according
1066 to the alignment of the field type, i.e. the pointer type
1067 in this case, if we don't preventively counter that. */
1068 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
1070 field_type = build_pointer_type (field_type);
1071 if (align != 0 && TYPE_ALIGN (field_type) > align)
1073 field_type = copy_node (field_type);
1074 TYPE_ALIGN (field_type) = align;
1079 /* Make a new field name, if necessary. */
1080 if (var || align != 0)
1085 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1086 align / BITS_PER_UNIT);
1088 strcpy (suffix, "XVL");
1090 field_name = concat_id_with_name (field_name, suffix);
1093 new_field = create_field_decl (field_name, field_type,
1095 DECL_SIZE (old_field), pos, 0);
1096 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1097 TYPE_FIELDS (new_record_type) = new_field;
1099 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1100 zero. The only time it's not the last field of the record
1101 is when there are other components at fixed positions after
1102 it (meaning there was a rep clause for every field) and we
1103 want to be able to encode them. */
1104 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1105 (TREE_CODE (TREE_TYPE (old_field))
1108 : DECL_SIZE (old_field));
1109 prev_old_field = old_field;
1112 TYPE_FIELDS (new_record_type)
1113 = nreverse (TYPE_FIELDS (new_record_type));
1115 rest_of_type_decl_compilation (TYPE_STUB_DECL (new_record_type));
1118 rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type));
1121 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1122 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1123 if this represents a QUAL_UNION_TYPE in which case we must look for
1124 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1125 is nonzero, we must take the MAX of the end position of this field
1126 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1128 We return an expression for the size. */
1131 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1134 tree type = TREE_TYPE (last_size);
1137 if (!special || TREE_CODE (size) != COND_EXPR)
1139 new = size_binop (PLUS_EXPR, first_bit, size);
1141 new = size_binop (MAX_EXPR, last_size, new);
1145 new = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1146 integer_zerop (TREE_OPERAND (size, 1))
1147 ? last_size : merge_sizes (last_size, first_bit,
1148 TREE_OPERAND (size, 1),
1150 integer_zerop (TREE_OPERAND (size, 2))
1151 ? last_size : merge_sizes (last_size, first_bit,
1152 TREE_OPERAND (size, 2),
1155 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1156 when fed through substitute_in_expr) into thinking that a constant
1157 size is not constant. */
1158 while (TREE_CODE (new) == NON_LVALUE_EXPR)
1159 new = TREE_OPERAND (new, 0);
1164 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1165 related by the addition of a constant. Return that constant if so. */
1168 compute_related_constant (tree op0, tree op1)
1170 tree op0_var, op1_var;
1171 tree op0_con = split_plus (op0, &op0_var);
1172 tree op1_con = split_plus (op1, &op1_var);
1173 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1175 if (operand_equal_p (op0_var, op1_var, 0))
1177 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1183 /* Utility function of above to split a tree OP which may be a sum, into a
1184 constant part, which is returned, and a variable part, which is stored
1185 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1189 split_plus (tree in, tree *pvar)
1191 /* Strip NOPS in order to ease the tree traversal and maximize the
1192 potential for constant or plus/minus discovery. We need to be careful
1193 to always return and set *pvar to bitsizetype trees, but it's worth
1197 *pvar = convert (bitsizetype, in);
1199 if (TREE_CODE (in) == INTEGER_CST)
1201 *pvar = bitsize_zero_node;
1202 return convert (bitsizetype, in);
1204 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1206 tree lhs_var, rhs_var;
1207 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1208 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1210 if (lhs_var == TREE_OPERAND (in, 0)
1211 && rhs_var == TREE_OPERAND (in, 1))
1212 return bitsize_zero_node;
1214 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1215 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1218 return bitsize_zero_node;
1221 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1222 subprogram. If it is void_type_node, then we are dealing with a procedure,
1223 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1224 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1225 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1226 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
1227 object. RETURNS_BY_REF is nonzero if the function returns by reference.
1228 RETURNS_WITH_DSP is nonzero if the function is to return with a
1229 depressed stack pointer. RETURNS_BY_TARGET_PTR is true if the function
1230 is to be passed (as its first parameter) the address of the place to copy
1234 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1235 bool returns_unconstrained, bool returns_by_ref,
1236 bool returns_with_dsp, bool returns_by_target_ptr)
1238 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1239 the subprogram formal parameters. This list is generated by traversing the
1240 input list of PARM_DECL nodes. */
1241 tree param_type_list = NULL;
1245 for (param_decl = param_decl_list; param_decl;
1246 param_decl = TREE_CHAIN (param_decl))
1247 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1250 /* The list of the function parameter types has to be terminated by the void
1251 type to signal to the back-end that we are not dealing with a variable
1252 parameter subprogram, but that the subprogram has a fixed number of
1254 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1256 /* The list of argument types has been created in reverse
1258 param_type_list = nreverse (param_type_list);
1260 type = build_function_type (return_type, param_type_list);
1262 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1263 or the new type should, make a copy of TYPE. Likewise for
1264 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1265 if (TYPE_CI_CO_LIST (type) || cico_list
1266 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1267 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref
1268 || TYPE_RETURNS_BY_TARGET_PTR_P (type) != returns_by_target_ptr)
1269 type = copy_type (type);
1271 TYPE_CI_CO_LIST (type) = cico_list;
1272 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1273 TYPE_RETURNS_STACK_DEPRESSED (type) = returns_with_dsp;
1274 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1275 TYPE_RETURNS_BY_TARGET_PTR_P (type) = returns_by_target_ptr;
1279 /* Return a copy of TYPE but safe to modify in any way. */
1282 copy_type (tree type)
1284 tree new = copy_node (type);
1286 /* copy_node clears this field instead of copying it, because it is
1287 aliased with TREE_CHAIN. */
1288 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1290 TYPE_POINTER_TO (new) = 0;
1291 TYPE_REFERENCE_TO (new) = 0;
1292 TYPE_MAIN_VARIANT (new) = new;
1293 TYPE_NEXT_VARIANT (new) = 0;
1298 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1299 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position of
1303 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
1305 /* First build a type for the desired range. */
1306 tree type = build_index_2_type (min, max);
1308 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1309 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1310 is set, but not to INDEX, make a copy of this type with the requested
1311 index type. Note that we have no way of sharing these types, but that's
1312 only a small hole. */
1313 if (TYPE_INDEX_TYPE (type) == index)
1315 else if (TYPE_INDEX_TYPE (type))
1316 type = copy_type (type);
1318 SET_TYPE_INDEX_TYPE (type, index);
1319 create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
1323 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1324 string) and TYPE is a ..._TYPE node giving its data type.
1325 ARTIFICIAL_P is true if this is a declaration that was generated
1326 by the compiler. DEBUG_INFO_P is true if we need to write debugging
1327 information about this type. GNAT_NODE is used for the position of
1331 create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1332 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1334 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1335 enum tree_code code = TREE_CODE (type);
1337 DECL_ARTIFICIAL (type_decl) = artificial_p;
1339 if (!TYPE_IS_DUMMY_P (type))
1340 gnat_pushdecl (type_decl, gnat_node);
1342 process_attributes (type_decl, attr_list);
1344 /* Pass type declaration information to the debugger unless this is an
1345 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1346 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately, or
1347 type for which debugging information was not requested. */
1348 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
1349 DECL_IGNORED_P (type_decl) = 1;
1350 else if (code != ENUMERAL_TYPE
1351 && (code != RECORD_TYPE || TYPE_IS_FAT_POINTER_P (type))
1352 && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
1353 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1354 rest_of_type_decl_compilation (type_decl);
1359 /* Helper for create_var_decl and create_true_var_decl. Returns a GCC VAR_DECL
1362 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1363 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1364 the GCC tree for an optional initial expression; NULL_TREE if none.
1366 CONST_FLAG is true if this variable is constant, in which case we might
1367 return a CONST_DECL node unless CONST_DECL_ALLOWED_FLAG is false.
1369 PUBLIC_FLAG is true if this definition is to be made visible outside of
1370 the current compilation unit. This flag should be set when processing the
1371 variable definitions in a package specification. EXTERN_FLAG is nonzero
1372 when processing an external variable declaration (as opposed to a
1373 definition: no storage is to be allocated for the variable here).
1375 STATIC_FLAG is only relevant when not at top level. In that case
1376 it indicates whether to always allocate storage to the variable.
1378 GNAT_NODE is used for the position of the decl. */
1381 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
1382 bool const_flag, bool const_decl_allowed_flag,
1383 bool public_flag, bool extern_flag, bool static_flag,
1384 struct attrib *attr_list, Node_Id gnat_node)
1388 && TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1389 && (global_bindings_p () || static_flag
1390 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
1391 : TREE_CONSTANT (var_init)));
1393 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1394 case the initializer may be used in-lieu of the DECL node (as done in
1395 Identifier_to_gnu). This is useful to prevent the need of elaboration
1396 code when an identifier for which such a decl is made is in turn used as
1397 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1398 but extra constraints apply to this choice (see below) and are not
1399 relevant to the distinction we wish to make. */
1400 bool constant_p = const_flag && init_const;
1402 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1403 and may be used for scalars in general but not for aggregates. */
1405 = build_decl ((constant_p && const_decl_allowed_flag
1406 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
1409 /* If this is external, throw away any initializations (they will be done
1410 elsewhere) unless this is a a constant for which we would like to remain
1411 able to get the initializer. If we are defining a global here, leave a
1412 constant initialization and save any variable elaborations for the
1413 elaboration routine. If we are just annotating types, throw away the
1414 initialization if it isn't a constant. */
1415 if ((extern_flag && !constant_p)
1416 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1417 var_init = NULL_TREE;
1419 /* At the global level, an initializer requiring code to be generated
1420 produces elaboration statements. Check that such statements are allowed,
1421 that is, not violating a No_Elaboration_Code restriction. */
1422 if (global_bindings_p () && var_init != 0 && ! init_const)
1423 Check_Elaboration_Code_Allowed (gnat_node);
1425 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1426 try to fiddle with DECL_COMMON. However, on platforms that don't
1427 support global BSS sections, uninitialized global variables would
1428 go in DATA instead, thus increasing the size of the executable. */
1430 && TREE_CODE (var_decl) == VAR_DECL
1431 && !have_global_bss_p ())
1432 DECL_COMMON (var_decl) = 1;
1433 DECL_INITIAL (var_decl) = var_init;
1434 TREE_READONLY (var_decl) = const_flag;
1435 DECL_EXTERNAL (var_decl) = extern_flag;
1436 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1437 TREE_CONSTANT (var_decl) = constant_p;
1438 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1439 = TYPE_VOLATILE (type);
1441 /* If it's public and not external, always allocate storage for it.
1442 At the global binding level we need to allocate static storage for the
1443 variable if and only if it's not external. If we are not at the top level
1444 we allocate automatic storage unless requested not to. */
1445 TREE_STATIC (var_decl)
1446 = public_flag || (global_bindings_p () ? !extern_flag : static_flag);
1448 if (asm_name && VAR_OR_FUNCTION_DECL_P (var_decl))
1449 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1451 process_attributes (var_decl, attr_list);
1453 /* Add this decl to the current binding level. */
1454 gnat_pushdecl (var_decl, gnat_node);
1456 if (TREE_SIDE_EFFECTS (var_decl))
1457 TREE_ADDRESSABLE (var_decl) = 1;
1459 if (TREE_CODE (var_decl) != CONST_DECL)
1461 if (global_bindings_p ())
1462 rest_of_decl_compilation (var_decl, true, 0);
1465 expand_decl (var_decl);
1470 /* Wrapper around create_var_decl_1 for cases where we don't care whether
1471 a VAR or a CONST decl node is created. */
1474 create_var_decl (tree var_name, tree asm_name, tree type, tree var_init,
1475 bool const_flag, bool public_flag, bool extern_flag,
1476 bool static_flag, struct attrib *attr_list,
1479 return create_var_decl_1 (var_name, asm_name, type, var_init,
1481 public_flag, extern_flag, static_flag,
1482 attr_list, gnat_node);
1485 /* Wrapper around create_var_decl_1 for cases where a VAR_DECL node is
1486 required. The primary intent is for DECL_CONST_CORRESPONDING_VARs, which
1487 must be VAR_DECLs and on which we want TREE_READONLY set to have them
1488 possibly assigned to a readonly data section. */
1491 create_true_var_decl (tree var_name, tree asm_name, tree type, tree var_init,
1492 bool const_flag, bool public_flag, bool extern_flag,
1493 bool static_flag, struct attrib *attr_list,
1496 return create_var_decl_1 (var_name, asm_name, type, var_init,
1498 public_flag, extern_flag, static_flag,
1499 attr_list, gnat_node);
1502 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
1505 aggregate_type_contains_array_p (tree type)
1507 switch (TREE_CODE (type))
1511 case QUAL_UNION_TYPE:
1514 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1515 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
1516 && aggregate_type_contains_array_p (TREE_TYPE (field)))
1529 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1530 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1531 this field is in a record type with a "pragma pack". If SIZE is nonzero
1532 it is the specified size for this field. If POS is nonzero, it is the bit
1533 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1534 the address of this field for aliasing purposes. If it is negative, we
1535 should not make a bitfield, which is used by make_aligning_type. */
1538 create_field_decl (tree field_name, tree field_type, tree record_type,
1539 int packed, tree size, tree pos, int addressable)
1541 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1543 DECL_CONTEXT (field_decl) = record_type;
1544 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1546 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1547 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1548 Likewise for an aggregate without specified position that contains an
1549 array, because in this case slices of variable length of this array
1550 must be handled by GCC and variable-sized objects need to be aligned
1551 to at least a byte boundary. */
1552 if (packed && (TYPE_MODE (field_type) == BLKmode
1554 && AGGREGATE_TYPE_P (field_type)
1555 && aggregate_type_contains_array_p (field_type))))
1556 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1558 /* If a size is specified, use it. Otherwise, if the record type is packed
1559 compute a size to use, which may differ from the object's natural size.
1560 We always set a size in this case to trigger the checks for bitfield
1561 creation below, which is typically required when no position has been
1564 size = convert (bitsizetype, size);
1565 else if (packed == 1)
1567 size = rm_size (field_type);
1569 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1571 if (TREE_CODE (size) == INTEGER_CST
1572 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1573 size = round_up (size, BITS_PER_UNIT);
1576 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1577 specified for two reasons: first if the size differs from the natural
1578 size. Second, if the alignment is insufficient. There are a number of
1579 ways the latter can be true.
1581 We never make a bitfield if the type of the field has a nonconstant size,
1582 because no such entity requiring bitfield operations should reach here.
1584 We do *preventively* make a bitfield when there might be the need for it
1585 but we don't have all the necessary information to decide, as is the case
1586 of a field with no specified position in a packed record.
1588 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1589 in layout_decl or finish_record_type to clear the bit_field indication if
1590 it is in fact not needed. */
1591 if (addressable >= 0
1593 && TREE_CODE (size) == INTEGER_CST
1594 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1595 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
1596 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
1598 || (TYPE_ALIGN (record_type) != 0
1599 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1601 DECL_BIT_FIELD (field_decl) = 1;
1602 DECL_SIZE (field_decl) = size;
1603 if (!packed && !pos)
1604 DECL_ALIGN (field_decl)
1605 = (TYPE_ALIGN (record_type) != 0
1606 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1607 : TYPE_ALIGN (field_type));
1610 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1612 /* Bump the alignment if need be, either for bitfield/packing purposes or
1613 to satisfy the type requirements if no such consideration applies. When
1614 we get the alignment from the type, indicate if this is from an explicit
1615 user request, which prevents stor-layout from lowering it later on. */
1618 = (DECL_BIT_FIELD (field_decl) ? 1
1619 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
1621 if (bit_align > DECL_ALIGN (field_decl))
1622 DECL_ALIGN (field_decl) = bit_align;
1623 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
1625 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1626 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
1632 /* We need to pass in the alignment the DECL is known to have.
1633 This is the lowest-order bit set in POS, but no more than
1634 the alignment of the record, if one is specified. Note
1635 that an alignment of 0 is taken as infinite. */
1636 unsigned int known_align;
1638 if (host_integerp (pos, 1))
1639 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1641 known_align = BITS_PER_UNIT;
1643 if (TYPE_ALIGN (record_type)
1644 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1645 known_align = TYPE_ALIGN (record_type);
1647 layout_decl (field_decl, known_align);
1648 SET_DECL_OFFSET_ALIGN (field_decl,
1649 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1651 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1652 &DECL_FIELD_BIT_OFFSET (field_decl),
1653 DECL_OFFSET_ALIGN (field_decl), pos);
1655 DECL_HAS_REP_P (field_decl) = 1;
1658 /* In addition to what our caller says, claim the field is addressable if we
1659 know that its type is not suitable.
1661 The field may also be "technically" nonaddressable, meaning that even if
1662 we attempt to take the field's address we will actually get the address
1663 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
1664 value we have at this point is not accurate enough, so we don't account
1665 for this here and let finish_record_type decide. */
1666 if (!type_for_nonaliased_component_p (field_type))
1669 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1674 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1675 PARAM_TYPE is its type. READONLY is true if the parameter is
1676 readonly (either an In parameter or an address of a pass-by-ref
1680 create_param_decl (tree param_name, tree param_type, bool readonly)
1682 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1684 /* Honor targetm.calls.promote_prototypes(), as not doing so can
1685 lead to various ABI violations. */
1686 if (targetm.calls.promote_prototypes (param_type)
1687 && (TREE_CODE (param_type) == INTEGER_TYPE
1688 || TREE_CODE (param_type) == ENUMERAL_TYPE)
1689 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1691 /* We have to be careful about biased types here. Make a subtype
1692 of integer_type_node with the proper biasing. */
1693 if (TREE_CODE (param_type) == INTEGER_TYPE
1694 && TYPE_BIASED_REPRESENTATION_P (param_type))
1697 = copy_type (build_range_type (integer_type_node,
1698 TYPE_MIN_VALUE (param_type),
1699 TYPE_MAX_VALUE (param_type)));
1701 TYPE_BIASED_REPRESENTATION_P (param_type) = 1;
1704 param_type = integer_type_node;
1707 DECL_ARG_TYPE (param_decl) = param_type;
1708 TREE_READONLY (param_decl) = readonly;
1712 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1715 process_attributes (tree decl, struct attrib *attr_list)
1717 for (; attr_list; attr_list = attr_list->next)
1718 switch (attr_list->type)
1720 case ATTR_MACHINE_ATTRIBUTE:
1721 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1723 ATTR_FLAG_TYPE_IN_PLACE);
1726 case ATTR_LINK_ALIAS:
1727 if (! DECL_EXTERNAL (decl))
1729 TREE_STATIC (decl) = 1;
1730 assemble_alias (decl, attr_list->name);
1734 case ATTR_WEAK_EXTERNAL:
1736 declare_weak (decl);
1738 post_error ("?weak declarations not supported on this target",
1739 attr_list->error_point);
1742 case ATTR_LINK_SECTION:
1743 if (targetm.have_named_sections)
1745 DECL_SECTION_NAME (decl)
1746 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1747 IDENTIFIER_POINTER (attr_list->name));
1748 DECL_COMMON (decl) = 0;
1751 post_error ("?section attributes are not supported for this target",
1752 attr_list->error_point);
1755 case ATTR_LINK_CONSTRUCTOR:
1756 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1757 TREE_USED (decl) = 1;
1760 case ATTR_LINK_DESTRUCTOR:
1761 DECL_STATIC_DESTRUCTOR (decl) = 1;
1762 TREE_USED (decl) = 1;
1767 /* Record a global renaming pointer. */
1770 record_global_renaming_pointer (tree decl)
1772 gcc_assert (DECL_RENAMED_OBJECT (decl));
1773 VEC_safe_push (tree, gc, global_renaming_pointers, decl);
1776 /* Invalidate the global renaming pointers. */
1779 invalidate_global_renaming_pointers (void)
1784 for (i = 0; VEC_iterate(tree, global_renaming_pointers, i, iter); i++)
1785 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
1787 VEC_free (tree, gc, global_renaming_pointers);
1790 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1794 value_factor_p (tree value, HOST_WIDE_INT factor)
1796 if (host_integerp (value, 1))
1797 return tree_low_cst (value, 1) % factor == 0;
1799 if (TREE_CODE (value) == MULT_EXPR)
1800 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1801 || value_factor_p (TREE_OPERAND (value, 1), factor));
1806 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1807 unless we can prove these 2 fields are laid out in such a way that no gap
1808 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1809 is the distance in bits between the end of PREV_FIELD and the starting
1810 position of CURR_FIELD. It is ignored if null. */
1813 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1815 /* If this is the first field of the record, there cannot be any gap */
1819 /* If the previous field is a union type, then return False: The only
1820 time when such a field is not the last field of the record is when
1821 there are other components at fixed positions after it (meaning there
1822 was a rep clause for every field), in which case we don't want the
1823 alignment constraint to override them. */
1824 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1827 /* If the distance between the end of prev_field and the beginning of
1828 curr_field is constant, then there is a gap if the value of this
1829 constant is not null. */
1830 if (offset && host_integerp (offset, 1))
1831 return !integer_zerop (offset);
1833 /* If the size and position of the previous field are constant,
1834 then check the sum of this size and position. There will be a gap
1835 iff it is not multiple of the current field alignment. */
1836 if (host_integerp (DECL_SIZE (prev_field), 1)
1837 && host_integerp (bit_position (prev_field), 1))
1838 return ((tree_low_cst (bit_position (prev_field), 1)
1839 + tree_low_cst (DECL_SIZE (prev_field), 1))
1840 % DECL_ALIGN (curr_field) != 0);
1842 /* If both the position and size of the previous field are multiples
1843 of the current field alignment, there cannot be any gap. */
1844 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1845 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1848 /* Fallback, return that there may be a potential gap */
1852 /* Returns a LABEL_DECL node for LABEL_NAME. */
1855 create_label_decl (tree label_name)
1857 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1859 DECL_CONTEXT (label_decl) = current_function_decl;
1860 DECL_MODE (label_decl) = VOIDmode;
1861 DECL_SOURCE_LOCATION (label_decl) = input_location;
1866 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1867 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1868 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1869 PARM_DECL nodes chained through the TREE_CHAIN field).
1871 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1872 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1875 create_subprog_decl (tree subprog_name, tree asm_name,
1876 tree subprog_type, tree param_decl_list, bool inline_flag,
1877 bool public_flag, bool extern_flag,
1878 struct attrib *attr_list, Node_Id gnat_node)
1880 tree return_type = TREE_TYPE (subprog_type);
1881 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1883 /* If this is a function nested inside an inlined external function, it
1884 means we aren't going to compile the outer function unless it is
1885 actually inlined, so do the same for us. */
1886 if (current_function_decl && DECL_INLINE (current_function_decl)
1887 && DECL_EXTERNAL (current_function_decl))
1890 DECL_EXTERNAL (subprog_decl) = extern_flag;
1891 TREE_PUBLIC (subprog_decl) = public_flag;
1892 TREE_STATIC (subprog_decl) = 1;
1893 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1894 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1895 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1896 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1897 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1898 DECL_ARTIFICIAL (DECL_RESULT (subprog_decl)) = 1;
1899 DECL_IGNORED_P (DECL_RESULT (subprog_decl)) = 1;
1901 /* TREE_ADDRESSABLE is set on the result type to request the use of the
1902 target by-reference return mechanism. This is not supported all the
1903 way down to RTL expansion with GCC 4, which ICEs on temporary creation
1904 attempts with such a type and expects DECL_BY_REFERENCE to be set on
1905 the RESULT_DECL instead - see gnat_genericize for more details. */
1906 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (subprog_decl))))
1908 tree result_decl = DECL_RESULT (subprog_decl);
1910 TREE_ADDRESSABLE (TREE_TYPE (result_decl)) = 0;
1911 DECL_BY_REFERENCE (result_decl) = 1;
1915 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
1918 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1920 process_attributes (subprog_decl, attr_list);
1922 /* Add this decl to the current binding level. */
1923 gnat_pushdecl (subprog_decl, gnat_node);
1925 /* Output the assembler code and/or RTL for the declaration. */
1926 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
1928 return subprog_decl;
1931 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1932 body. This routine needs to be invoked before processing the declarations
1933 appearing in the subprogram. */
1936 begin_subprog_body (tree subprog_decl)
1940 current_function_decl = subprog_decl;
1941 announce_function (subprog_decl);
1943 /* Enter a new binding level and show that all the parameters belong to
1946 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1947 param_decl = TREE_CHAIN (param_decl))
1948 DECL_CONTEXT (param_decl) = subprog_decl;
1950 make_decl_rtl (subprog_decl);
1952 /* We handle pending sizes via the elaboration of types, so we don't need to
1953 save them. This causes them to be marked as part of the outer function
1954 and then discarded. */
1955 get_pending_sizes ();
1959 /* Helper for the genericization callback. Return a dereference of VAL
1960 if it is of a reference type. */
1963 convert_from_reference (tree val)
1965 tree value_type, ref;
1967 if (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE)
1970 value_type = TREE_TYPE (TREE_TYPE (val));
1971 ref = build1 (INDIRECT_REF, value_type, val);
1973 /* See if what we reference is CONST or VOLATILE, which requires
1974 looking into array types to get to the component type. */
1976 while (TREE_CODE (value_type) == ARRAY_TYPE)
1977 value_type = TREE_TYPE (value_type);
1980 = (TYPE_QUALS (value_type) & TYPE_QUAL_CONST);
1981 TREE_THIS_VOLATILE (ref)
1982 = (TYPE_QUALS (value_type) & TYPE_QUAL_VOLATILE);
1984 TREE_SIDE_EFFECTS (ref)
1985 = (TREE_THIS_VOLATILE (ref) || TREE_SIDE_EFFECTS (val));
1990 /* Helper for the genericization callback. Returns true if T denotes
1991 a RESULT_DECL with DECL_BY_REFERENCE set. */
1994 is_byref_result (tree t)
1996 return (TREE_CODE (t) == RESULT_DECL && DECL_BY_REFERENCE (t));
2000 /* Tree walking callback for gnat_genericize. Currently ...
2002 o Adjust references to the function's DECL_RESULT if it is marked
2003 DECL_BY_REFERENCE and so has had its type turned into a reference
2004 type at the end of the function compilation. */
2007 gnat_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
2009 /* This implementation is modeled after what the C++ front-end is
2010 doing, basis of the downstream passes behavior. */
2012 tree stmt = *stmt_p;
2013 struct pointer_set_t *p_set = (struct pointer_set_t*) data;
2015 /* If we have a direct mention of the result decl, dereference. */
2016 if (is_byref_result (stmt))
2018 *stmt_p = convert_from_reference (stmt);
2023 /* Otherwise, no need to walk the the same tree twice. */
2024 if (pointer_set_contains (p_set, stmt))
2030 /* If we are taking the address of what now is a reference, just get the
2032 if (TREE_CODE (stmt) == ADDR_EXPR
2033 && is_byref_result (TREE_OPERAND (stmt, 0)))
2035 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
2039 /* Don't dereference an by-reference RESULT_DECL inside a RETURN_EXPR. */
2040 else if (TREE_CODE (stmt) == RETURN_EXPR
2041 && TREE_OPERAND (stmt, 0)
2042 && is_byref_result (TREE_OPERAND (stmt, 0)))
2045 /* Don't look inside trees that cannot embed references of interest. */
2046 else if (IS_TYPE_OR_DECL_P (stmt))
2049 pointer_set_insert (p_set, *stmt_p);
2054 /* Perform lowering of Ada trees to GENERIC. In particular:
2056 o Turn a DECL_BY_REFERENCE RESULT_DECL into a real by-reference decl
2057 and adjust all the references to this decl accordingly. */
2060 gnat_genericize (tree fndecl)
2062 /* Prior to GCC 4, an explicit By_Reference result mechanism for a function
2063 was handled by simply setting TREE_ADDRESSABLE on the result type.
2064 Everything required to actually pass by invisible ref using the target
2065 mechanism (e.g. extra parameter) was handled at RTL expansion time.
2067 This doesn't work with GCC 4 any more for several reasons. First, the
2068 gimplification process might need the creation of temporaries of this
2069 type, and the gimplifier ICEs on such attempts. Second, the middle-end
2070 now relies on a different attribute for such cases (DECL_BY_REFERENCE on
2071 RESULT/PARM_DECLs), and expects the user invisible by-reference-ness to
2072 be explicitely accounted for by the front-end in the function body.
2074 We achieve the complete transformation in two steps:
2076 1/ create_subprog_decl performs early attribute tweaks: it clears
2077 TREE_ADDRESSABLE from the result type and sets DECL_BY_REFERENCE on
2078 the result decl. The former ensures that the bit isn't set in the GCC
2079 tree saved for the function, so prevents ICEs on temporary creation.
2080 The latter we use here to trigger the rest of the processing.
2082 2/ This function performs the type transformation on the result decl
2083 and adjusts all the references to this decl from the function body
2086 Clearing TREE_ADDRESSABLE from the type differs from the C++ front-end
2087 strategy, which escapes the gimplifier temporary creation issues by
2088 creating it's own temporaries using TARGET_EXPR nodes. Our way relies
2089 on simple specific support code in aggregate_value_p to look at the
2090 target function result decl explicitely. */
2092 struct pointer_set_t *p_set;
2093 tree decl_result = DECL_RESULT (fndecl);
2095 if (!DECL_BY_REFERENCE (decl_result))
2098 /* Make the DECL_RESULT explicitely by-reference and adjust all the
2099 occurrences in the function body using the common tree-walking facility.
2100 We want to see every occurrence of the result decl to adjust the
2101 referencing tree, so need to use our own pointer set to control which
2102 trees should be visited again or not. */
2104 p_set = pointer_set_create ();
2106 TREE_TYPE (decl_result) = build_reference_type (TREE_TYPE (decl_result));
2107 TREE_ADDRESSABLE (decl_result) = 0;
2108 relayout_decl (decl_result);
2110 walk_tree (&DECL_SAVED_TREE (fndecl), gnat_genericize_r, p_set, NULL);
2112 pointer_set_destroy (p_set);
2115 /* Finish the definition of the current subprogram and compile it all the way
2116 to assembler language output. BODY is the tree corresponding to
2120 end_subprog_body (tree body)
2122 tree fndecl = current_function_decl;
2124 /* Mark the BLOCK for this level as being for this function and pop the
2125 level. Since the vars in it are the parameters, clear them. */
2126 BLOCK_VARS (current_binding_level->block) = 0;
2127 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
2128 DECL_INITIAL (fndecl) = current_binding_level->block;
2131 /* Deal with inline. If declared inline or we should default to inline,
2132 set the flag in the decl. */
2133 DECL_INLINE (fndecl)
2134 = DECL_DECLARED_INLINE_P (fndecl) || flag_inline_trees == 2;
2136 /* We handle pending sizes via the elaboration of types, so we don't
2137 need to save them. */
2138 get_pending_sizes ();
2140 /* Mark the RESULT_DECL as being in this subprogram. */
2141 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
2143 DECL_SAVED_TREE (fndecl) = body;
2145 current_function_decl = DECL_CONTEXT (fndecl);
2148 /* We cannot track the location of errors past this point. */
2149 error_gnat_node = Empty;
2151 /* If we're only annotating types, don't actually compile this function. */
2152 if (type_annotate_only)
2155 /* Perform the required pre-gimplfication transformations on the tree. */
2156 gnat_genericize (fndecl);
2158 /* We do different things for nested and non-nested functions.
2159 ??? This should be in cgraph. */
2160 if (!DECL_CONTEXT (fndecl))
2162 gnat_gimplify_function (fndecl);
2163 cgraph_finalize_function (fndecl, false);
2166 /* Register this function with cgraph just far enough to get it
2167 added to our parent's nested function list. */
2168 (void) cgraph_node (fndecl);
2171 /* Convert FNDECL's code to GIMPLE and handle any nested functions. */
2174 gnat_gimplify_function (tree fndecl)
2176 struct cgraph_node *cgn;
2178 dump_function (TDI_original, fndecl);
2179 gimplify_function_tree (fndecl);
2180 dump_function (TDI_generic, fndecl);
2182 /* Convert all nested functions to GIMPLE now. We do things in this order
2183 so that items like VLA sizes are expanded properly in the context of the
2184 correct function. */
2185 cgn = cgraph_node (fndecl);
2186 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
2187 gnat_gimplify_function (cgn->decl);
2192 gnat_builtin_function (tree decl)
2194 gnat_pushdecl (decl, Empty);
2198 /* Handle a "const" attribute; arguments as in
2199 struct attribute_spec.handler. */
2202 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
2203 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
2206 if (TREE_CODE (*node) == FUNCTION_DECL)
2207 TREE_READONLY (*node) = 1;
2209 *no_add_attrs = true;
2214 /* Handle a "nothrow" attribute; arguments as in
2215 struct attribute_spec.handler. */
2218 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
2219 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
2222 if (TREE_CODE (*node) == FUNCTION_DECL)
2223 TREE_NOTHROW (*node) = 1;
2225 *no_add_attrs = true;
2230 /* Return an integer type with the number of bits of precision given by
2231 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2232 it is a signed type. */
2235 gnat_type_for_size (unsigned precision, int unsignedp)
2240 if (precision <= 2 * MAX_BITS_PER_WORD
2241 && signed_and_unsigned_types[precision][unsignedp])
2242 return signed_and_unsigned_types[precision][unsignedp];
2245 t = make_unsigned_type (precision);
2247 t = make_signed_type (precision);
2249 if (precision <= 2 * MAX_BITS_PER_WORD)
2250 signed_and_unsigned_types[precision][unsignedp] = t;
2254 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2255 TYPE_NAME (t) = get_identifier (type_name);
2261 /* Likewise for floating-point types. */
2264 float_type_for_precision (int precision, enum machine_mode mode)
2269 if (float_types[(int) mode])
2270 return float_types[(int) mode];
2272 float_types[(int) mode] = t = make_node (REAL_TYPE);
2273 TYPE_PRECISION (t) = precision;
2276 gcc_assert (TYPE_MODE (t) == mode);
2279 sprintf (type_name, "FLOAT_%d", precision);
2280 TYPE_NAME (t) = get_identifier (type_name);
2286 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2287 an unsigned type; otherwise a signed type is returned. */
2290 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2292 if (mode == BLKmode)
2294 else if (mode == VOIDmode)
2295 return void_type_node;
2296 else if (COMPLEX_MODE_P (mode))
2298 else if (SCALAR_FLOAT_MODE_P (mode))
2299 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2300 else if (SCALAR_INT_MODE_P (mode))
2301 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2306 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2309 gnat_unsigned_type (tree type_node)
2311 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2313 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2315 type = copy_node (type);
2316 TREE_TYPE (type) = type_node;
2318 else if (TREE_TYPE (type_node)
2319 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2320 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2322 type = copy_node (type);
2323 TREE_TYPE (type) = TREE_TYPE (type_node);
2329 /* Return the signed version of a TYPE_NODE, a scalar type. */
2332 gnat_signed_type (tree type_node)
2334 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2336 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2338 type = copy_node (type);
2339 TREE_TYPE (type) = type_node;
2341 else if (TREE_TYPE (type_node)
2342 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2343 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2345 type = copy_node (type);
2346 TREE_TYPE (type) = TREE_TYPE (type_node);
2353 /* EXP is an expression for the size of an object. If this size contains
2354 discriminant references, replace them with the maximum (if MAX_P) or
2355 minimum (if !MAX_P) possible value of the discriminant. */
2358 max_size (tree exp, bool max_p)
2360 enum tree_code code = TREE_CODE (exp);
2361 tree type = TREE_TYPE (exp);
2363 switch (TREE_CODE_CLASS (code))
2365 case tcc_declaration:
2370 if (code == CALL_EXPR)
2373 int i, n = call_expr_nargs (exp);
2376 argarray = (tree *) alloca (n * sizeof (tree));
2377 for (i = 0; i < n; i++)
2378 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
2379 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
2384 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2385 modify. Otherwise, we treat it like a variable. */
2386 if (!CONTAINS_PLACEHOLDER_P (exp))
2389 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2391 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
2393 case tcc_comparison:
2394 return max_p ? size_one_node : size_zero_node;
2398 case tcc_expression:
2399 switch (TREE_CODE_LENGTH (code))
2402 if (code == NON_LVALUE_EXPR)
2403 return max_size (TREE_OPERAND (exp, 0), max_p);
2406 fold_build1 (code, type,
2407 max_size (TREE_OPERAND (exp, 0),
2408 code == NEGATE_EXPR ? !max_p : max_p));
2411 if (code == COMPOUND_EXPR)
2412 return max_size (TREE_OPERAND (exp, 1), max_p);
2414 /* Calculate "(A ? B : C) - D" as "A ? B - D : C - D" which
2415 may provide a tighter bound on max_size. */
2416 if (code == MINUS_EXPR
2417 && TREE_CODE (TREE_OPERAND (exp, 0)) == COND_EXPR)
2419 tree lhs = fold_build2 (MINUS_EXPR, type,
2420 TREE_OPERAND (TREE_OPERAND (exp, 0), 1),
2421 TREE_OPERAND (exp, 1));
2422 tree rhs = fold_build2 (MINUS_EXPR, type,
2423 TREE_OPERAND (TREE_OPERAND (exp, 0), 2),
2424 TREE_OPERAND (exp, 1));
2425 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2426 max_size (lhs, max_p),
2427 max_size (rhs, max_p));
2431 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2432 tree rhs = max_size (TREE_OPERAND (exp, 1),
2433 code == MINUS_EXPR ? !max_p : max_p);
2435 /* Special-case wanting the maximum value of a MIN_EXPR.
2436 In that case, if one side overflows, return the other.
2437 sizetype is signed, but we know sizes are non-negative.
2438 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2439 overflowing or the maximum possible value and the RHS
2443 && TREE_CODE (rhs) == INTEGER_CST
2444 && TREE_OVERFLOW (rhs))
2448 && TREE_CODE (lhs) == INTEGER_CST
2449 && TREE_OVERFLOW (lhs))
2451 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2452 && ((TREE_CODE (lhs) == INTEGER_CST
2453 && TREE_OVERFLOW (lhs))
2454 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2455 && !TREE_CONSTANT (rhs))
2458 return fold_build2 (code, type, lhs, rhs);
2462 if (code == SAVE_EXPR)
2464 else if (code == COND_EXPR)
2465 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2466 max_size (TREE_OPERAND (exp, 1), max_p),
2467 max_size (TREE_OPERAND (exp, 2), max_p));
2470 /* Other tree classes cannot happen. */
2478 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2479 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2480 Return a constructor for the template. */
2483 build_template (tree template_type, tree array_type, tree expr)
2485 tree template_elts = NULL_TREE;
2486 tree bound_list = NULL_TREE;
2489 while (TREE_CODE (array_type) == RECORD_TYPE
2490 && (TYPE_IS_PADDING_P (array_type)
2491 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2492 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2494 if (TREE_CODE (array_type) == ARRAY_TYPE
2495 || (TREE_CODE (array_type) == INTEGER_TYPE
2496 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2497 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2499 /* First make the list for a CONSTRUCTOR for the template. Go down the
2500 field list of the template instead of the type chain because this
2501 array might be an Ada array of arrays and we can't tell where the
2502 nested arrays stop being the underlying object. */
2504 for (field = TYPE_FIELDS (template_type); field;
2506 ? (bound_list = TREE_CHAIN (bound_list))
2507 : (array_type = TREE_TYPE (array_type))),
2508 field = TREE_CHAIN (TREE_CHAIN (field)))
2510 tree bounds, min, max;
2512 /* If we have a bound list, get the bounds from there. Likewise
2513 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2514 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2515 This will give us a maximum range. */
2517 bounds = TREE_VALUE (bound_list);
2518 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2519 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2520 else if (expr && TREE_CODE (expr) == PARM_DECL
2521 && DECL_BY_COMPONENT_PTR_P (expr))
2522 bounds = TREE_TYPE (field);
2526 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
2527 max = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MAX_VALUE (bounds));
2529 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2530 substitute it from OBJECT. */
2531 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2532 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2534 template_elts = tree_cons (TREE_CHAIN (field), max,
2535 tree_cons (field, min, template_elts));
2538 return gnat_build_constructor (template_type, nreverse (template_elts));
2541 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2542 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2543 in the type contains in its DECL_INITIAL the expression to use when
2544 a constructor is made for the type. GNAT_ENTITY is an entity used
2545 to print out an error message if the mechanism cannot be applied to
2546 an object of that type and also for the name. */
2549 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2551 tree record_type = make_node (RECORD_TYPE);
2552 tree pointer32_type;
2553 tree field_list = 0;
2562 /* If TYPE is an unconstrained array, use the underlying array type. */
2563 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2564 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2566 /* If this is an array, compute the number of dimensions in the array,
2567 get the index types, and point to the inner type. */
2568 if (TREE_CODE (type) != ARRAY_TYPE)
2571 for (ndim = 1, inner_type = type;
2572 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2573 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2574 ndim++, inner_type = TREE_TYPE (inner_type))
2577 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2579 if (mech != By_Descriptor_NCA
2580 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2581 for (i = ndim - 1, inner_type = type;
2583 i--, inner_type = TREE_TYPE (inner_type))
2584 idx_arr[i] = TYPE_DOMAIN (inner_type);
2586 for (i = 0, inner_type = type;
2588 i++, inner_type = TREE_TYPE (inner_type))
2589 idx_arr[i] = TYPE_DOMAIN (inner_type);
2591 /* Now get the DTYPE value. */
2592 switch (TREE_CODE (type))
2596 if (TYPE_VAX_FLOATING_POINT_P (type))
2597 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2610 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2613 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2616 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2619 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2622 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2625 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2631 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2635 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2636 && TYPE_VAX_FLOATING_POINT_P (type))
2637 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2649 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2660 /* Get the CLASS value. */
2663 case By_Descriptor_A:
2666 case By_Descriptor_NCA:
2669 case By_Descriptor_SB:
2673 case By_Descriptor_S:
2679 /* Make the type for a descriptor for VMS. The first four fields
2680 are the same for all types. */
2683 = chainon (field_list,
2684 make_descriptor_field
2685 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2686 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2688 field_list = chainon (field_list,
2689 make_descriptor_field ("DTYPE",
2690 gnat_type_for_size (8, 1),
2691 record_type, size_int (dtype)));
2692 field_list = chainon (field_list,
2693 make_descriptor_field ("CLASS",
2694 gnat_type_for_size (8, 1),
2695 record_type, size_int (class)));
2697 /* Of course this will crash at run-time if the address space is not
2698 within the low 32 bits, but there is nothing else we can do. */
2699 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
2702 = chainon (field_list,
2703 make_descriptor_field
2704 ("POINTER", pointer32_type, record_type,
2705 build_unary_op (ADDR_EXPR,
2707 build0 (PLACEHOLDER_EXPR, type))));
2712 case By_Descriptor_S:
2715 case By_Descriptor_SB:
2717 = chainon (field_list,
2718 make_descriptor_field
2719 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2720 TREE_CODE (type) == ARRAY_TYPE
2721 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2723 = chainon (field_list,
2724 make_descriptor_field
2725 ("SB_U1", gnat_type_for_size (32, 1), record_type,
2726 TREE_CODE (type) == ARRAY_TYPE
2727 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2730 case By_Descriptor_A:
2731 case By_Descriptor_NCA:
2732 field_list = chainon (field_list,
2733 make_descriptor_field ("SCALE",
2734 gnat_type_for_size (8, 1),
2738 field_list = chainon (field_list,
2739 make_descriptor_field ("DIGITS",
2740 gnat_type_for_size (8, 1),
2745 = chainon (field_list,
2746 make_descriptor_field
2747 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2748 size_int (mech == By_Descriptor_NCA
2750 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2751 : (TREE_CODE (type) == ARRAY_TYPE
2752 && TYPE_CONVENTION_FORTRAN_P (type)
2755 field_list = chainon (field_list,
2756 make_descriptor_field ("DIMCT",
2757 gnat_type_for_size (8, 1),
2761 field_list = chainon (field_list,
2762 make_descriptor_field ("ARSIZE",
2763 gnat_type_for_size (32, 1),
2765 size_in_bytes (type)));
2767 /* Now build a pointer to the 0,0,0... element. */
2768 tem = build0 (PLACEHOLDER_EXPR, type);
2769 for (i = 0, inner_type = type; i < ndim;
2770 i++, inner_type = TREE_TYPE (inner_type))
2771 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2772 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2773 NULL_TREE, NULL_TREE);
2776 = chainon (field_list,
2777 make_descriptor_field
2779 build_pointer_type_for_mode (inner_type, SImode, false),
2782 build_pointer_type_for_mode (inner_type, SImode,
2786 /* Next come the addressing coefficients. */
2787 tem = size_one_node;
2788 for (i = 0; i < ndim; i++)
2792 = size_binop (MULT_EXPR, tem,
2793 size_binop (PLUS_EXPR,
2794 size_binop (MINUS_EXPR,
2795 TYPE_MAX_VALUE (idx_arr[i]),
2796 TYPE_MIN_VALUE (idx_arr[i])),
2799 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2800 fname[1] = '0' + i, fname[2] = 0;
2802 = chainon (field_list,
2803 make_descriptor_field (fname,
2804 gnat_type_for_size (32, 1),
2805 record_type, idx_length));
2807 if (mech == By_Descriptor_NCA)
2811 /* Finally here are the bounds. */
2812 for (i = 0; i < ndim; i++)
2816 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2818 = chainon (field_list,
2819 make_descriptor_field
2820 (fname, gnat_type_for_size (32, 1), record_type,
2821 TYPE_MIN_VALUE (idx_arr[i])));
2825 = chainon (field_list,
2826 make_descriptor_field
2827 (fname, gnat_type_for_size (32, 1), record_type,
2828 TYPE_MAX_VALUE (idx_arr[i])));
2833 post_error ("unsupported descriptor type for &", gnat_entity);
2836 finish_record_type (record_type, field_list, 0, true);
2837 create_type_decl (create_concat_name (gnat_entity, "DESC"), record_type,
2838 NULL, true, false, gnat_entity);
2843 /* Utility routine for above code to make a field. */
2846 make_descriptor_field (const char *name, tree type,
2847 tree rec_type, tree initial)
2850 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2852 DECL_INITIAL (field) = initial;
2856 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
2857 pointer or fat pointer type. GNAT_SUBPROG is the subprogram to which
2858 the VMS descriptor is passed. */
2861 convert_vms_descriptor (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
2863 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
2864 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
2865 /* The CLASS field is the 3rd field in the descriptor. */
2866 tree class = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
2867 /* The POINTER field is the 4th field in the descriptor. */
2868 tree pointer = TREE_CHAIN (class);
2870 /* Retrieve the value of the POINTER field. */
2872 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
2874 if (POINTER_TYPE_P (gnu_type))
2875 return convert (gnu_type, gnu_expr);
2877 else if (TYPE_FAT_POINTER_P (gnu_type))
2879 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
2880 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
2881 tree template_type = TREE_TYPE (p_bounds_type);
2882 tree min_field = TYPE_FIELDS (template_type);
2883 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
2884 tree template, template_addr, aflags, dimct, t, u;
2885 /* See the head comment of build_vms_descriptor. */
2886 int iclass = TREE_INT_CST_LOW (DECL_INITIAL (class));
2888 /* Convert POINTER to the type of the P_ARRAY field. */
2889 gnu_expr = convert (p_array_type, gnu_expr);
2893 case 1: /* Class S */
2894 case 15: /* Class SB */
2895 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
2896 t = TYPE_FIELDS (desc_type);
2897 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2898 t = tree_cons (min_field,
2899 convert (TREE_TYPE (min_field), integer_one_node),
2900 tree_cons (max_field,
2901 convert (TREE_TYPE (max_field), t),
2903 template = gnat_build_constructor (template_type, t);
2904 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
2906 /* For class S, we are done. */
2910 /* Test that we really have a SB descriptor, like DEC Ada. */
2911 t = build3 (COMPONENT_REF, TREE_TYPE (class), desc, class, NULL);
2912 u = convert (TREE_TYPE (class), DECL_INITIAL (class));
2913 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
2914 /* If so, there is already a template in the descriptor and
2915 it is located right after the POINTER field. */
2916 t = TREE_CHAIN (pointer);
2917 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2918 /* Otherwise use the {1, LENGTH} template we build above. */
2919 template_addr = build3 (COND_EXPR, p_bounds_type, u,
2920 build_unary_op (ADDR_EXPR, p_bounds_type,
2925 case 4: /* Class A */
2926 /* The AFLAGS field is the 7th field in the descriptor. */
2927 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer)));
2928 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2929 /* The DIMCT field is the 8th field in the descriptor. */
2931 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2932 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
2933 or FL_COEFF or FL_BOUNDS not set. */
2934 u = build_int_cst (TREE_TYPE (aflags), 192);
2935 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
2936 build_binary_op (NE_EXPR, integer_type_node,
2938 convert (TREE_TYPE (dimct),
2940 build_binary_op (NE_EXPR, integer_type_node,
2941 build2 (BIT_AND_EXPR,
2945 add_stmt (build3 (COND_EXPR, void_type_node, u,
2946 build_call_raise (CE_Length_Check_Failed, Empty,
2947 N_Raise_Constraint_Error),
2949 /* There is already a template in the descriptor and it is
2950 located at the start of block 3 (12th field). */
2951 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (t))));
2952 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2953 template_addr = build_unary_op (ADDR_EXPR, p_bounds_type, template);
2956 case 10: /* Class NCA */
2958 post_error ("unsupported descriptor type for &", gnat_subprog);
2959 template_addr = integer_zero_node;
2963 /* Build the fat pointer in the form of a constructor. */
2964 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr,
2965 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
2966 template_addr, NULL_TREE));
2967 return gnat_build_constructor (gnu_type, t);
2974 /* Build a stub for the subprogram specified by the GCC tree GNU_SUBPROG
2975 and the GNAT node GNAT_SUBPROG. */
2978 build_function_stub (tree gnu_subprog, Entity_Id gnat_subprog)
2980 tree gnu_subprog_type, gnu_subprog_addr, gnu_subprog_call;
2981 tree gnu_stub_param, gnu_param_list, gnu_arg_types, gnu_param;
2982 tree gnu_stub_decl = DECL_FUNCTION_STUB (gnu_subprog);
2985 gnu_subprog_type = TREE_TYPE (gnu_subprog);
2986 gnu_param_list = NULL_TREE;
2988 begin_subprog_body (gnu_stub_decl);
2991 start_stmt_group ();
2993 /* Loop over the parameters of the stub and translate any of them
2994 passed by descriptor into a by reference one. */
2995 for (gnu_stub_param = DECL_ARGUMENTS (gnu_stub_decl),
2996 gnu_arg_types = TYPE_ARG_TYPES (gnu_subprog_type);
2998 gnu_stub_param = TREE_CHAIN (gnu_stub_param),
2999 gnu_arg_types = TREE_CHAIN (gnu_arg_types))
3001 if (DECL_BY_DESCRIPTOR_P (gnu_stub_param))
3002 gnu_param = convert_vms_descriptor (TREE_VALUE (gnu_arg_types),
3003 gnu_stub_param, gnat_subprog);
3005 gnu_param = gnu_stub_param;
3007 gnu_param_list = tree_cons (NULL_TREE, gnu_param, gnu_param_list);
3010 gnu_body = end_stmt_group ();
3012 /* Invoke the internal subprogram. */
3013 gnu_subprog_addr = build1 (ADDR_EXPR, build_pointer_type (gnu_subprog_type),
3015 gnu_subprog_call = build_call_list (TREE_TYPE (gnu_subprog_type),
3017 nreverse (gnu_param_list));
3019 /* Propagate the return value, if any. */
3020 if (VOID_TYPE_P (TREE_TYPE (gnu_subprog_type)))
3021 append_to_statement_list (gnu_subprog_call, &gnu_body);
3023 append_to_statement_list (build_return_expr (DECL_RESULT (gnu_stub_decl),
3029 allocate_struct_function (gnu_stub_decl, false);
3030 end_subprog_body (gnu_body);
3033 /* Build a type to be used to represent an aliased object whose nominal
3034 type is an unconstrained array. This consists of a RECORD_TYPE containing
3035 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
3036 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
3037 is used to represent an arbitrary unconstrained object. Use NAME
3038 as the name of the record. */
3041 build_unc_object_type (tree template_type, tree object_type, tree name)
3043 tree type = make_node (RECORD_TYPE);
3044 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
3045 template_type, type, 0, 0, 0, 1);
3046 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
3049 TYPE_NAME (type) = name;
3050 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3051 finish_record_type (type,
3052 chainon (chainon (NULL_TREE, template_field),
3059 /* Same, taking a thin or fat pointer type instead of a template type. */
3062 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3067 gcc_assert (TYPE_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3070 = (TYPE_FAT_POINTER_P (thin_fat_ptr_type)
3071 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3072 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3073 return build_unc_object_type (template_type, object_type, name);
3076 /* Shift the component offsets within an unconstrained object TYPE to make it
3077 suitable for use as a designated type for thin pointers. */
3080 shift_unc_components_for_thin_pointers (tree type)
3082 /* Thin pointer values designate the ARRAY data of an unconstrained object,
3083 allocated past the BOUNDS template. The designated type is adjusted to
3084 have ARRAY at position zero and the template at a negative offset, so
3085 that COMPONENT_REFs on (*thin_ptr) designate the proper location. */
3087 tree bounds_field = TYPE_FIELDS (type);
3088 tree array_field = TREE_CHAIN (TYPE_FIELDS (type));
3090 DECL_FIELD_OFFSET (bounds_field)
3091 = size_binop (MINUS_EXPR, size_zero_node, byte_position (array_field));
3093 DECL_FIELD_OFFSET (array_field) = size_zero_node;
3094 DECL_FIELD_BIT_OFFSET (array_field) = bitsize_zero_node;
3097 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
3098 the normal case this is just two adjustments, but we have more to do
3099 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
3102 update_pointer_to (tree old_type, tree new_type)
3104 tree ptr = TYPE_POINTER_TO (old_type);
3105 tree ref = TYPE_REFERENCE_TO (old_type);
3109 /* If this is the main variant, process all the other variants first. */
3110 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3111 for (type = TYPE_NEXT_VARIANT (old_type); type;
3112 type = TYPE_NEXT_VARIANT (type))
3113 update_pointer_to (type, new_type);
3115 /* If no pointer or reference, we are done. */
3119 /* Merge the old type qualifiers in the new type.
3121 Each old variant has qualifiers for specific reasons, and the new
3122 designated type as well. Each set of qualifiers represents useful
3123 information grabbed at some point, and merging the two simply unifies
3124 these inputs into the final type description.
3126 Consider for instance a volatile type frozen after an access to constant
3127 type designating it. After the designated type freeze, we get here with a
3128 volatile new_type and a dummy old_type with a readonly variant, created
3129 when the access type was processed. We shall make a volatile and readonly
3130 designated type, because that's what it really is.
3132 We might also get here for a non-dummy old_type variant with different
3133 qualifiers than the new_type ones, for instance in some cases of pointers
3134 to private record type elaboration (see the comments around the call to
3135 this routine from gnat_to_gnu_entity/E_Access_Type). We have to merge the
3136 qualifiers in thoses cases too, to avoid accidentally discarding the
3137 initial set, and will often end up with old_type == new_type then. */
3138 new_type = build_qualified_type (new_type,
3139 TYPE_QUALS (old_type)
3140 | TYPE_QUALS (new_type));
3142 /* If the new type and the old one are identical, there is nothing to
3144 if (old_type == new_type)
3147 /* Otherwise, first handle the simple case. */
3148 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3150 TYPE_POINTER_TO (new_type) = ptr;
3151 TYPE_REFERENCE_TO (new_type) = ref;
3153 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3154 for (ptr1 = TYPE_MAIN_VARIANT (ptr); ptr1;
3155 ptr1 = TYPE_NEXT_VARIANT (ptr1))
3156 TREE_TYPE (ptr1) = new_type;
3158 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3159 for (ref1 = TYPE_MAIN_VARIANT (ref); ref1;
3160 ref1 = TYPE_NEXT_VARIANT (ref1))
3161 TREE_TYPE (ref1) = new_type;
3164 /* Now deal with the unconstrained array case. In this case the "pointer"
3165 is actually a RECORD_TYPE where both fields are pointers to dummy nodes.
3166 Turn them into pointers to the correct types using update_pointer_to. */
3167 else if (TREE_CODE (ptr) != RECORD_TYPE || !TYPE_IS_FAT_POINTER_P (ptr))
3172 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
3173 tree array_field = TYPE_FIELDS (ptr);
3174 tree bounds_field = TREE_CHAIN (TYPE_FIELDS (ptr));
3175 tree new_ptr = TYPE_POINTER_TO (new_type);
3179 /* Make pointers to the dummy template point to the real template. */
3181 (TREE_TYPE (TREE_TYPE (bounds_field)),
3182 TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_ptr)))));
3184 /* The references to the template bounds present in the array type
3185 are made through a PLACEHOLDER_EXPR of type new_ptr. Since we
3186 are updating ptr to make it a full replacement for new_ptr as
3187 pointer to new_type, we must rework the PLACEHOLDER_EXPR so as
3188 to make it of type ptr. */
3189 new_ref = build3 (COMPONENT_REF, TREE_TYPE (bounds_field),
3190 build0 (PLACEHOLDER_EXPR, ptr),
3191 bounds_field, NULL_TREE);
3193 /* Create the new array for the new PLACEHOLDER_EXPR and make
3194 pointers to the dummy array point to it.
3196 ??? This is now the only use of substitute_in_type,
3197 which is a very "heavy" routine to do this, so it
3198 should be replaced at some point. */
3200 (TREE_TYPE (TREE_TYPE (array_field)),
3201 substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))),
3202 TREE_CHAIN (TYPE_FIELDS (new_ptr)), new_ref));
3204 /* Make ptr the pointer to new_type. */
3205 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
3206 = TREE_TYPE (new_type) = ptr;
3208 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
3209 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
3211 /* Now handle updating the allocation record, what the thin pointer
3212 points to. Update all pointers from the old record into the new
3213 one, update the type of the array field, and recompute the size. */
3214 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
3216 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
3217 = TREE_TYPE (TREE_TYPE (array_field));
3219 /* The size recomputation needs to account for alignment constraints, so
3220 we let layout_type work it out. This will reset the field offsets to
3221 what they would be in a regular record, so we shift them back to what
3222 we want them to be for a thin pointer designated type afterwards. */
3223 DECL_SIZE (TYPE_FIELDS (new_obj_rec)) = 0;
3224 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))) = 0;
3225 TYPE_SIZE (new_obj_rec) = 0;
3226 layout_type (new_obj_rec);
3228 shift_unc_components_for_thin_pointers (new_obj_rec);
3230 /* We are done, at last. */
3231 rest_of_record_type_compilation (ptr);
3235 /* Convert a pointer to a constrained array into a pointer to a fat
3236 pointer. This involves making or finding a template. */
3239 convert_to_fat_pointer (tree type, tree expr)
3241 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
3242 tree template, template_addr;
3243 tree etype = TREE_TYPE (expr);
3245 /* If EXPR is a constant of zero, we make a fat pointer that has a null
3246 pointer to the template and array. */
3247 if (integer_zerop (expr))
3249 gnat_build_constructor
3251 tree_cons (TYPE_FIELDS (type),
3252 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3253 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3254 convert (build_pointer_type (template_type),
3258 /* If EXPR is a thin pointer, make the template and data from the record. */
3260 else if (TYPE_THIN_POINTER_P (etype))
3262 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
3264 expr = save_expr (expr);
3265 if (TREE_CODE (expr) == ADDR_EXPR)
3266 expr = TREE_OPERAND (expr, 0);
3268 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
3270 template = build_component_ref (expr, NULL_TREE, fields, false);
3271 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3272 build_component_ref (expr, NULL_TREE,
3273 TREE_CHAIN (fields), false));
3276 /* Otherwise, build the constructor for the template. */
3277 template = build_template (template_type, TREE_TYPE (etype), expr);
3279 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
3281 /* The result is a CONSTRUCTOR for the fat pointer.
3283 If expr is an argument of a foreign convention subprogram, the type it
3284 points to is directly the component type. In this case, the expression
3285 type may not match the corresponding FIELD_DECL type at this point, so we
3286 call "convert" here to fix that up if necessary. This type consistency is
3287 required, for instance because it ensures that possible later folding of
3288 component_refs against this constructor always yields something of the
3289 same type as the initial reference.
3291 Note that the call to "build_template" above is still fine, because it
3292 will only refer to the provided template_type in this case. */
3294 gnat_build_constructor
3295 (type, tree_cons (TYPE_FIELDS (type),
3296 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3297 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3298 template_addr, NULL_TREE)));
3301 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
3302 is something that is a fat pointer, so convert to it first if it EXPR
3303 is not already a fat pointer. */
3306 convert_to_thin_pointer (tree type, tree expr)
3308 if (!TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
3310 = convert_to_fat_pointer
3311 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
3313 /* We get the pointer to the data and use a NOP_EXPR to make it the
3315 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)),
3317 expr = build1 (NOP_EXPR, type, expr);
3322 /* Create an expression whose value is that of EXPR,
3323 converted to type TYPE. The TREE_TYPE of the value
3324 is always TYPE. This function implements all reasonable
3325 conversions; callers should filter out those that are
3326 not permitted by the language being compiled. */
3329 convert (tree type, tree expr)
3331 enum tree_code code = TREE_CODE (type);
3332 tree etype = TREE_TYPE (expr);
3333 enum tree_code ecode = TREE_CODE (etype);
3335 /* If EXPR is already the right type, we are done. */
3339 /* If both input and output have padding and are of variable size, do this
3340 as an unchecked conversion. Likewise if one is a mere variant of the
3341 other, so we avoid a pointless unpad/repad sequence. */
3342 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
3343 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
3344 && (!TREE_CONSTANT (TYPE_SIZE (type))
3345 || !TREE_CONSTANT (TYPE_SIZE (etype))
3346 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)))
3349 /* If the output type has padding, make a constructor to build the
3351 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
3353 /* If we previously converted from another type and our type is
3354 of variable size, remove the conversion to avoid the need for
3355 variable-size temporaries. */
3356 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3357 && !TREE_CONSTANT (TYPE_SIZE (type)))
3358 expr = TREE_OPERAND (expr, 0);
3360 /* If we are just removing the padding from expr, convert the original
3361 object if we have variable size. That will avoid the need
3362 for some variable-size temporaries. */
3363 if (TREE_CODE (expr) == COMPONENT_REF
3364 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
3365 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
3366 && !TREE_CONSTANT (TYPE_SIZE (type)))
3367 return convert (type, TREE_OPERAND (expr, 0));
3369 /* If the result type is a padded type with a self-referentially-sized
3370 field and the expression type is a record, do this as an
3371 unchecked conversion. */
3372 else if (TREE_CODE (etype) == RECORD_TYPE
3373 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
3374 return unchecked_convert (type, expr, false);
3378 gnat_build_constructor (type,
3379 tree_cons (TYPE_FIELDS (type),
3381 (TYPE_FIELDS (type)),
3386 /* If the input type has padding, remove it and convert to the output type.
3387 The conditions ordering is arranged to ensure that the output type is not
3388 a padding type here, as it is not clear whether the conversion would
3389 always be correct if this was to happen. */
3390 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
3394 /* If we have just converted to this padded type, just get the
3395 inner expression. */
3396 if (TREE_CODE (expr) == CONSTRUCTOR
3397 && !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (expr))
3398 && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->index
3399 == TYPE_FIELDS (etype))
3401 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->value;
3403 /* Otherwise, build an explicit component reference. */
3406 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
3408 return convert (type, unpadded);
3411 /* If the input is a biased type, adjust first. */
3412 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
3413 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
3414 fold_convert (TREE_TYPE (etype),
3416 TYPE_MIN_VALUE (etype)));
3418 /* If the input is a justified modular type, we need to extract the actual
3419 object before converting it to any other type with the exceptions of an
3420 unconstrained array or of a mere type variant. It is useful to avoid the
3421 extraction and conversion in the type variant case because it could end
3422 up replacing a VAR_DECL expr by a constructor and we might be about the
3423 take the address of the result. */
3424 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
3425 && code != UNCONSTRAINED_ARRAY_TYPE
3426 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
3427 return convert (type, build_component_ref (expr, NULL_TREE,
3428 TYPE_FIELDS (etype), false));
3430 /* If converting to a type that contains a template, convert to the data
3431 type and then build the template. */
3432 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
3434 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
3436 /* If the source already has a template, get a reference to the
3437 associated array only, as we are going to rebuild a template
3438 for the target type anyway. */
3439 expr = maybe_unconstrained_array (expr);
3442 gnat_build_constructor
3444 tree_cons (TYPE_FIELDS (type),
3445 build_template (TREE_TYPE (TYPE_FIELDS (type)),
3446 obj_type, NULL_TREE),
3447 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3448 convert (obj_type, expr), NULL_TREE)));
3451 /* There are some special cases of expressions that we process
3453 switch (TREE_CODE (expr))
3459 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
3460 conversion in gnat_expand_expr. NULL_EXPR does not represent
3461 and actual value, so no conversion is needed. */
3462 expr = copy_node (expr);
3463 TREE_TYPE (expr) = type;
3467 /* If we are converting a STRING_CST to another constrained array type,
3468 just make a new one in the proper type. */
3469 if (code == ecode && AGGREGATE_TYPE_P (etype)
3470 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
3471 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
3473 expr = copy_node (expr);
3474 TREE_TYPE (expr) = type;
3480 /* If we are converting a CONSTRUCTOR to another constrained array type
3481 with the same domain, just make a new one in the proper type. */
3482 if (code == ecode && code == ARRAY_TYPE
3483 && TREE_TYPE (type) == TREE_TYPE (etype)
3484 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (type)),
3485 TYPE_MIN_VALUE (TYPE_DOMAIN (etype)))
3486 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
3487 TYPE_MAX_VALUE (TYPE_DOMAIN (etype))))
3489 expr = copy_node (expr);
3490 TREE_TYPE (expr) = type;
3495 case UNCONSTRAINED_ARRAY_REF:
3496 /* Convert this to the type of the inner array by getting the address of
3497 the array from the template. */
3498 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3499 build_component_ref (TREE_OPERAND (expr, 0),
3500 get_identifier ("P_ARRAY"),
3502 etype = TREE_TYPE (expr);
3503 ecode = TREE_CODE (etype);
3506 case VIEW_CONVERT_EXPR:
3508 /* GCC 4.x is very sensitive to type consistency overall, and view
3509 conversions thus are very frequent. Even though just "convert"ing
3510 the inner operand to the output type is fine in most cases, it
3511 might expose unexpected input/output type mismatches in special
3512 circumstances so we avoid such recursive calls when we can. */
3514 tree op0 = TREE_OPERAND (expr, 0);
3516 /* If we are converting back to the original type, we can just
3517 lift the input conversion. This is a common occurrence with
3518 switches back-and-forth amongst type variants. */
3519 if (type == TREE_TYPE (op0))
3522 /* Otherwise, if we're converting between two aggregate types, we
3523 might be allowed to substitute the VIEW_CONVERT target type in
3524 place or to just convert the inner expression. */
3525 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
3527 /* If we are converting between type variants, we can just
3528 substitute the VIEW_CONVERT in place. */
3529 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
3530 return build1 (VIEW_CONVERT_EXPR, type, op0);
3532 /* Otherwise, we may just bypass the input view conversion unless
3533 one of the types is a fat pointer, which is handled by
3534 specialized code below which relies on exact type matching. */
3535 else if (!TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
3536 return convert (type, op0);
3542 /* If both types are record types, just convert the pointer and
3543 make a new INDIRECT_REF.
3545 ??? Disable this for now since it causes problems with the
3546 code in build_binary_op for MODIFY_EXPR which wants to
3547 strip off conversions. But that code really is a mess and
3548 we need to do this a much better way some time. */
3550 && (TREE_CODE (type) == RECORD_TYPE
3551 || TREE_CODE (type) == UNION_TYPE)
3552 && (TREE_CODE (etype) == RECORD_TYPE
3553 || TREE_CODE (etype) == UNION_TYPE)
3554 && !TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
3555 return build_unary_op (INDIRECT_REF, NULL_TREE,
3556 convert (build_pointer_type (type),
3557 TREE_OPERAND (expr, 0)));
3564 /* Check for converting to a pointer to an unconstrained array. */
3565 if (TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
3566 return convert_to_fat_pointer (type, expr);
3568 /* If we're converting between two aggregate types that have the same main
3569 variant, just make a VIEW_CONVER_EXPR. */
3570 else if (AGGREGATE_TYPE_P (type)
3571 && TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
3572 return build1 (VIEW_CONVERT_EXPR, type, expr);
3574 /* In all other cases of related types, make a NOP_EXPR. */
3575 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
3576 || (code == INTEGER_CST && ecode == INTEGER_CST
3577 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
3578 return fold_convert (type, expr);
3583 return fold_build1 (CONVERT_EXPR, type, expr);
3586 return fold_convert (type, gnat_truthvalue_conversion (expr));
3589 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
3590 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
3591 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
3592 return unchecked_convert (type, expr, false);
3593 else if (TYPE_BIASED_REPRESENTATION_P (type))
3594 return fold_convert (type,
3595 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
3596 convert (TREE_TYPE (type), expr),
3597 TYPE_MIN_VALUE (type)));
3599 /* ... fall through ... */
3602 return fold (convert_to_integer (type, expr));
3605 case REFERENCE_TYPE:
3606 /* If converting between two pointers to records denoting
3607 both a template and type, adjust if needed to account
3608 for any differing offsets, since one might be negative. */
3609 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
3612 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
3613 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
3614 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
3615 sbitsize_int (BITS_PER_UNIT));
3617 expr = build1 (NOP_EXPR, type, expr);
3618 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
3619 if (integer_zerop (byte_diff))
3622 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
3623 fold (convert (sizetype, byte_diff)));
3626 /* If converting to a thin pointer, handle specially. */
3627 if (TYPE_THIN_POINTER_P (type)
3628 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
3629 return convert_to_thin_pointer (type, expr);
3631 /* If converting fat pointer to normal pointer, get the pointer to the
3632 array and then convert it. */
3633 else if (TYPE_FAT_POINTER_P (etype))
3634 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
3637 return fold (convert_to_pointer (type, expr));
3640 return fold (convert_to_real (type, expr));
3643 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
3645 gnat_build_constructor
3646 (type, tree_cons (TYPE_FIELDS (type),
3647 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3650 /* ... fall through ... */
3653 /* In these cases, assume the front-end has validated the conversion.
3654 If the conversion is valid, it will be a bit-wise conversion, so
3655 it can be viewed as an unchecked conversion. */
3656 return unchecked_convert (type, expr, false);
3659 /* This is a either a conversion between a tagged type and some
3660 subtype, which we have to mark as a UNION_TYPE because of
3661 overlapping fields or a conversion of an Unchecked_Union. */
3662 return unchecked_convert (type, expr, false);
3664 case UNCONSTRAINED_ARRAY_TYPE:
3665 /* If EXPR is a constrained array, take its address, convert it to a
3666 fat pointer, and then dereference it. Likewise if EXPR is a
3667 record containing both a template and a constrained array.
3668 Note that a record representing a justified modular type
3669 always represents a packed constrained array. */
3670 if (ecode == ARRAY_TYPE
3671 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
3672 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
3673 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
3676 (INDIRECT_REF, NULL_TREE,
3677 convert_to_fat_pointer (TREE_TYPE (type),
3678 build_unary_op (ADDR_EXPR,
3681 /* Do something very similar for converting one unconstrained
3682 array to another. */
3683 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
3685 build_unary_op (INDIRECT_REF, NULL_TREE,
3686 convert (TREE_TYPE (type),
3687 build_unary_op (ADDR_EXPR,
3693 return fold (convert_to_complex (type, expr));
3700 /* Remove all conversions that are done in EXP. This includes converting
3701 from a padded type or to a justified modular type. If TRUE_ADDRESS
3702 is true, always return the address of the containing object even if
3703 the address is not bit-aligned. */
3706 remove_conversions (tree exp, bool true_address)
3708 switch (TREE_CODE (exp))
3712 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3713 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
3715 remove_conversions (VEC_index (constructor_elt,
3716 CONSTRUCTOR_ELTS (exp), 0)->value,
3721 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
3722 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
3723 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3726 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
3727 case NOP_EXPR: case CONVERT_EXPR:
3728 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3737 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3738 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3739 likewise return an expression pointing to the underlying array. */
3742 maybe_unconstrained_array (tree exp)
3744 enum tree_code code = TREE_CODE (exp);
3747 switch (TREE_CODE (TREE_TYPE (exp)))
3749 case UNCONSTRAINED_ARRAY_TYPE:
3750 if (code == UNCONSTRAINED_ARRAY_REF)
3753 = build_unary_op (INDIRECT_REF, NULL_TREE,
3754 build_component_ref (TREE_OPERAND (exp, 0),
3755 get_identifier ("P_ARRAY"),
3757 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
3761 else if (code == NULL_EXPR)
3762 return build1 (NULL_EXPR,
3763 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3764 (TREE_TYPE (TREE_TYPE (exp))))),
3765 TREE_OPERAND (exp, 0));
3768 /* If this is a padded type, convert to the unpadded type and see if
3769 it contains a template. */
3770 if (TYPE_IS_PADDING_P (TREE_TYPE (exp)))
3772 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
3773 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3774 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
3776 build_component_ref (new, NULL_TREE,
3777 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
3780 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
3782 build_component_ref (exp, NULL_TREE,
3783 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
3793 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
3794 If NOTRUNC_P is true, truncation operations should be suppressed. */
3797 unchecked_convert (tree type, tree expr, bool notrunc_p)
3799 tree etype = TREE_TYPE (expr);
3801 /* If the expression is already the right type, we are done. */
3805 /* If both types types are integral just do a normal conversion.
3806 Likewise for a conversion to an unconstrained array. */
3807 if ((((INTEGRAL_TYPE_P (type)
3808 && !(TREE_CODE (type) == INTEGER_TYPE
3809 && TYPE_VAX_FLOATING_POINT_P (type)))
3810 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
3811 || (TREE_CODE (type) == RECORD_TYPE
3812 && TYPE_JUSTIFIED_MODULAR_P (type)))
3813 && ((INTEGRAL_TYPE_P (etype)
3814 && !(TREE_CODE (etype) == INTEGER_TYPE
3815 && TYPE_VAX_FLOATING_POINT_P (etype)))
3816 || (POINTER_TYPE_P (etype) && !TYPE_THIN_POINTER_P (etype))
3817 || (TREE_CODE (etype) == RECORD_TYPE
3818 && TYPE_JUSTIFIED_MODULAR_P (etype))))
3819 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3822 bool final_unchecked = false;
3824 if (TREE_CODE (etype) == INTEGER_TYPE
3825 && TYPE_BIASED_REPRESENTATION_P (etype))
3827 tree ntype = copy_type (etype);
3829 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
3830 TYPE_MAIN_VARIANT (ntype) = ntype;
3831 expr = build1 (NOP_EXPR, ntype, expr);
3834 if (TREE_CODE (type) == INTEGER_TYPE
3835 && TYPE_BIASED_REPRESENTATION_P (type))
3837 rtype = copy_type (type);
3838 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
3839 TYPE_MAIN_VARIANT (rtype) = rtype;
3842 /* We have another special case: if we are unchecked converting subtype
3843 into a base type, we need to ensure that VRP doesn't propagate range
3844 information since this conversion may be done precisely to validate
3845 that the object is within the range it is supposed to have. */
3846 else if (TREE_CODE (expr) != INTEGER_CST
3847 && TREE_CODE (type) == INTEGER_TYPE && !TREE_TYPE (type)
3848 && ((TREE_CODE (etype) == INTEGER_TYPE && TREE_TYPE (etype))
3849 || TREE_CODE (etype) == ENUMERAL_TYPE
3850 || TREE_CODE (etype) == BOOLEAN_TYPE))
3852 /* The optimization barrier is a VIEW_CONVERT_EXPR node; moreover,
3853 in order not to be deemed an useless type conversion, it must
3854 be from subtype to base type.
3856 ??? This may raise addressability and/or aliasing issues because
3857 VIEW_CONVERT_EXPR gets gimplified as an lvalue, thus causing the
3858 address of its operand to be taken if it is deemed addressable
3859 and not already in GIMPLE form. */
3860 rtype = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
3861 rtype = copy_type (rtype);
3862 TYPE_MAIN_VARIANT (rtype) = rtype;
3863 TREE_TYPE (rtype) = type;
3864 final_unchecked = true;
3867 expr = convert (rtype, expr);
3869 expr = fold_build1 (final_unchecked ? VIEW_CONVERT_EXPR : NOP_EXPR,
3873 /* If we are converting TO an integral type whose precision is not the
3874 same as its size, first unchecked convert to a record that contains
3875 an object of the output type. Then extract the field. */
3876 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
3877 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3878 GET_MODE_BITSIZE (TYPE_MODE (type))))
3880 tree rec_type = make_node (RECORD_TYPE);
3881 tree field = create_field_decl (get_identifier ("OBJ"), type,
3882 rec_type, 1, 0, 0, 0);
3884 TYPE_FIELDS (rec_type) = field;
3885 layout_type (rec_type);
3887 expr = unchecked_convert (rec_type, expr, notrunc_p);
3888 expr = build_component_ref (expr, NULL_TREE, field, 0);
3891 /* Similarly for integral input type whose precision is not equal to its
3893 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype)
3894 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
3895 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3897 tree rec_type = make_node (RECORD_TYPE);
3899 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
3902 TYPE_FIELDS (rec_type) = field;
3903 layout_type (rec_type);
3905 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
3906 expr = unchecked_convert (type, expr, notrunc_p);
3909 /* We have a special case when we are converting between two
3910 unconstrained array types. In that case, take the address,
3911 convert the fat pointer types, and dereference. */
3912 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
3913 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3914 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3915 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
3916 build_unary_op (ADDR_EXPR, NULL_TREE,
3920 expr = maybe_unconstrained_array (expr);
3921 etype = TREE_TYPE (expr);
3922 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
3925 /* If the result is an integral type whose size is not equal to
3926 the size of the underlying machine type, sign- or zero-extend
3927 the result. We need not do this in the case where the input is
3928 an integral type of the same precision and signedness or if the output
3929 is a biased type or if both the input and output are unsigned. */
3931 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
3932 && !(TREE_CODE (type) == INTEGER_TYPE
3933 && TYPE_BIASED_REPRESENTATION_P (type))
3934 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3935 GET_MODE_BITSIZE (TYPE_MODE (type)))
3936 && !(INTEGRAL_TYPE_P (etype)
3937 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
3938 && operand_equal_p (TYPE_RM_SIZE (type),
3939 (TYPE_RM_SIZE (etype) != 0
3940 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
3942 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
3944 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
3945 TYPE_UNSIGNED (type));
3947 = convert (base_type,
3948 size_binop (MINUS_EXPR,
3950 (GET_MODE_BITSIZE (TYPE_MODE (type))),
3951 TYPE_RM_SIZE (type)));
3954 build_binary_op (RSHIFT_EXPR, base_type,
3955 build_binary_op (LSHIFT_EXPR, base_type,
3956 convert (base_type, expr),
3961 /* An unchecked conversion should never raise Constraint_Error. The code
3962 below assumes that GCC's conversion routines overflow the same way that
3963 the underlying hardware does. This is probably true. In the rare case
3964 when it is false, we can rely on the fact that such conversions are
3965 erroneous anyway. */
3966 if (TREE_CODE (expr) == INTEGER_CST)
3967 TREE_OVERFLOW (expr) = 0;
3969 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
3970 show no longer constant. */
3971 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3972 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
3974 TREE_CONSTANT (expr) = 0;
3979 /* Search the chain of currently available builtin declarations for a node
3980 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
3981 found, if any, or NULL_TREE otherwise. */
3983 builtin_decl_for (tree name)
3988 for (i = 0; VEC_iterate(tree, builtin_decls, i, decl); i++)
3989 if (DECL_NAME (decl) == name)
3995 /* Return the appropriate GCC tree code for the specified GNAT type,
3996 the latter being a record type as predicated by Is_Record_Type. */
3999 tree_code_for_record_type (Entity_Id gnat_type)
4001 Node_Id component_list
4002 = Component_List (Type_Definition
4004 (Implementation_Base_Type (gnat_type))));
4007 /* Make this a UNION_TYPE unless it's either not an Unchecked_Union or
4008 we have a non-discriminant field outside a variant. In either case,
4009 it's a RECORD_TYPE. */
4011 if (!Is_Unchecked_Union (gnat_type))
4014 for (component = First_Non_Pragma (Component_Items (component_list));
4015 Present (component);
4016 component = Next_Non_Pragma (component))
4017 if (Ekind (Defining_Entity (component)) == E_Component)
4023 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
4024 component of an aggregate type. */
4027 type_for_nonaliased_component_p (tree gnu_type)
4029 /* If the type is passed by reference, we may have pointers to the
4030 component so it cannot be made non-aliased. */
4031 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
4034 /* We used to say that any component of aggregate type is aliased
4035 because the front-end may take 'Reference of it. The front-end
4036 has been enhanced in the meantime so as to use a renaming instead
4037 in most cases, but the back-end can probably take the address of
4038 such a component too so we go for the conservative stance.
4040 For instance, we might need the address of any array type, even
4041 if normally passed by copy, to construct a fat pointer if the
4042 component is used as an actual for an unconstrained formal.
4044 Likewise for record types: even if a specific record subtype is
4045 passed by copy, the parent type might be passed by ref (e.g. if
4046 it's of variable size) and we might take the address of a child
4047 component to pass to a parent formal. We have no way to check
4048 for such conditions here. */
4049 if (AGGREGATE_TYPE_P (gnu_type))
4055 /* Perform final processing on global variables. */
4058 gnat_write_global_declarations (void)
4060 /* Proceed to optimize and emit assembly.
4061 FIXME: shouldn't be the front end's responsibility to call this. */
4064 /* Emit debug info for all global declarations. */
4065 emit_debug_global_declarations (VEC_address (tree, global_decls),
4066 VEC_length (tree, global_decls));
4069 #include "gt-ada-utils.h"
4070 #include "gtype-ada.h"