1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2007, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 2, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License distributed with GNAT; see file COPYING. If not, write *
19 * to the Free Software Foundation, 51 Franklin Street, Fifth Floor, *
20 * Boston, MA 02110-1301, USA. *
22 * GNAT was originally developed by the GNAT team at New York University. *
23 * Extensive contributions were provided by Ada Core Technologies Inc. *
25 ****************************************************************************/
29 #include "coretypes.h"
42 #include "tree-inline.h"
43 #include "tree-gimple.h"
44 #include "tree-dump.h"
45 #include "pointer-set.h"
61 #ifndef MAX_FIXED_MODE_SIZE
62 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
65 #ifndef MAX_BITS_PER_WORD
66 #define MAX_BITS_PER_WORD BITS_PER_WORD
69 /* If nonzero, pretend we are allocating at global level. */
72 /* Tree nodes for the various types and decls we create. */
73 tree gnat_std_decls[(int) ADT_LAST];
75 /* Functions to call for each of the possible raise reasons. */
76 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
78 /* Forward declarations for handlers of attributes. */
79 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
80 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
82 /* Table of machine-independent internal attributes for Ada. We support
83 this minimal set of attributes to accommodate the Alpha back-end which
84 unconditionally puts them on its builtins. */
85 const struct attribute_spec gnat_internal_attribute_table[] =
87 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
88 { "const", 0, 0, true, false, false, handle_const_attribute },
89 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute },
90 { NULL, 0, 0, false, false, false, NULL }
93 /* Associates a GNAT tree node to a GCC tree node. It is used in
94 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
95 of `save_gnu_tree' for more info. */
96 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
98 #define GET_GNU_TREE(GNAT_ENTITY) \
99 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
101 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
102 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
104 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
105 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
107 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
108 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
110 #define GET_DUMMY_NODE(GNAT_ENTITY) \
111 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
113 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
114 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
116 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
117 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
119 /* This variable keeps a table for types for each precision so that we only
120 allocate each of them once. Signed and unsigned types are kept separate.
122 Note that these types are only used when fold-const requests something
123 special. Perhaps we should NOT share these types; we'll see how it
125 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
127 /* Likewise for float types, but record these by mode. */
128 static GTY(()) tree float_types[NUM_MACHINE_MODES];
130 /* For each binding contour we allocate a binding_level structure to indicate
131 the binding depth. */
133 struct gnat_binding_level GTY((chain_next ("%h.chain")))
135 /* The binding level containing this one (the enclosing binding level). */
136 struct gnat_binding_level *chain;
137 /* The BLOCK node for this level. */
139 /* If nonzero, the setjmp buffer that needs to be updated for any
140 variable-sized definition within this context. */
144 /* The binding level currently in effect. */
145 static GTY(()) struct gnat_binding_level *current_binding_level;
147 /* A chain of gnat_binding_level structures awaiting reuse. */
148 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
150 /* An array of global declarations. */
151 static GTY(()) VEC(tree,gc) *global_decls;
153 /* An array of builtin declarations. */
154 static GTY(()) VEC(tree,gc) *builtin_decls;
156 /* An array of global renaming pointers. */
157 static GTY(()) VEC(tree,gc) *global_renaming_pointers;
159 /* Arrays of functions called automatically at the beginning and
160 end of execution, on targets without .ctors/.dtors sections. */
161 static GTY(()) VEC(tree,gc) *static_ctors;
162 static GTY(()) VEC(tree,gc) *static_dtors;
164 /* A chain of unused BLOCK nodes. */
165 static GTY((deletable)) tree free_block_chain;
167 static void gnat_install_builtins (void);
168 static tree merge_sizes (tree, tree, tree, bool, bool);
169 static tree compute_related_constant (tree, tree);
170 static tree split_plus (tree, tree *);
171 static bool value_zerop (tree);
172 static void gnat_gimplify_function (tree);
173 static tree float_type_for_precision (int, enum machine_mode);
174 static tree convert_to_fat_pointer (tree, tree);
175 static tree convert_to_thin_pointer (tree, tree);
176 static tree make_descriptor_field (const char *,tree, tree, tree);
177 static bool potential_alignment_gap (tree, tree, tree);
179 /* Initialize the association of GNAT nodes to GCC trees. */
182 init_gnat_to_gnu (void)
184 associate_gnat_to_gnu
185 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
188 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
189 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
190 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
192 If GNU_DECL is zero, a previous association is to be reset. */
195 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
197 /* Check that GNAT_ENTITY is not already defined and that it is being set
198 to something which is a decl. Raise gigi 401 if not. Usually, this
199 means GNAT_ENTITY is defined twice, but occasionally is due to some
201 gcc_assert (!(gnu_decl
202 && (PRESENT_GNU_TREE (gnat_entity)
203 || (!no_check && !DECL_P (gnu_decl)))));
205 SET_GNU_TREE (gnat_entity, gnu_decl);
208 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
209 Return the ..._DECL node that was associated with it. If there is no tree
210 node associated with GNAT_ENTITY, abort.
212 In some cases, such as delayed elaboration or expressions that need to
213 be elaborated only once, GNAT_ENTITY is really not an entity. */
216 get_gnu_tree (Entity_Id gnat_entity)
218 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
219 return GET_GNU_TREE (gnat_entity);
222 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
225 present_gnu_tree (Entity_Id gnat_entity)
227 return PRESENT_GNU_TREE (gnat_entity);
230 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
233 init_dummy_type (void)
236 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
239 /* Make a dummy type corresponding to GNAT_TYPE. */
242 make_dummy_type (Entity_Id gnat_type)
244 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
247 /* If there is an equivalent type, get its underlying type. */
248 if (Present (gnat_underlying))
249 gnat_underlying = Underlying_Type (gnat_underlying);
251 /* If there was no equivalent type (can only happen when just annotating
252 types) or underlying type, go back to the original type. */
253 if (No (gnat_underlying))
254 gnat_underlying = gnat_type;
256 /* If it there already a dummy type, use that one. Else make one. */
257 if (PRESENT_DUMMY_NODE (gnat_underlying))
258 return GET_DUMMY_NODE (gnat_underlying);
260 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
262 gnu_type = make_node (Is_Record_Type (gnat_underlying)
263 ? tree_code_for_record_type (gnat_underlying)
265 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
266 TYPE_DUMMY_P (gnu_type) = 1;
267 if (AGGREGATE_TYPE_P (gnu_type))
269 TYPE_STUB_DECL (gnu_type) = build_decl (TYPE_DECL, NULL_TREE, gnu_type);
270 TYPE_BY_REFERENCE_P (gnu_type) = Is_By_Reference_Type (gnat_type);
273 SET_DUMMY_NODE (gnat_underlying, gnu_type);
278 /* Return nonzero if we are currently in the global binding level. */
281 global_bindings_p (void)
283 return ((force_global || !current_function_decl) ? -1 : 0);
286 /* Enter a new binding level. */
291 struct gnat_binding_level *newlevel = NULL;
293 /* Reuse a struct for this binding level, if there is one. */
294 if (free_binding_level)
296 newlevel = free_binding_level;
297 free_binding_level = free_binding_level->chain;
301 = (struct gnat_binding_level *)
302 ggc_alloc (sizeof (struct gnat_binding_level));
304 /* Use a free BLOCK, if any; otherwise, allocate one. */
305 if (free_block_chain)
307 newlevel->block = free_block_chain;
308 free_block_chain = TREE_CHAIN (free_block_chain);
309 TREE_CHAIN (newlevel->block) = NULL_TREE;
312 newlevel->block = make_node (BLOCK);
314 /* Point the BLOCK we just made to its parent. */
315 if (current_binding_level)
316 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
318 BLOCK_VARS (newlevel->block) = BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
319 TREE_USED (newlevel->block) = 1;
321 /* Add this level to the front of the chain (stack) of levels that are
323 newlevel->chain = current_binding_level;
324 newlevel->jmpbuf_decl = NULL_TREE;
325 current_binding_level = newlevel;
328 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
329 and point FNDECL to this BLOCK. */
332 set_current_block_context (tree fndecl)
334 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
335 DECL_INITIAL (fndecl) = current_binding_level->block;
338 /* Set the jmpbuf_decl for the current binding level to DECL. */
341 set_block_jmpbuf_decl (tree decl)
343 current_binding_level->jmpbuf_decl = decl;
346 /* Get the jmpbuf_decl, if any, for the current binding level. */
349 get_block_jmpbuf_decl ()
351 return current_binding_level->jmpbuf_decl;
354 /* Exit a binding level. Set any BLOCK into the current code group. */
359 struct gnat_binding_level *level = current_binding_level;
360 tree block = level->block;
362 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
363 BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
365 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
366 are no variables free the block and merge its subblocks into those of its
367 parent block. Otherwise, add it to the list of its parent. */
368 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
370 else if (BLOCK_VARS (block) == NULL_TREE)
372 BLOCK_SUBBLOCKS (level->chain->block)
373 = chainon (BLOCK_SUBBLOCKS (block),
374 BLOCK_SUBBLOCKS (level->chain->block));
375 TREE_CHAIN (block) = free_block_chain;
376 free_block_chain = block;
380 TREE_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
381 BLOCK_SUBBLOCKS (level->chain->block) = block;
382 TREE_USED (block) = 1;
383 set_block_for_group (block);
386 /* Free this binding structure. */
387 current_binding_level = level->chain;
388 level->chain = free_binding_level;
389 free_binding_level = level;
392 /* Insert BLOCK at the end of the list of subblocks of the
393 current binding level. This is used when a BIND_EXPR is expanded,
394 to handle the BLOCK node inside the BIND_EXPR. */
397 insert_block (tree block)
399 TREE_USED (block) = 1;
400 TREE_CHAIN (block) = BLOCK_SUBBLOCKS (current_binding_level->block);
401 BLOCK_SUBBLOCKS (current_binding_level->block) = block;
404 /* Records a ..._DECL node DECL as belonging to the current lexical scope
405 and uses GNAT_NODE for location information and propagating flags. */
408 gnat_pushdecl (tree decl, Node_Id gnat_node)
410 /* If at top level, there is no context. But PARM_DECLs always go in the
411 level of its function. */
412 if (global_bindings_p () && TREE_CODE (decl) != PARM_DECL)
413 DECL_CONTEXT (decl) = 0;
416 DECL_CONTEXT (decl) = current_function_decl;
418 /* Functions imported in another function are not really nested. */
419 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_PUBLIC (decl))
420 DECL_NO_STATIC_CHAIN (decl) = 1;
423 TREE_NO_WARNING (decl) = (gnat_node == Empty || Warnings_Off (gnat_node));
425 /* Set the location of DECL and emit a declaration for it. */
426 if (Present (gnat_node))
427 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
428 add_decl_expr (decl, gnat_node);
430 /* Put the declaration on the list. The list of declarations is in reverse
431 order. The list will be reversed later. Put global variables in the
432 globals list and builtin functions in a dedicated list to speed up
433 further lookups. Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
434 the list, as they will cause trouble with the debugger and aren't needed
436 if (TREE_CODE (decl) != TYPE_DECL
437 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
439 if (global_bindings_p ())
441 VEC_safe_push (tree, gc, global_decls, decl);
443 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
444 VEC_safe_push (tree, gc, builtin_decls, decl);
448 TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
449 BLOCK_VARS (current_binding_level->block) = decl;
453 /* For the declaration of a type, set its name if it either is not already
454 set, was set to an IDENTIFIER_NODE, indicating an internal name,
455 or if the previous type name was not derived from a source name.
456 We'd rather have the type named with a real name and all the pointer
457 types to the same object have the same POINTER_TYPE node. Code in the
458 equivalent function of c-decl.c makes a copy of the type node here, but
459 that may cause us trouble with incomplete types. We make an exception
460 for fat pointer types because the compiler automatically builds them
461 for unconstrained array types and the debugger uses them to represent
462 both these and pointers to these. */
463 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
465 tree t = TREE_TYPE (decl);
467 if (!TYPE_NAME (t) || TREE_CODE (TYPE_NAME (t)) == IDENTIFIER_NODE)
468 TYPE_NAME (t) = decl;
469 else if (TYPE_FAT_POINTER_P (t))
471 tree tt = build_variant_type_copy (t);
472 TYPE_NAME (tt) = decl;
473 TREE_USED (tt) = TREE_USED (t);
474 TREE_TYPE (decl) = tt;
475 DECL_ORIGINAL_TYPE (decl) = t;
477 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
478 TYPE_NAME (t) = decl;
482 /* Do little here. Set up the standard declarations later after the
483 front end has been run. */
486 gnat_init_decl_processing (void)
490 /* Make the binding_level structure for global names. */
491 current_function_decl = 0;
492 current_binding_level = 0;
493 free_binding_level = 0;
496 build_common_tree_nodes (true, true);
498 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
499 corresponding to the size of Pmode. In most cases when ptr_mode and
500 Pmode differ, C will use the width of ptr_mode as sizetype. But we get
501 far better code using the width of Pmode. Make this here since we need
502 this before we can expand the GNAT types. */
503 size_type_node = gnat_type_for_size (GET_MODE_BITSIZE (Pmode), 0);
504 set_sizetype (size_type_node);
505 build_common_tree_nodes_2 (0);
507 /* Give names and make TYPE_DECLs for common types. */
508 gnat_pushdecl (build_decl (TYPE_DECL, get_identifier (SIZE_TYPE), sizetype),
510 gnat_pushdecl (build_decl (TYPE_DECL, get_identifier ("integer"),
513 gnat_pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned char"),
516 gnat_pushdecl (build_decl (TYPE_DECL, get_identifier ("long integer"),
517 long_integer_type_node),
520 ptr_void_type_node = build_pointer_type (void_type_node);
522 gnat_install_builtins ();
525 /* Install the builtin functions we might need. */
528 gnat_install_builtins ()
530 /* Builtins used by generic middle-end optimizers. */
531 build_common_builtin_nodes ();
533 /* Target specific builtins, such as the AltiVec family on ppc. */
534 targetm.init_builtins ();
537 /* Create the predefined scalar types such as `integer_type_node' needed
538 in the gcc back-end and initialize the global binding level. */
541 init_gigi_decls (tree long_long_float_type, tree exception_type)
546 /* Set the types that GCC and Gigi use from the front end. We would like
547 to do this for char_type_node, but it needs to correspond to the C
549 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
551 /* In this case, the builtin floating point types are VAX float,
552 so make up a type for use. */
553 longest_float_type_node = make_node (REAL_TYPE);
554 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
555 layout_type (longest_float_type_node);
556 create_type_decl (get_identifier ("longest float type"),
557 longest_float_type_node, NULL, false, true, Empty);
560 longest_float_type_node = TREE_TYPE (long_long_float_type);
562 except_type_node = TREE_TYPE (exception_type);
564 unsigned_type_node = gnat_type_for_size (INT_TYPE_SIZE, 1);
565 create_type_decl (get_identifier ("unsigned int"), unsigned_type_node,
566 NULL, false, true, Empty);
568 void_type_decl_node = create_type_decl (get_identifier ("void"),
569 void_type_node, NULL, false, true,
572 void_ftype = build_function_type (void_type_node, NULL_TREE);
573 ptr_void_ftype = build_pointer_type (void_ftype);
575 /* Now declare runtime functions. */
576 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
578 /* malloc is a function declaration tree for a function to allocate
580 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
582 build_function_type (ptr_void_type_node,
583 tree_cons (NULL_TREE,
586 NULL_TREE, false, true, true, NULL,
588 DECL_IS_MALLOC (malloc_decl) = 1;
590 /* free is a function declaration tree for a function to free memory. */
592 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
593 build_function_type (void_type_node,
594 tree_cons (NULL_TREE,
597 NULL_TREE, false, true, true, NULL, Empty);
599 /* Make the types and functions used for exception processing. */
601 = build_array_type (gnat_type_for_mode (Pmode, 0),
602 build_index_type (build_int_cst (NULL_TREE, 5)));
603 create_type_decl (get_identifier ("JMPBUF_T"), jmpbuf_type, NULL,
605 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
607 /* Functions to get and set the jumpbuf pointer for the current thread. */
609 = create_subprog_decl
610 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
611 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
612 NULL_TREE, false, true, true, NULL, Empty);
613 /* Avoid creating superfluous edges to __builtin_setjmp receivers. */
614 DECL_IS_PURE (get_jmpbuf_decl) = 1;
617 = create_subprog_decl
618 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
620 build_function_type (void_type_node,
621 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
622 NULL_TREE, false, true, true, NULL, Empty);
624 /* Function to get the current exception. */
626 = create_subprog_decl
627 (get_identifier ("system__soft_links__get_gnat_exception"),
629 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
630 NULL_TREE, false, true, true, NULL, Empty);
631 /* Avoid creating superfluous edges to __builtin_setjmp receivers. */
632 DECL_IS_PURE (get_excptr_decl) = 1;
634 /* Functions that raise exceptions. */
636 = create_subprog_decl
637 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
638 build_function_type (void_type_node,
639 tree_cons (NULL_TREE,
640 build_pointer_type (except_type_node),
642 NULL_TREE, false, true, true, NULL, Empty);
644 /* Dummy objects to materialize "others" and "all others" in the exception
645 tables. These are exported by a-exexpr.adb, so see this unit for the
649 = create_var_decl (get_identifier ("OTHERS"),
650 get_identifier ("__gnat_others_value"),
651 integer_type_node, 0, 1, 0, 1, 1, 0, Empty);
654 = create_var_decl (get_identifier ("ALL_OTHERS"),
655 get_identifier ("__gnat_all_others_value"),
656 integer_type_node, 0, 1, 0, 1, 1, 0, Empty);
658 /* Hooks to call when entering/leaving an exception handler. */
660 = create_subprog_decl (get_identifier ("__gnat_begin_handler"), NULL_TREE,
661 build_function_type (void_type_node,
662 tree_cons (NULL_TREE,
665 NULL_TREE, false, true, true, NULL, Empty);
668 = create_subprog_decl (get_identifier ("__gnat_end_handler"), NULL_TREE,
669 build_function_type (void_type_node,
670 tree_cons (NULL_TREE,
673 NULL_TREE, false, true, true, NULL, Empty);
675 /* If in no exception handlers mode, all raise statements are redirected to
676 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
677 this procedure will never be called in this mode. */
678 if (No_Exception_Handlers_Set ())
681 = create_subprog_decl
682 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE,
683 build_function_type (void_type_node,
684 tree_cons (NULL_TREE,
685 build_pointer_type (char_type_node),
686 tree_cons (NULL_TREE,
689 NULL_TREE, false, true, true, NULL, Empty);
691 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
692 gnat_raise_decls[i] = decl;
695 /* Otherwise, make one decl for each exception reason. */
696 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
700 sprintf (name, "__gnat_rcheck_%.2d", i);
702 = create_subprog_decl
703 (get_identifier (name), NULL_TREE,
704 build_function_type (void_type_node,
705 tree_cons (NULL_TREE,
708 tree_cons (NULL_TREE,
711 NULL_TREE, false, true, true, NULL, Empty);
714 /* Indicate that these never return. */
715 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
716 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
717 TREE_TYPE (raise_nodefer_decl)
718 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
721 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
723 TREE_THIS_VOLATILE (gnat_raise_decls[i]) = 1;
724 TREE_SIDE_EFFECTS (gnat_raise_decls[i]) = 1;
725 TREE_TYPE (gnat_raise_decls[i])
726 = build_qualified_type (TREE_TYPE (gnat_raise_decls[i]),
730 /* setjmp returns an integer and has one operand, which is a pointer to
733 = create_subprog_decl
734 (get_identifier ("__builtin_setjmp"), NULL_TREE,
735 build_function_type (integer_type_node,
736 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
737 NULL_TREE, false, true, true, NULL, Empty);
739 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
740 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
742 /* update_setjmp_buf updates a setjmp buffer from the current stack pointer
744 update_setjmp_buf_decl
745 = create_subprog_decl
746 (get_identifier ("__builtin_update_setjmp_buf"), NULL_TREE,
747 build_function_type (void_type_node,
748 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
749 NULL_TREE, false, true, true, NULL, Empty);
751 DECL_BUILT_IN_CLASS (update_setjmp_buf_decl) = BUILT_IN_NORMAL;
752 DECL_FUNCTION_CODE (update_setjmp_buf_decl) = BUILT_IN_UPDATE_SETJMP_BUF;
754 main_identifier_node = get_identifier ("main");
757 /* Given a record type RECORD_TYPE and a chain of FIELD_DECL nodes FIELDLIST,
758 finish constructing the record or union type. If REP_LEVEL is zero, this
759 record has no representation clause and so will be entirely laid out here.
760 If REP_LEVEL is one, this record has a representation clause and has been
761 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
762 this record is derived from a parent record and thus inherits its layout;
763 only make a pass on the fields to finalize them. If DO_NOT_FINALIZE is
764 true, the record type is expected to be modified afterwards so it will
765 not be sent to the back-end for finalization. */
768 finish_record_type (tree record_type, tree fieldlist, int rep_level,
769 bool do_not_finalize)
771 enum tree_code code = TREE_CODE (record_type);
772 tree ada_size = bitsize_zero_node;
773 tree size = bitsize_zero_node;
774 bool var_size = false;
775 bool had_size = TYPE_SIZE (record_type) != 0;
776 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
779 TYPE_FIELDS (record_type) = fieldlist;
780 TYPE_STUB_DECL (record_type)
781 = build_decl (TYPE_DECL, NULL_TREE, record_type);
783 /* We don't need both the typedef name and the record name output in
784 the debugging information, since they are the same. */
785 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
787 /* Globally initialize the record first. If this is a rep'ed record,
788 that just means some initializations; otherwise, layout the record. */
791 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
792 TYPE_MODE (record_type) = BLKmode;
795 TYPE_SIZE_UNIT (record_type) = size_zero_node;
797 TYPE_SIZE (record_type) = bitsize_zero_node;
799 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
800 out just like a UNION_TYPE, since the size will be fixed. */
801 else if (code == QUAL_UNION_TYPE)
806 /* Ensure there isn't a size already set. There can be in an error
807 case where there is a rep clause but all fields have errors and
808 no longer have a position. */
809 TYPE_SIZE (record_type) = 0;
810 layout_type (record_type);
813 /* At this point, the position and size of each field is known. It was
814 either set before entry by a rep clause, or by laying out the type above.
816 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
817 to compute the Ada size; the GCC size and alignment (for rep'ed records
818 that are not padding types); and the mode (for rep'ed records). We also
819 clear the DECL_BIT_FIELD indication for the cases we know have not been
820 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
822 if (code == QUAL_UNION_TYPE)
823 fieldlist = nreverse (fieldlist);
825 for (field = fieldlist; field; field = TREE_CHAIN (field))
827 tree pos = bit_position (field);
829 tree type = TREE_TYPE (field);
830 tree this_size = DECL_SIZE (field);
831 tree this_ada_size = DECL_SIZE (field);
833 /* We need to make an XVE/XVU record if any field has variable size,
834 whether or not the record does. For example, if we have a union,
835 it may be that all fields, rounded up to the alignment, have the
836 same size, in which case we'll use that size. But the debug
837 output routines (except Dwarf2) won't be able to output the fields,
838 so we need to make the special record. */
839 if (TREE_CODE (this_size) != INTEGER_CST)
842 if ((TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
843 || TREE_CODE (type) == QUAL_UNION_TYPE)
844 && !TYPE_IS_FAT_POINTER_P (type)
845 && !TYPE_CONTAINS_TEMPLATE_P (type)
846 && TYPE_ADA_SIZE (type))
847 this_ada_size = TYPE_ADA_SIZE (type);
849 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
850 if (DECL_BIT_FIELD (field) && !STRICT_ALIGNMENT
851 && value_factor_p (pos, BITS_PER_UNIT)
852 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
853 DECL_BIT_FIELD (field) = 0;
855 /* If we still have DECL_BIT_FIELD set at this point, we know the field
856 is technically not addressable. Except that it can actually be
857 addressed if the field is BLKmode and happens to be properly
859 DECL_NONADDRESSABLE_P (field)
860 |= DECL_BIT_FIELD (field) && DECL_MODE (field) != BLKmode;
862 if ((rep_level > 0) && !DECL_BIT_FIELD (field))
863 TYPE_ALIGN (record_type)
864 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
869 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
870 size = size_binop (MAX_EXPR, size, this_size);
873 case QUAL_UNION_TYPE:
875 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
876 this_ada_size, ada_size);
877 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
882 /* Since we know here that all fields are sorted in order of
883 increasing bit position, the size of the record is one
884 higher than the ending bit of the last field processed
885 unless we have a rep clause, since in that case we might
886 have a field outside a QUAL_UNION_TYPE that has a higher ending
887 position. So use a MAX in that case. Also, if this field is a
888 QUAL_UNION_TYPE, we need to take into account the previous size in
889 the case of empty variants. */
891 = merge_sizes (ada_size, pos, this_ada_size,
892 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
894 = merge_sizes (size, pos, this_size,
895 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
903 if (code == QUAL_UNION_TYPE)
904 nreverse (fieldlist);
908 /* If this is a padding record, we never want to make the size smaller
909 than what was specified in it, if any. */
910 if (TREE_CODE (record_type) == RECORD_TYPE
911 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
912 size = TYPE_SIZE (record_type);
914 /* Now set any of the values we've just computed that apply. */
915 if (!TYPE_IS_FAT_POINTER_P (record_type)
916 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
917 SET_TYPE_ADA_SIZE (record_type, ada_size);
921 tree size_unit = had_size_unit
922 ? TYPE_SIZE_UNIT (record_type)
924 size_binop (CEIL_DIV_EXPR, size,
926 unsigned int align = TYPE_ALIGN (record_type);
928 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
929 TYPE_SIZE_UNIT (record_type)
930 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
932 compute_record_mode (record_type);
936 if (!do_not_finalize)
937 rest_of_record_type_compilation (record_type);
940 /* Wrap up compilation of RECORD_TYPE, i.e. most notably output all
941 the debug information associated with it. It need not be invoked
942 directly in most cases since finish_record_type takes care of doing
943 so, unless explicitly requested not to through DO_NOT_FINALIZE. */
946 rest_of_record_type_compilation (tree record_type)
948 tree fieldlist = TYPE_FIELDS (record_type);
950 bool var_size = false;
952 for (field = fieldlist; field; field = TREE_CHAIN (field))
954 /* We need to make an XVE/XVU record if any field has variable size,
955 whether or not the record does. For example, if we have a union,
956 it may be that all fields, rounded up to the alignment, have the
957 same size, in which case we'll use that size. But the debug
958 output routines (except Dwarf2) won't be able to output the fields,
959 so we need to make the special record. */
960 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST)
967 /* If this record is of variable size, rename it so that the
968 debugger knows it is and make a new, parallel, record
969 that tells the debugger how the record is laid out. See
970 exp_dbug.ads. But don't do this for records that are padding
971 since they confuse GDB. */
973 && !(TREE_CODE (record_type) == RECORD_TYPE
974 && TYPE_IS_PADDING_P (record_type)))
977 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
978 ? UNION_TYPE : TREE_CODE (record_type));
979 tree orig_name = TYPE_NAME (record_type);
981 = (TREE_CODE (orig_name) == TYPE_DECL ? DECL_NAME (orig_name)
984 = concat_id_with_name (orig_id,
985 TREE_CODE (record_type) == QUAL_UNION_TYPE
987 tree last_pos = bitsize_zero_node;
989 tree prev_old_field = 0;
991 TYPE_NAME (new_record_type) = new_id;
992 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
993 TYPE_STUB_DECL (new_record_type)
994 = build_decl (TYPE_DECL, NULL_TREE, new_record_type);
995 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
996 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
997 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
998 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
999 TYPE_SIZE_UNIT (new_record_type)
1000 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
1002 /* Now scan all the fields, replacing each field with a new
1003 field corresponding to the new encoding. */
1004 for (old_field = TYPE_FIELDS (record_type); old_field;
1005 old_field = TREE_CHAIN (old_field))
1007 tree field_type = TREE_TYPE (old_field);
1008 tree field_name = DECL_NAME (old_field);
1010 tree curpos = bit_position (old_field);
1012 unsigned int align = 0;
1015 /* See how the position was modified from the last position.
1017 There are two basic cases we support: a value was added
1018 to the last position or the last position was rounded to
1019 a boundary and they something was added. Check for the
1020 first case first. If not, see if there is any evidence
1021 of rounding. If so, round the last position and try
1024 If this is a union, the position can be taken as zero. */
1026 if (TREE_CODE (new_record_type) == UNION_TYPE)
1027 pos = bitsize_zero_node, align = 0;
1029 pos = compute_related_constant (curpos, last_pos);
1031 if (!pos && TREE_CODE (curpos) == MULT_EXPR
1032 && host_integerp (TREE_OPERAND (curpos, 1), 1))
1034 tree offset = TREE_OPERAND (curpos, 0);
1035 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
1037 /* Strip off any conversions. */
1038 while (TREE_CODE (offset) == NON_LVALUE_EXPR
1039 || TREE_CODE (offset) == NOP_EXPR
1040 || TREE_CODE (offset) == CONVERT_EXPR)
1041 offset = TREE_OPERAND (offset, 0);
1043 /* An offset which is a bitwise AND with a negative power of 2
1044 means an alignment corresponding to this power of 2. */
1045 if (TREE_CODE (offset) == BIT_AND_EXPR
1046 && host_integerp (TREE_OPERAND (offset, 1), 0)
1047 && tree_int_cst_sgn (TREE_OPERAND (offset, 1)) < 0)
1050 = - tree_low_cst (TREE_OPERAND (offset, 1), 0);
1051 if (exact_log2 (pow) > 0)
1055 pos = compute_related_constant (curpos,
1056 round_up (last_pos, align));
1058 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
1059 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
1060 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
1061 && host_integerp (TREE_OPERAND
1062 (TREE_OPERAND (curpos, 0), 1),
1067 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
1068 pos = compute_related_constant (curpos,
1069 round_up (last_pos, align));
1071 else if (potential_alignment_gap (prev_old_field, old_field,
1074 align = TYPE_ALIGN (field_type);
1075 pos = compute_related_constant (curpos,
1076 round_up (last_pos, align));
1079 /* If we can't compute a position, set it to zero.
1081 ??? We really should abort here, but it's too much work
1082 to get this correct for all cases. */
1085 pos = bitsize_zero_node;
1087 /* See if this type is variable-sized and make a pointer type
1088 and indicate the indirection if so. Beware that the debug
1089 back-end may adjust the position computed above according
1090 to the alignment of the field type, i.e. the pointer type
1091 in this case, if we don't preventively counter that. */
1092 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
1094 field_type = build_pointer_type (field_type);
1095 if (align != 0 && TYPE_ALIGN (field_type) > align)
1097 field_type = copy_node (field_type);
1098 TYPE_ALIGN (field_type) = align;
1103 /* Make a new field name, if necessary. */
1104 if (var || align != 0)
1109 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1110 align / BITS_PER_UNIT);
1112 strcpy (suffix, "XVL");
1114 field_name = concat_id_with_name (field_name, suffix);
1117 new_field = create_field_decl (field_name, field_type,
1119 DECL_SIZE (old_field), pos, 0);
1120 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1121 TYPE_FIELDS (new_record_type) = new_field;
1123 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1124 zero. The only time it's not the last field of the record
1125 is when there are other components at fixed positions after
1126 it (meaning there was a rep clause for every field) and we
1127 want to be able to encode them. */
1128 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1129 (TREE_CODE (TREE_TYPE (old_field))
1132 : DECL_SIZE (old_field));
1133 prev_old_field = old_field;
1136 TYPE_FIELDS (new_record_type)
1137 = nreverse (TYPE_FIELDS (new_record_type));
1139 rest_of_type_decl_compilation (TYPE_STUB_DECL (new_record_type));
1142 rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type));
1145 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1146 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1147 if this represents a QUAL_UNION_TYPE in which case we must look for
1148 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1149 is nonzero, we must take the MAX of the end position of this field
1150 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1152 We return an expression for the size. */
1155 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1158 tree type = TREE_TYPE (last_size);
1161 if (!special || TREE_CODE (size) != COND_EXPR)
1163 new = size_binop (PLUS_EXPR, first_bit, size);
1165 new = size_binop (MAX_EXPR, last_size, new);
1169 new = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1170 integer_zerop (TREE_OPERAND (size, 1))
1171 ? last_size : merge_sizes (last_size, first_bit,
1172 TREE_OPERAND (size, 1),
1174 integer_zerop (TREE_OPERAND (size, 2))
1175 ? last_size : merge_sizes (last_size, first_bit,
1176 TREE_OPERAND (size, 2),
1179 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1180 when fed through substitute_in_expr) into thinking that a constant
1181 size is not constant. */
1182 while (TREE_CODE (new) == NON_LVALUE_EXPR)
1183 new = TREE_OPERAND (new, 0);
1188 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1189 related by the addition of a constant. Return that constant if so. */
1192 compute_related_constant (tree op0, tree op1)
1194 tree op0_var, op1_var;
1195 tree op0_con = split_plus (op0, &op0_var);
1196 tree op1_con = split_plus (op1, &op1_var);
1197 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1199 if (operand_equal_p (op0_var, op1_var, 0))
1201 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1207 /* Utility function of above to split a tree OP which may be a sum, into a
1208 constant part, which is returned, and a variable part, which is stored
1209 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1213 split_plus (tree in, tree *pvar)
1215 /* Strip NOPS in order to ease the tree traversal and maximize the
1216 potential for constant or plus/minus discovery. We need to be careful
1217 to always return and set *pvar to bitsizetype trees, but it's worth
1221 *pvar = convert (bitsizetype, in);
1223 if (TREE_CODE (in) == INTEGER_CST)
1225 *pvar = bitsize_zero_node;
1226 return convert (bitsizetype, in);
1228 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1230 tree lhs_var, rhs_var;
1231 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1232 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1234 if (lhs_var == TREE_OPERAND (in, 0)
1235 && rhs_var == TREE_OPERAND (in, 1))
1236 return bitsize_zero_node;
1238 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1239 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1242 return bitsize_zero_node;
1245 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1246 subprogram. If it is void_type_node, then we are dealing with a procedure,
1247 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1248 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1249 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1250 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
1251 object. RETURNS_BY_REF is nonzero if the function returns by reference.
1252 RETURNS_WITH_DSP is nonzero if the function is to return with a
1253 depressed stack pointer. RETURNS_BY_TARGET_PTR is true if the function
1254 is to be passed (as its first parameter) the address of the place to copy
1258 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1259 bool returns_unconstrained, bool returns_by_ref,
1260 bool returns_with_dsp, bool returns_by_target_ptr)
1262 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1263 the subprogram formal parameters. This list is generated by traversing the
1264 input list of PARM_DECL nodes. */
1265 tree param_type_list = NULL;
1269 for (param_decl = param_decl_list; param_decl;
1270 param_decl = TREE_CHAIN (param_decl))
1271 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1274 /* The list of the function parameter types has to be terminated by the void
1275 type to signal to the back-end that we are not dealing with a variable
1276 parameter subprogram, but that the subprogram has a fixed number of
1278 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1280 /* The list of argument types has been created in reverse
1282 param_type_list = nreverse (param_type_list);
1284 type = build_function_type (return_type, param_type_list);
1286 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1287 or the new type should, make a copy of TYPE. Likewise for
1288 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1289 if (TYPE_CI_CO_LIST (type) || cico_list
1290 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1291 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref
1292 || TYPE_RETURNS_BY_TARGET_PTR_P (type) != returns_by_target_ptr)
1293 type = copy_type (type);
1295 TYPE_CI_CO_LIST (type) = cico_list;
1296 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1297 TYPE_RETURNS_STACK_DEPRESSED (type) = returns_with_dsp;
1298 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1299 TYPE_RETURNS_BY_TARGET_PTR_P (type) = returns_by_target_ptr;
1303 /* Return a copy of TYPE but safe to modify in any way. */
1306 copy_type (tree type)
1308 tree new = copy_node (type);
1310 /* copy_node clears this field instead of copying it, because it is
1311 aliased with TREE_CHAIN. */
1312 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1314 TYPE_POINTER_TO (new) = 0;
1315 TYPE_REFERENCE_TO (new) = 0;
1316 TYPE_MAIN_VARIANT (new) = new;
1317 TYPE_NEXT_VARIANT (new) = 0;
1322 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1323 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position of
1327 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
1329 /* First build a type for the desired range. */
1330 tree type = build_index_2_type (min, max);
1332 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1333 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1334 is set, but not to INDEX, make a copy of this type with the requested
1335 index type. Note that we have no way of sharing these types, but that's
1336 only a small hole. */
1337 if (TYPE_INDEX_TYPE (type) == index)
1339 else if (TYPE_INDEX_TYPE (type))
1340 type = copy_type (type);
1342 SET_TYPE_INDEX_TYPE (type, index);
1343 create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
1347 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1348 string) and TYPE is a ..._TYPE node giving its data type.
1349 ARTIFICIAL_P is true if this is a declaration that was generated
1350 by the compiler. DEBUG_INFO_P is true if we need to write debugging
1351 information about this type. GNAT_NODE is used for the position of
1355 create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1356 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1358 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1359 enum tree_code code = TREE_CODE (type);
1361 DECL_ARTIFICIAL (type_decl) = artificial_p;
1363 if (!TYPE_IS_DUMMY_P (type))
1364 gnat_pushdecl (type_decl, gnat_node);
1366 process_attributes (type_decl, attr_list);
1368 /* Pass type declaration information to the debugger unless this is an
1369 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1370 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately, or
1371 type for which debugging information was not requested. */
1372 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
1373 DECL_IGNORED_P (type_decl) = 1;
1374 else if (code != ENUMERAL_TYPE
1375 && (code != RECORD_TYPE || TYPE_IS_FAT_POINTER_P (type))
1376 && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
1377 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1378 rest_of_type_decl_compilation (type_decl);
1383 /* Helper for create_var_decl and create_true_var_decl. Returns a GCC VAR_DECL
1386 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1387 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1388 the GCC tree for an optional initial expression; NULL_TREE if none.
1390 CONST_FLAG is true if this variable is constant, in which case we might
1391 return a CONST_DECL node unless CONST_DECL_ALLOWED_FLAG is false.
1393 PUBLIC_FLAG is true if this definition is to be made visible outside of
1394 the current compilation unit. This flag should be set when processing the
1395 variable definitions in a package specification. EXTERN_FLAG is nonzero
1396 when processing an external variable declaration (as opposed to a
1397 definition: no storage is to be allocated for the variable here).
1399 STATIC_FLAG is only relevant when not at top level. In that case
1400 it indicates whether to always allocate storage to the variable.
1402 GNAT_NODE is used for the position of the decl. */
1405 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
1406 bool const_flag, bool const_decl_allowed_flag,
1407 bool public_flag, bool extern_flag, bool static_flag,
1408 struct attrib *attr_list, Node_Id gnat_node)
1412 && TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1413 && (global_bindings_p () || static_flag
1414 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
1415 : TREE_CONSTANT (var_init)));
1417 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1418 case the initializer may be used in-lieu of the DECL node (as done in
1419 Identifier_to_gnu). This is useful to prevent the need of elaboration
1420 code when an identifier for which such a decl is made is in turn used as
1421 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1422 but extra constraints apply to this choice (see below) and are not
1423 relevant to the distinction we wish to make. */
1424 bool constant_p = const_flag && init_const;
1426 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1427 and may be used for scalars in general but not for aggregates. */
1429 = build_decl ((constant_p && const_decl_allowed_flag
1430 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
1433 /* If this is external, throw away any initializations (they will be done
1434 elsewhere) unless this is a a constant for which we would like to remain
1435 able to get the initializer. If we are defining a global here, leave a
1436 constant initialization and save any variable elaborations for the
1437 elaboration routine. If we are just annotating types, throw away the
1438 initialization if it isn't a constant. */
1439 if ((extern_flag && !constant_p)
1440 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1441 var_init = NULL_TREE;
1443 /* At the global level, an initializer requiring code to be generated
1444 produces elaboration statements. Check that such statements are allowed,
1445 that is, not violating a No_Elaboration_Code restriction. */
1446 if (global_bindings_p () && var_init != 0 && ! init_const)
1447 Check_Elaboration_Code_Allowed (gnat_node);
1449 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1450 try to fiddle with DECL_COMMON. However, on platforms that don't
1451 support global BSS sections, uninitialized global variables would
1452 go in DATA instead, thus increasing the size of the executable. */
1454 && TREE_CODE (var_decl) == VAR_DECL
1455 && !have_global_bss_p ())
1456 DECL_COMMON (var_decl) = 1;
1457 DECL_INITIAL (var_decl) = var_init;
1458 TREE_READONLY (var_decl) = const_flag;
1459 DECL_EXTERNAL (var_decl) = extern_flag;
1460 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1461 TREE_CONSTANT (var_decl) = constant_p;
1462 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1463 = TYPE_VOLATILE (type);
1465 /* If it's public and not external, always allocate storage for it.
1466 At the global binding level we need to allocate static storage for the
1467 variable if and only if it's not external. If we are not at the top level
1468 we allocate automatic storage unless requested not to. */
1469 TREE_STATIC (var_decl)
1470 = public_flag || (global_bindings_p () ? !extern_flag : static_flag);
1472 if (asm_name && VAR_OR_FUNCTION_DECL_P (var_decl))
1473 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1475 process_attributes (var_decl, attr_list);
1477 /* Add this decl to the current binding level. */
1478 gnat_pushdecl (var_decl, gnat_node);
1480 if (TREE_SIDE_EFFECTS (var_decl))
1481 TREE_ADDRESSABLE (var_decl) = 1;
1483 if (TREE_CODE (var_decl) != CONST_DECL)
1484 rest_of_decl_compilation (var_decl, global_bindings_p (), 0);
1486 /* expand CONST_DECLs to set their MODE, ALIGN, SIZE and SIZE_UNIT,
1487 which we need for later back-annotations. */
1488 expand_decl (var_decl);
1493 /* Wrapper around create_var_decl_1 for cases where we don't care whether
1494 a VAR or a CONST decl node is created. */
1497 create_var_decl (tree var_name, tree asm_name, tree type, tree var_init,
1498 bool const_flag, bool public_flag, bool extern_flag,
1499 bool static_flag, struct attrib *attr_list,
1502 return create_var_decl_1 (var_name, asm_name, type, var_init,
1504 public_flag, extern_flag, static_flag,
1505 attr_list, gnat_node);
1508 /* Wrapper around create_var_decl_1 for cases where a VAR_DECL node is
1509 required. The primary intent is for DECL_CONST_CORRESPONDING_VARs, which
1510 must be VAR_DECLs and on which we want TREE_READONLY set to have them
1511 possibly assigned to a readonly data section. */
1514 create_true_var_decl (tree var_name, tree asm_name, tree type, tree var_init,
1515 bool const_flag, bool public_flag, bool extern_flag,
1516 bool static_flag, struct attrib *attr_list,
1519 return create_var_decl_1 (var_name, asm_name, type, var_init,
1521 public_flag, extern_flag, static_flag,
1522 attr_list, gnat_node);
1525 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1526 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1527 this field is in a record type with a "pragma pack". If SIZE is nonzero
1528 it is the specified size for this field. If POS is nonzero, it is the bit
1529 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1530 the address of this field for aliasing purposes. If it is negative, we
1531 should not make a bitfield, which is used by make_aligning_type. */
1534 create_field_decl (tree field_name, tree field_type, tree record_type,
1535 int packed, tree size, tree pos, int addressable)
1537 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1539 DECL_CONTEXT (field_decl) = record_type;
1540 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1542 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1543 byte boundary since GCC cannot handle less-aligned BLKmode bitfields. */
1544 if (packed && TYPE_MODE (field_type) == BLKmode)
1545 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1547 /* If a size is specified, use it. Otherwise, if the record type is packed
1548 compute a size to use, which may differ from the object's natural size.
1549 We always set a size in this case to trigger the checks for bitfield
1550 creation below, which is typically required when no position has been
1553 size = convert (bitsizetype, size);
1554 else if (packed == 1)
1556 size = rm_size (field_type);
1558 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1560 if (TREE_CODE (size) == INTEGER_CST
1561 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1562 size = round_up (size, BITS_PER_UNIT);
1565 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1566 specified for two reasons: first if the size differs from the natural
1567 size. Second, if the alignment is insufficient. There are a number of
1568 ways the latter can be true.
1570 We never make a bitfield if the type of the field has a nonconstant size,
1571 because no such entity requiring bitfield operations should reach here.
1573 We do *preventively* make a bitfield when there might be the need for it
1574 but we don't have all the necessary information to decide, as is the case
1575 of a field with no specified position in a packed record.
1577 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1578 in layout_decl or finish_record_type to clear the bit_field indication if
1579 it is in fact not needed. */
1580 if (addressable >= 0
1582 && TREE_CODE (size) == INTEGER_CST
1583 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1584 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
1585 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
1587 || (TYPE_ALIGN (record_type) != 0
1588 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1590 DECL_BIT_FIELD (field_decl) = 1;
1591 DECL_SIZE (field_decl) = size;
1592 if (!packed && !pos)
1593 DECL_ALIGN (field_decl)
1594 = (TYPE_ALIGN (record_type) != 0
1595 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1596 : TYPE_ALIGN (field_type));
1599 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1600 DECL_ALIGN (field_decl)
1601 = MAX (DECL_ALIGN (field_decl),
1602 DECL_BIT_FIELD (field_decl) ? 1
1603 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT
1604 : TYPE_ALIGN (field_type));
1608 /* We need to pass in the alignment the DECL is known to have.
1609 This is the lowest-order bit set in POS, but no more than
1610 the alignment of the record, if one is specified. Note
1611 that an alignment of 0 is taken as infinite. */
1612 unsigned int known_align;
1614 if (host_integerp (pos, 1))
1615 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1617 known_align = BITS_PER_UNIT;
1619 if (TYPE_ALIGN (record_type)
1620 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1621 known_align = TYPE_ALIGN (record_type);
1623 layout_decl (field_decl, known_align);
1624 SET_DECL_OFFSET_ALIGN (field_decl,
1625 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1627 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1628 &DECL_FIELD_BIT_OFFSET (field_decl),
1629 DECL_OFFSET_ALIGN (field_decl), pos);
1631 DECL_HAS_REP_P (field_decl) = 1;
1634 /* If the field type is passed by reference, we will have pointers to the
1635 field, so it is addressable. */
1636 if (must_pass_by_ref (field_type) || default_pass_by_ref (field_type))
1639 /* Mark the decl as nonaddressable if it is indicated so semantically,
1640 meaning we won't ever attempt to take the address of the field.
1642 It may also be "technically" nonaddressable, meaning that even if we
1643 attempt to take the field's address we will actually get the address of a
1644 copy. This is the case for true bitfields, but the DECL_BIT_FIELD value
1645 we have at this point is not accurate enough, so we don't account for
1646 this here and let finish_record_type decide. */
1647 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1652 /* Subroutine of previous function: return nonzero if EXP, ignoring any side
1653 effects, has the value of zero. */
1656 value_zerop (tree exp)
1658 if (TREE_CODE (exp) == COMPOUND_EXPR)
1659 return value_zerop (TREE_OPERAND (exp, 1));
1661 return integer_zerop (exp);
1664 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1665 PARAM_TYPE is its type. READONLY is true if the parameter is
1666 readonly (either an IN parameter or an address of a pass-by-ref
1670 create_param_decl (tree param_name, tree param_type, bool readonly)
1672 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1674 /* Honor targetm.calls.promote_prototypes(), as not doing so can
1675 lead to various ABI violations. */
1676 if (targetm.calls.promote_prototypes (param_type)
1677 && (TREE_CODE (param_type) == INTEGER_TYPE
1678 || TREE_CODE (param_type) == ENUMERAL_TYPE)
1679 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1681 /* We have to be careful about biased types here. Make a subtype
1682 of integer_type_node with the proper biasing. */
1683 if (TREE_CODE (param_type) == INTEGER_TYPE
1684 && TYPE_BIASED_REPRESENTATION_P (param_type))
1687 = copy_type (build_range_type (integer_type_node,
1688 TYPE_MIN_VALUE (param_type),
1689 TYPE_MAX_VALUE (param_type)));
1691 TYPE_BIASED_REPRESENTATION_P (param_type) = 1;
1694 param_type = integer_type_node;
1697 DECL_ARG_TYPE (param_decl) = param_type;
1698 TREE_READONLY (param_decl) = readonly;
1702 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1705 process_attributes (tree decl, struct attrib *attr_list)
1707 for (; attr_list; attr_list = attr_list->next)
1708 switch (attr_list->type)
1710 case ATTR_MACHINE_ATTRIBUTE:
1711 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1713 ATTR_FLAG_TYPE_IN_PLACE);
1716 case ATTR_LINK_ALIAS:
1717 if (! DECL_EXTERNAL (decl))
1719 TREE_STATIC (decl) = 1;
1720 assemble_alias (decl, attr_list->name);
1724 case ATTR_WEAK_EXTERNAL:
1726 declare_weak (decl);
1728 post_error ("?weak declarations not supported on this target",
1729 attr_list->error_point);
1732 case ATTR_LINK_SECTION:
1733 if (targetm.have_named_sections)
1735 DECL_SECTION_NAME (decl)
1736 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1737 IDENTIFIER_POINTER (attr_list->name));
1738 DECL_COMMON (decl) = 0;
1741 post_error ("?section attributes are not supported for this target",
1742 attr_list->error_point);
1745 case ATTR_LINK_CONSTRUCTOR:
1746 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1747 TREE_USED (decl) = 1;
1750 case ATTR_LINK_DESTRUCTOR:
1751 DECL_STATIC_DESTRUCTOR (decl) = 1;
1752 TREE_USED (decl) = 1;
1757 /* Record a global renaming pointer. */
1760 record_global_renaming_pointer (tree decl)
1762 gcc_assert (DECL_RENAMED_OBJECT (decl));
1763 VEC_safe_push (tree, gc, global_renaming_pointers, decl);
1766 /* Invalidate the global renaming pointers. */
1769 invalidate_global_renaming_pointers (void)
1774 for (i = 0; VEC_iterate(tree, global_renaming_pointers, i, iter); i++)
1775 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
1777 VEC_free (tree, gc, global_renaming_pointers);
1780 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1784 value_factor_p (tree value, HOST_WIDE_INT factor)
1786 if (host_integerp (value, 1))
1787 return tree_low_cst (value, 1) % factor == 0;
1789 if (TREE_CODE (value) == MULT_EXPR)
1790 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1791 || value_factor_p (TREE_OPERAND (value, 1), factor));
1796 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1797 unless we can prove these 2 fields are laid out in such a way that no gap
1798 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1799 is the distance in bits between the end of PREV_FIELD and the starting
1800 position of CURR_FIELD. It is ignored if null. */
1803 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1805 /* If this is the first field of the record, there cannot be any gap */
1809 /* If the previous field is a union type, then return False: The only
1810 time when such a field is not the last field of the record is when
1811 there are other components at fixed positions after it (meaning there
1812 was a rep clause for every field), in which case we don't want the
1813 alignment constraint to override them. */
1814 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1817 /* If the distance between the end of prev_field and the beginning of
1818 curr_field is constant, then there is a gap if the value of this
1819 constant is not null. */
1820 if (offset && host_integerp (offset, 1))
1821 return !integer_zerop (offset);
1823 /* If the size and position of the previous field are constant,
1824 then check the sum of this size and position. There will be a gap
1825 iff it is not multiple of the current field alignment. */
1826 if (host_integerp (DECL_SIZE (prev_field), 1)
1827 && host_integerp (bit_position (prev_field), 1))
1828 return ((tree_low_cst (bit_position (prev_field), 1)
1829 + tree_low_cst (DECL_SIZE (prev_field), 1))
1830 % DECL_ALIGN (curr_field) != 0);
1832 /* If both the position and size of the previous field are multiples
1833 of the current field alignment, there cannot be any gap. */
1834 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1835 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1838 /* Fallback, return that there may be a potential gap */
1842 /* Returns a LABEL_DECL node for LABEL_NAME. */
1845 create_label_decl (tree label_name)
1847 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1849 DECL_CONTEXT (label_decl) = current_function_decl;
1850 DECL_MODE (label_decl) = VOIDmode;
1851 DECL_SOURCE_LOCATION (label_decl) = input_location;
1856 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1857 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1858 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1859 PARM_DECL nodes chained through the TREE_CHAIN field).
1861 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1862 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1865 create_subprog_decl (tree subprog_name, tree asm_name,
1866 tree subprog_type, tree param_decl_list, bool inline_flag,
1867 bool public_flag, bool extern_flag,
1868 struct attrib *attr_list, Node_Id gnat_node)
1870 tree return_type = TREE_TYPE (subprog_type);
1871 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1873 /* If this is a function nested inside an inlined external function, it
1874 means we aren't going to compile the outer function unless it is
1875 actually inlined, so do the same for us. */
1876 if (current_function_decl && DECL_INLINE (current_function_decl)
1877 && DECL_EXTERNAL (current_function_decl))
1880 DECL_EXTERNAL (subprog_decl) = extern_flag;
1881 TREE_PUBLIC (subprog_decl) = public_flag;
1882 TREE_STATIC (subprog_decl) = 1;
1883 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1884 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1885 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1886 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1887 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1888 DECL_ARTIFICIAL (DECL_RESULT (subprog_decl)) = 1;
1889 DECL_IGNORED_P (DECL_RESULT (subprog_decl)) = 1;
1891 /* TREE_ADDRESSABLE is set on the result type to request the use of the
1892 target by-reference return mechanism. This is not supported all the
1893 way down to RTL expansion with GCC 4, which ICEs on temporary creation
1894 attempts with such a type and expects DECL_BY_REFERENCE to be set on
1895 the RESULT_DECL instead - see gnat_genericize for more details. */
1896 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (subprog_decl))))
1898 tree result_decl = DECL_RESULT (subprog_decl);
1900 TREE_ADDRESSABLE (TREE_TYPE (result_decl)) = 0;
1901 DECL_BY_REFERENCE (result_decl) = 1;
1905 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
1908 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1910 process_attributes (subprog_decl, attr_list);
1912 /* Add this decl to the current binding level. */
1913 gnat_pushdecl (subprog_decl, gnat_node);
1915 /* Output the assembler code and/or RTL for the declaration. */
1916 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
1918 return subprog_decl;
1921 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1922 body. This routine needs to be invoked before processing the declarations
1923 appearing in the subprogram. */
1926 begin_subprog_body (tree subprog_decl)
1930 current_function_decl = subprog_decl;
1931 announce_function (subprog_decl);
1933 /* Enter a new binding level and show that all the parameters belong to
1936 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1937 param_decl = TREE_CHAIN (param_decl))
1938 DECL_CONTEXT (param_decl) = subprog_decl;
1940 make_decl_rtl (subprog_decl);
1942 /* We handle pending sizes via the elaboration of types, so we don't need to
1943 save them. This causes them to be marked as part of the outer function
1944 and then discarded. */
1945 get_pending_sizes ();
1949 /* Helper for the genericization callback. Return a dereference of VAL
1950 if it is of a reference type. */
1953 convert_from_reference (tree val)
1955 tree value_type, ref;
1957 if (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE)
1960 value_type = TREE_TYPE (TREE_TYPE (val));
1961 ref = build1 (INDIRECT_REF, value_type, val);
1963 /* See if what we reference is CONST or VOLATILE, which requires
1964 looking into array types to get to the component type. */
1966 while (TREE_CODE (value_type) == ARRAY_TYPE)
1967 value_type = TREE_TYPE (value_type);
1970 = (TYPE_QUALS (value_type) & TYPE_QUAL_CONST);
1971 TREE_THIS_VOLATILE (ref)
1972 = (TYPE_QUALS (value_type) & TYPE_QUAL_VOLATILE);
1974 TREE_SIDE_EFFECTS (ref)
1975 = (TREE_THIS_VOLATILE (ref) || TREE_SIDE_EFFECTS (val));
1980 /* Helper for the genericization callback. Returns true if T denotes
1981 a RESULT_DECL with DECL_BY_REFERENCE set. */
1984 is_byref_result (tree t)
1986 return (TREE_CODE (t) == RESULT_DECL && DECL_BY_REFERENCE (t));
1990 /* Tree walking callback for gnat_genericize. Currently ...
1992 o Adjust references to the function's DECL_RESULT if it is marked
1993 DECL_BY_REFERENCE and so has had its type turned into a reference
1994 type at the end of the function compilation. */
1997 gnat_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1999 /* This implementation is modeled after what the C++ front-end is
2000 doing, basis of the downstream passes behavior. */
2002 tree stmt = *stmt_p;
2003 struct pointer_set_t *p_set = (struct pointer_set_t*) data;
2005 /* If we have a direct mention of the result decl, dereference. */
2006 if (is_byref_result (stmt))
2008 *stmt_p = convert_from_reference (stmt);
2013 /* Otherwise, no need to walk the the same tree twice. */
2014 if (pointer_set_contains (p_set, stmt))
2020 /* If we are taking the address of what now is a reference, just get the
2022 if (TREE_CODE (stmt) == ADDR_EXPR
2023 && is_byref_result (TREE_OPERAND (stmt, 0)))
2025 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
2029 /* Don't dereference an by-reference RESULT_DECL inside a RETURN_EXPR. */
2030 else if (TREE_CODE (stmt) == RETURN_EXPR
2031 && TREE_OPERAND (stmt, 0)
2032 && is_byref_result (TREE_OPERAND (stmt, 0)))
2035 /* Don't look inside trees that cannot embed references of interest. */
2036 else if (IS_TYPE_OR_DECL_P (stmt))
2039 pointer_set_insert (p_set, *stmt_p);
2044 /* Perform lowering of Ada trees to GENERIC. In particular:
2046 o Turn a DECL_BY_REFERENCE RESULT_DECL into a real by-reference decl
2047 and adjust all the references to this decl accordingly. */
2050 gnat_genericize (tree fndecl)
2052 /* Prior to GCC 4, an explicit By_Reference result mechanism for a function
2053 was handled by simply setting TREE_ADDRESSABLE on the result type.
2054 Everything required to actually pass by invisible ref using the target
2055 mechanism (e.g. extra parameter) was handled at RTL expansion time.
2057 This doesn't work with GCC 4 any more for several reasons. First, the
2058 gimplification process might need the creation of temporaries of this
2059 type, and the gimplifier ICEs on such attempts. Second, the middle-end
2060 now relies on a different attribute for such cases (DECL_BY_REFERENCE on
2061 RESULT/PARM_DECLs), and expects the user invisible by-reference-ness to
2062 be explicitely accounted for by the front-end in the function body.
2064 We achieve the complete transformation in two steps:
2066 1/ create_subprog_decl performs early attribute tweaks: it clears
2067 TREE_ADDRESSABLE from the result type and sets DECL_BY_REFERENCE on
2068 the result decl. The former ensures that the bit isn't set in the GCC
2069 tree saved for the function, so prevents ICEs on temporary creation.
2070 The latter we use here to trigger the rest of the processing.
2072 2/ This function performs the type transformation on the result decl
2073 and adjusts all the references to this decl from the function body
2076 Clearing TREE_ADDRESSABLE from the type differs from the C++ front-end
2077 strategy, which escapes the gimplifier temporary creation issues by
2078 creating it's own temporaries using TARGET_EXPR nodes. Our way relies
2079 on simple specific support code in aggregate_value_p to look at the
2080 target function result decl explicitely. */
2082 struct pointer_set_t *p_set;
2083 tree decl_result = DECL_RESULT (fndecl);
2085 if (!DECL_BY_REFERENCE (decl_result))
2088 /* Make the DECL_RESULT explicitely by-reference and adjust all the
2089 occurrences in the function body using the common tree-walking facility.
2090 We want to see every occurrence of the result decl to adjust the
2091 referencing tree, so need to use our own pointer set to control which
2092 trees should be visited again or not. */
2094 p_set = pointer_set_create ();
2096 TREE_TYPE (decl_result) = build_reference_type (TREE_TYPE (decl_result));
2097 TREE_ADDRESSABLE (decl_result) = 0;
2098 relayout_decl (decl_result);
2100 walk_tree (&DECL_SAVED_TREE (fndecl), gnat_genericize_r, p_set, NULL);
2102 pointer_set_destroy (p_set);
2105 /* Finish the definition of the current subprogram and compile it all the way
2106 to assembler language output. BODY is the tree corresponding to
2110 end_subprog_body (tree body)
2112 tree fndecl = current_function_decl;
2114 /* Mark the BLOCK for this level as being for this function and pop the
2115 level. Since the vars in it are the parameters, clear them. */
2116 BLOCK_VARS (current_binding_level->block) = 0;
2117 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
2118 DECL_INITIAL (fndecl) = current_binding_level->block;
2121 /* Deal with inline. If declared inline or we should default to inline,
2122 set the flag in the decl. */
2123 DECL_INLINE (fndecl)
2124 = DECL_DECLARED_INLINE_P (fndecl) || flag_inline_trees == 2;
2126 /* We handle pending sizes via the elaboration of types, so we don't
2127 need to save them. */
2128 get_pending_sizes ();
2130 /* Mark the RESULT_DECL as being in this subprogram. */
2131 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
2133 DECL_SAVED_TREE (fndecl) = body;
2135 current_function_decl = DECL_CONTEXT (fndecl);
2138 /* We cannot track the location of errors past this point. */
2139 error_gnat_node = Empty;
2141 /* If we're only annotating types, don't actually compile this function. */
2142 if (type_annotate_only)
2145 /* If we don't have .ctors/.dtors sections, and this is a static
2146 constructor or destructor, it must be recorded now. */
2147 if (DECL_STATIC_CONSTRUCTOR (fndecl) && !targetm.have_ctors_dtors)
2148 VEC_safe_push (tree, gc, static_ctors, fndecl);
2150 if (DECL_STATIC_DESTRUCTOR (fndecl) && !targetm.have_ctors_dtors)
2151 VEC_safe_push (tree, gc, static_dtors, fndecl);
2153 /* Perform the required pre-gimplfication transformations on the tree. */
2154 gnat_genericize (fndecl);
2156 /* We do different things for nested and non-nested functions.
2157 ??? This should be in cgraph. */
2158 if (!DECL_CONTEXT (fndecl))
2160 gnat_gimplify_function (fndecl);
2161 cgraph_finalize_function (fndecl, false);
2164 /* Register this function with cgraph just far enough to get it
2165 added to our parent's nested function list. */
2166 (void) cgraph_node (fndecl);
2169 /* Convert FNDECL's code to GIMPLE and handle any nested functions. */
2172 gnat_gimplify_function (tree fndecl)
2174 struct cgraph_node *cgn;
2176 dump_function (TDI_original, fndecl);
2177 gimplify_function_tree (fndecl);
2178 dump_function (TDI_generic, fndecl);
2180 /* Convert all nested functions to GIMPLE now. We do things in this order
2181 so that items like VLA sizes are expanded properly in the context of the
2182 correct function. */
2183 cgn = cgraph_node (fndecl);
2184 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
2185 gnat_gimplify_function (cgn->decl);
2190 gnat_builtin_function (tree decl)
2192 gnat_pushdecl (decl, Empty);
2196 /* Handle a "const" attribute; arguments as in
2197 struct attribute_spec.handler. */
2200 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
2201 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
2204 if (TREE_CODE (*node) == FUNCTION_DECL)
2205 TREE_READONLY (*node) = 1;
2207 *no_add_attrs = true;
2212 /* Handle a "nothrow" attribute; arguments as in
2213 struct attribute_spec.handler. */
2216 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
2217 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
2220 if (TREE_CODE (*node) == FUNCTION_DECL)
2221 TREE_NOTHROW (*node) = 1;
2223 *no_add_attrs = true;
2228 /* Return an integer type with the number of bits of precision given by
2229 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2230 it is a signed type. */
2233 gnat_type_for_size (unsigned precision, int unsignedp)
2238 if (precision <= 2 * MAX_BITS_PER_WORD
2239 && signed_and_unsigned_types[precision][unsignedp])
2240 return signed_and_unsigned_types[precision][unsignedp];
2243 t = make_unsigned_type (precision);
2245 t = make_signed_type (precision);
2247 if (precision <= 2 * MAX_BITS_PER_WORD)
2248 signed_and_unsigned_types[precision][unsignedp] = t;
2252 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2253 TYPE_NAME (t) = get_identifier (type_name);
2259 /* Likewise for floating-point types. */
2262 float_type_for_precision (int precision, enum machine_mode mode)
2267 if (float_types[(int) mode])
2268 return float_types[(int) mode];
2270 float_types[(int) mode] = t = make_node (REAL_TYPE);
2271 TYPE_PRECISION (t) = precision;
2274 gcc_assert (TYPE_MODE (t) == mode);
2277 sprintf (type_name, "FLOAT_%d", precision);
2278 TYPE_NAME (t) = get_identifier (type_name);
2284 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2285 an unsigned type; otherwise a signed type is returned. */
2288 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2290 if (mode == BLKmode)
2292 else if (mode == VOIDmode)
2293 return void_type_node;
2294 else if (COMPLEX_MODE_P (mode))
2296 else if (SCALAR_FLOAT_MODE_P (mode))
2297 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2298 else if (SCALAR_INT_MODE_P (mode))
2299 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2304 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2307 gnat_unsigned_type (tree type_node)
2309 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2311 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2313 type = copy_node (type);
2314 TREE_TYPE (type) = type_node;
2316 else if (TREE_TYPE (type_node)
2317 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2318 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2320 type = copy_node (type);
2321 TREE_TYPE (type) = TREE_TYPE (type_node);
2327 /* Return the signed version of a TYPE_NODE, a scalar type. */
2330 gnat_signed_type (tree type_node)
2332 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2334 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2336 type = copy_node (type);
2337 TREE_TYPE (type) = type_node;
2339 else if (TREE_TYPE (type_node)
2340 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2341 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2343 type = copy_node (type);
2344 TREE_TYPE (type) = TREE_TYPE (type_node);
2351 /* EXP is an expression for the size of an object. If this size contains
2352 discriminant references, replace them with the maximum (if MAX_P) or
2353 minimum (if !MAX_P) possible value of the discriminant. */
2356 max_size (tree exp, bool max_p)
2358 enum tree_code code = TREE_CODE (exp);
2359 tree type = TREE_TYPE (exp);
2361 switch (TREE_CODE_CLASS (code))
2363 case tcc_declaration:
2368 if (code == CALL_EXPR)
2371 int i, n = call_expr_nargs (exp);
2374 argarray = (tree *) alloca (n * sizeof (tree));
2375 for (i = 0; i < n; i++)
2376 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
2377 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
2382 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2383 modify. Otherwise, we treat it like a variable. */
2384 if (!CONTAINS_PLACEHOLDER_P (exp))
2387 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2389 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
2391 case tcc_comparison:
2392 return max_p ? size_one_node : size_zero_node;
2396 case tcc_expression:
2397 switch (TREE_CODE_LENGTH (code))
2400 if (code == NON_LVALUE_EXPR)
2401 return max_size (TREE_OPERAND (exp, 0), max_p);
2404 fold_build1 (code, type,
2405 max_size (TREE_OPERAND (exp, 0),
2406 code == NEGATE_EXPR ? !max_p : max_p));
2409 if (code == COMPOUND_EXPR)
2410 return max_size (TREE_OPERAND (exp, 1), max_p);
2412 /* Calculate "(A ? B : C) - D" as "A ? B - D : C - D" which
2413 may provide a tighter bound on max_size. */
2414 if (code == MINUS_EXPR
2415 && TREE_CODE (TREE_OPERAND (exp, 0)) == COND_EXPR)
2417 tree lhs = fold_build2 (MINUS_EXPR, type,
2418 TREE_OPERAND (TREE_OPERAND (exp, 0), 1),
2419 TREE_OPERAND (exp, 1));
2420 tree rhs = fold_build2 (MINUS_EXPR, type,
2421 TREE_OPERAND (TREE_OPERAND (exp, 0), 2),
2422 TREE_OPERAND (exp, 1));
2423 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2424 max_size (lhs, max_p),
2425 max_size (rhs, max_p));
2429 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2430 tree rhs = max_size (TREE_OPERAND (exp, 1),
2431 code == MINUS_EXPR ? !max_p : max_p);
2433 /* Special-case wanting the maximum value of a MIN_EXPR.
2434 In that case, if one side overflows, return the other.
2435 sizetype is signed, but we know sizes are non-negative.
2436 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2437 overflowing or the maximum possible value and the RHS
2441 && TREE_CODE (rhs) == INTEGER_CST
2442 && TREE_OVERFLOW (rhs))
2446 && TREE_CODE (lhs) == INTEGER_CST
2447 && TREE_OVERFLOW (lhs))
2449 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2450 && ((TREE_CODE (lhs) == INTEGER_CST
2451 && TREE_OVERFLOW (lhs))
2452 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2453 && !TREE_CONSTANT (rhs))
2456 return fold_build2 (code, type, lhs, rhs);
2460 if (code == SAVE_EXPR)
2462 else if (code == COND_EXPR)
2463 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2464 max_size (TREE_OPERAND (exp, 1), max_p),
2465 max_size (TREE_OPERAND (exp, 2), max_p));
2468 /* Other tree classes cannot happen. */
2476 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2477 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2478 Return a constructor for the template. */
2481 build_template (tree template_type, tree array_type, tree expr)
2483 tree template_elts = NULL_TREE;
2484 tree bound_list = NULL_TREE;
2487 if (TREE_CODE (array_type) == RECORD_TYPE
2488 && (TYPE_IS_PADDING_P (array_type)
2489 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2490 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2492 if (TREE_CODE (array_type) == ARRAY_TYPE
2493 || (TREE_CODE (array_type) == INTEGER_TYPE
2494 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2495 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2497 /* First make the list for a CONSTRUCTOR for the template. Go down the
2498 field list of the template instead of the type chain because this
2499 array might be an Ada array of arrays and we can't tell where the
2500 nested arrays stop being the underlying object. */
2502 for (field = TYPE_FIELDS (template_type); field;
2504 ? (bound_list = TREE_CHAIN (bound_list))
2505 : (array_type = TREE_TYPE (array_type))),
2506 field = TREE_CHAIN (TREE_CHAIN (field)))
2508 tree bounds, min, max;
2510 /* If we have a bound list, get the bounds from there. Likewise
2511 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2512 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2513 This will give us a maximum range. */
2515 bounds = TREE_VALUE (bound_list);
2516 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2517 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2518 else if (expr && TREE_CODE (expr) == PARM_DECL
2519 && DECL_BY_COMPONENT_PTR_P (expr))
2520 bounds = TREE_TYPE (field);
2524 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
2525 max = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MAX_VALUE (bounds));
2527 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2528 substitute it from OBJECT. */
2529 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2530 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2532 template_elts = tree_cons (TREE_CHAIN (field), max,
2533 tree_cons (field, min, template_elts));
2536 return gnat_build_constructor (template_type, nreverse (template_elts));
2539 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2540 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2541 in the type contains in its DECL_INITIAL the expression to use when
2542 a constructor is made for the type. GNAT_ENTITY is an entity used
2543 to print out an error message if the mechanism cannot be applied to
2544 an object of that type and also for the name. */
2547 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2549 tree record_type = make_node (RECORD_TYPE);
2550 tree pointer32_type;
2551 tree field_list = 0;
2560 /* If TYPE is an unconstrained array, use the underlying array type. */
2561 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2562 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2564 /* If this is an array, compute the number of dimensions in the array,
2565 get the index types, and point to the inner type. */
2566 if (TREE_CODE (type) != ARRAY_TYPE)
2569 for (ndim = 1, inner_type = type;
2570 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2571 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2572 ndim++, inner_type = TREE_TYPE (inner_type))
2575 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2577 if (mech != By_Descriptor_NCA
2578 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2579 for (i = ndim - 1, inner_type = type;
2581 i--, inner_type = TREE_TYPE (inner_type))
2582 idx_arr[i] = TYPE_DOMAIN (inner_type);
2584 for (i = 0, inner_type = type;
2586 i++, inner_type = TREE_TYPE (inner_type))
2587 idx_arr[i] = TYPE_DOMAIN (inner_type);
2589 /* Now get the DTYPE value. */
2590 switch (TREE_CODE (type))
2594 if (TYPE_VAX_FLOATING_POINT_P (type))
2595 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2608 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2611 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2614 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2617 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2620 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2623 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2629 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2633 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2634 && TYPE_VAX_FLOATING_POINT_P (type))
2635 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2647 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2658 /* Get the CLASS value. */
2661 case By_Descriptor_A:
2664 case By_Descriptor_NCA:
2667 case By_Descriptor_SB:
2671 case By_Descriptor_S:
2677 /* Make the type for a descriptor for VMS. The first four fields
2678 are the same for all types. */
2681 = chainon (field_list,
2682 make_descriptor_field
2683 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2684 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2686 field_list = chainon (field_list,
2687 make_descriptor_field ("DTYPE",
2688 gnat_type_for_size (8, 1),
2689 record_type, size_int (dtype)));
2690 field_list = chainon (field_list,
2691 make_descriptor_field ("CLASS",
2692 gnat_type_for_size (8, 1),
2693 record_type, size_int (class)));
2695 /* Of course this will crash at run-time if the address space is not
2696 within the low 32 bits, but there is nothing else we can do. */
2697 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
2700 = chainon (field_list,
2701 make_descriptor_field
2702 ("POINTER", pointer32_type, record_type,
2703 build_unary_op (ADDR_EXPR,
2705 build0 (PLACEHOLDER_EXPR, type))));
2710 case By_Descriptor_S:
2713 case By_Descriptor_SB:
2715 = chainon (field_list,
2716 make_descriptor_field
2717 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2718 TREE_CODE (type) == ARRAY_TYPE
2719 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2721 = chainon (field_list,
2722 make_descriptor_field
2723 ("SB_U1", gnat_type_for_size (32, 1), record_type,
2724 TREE_CODE (type) == ARRAY_TYPE
2725 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2728 case By_Descriptor_A:
2729 case By_Descriptor_NCA:
2730 field_list = chainon (field_list,
2731 make_descriptor_field ("SCALE",
2732 gnat_type_for_size (8, 1),
2736 field_list = chainon (field_list,
2737 make_descriptor_field ("DIGITS",
2738 gnat_type_for_size (8, 1),
2743 = chainon (field_list,
2744 make_descriptor_field
2745 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2746 size_int (mech == By_Descriptor_NCA
2748 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2749 : (TREE_CODE (type) == ARRAY_TYPE
2750 && TYPE_CONVENTION_FORTRAN_P (type)
2753 field_list = chainon (field_list,
2754 make_descriptor_field ("DIMCT",
2755 gnat_type_for_size (8, 1),
2759 field_list = chainon (field_list,
2760 make_descriptor_field ("ARSIZE",
2761 gnat_type_for_size (32, 1),
2763 size_in_bytes (type)));
2765 /* Now build a pointer to the 0,0,0... element. */
2766 tem = build0 (PLACEHOLDER_EXPR, type);
2767 for (i = 0, inner_type = type; i < ndim;
2768 i++, inner_type = TREE_TYPE (inner_type))
2769 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2770 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2771 NULL_TREE, NULL_TREE);
2774 = chainon (field_list,
2775 make_descriptor_field
2777 build_pointer_type_for_mode (inner_type, SImode, false),
2780 build_pointer_type_for_mode (inner_type, SImode,
2784 /* Next come the addressing coefficients. */
2785 tem = size_one_node;
2786 for (i = 0; i < ndim; i++)
2790 = size_binop (MULT_EXPR, tem,
2791 size_binop (PLUS_EXPR,
2792 size_binop (MINUS_EXPR,
2793 TYPE_MAX_VALUE (idx_arr[i]),
2794 TYPE_MIN_VALUE (idx_arr[i])),
2797 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2798 fname[1] = '0' + i, fname[2] = 0;
2800 = chainon (field_list,
2801 make_descriptor_field (fname,
2802 gnat_type_for_size (32, 1),
2803 record_type, idx_length));
2805 if (mech == By_Descriptor_NCA)
2809 /* Finally here are the bounds. */
2810 for (i = 0; i < ndim; i++)
2814 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2816 = chainon (field_list,
2817 make_descriptor_field
2818 (fname, gnat_type_for_size (32, 1), record_type,
2819 TYPE_MIN_VALUE (idx_arr[i])));
2823 = chainon (field_list,
2824 make_descriptor_field
2825 (fname, gnat_type_for_size (32, 1), record_type,
2826 TYPE_MAX_VALUE (idx_arr[i])));
2831 post_error ("unsupported descriptor type for &", gnat_entity);
2834 finish_record_type (record_type, field_list, 0, true);
2835 create_type_decl (create_concat_name (gnat_entity, "DESC"), record_type,
2836 NULL, true, false, gnat_entity);
2841 /* Utility routine for above code to make a field. */
2844 make_descriptor_field (const char *name, tree type,
2845 tree rec_type, tree initial)
2848 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2850 DECL_INITIAL (field) = initial;
2854 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
2855 pointer or fat pointer type. GNAT_SUBPROG is the subprogram to which
2856 the VMS descriptor is passed. */
2859 convert_vms_descriptor (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
2861 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
2862 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
2863 /* The CLASS field is the 3rd field in the descriptor. */
2864 tree class = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
2865 /* The POINTER field is the 4th field in the descriptor. */
2866 tree pointer = TREE_CHAIN (class);
2868 /* Retrieve the value of the POINTER field. */
2870 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
2872 if (POINTER_TYPE_P (gnu_type))
2873 return convert (gnu_type, gnu_expr);
2875 else if (TYPE_FAT_POINTER_P (gnu_type))
2877 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
2878 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
2879 tree template_type = TREE_TYPE (p_bounds_type);
2880 tree min_field = TYPE_FIELDS (template_type);
2881 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
2882 tree template, template_addr, aflags, dimct, t, u;
2883 /* See the head comment of build_vms_descriptor. */
2884 int iclass = TREE_INT_CST_LOW (DECL_INITIAL (class));
2886 /* Convert POINTER to the type of the P_ARRAY field. */
2887 gnu_expr = convert (p_array_type, gnu_expr);
2891 case 1: /* Class S */
2892 case 15: /* Class SB */
2893 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
2894 t = TYPE_FIELDS (desc_type);
2895 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2896 t = tree_cons (min_field,
2897 convert (TREE_TYPE (min_field), integer_one_node),
2898 tree_cons (max_field,
2899 convert (TREE_TYPE (max_field), t),
2901 template = gnat_build_constructor (template_type, t);
2902 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
2904 /* For class S, we are done. */
2908 /* Test that we really have a SB descriptor, like DEC Ada. */
2909 t = build3 (COMPONENT_REF, TREE_TYPE (class), desc, class, NULL);
2910 u = convert (TREE_TYPE (class), DECL_INITIAL (class));
2911 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
2912 /* If so, there is already a template in the descriptor and
2913 it is located right after the POINTER field. */
2914 t = TREE_CHAIN (pointer);
2915 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2916 /* Otherwise use the {1, LENGTH} template we build above. */
2917 template_addr = build3 (COND_EXPR, p_bounds_type, u,
2918 build_unary_op (ADDR_EXPR, p_bounds_type,
2923 case 4: /* Class A */
2924 /* The AFLAGS field is the 7th field in the descriptor. */
2925 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer)));
2926 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2927 /* The DIMCT field is the 8th field in the descriptor. */
2929 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2930 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
2931 or FL_COEFF or FL_BOUNDS not set. */
2932 u = build_int_cst (TREE_TYPE (aflags), 192);
2933 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
2934 build_binary_op (NE_EXPR, integer_type_node,
2936 convert (TREE_TYPE (dimct),
2938 build_binary_op (NE_EXPR, integer_type_node,
2939 build2 (BIT_AND_EXPR,
2943 add_stmt (build3 (COND_EXPR, void_type_node, u,
2944 build_call_raise (CE_Length_Check_Failed, Empty,
2945 N_Raise_Constraint_Error),
2947 /* There is already a template in the descriptor and it is
2948 located at the start of block 3 (12th field). */
2949 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (t))));
2950 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2951 template_addr = build_unary_op (ADDR_EXPR, p_bounds_type, template);
2954 case 10: /* Class NCA */
2956 post_error ("unsupported descriptor type for &", gnat_subprog);
2957 template_addr = integer_zero_node;
2961 /* Build the fat pointer in the form of a constructor. */
2962 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr,
2963 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
2964 template_addr, NULL_TREE));
2965 return gnat_build_constructor (gnu_type, t);
2972 /* Build a stub for the subprogram specified by the GCC tree GNU_SUBPROG
2973 and the GNAT node GNAT_SUBPROG. */
2976 build_function_stub (tree gnu_subprog, Entity_Id gnat_subprog)
2978 tree gnu_subprog_type, gnu_subprog_addr, gnu_subprog_call;
2979 tree gnu_stub_param, gnu_param_list, gnu_arg_types, gnu_param;
2980 tree gnu_stub_decl = DECL_FUNCTION_STUB (gnu_subprog);
2983 gnu_subprog_type = TREE_TYPE (gnu_subprog);
2984 gnu_param_list = NULL_TREE;
2986 begin_subprog_body (gnu_stub_decl);
2989 start_stmt_group ();
2991 /* Loop over the parameters of the stub and translate any of them
2992 passed by descriptor into a by reference one. */
2993 for (gnu_stub_param = DECL_ARGUMENTS (gnu_stub_decl),
2994 gnu_arg_types = TYPE_ARG_TYPES (gnu_subprog_type);
2996 gnu_stub_param = TREE_CHAIN (gnu_stub_param),
2997 gnu_arg_types = TREE_CHAIN (gnu_arg_types))
2999 if (DECL_BY_DESCRIPTOR_P (gnu_stub_param))
3000 gnu_param = convert_vms_descriptor (TREE_VALUE (gnu_arg_types),
3001 gnu_stub_param, gnat_subprog);
3003 gnu_param = gnu_stub_param;
3005 gnu_param_list = tree_cons (NULL_TREE, gnu_param, gnu_param_list);
3008 gnu_body = end_stmt_group ();
3010 /* Invoke the internal subprogram. */
3011 gnu_subprog_addr = build1 (ADDR_EXPR, build_pointer_type (gnu_subprog_type),
3013 gnu_subprog_call = build3 (CALL_EXPR, TREE_TYPE (gnu_subprog_type),
3014 gnu_subprog_addr, nreverse (gnu_param_list),
3017 /* Propagate the return value, if any. */
3018 if (VOID_TYPE_P (TREE_TYPE (gnu_subprog_type)))
3019 append_to_statement_list (gnu_subprog_call, &gnu_body);
3021 append_to_statement_list (build_return_expr (DECL_RESULT (gnu_stub_decl),
3027 allocate_struct_function (gnu_stub_decl);
3028 end_subprog_body (gnu_body);
3031 /* Build a type to be used to represent an aliased object whose nominal
3032 type is an unconstrained array. This consists of a RECORD_TYPE containing
3033 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
3034 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
3035 is used to represent an arbitrary unconstrained object. Use NAME
3036 as the name of the record. */
3039 build_unc_object_type (tree template_type, tree object_type, tree name)
3041 tree type = make_node (RECORD_TYPE);
3042 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
3043 template_type, type, 0, 0, 0, 1);
3044 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
3047 TYPE_NAME (type) = name;
3048 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3049 finish_record_type (type,
3050 chainon (chainon (NULL_TREE, template_field),
3057 /* Same, taking a thin or fat pointer type instead of a template type. */
3060 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3065 gcc_assert (TYPE_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3068 = (TYPE_FAT_POINTER_P (thin_fat_ptr_type)
3069 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3070 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3071 return build_unc_object_type (template_type, object_type, name);
3074 /* Shift the component offsets within an unconstrained object TYPE to make it
3075 suitable for use as a designated type for thin pointers. */
3078 shift_unc_components_for_thin_pointers (tree type)
3080 /* Thin pointer values designate the ARRAY data of an unconstrained object,
3081 allocated past the BOUNDS template. The designated type is adjusted to
3082 have ARRAY at position zero and the template at a negative offset, so
3083 that COMPONENT_REFs on (*thin_ptr) designate the proper location. */
3085 tree bounds_field = TYPE_FIELDS (type);
3086 tree array_field = TREE_CHAIN (TYPE_FIELDS (type));
3088 DECL_FIELD_OFFSET (bounds_field)
3089 = size_binop (MINUS_EXPR, size_zero_node, byte_position (array_field));
3091 DECL_FIELD_OFFSET (array_field) = size_zero_node;
3092 DECL_FIELD_BIT_OFFSET (array_field) = bitsize_zero_node;
3095 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
3096 the normal case this is just two adjustments, but we have more to do
3097 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
3100 update_pointer_to (tree old_type, tree new_type)
3102 tree ptr = TYPE_POINTER_TO (old_type);
3103 tree ref = TYPE_REFERENCE_TO (old_type);
3107 /* If this is the main variant, process all the other variants first. */
3108 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3109 for (type = TYPE_NEXT_VARIANT (old_type); type;
3110 type = TYPE_NEXT_VARIANT (type))
3111 update_pointer_to (type, new_type);
3113 /* If no pointer or reference, we are done. */
3117 /* Merge the old type qualifiers in the new type.
3119 Each old variant has qualifiers for specific reasons, and the new
3120 designated type as well. Each set of qualifiers represents useful
3121 information grabbed at some point, and merging the two simply unifies
3122 these inputs into the final type description.
3124 Consider for instance a volatile type frozen after an access to constant
3125 type designating it. After the designated type freeze, we get here with a
3126 volatile new_type and a dummy old_type with a readonly variant, created
3127 when the access type was processed. We shall make a volatile and readonly
3128 designated type, because that's what it really is.
3130 We might also get here for a non-dummy old_type variant with different
3131 qualifiers than the new_type ones, for instance in some cases of pointers
3132 to private record type elaboration (see the comments around the call to
3133 this routine from gnat_to_gnu_entity/E_Access_Type). We have to merge the
3134 qualifiers in thoses cases too, to avoid accidentally discarding the
3135 initial set, and will often end up with old_type == new_type then. */
3136 new_type = build_qualified_type (new_type,
3137 TYPE_QUALS (old_type)
3138 | TYPE_QUALS (new_type));
3140 /* If the new type and the old one are identical, there is nothing to
3142 if (old_type == new_type)
3145 /* Otherwise, first handle the simple case. */
3146 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3148 TYPE_POINTER_TO (new_type) = ptr;
3149 TYPE_REFERENCE_TO (new_type) = ref;
3151 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3152 for (ptr1 = TYPE_MAIN_VARIANT (ptr); ptr1;
3153 ptr1 = TYPE_NEXT_VARIANT (ptr1))
3154 TREE_TYPE (ptr1) = new_type;
3156 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3157 for (ref1 = TYPE_MAIN_VARIANT (ref); ref1;
3158 ref1 = TYPE_NEXT_VARIANT (ref1))
3159 TREE_TYPE (ref1) = new_type;
3162 /* Now deal with the unconstrained array case. In this case the "pointer"
3163 is actually a RECORD_TYPE where both fields are pointers to dummy nodes.
3164 Turn them into pointers to the correct types using update_pointer_to. */
3165 else if (TREE_CODE (ptr) != RECORD_TYPE || !TYPE_IS_FAT_POINTER_P (ptr))
3170 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
3171 tree array_field = TYPE_FIELDS (ptr);
3172 tree bounds_field = TREE_CHAIN (TYPE_FIELDS (ptr));
3173 tree new_ptr = TYPE_POINTER_TO (new_type);
3177 /* Make pointers to the dummy template point to the real template. */
3179 (TREE_TYPE (TREE_TYPE (bounds_field)),
3180 TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_ptr)))));
3182 /* The references to the template bounds present in the array type
3183 are made through a PLACEHOLDER_EXPR of type new_ptr. Since we
3184 are updating ptr to make it a full replacement for new_ptr as
3185 pointer to new_type, we must rework the PLACEHOLDER_EXPR so as
3186 to make it of type ptr. */
3187 new_ref = build3 (COMPONENT_REF, TREE_TYPE (bounds_field),
3188 build0 (PLACEHOLDER_EXPR, ptr),
3189 bounds_field, NULL_TREE);
3191 /* Create the new array for the new PLACEHOLDER_EXPR and make
3192 pointers to the dummy array point to it.
3194 ??? This is now the only use of substitute_in_type,
3195 which is a very "heavy" routine to do this, so it
3196 should be replaced at some point. */
3198 (TREE_TYPE (TREE_TYPE (array_field)),
3199 substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))),
3200 TREE_CHAIN (TYPE_FIELDS (new_ptr)), new_ref));
3202 /* Make ptr the pointer to new_type. */
3203 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
3204 = TREE_TYPE (new_type) = ptr;
3206 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
3207 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
3209 /* Now handle updating the allocation record, what the thin pointer
3210 points to. Update all pointers from the old record into the new
3211 one, update the type of the array field, and recompute the size. */
3212 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
3214 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
3215 = TREE_TYPE (TREE_TYPE (array_field));
3217 /* The size recomputation needs to account for alignment constraints, so
3218 we let layout_type work it out. This will reset the field offsets to
3219 what they would be in a regular record, so we shift them back to what
3220 we want them to be for a thin pointer designated type afterwards. */
3221 DECL_SIZE (TYPE_FIELDS (new_obj_rec)) = 0;
3222 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))) = 0;
3223 TYPE_SIZE (new_obj_rec) = 0;
3224 layout_type (new_obj_rec);
3226 shift_unc_components_for_thin_pointers (new_obj_rec);
3228 /* We are done, at last. */
3229 rest_of_record_type_compilation (ptr);
3233 /* Convert a pointer to a constrained array into a pointer to a fat
3234 pointer. This involves making or finding a template. */
3237 convert_to_fat_pointer (tree type, tree expr)
3239 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
3240 tree template, template_addr;
3241 tree etype = TREE_TYPE (expr);
3243 /* If EXPR is a constant of zero, we make a fat pointer that has a null
3244 pointer to the template and array. */
3245 if (integer_zerop (expr))
3247 gnat_build_constructor
3249 tree_cons (TYPE_FIELDS (type),
3250 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3251 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3252 convert (build_pointer_type (template_type),
3256 /* If EXPR is a thin pointer, make the template and data from the record. */
3258 else if (TYPE_THIN_POINTER_P (etype))
3260 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
3262 expr = save_expr (expr);
3263 if (TREE_CODE (expr) == ADDR_EXPR)
3264 expr = TREE_OPERAND (expr, 0);
3266 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
3268 template = build_component_ref (expr, NULL_TREE, fields, false);
3269 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3270 build_component_ref (expr, NULL_TREE,
3271 TREE_CHAIN (fields), false));
3274 /* Otherwise, build the constructor for the template. */
3275 template = build_template (template_type, TREE_TYPE (etype), expr);
3277 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
3279 /* The result is a CONSTRUCTOR for the fat pointer.
3281 If expr is an argument of a foreign convention subprogram, the type it
3282 points to is directly the component type. In this case, the expression
3283 type may not match the corresponding FIELD_DECL type at this point, so we
3284 call "convert" here to fix that up if necessary. This type consistency is
3285 required, for instance because it ensures that possible later folding of
3286 component_refs against this constructor always yields something of the
3287 same type as the initial reference.
3289 Note that the call to "build_template" above is still fine, because it
3290 will only refer to the provided template_type in this case. */
3292 gnat_build_constructor
3293 (type, tree_cons (TYPE_FIELDS (type),
3294 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3295 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3296 template_addr, NULL_TREE)));
3299 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
3300 is something that is a fat pointer, so convert to it first if it EXPR
3301 is not already a fat pointer. */
3304 convert_to_thin_pointer (tree type, tree expr)
3306 if (!TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
3308 = convert_to_fat_pointer
3309 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
3311 /* We get the pointer to the data and use a NOP_EXPR to make it the
3313 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)),
3315 expr = build1 (NOP_EXPR, type, expr);
3320 /* Create an expression whose value is that of EXPR,
3321 converted to type TYPE. The TREE_TYPE of the value
3322 is always TYPE. This function implements all reasonable
3323 conversions; callers should filter out those that are
3324 not permitted by the language being compiled. */
3327 convert (tree type, tree expr)
3329 enum tree_code code = TREE_CODE (type);
3330 tree etype = TREE_TYPE (expr);
3331 enum tree_code ecode = TREE_CODE (etype);
3333 /* If EXPR is already the right type, we are done. */
3337 /* If both input and output have padding and are of variable size, do this
3338 as an unchecked conversion. Likewise if one is a mere variant of the
3339 other, so we avoid a pointless unpad/repad sequence. */
3340 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
3341 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
3342 && (!TREE_CONSTANT (TYPE_SIZE (type))
3343 || !TREE_CONSTANT (TYPE_SIZE (etype))
3344 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)))
3347 /* If the output type has padding, make a constructor to build the
3349 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
3351 /* If we previously converted from another type and our type is
3352 of variable size, remove the conversion to avoid the need for
3353 variable-size temporaries. */
3354 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3355 && !TREE_CONSTANT (TYPE_SIZE (type)))
3356 expr = TREE_OPERAND (expr, 0);
3358 /* If we are just removing the padding from expr, convert the original
3359 object if we have variable size. That will avoid the need
3360 for some variable-size temporaries. */
3361 if (TREE_CODE (expr) == COMPONENT_REF
3362 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
3363 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
3364 && !TREE_CONSTANT (TYPE_SIZE (type)))
3365 return convert (type, TREE_OPERAND (expr, 0));
3367 /* If the result type is a padded type with a self-referentially-sized
3368 field and the expression type is a record, do this as an
3369 unchecked conversion. */
3370 else if (TREE_CODE (etype) == RECORD_TYPE
3371 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
3372 return unchecked_convert (type, expr, false);
3376 gnat_build_constructor (type,
3377 tree_cons (TYPE_FIELDS (type),
3379 (TYPE_FIELDS (type)),
3384 /* If the input type has padding, remove it and convert to the output type.
3385 The conditions ordering is arranged to ensure that the output type is not
3386 a padding type here, as it is not clear whether the conversion would
3387 always be correct if this was to happen. */
3388 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
3392 /* If we have just converted to this padded type, just get the
3393 inner expression. */
3394 if (TREE_CODE (expr) == CONSTRUCTOR
3395 && !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (expr))
3396 && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->index
3397 == TYPE_FIELDS (etype))
3399 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->value;
3401 /* Otherwise, build an explicit component reference. */
3404 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
3406 return convert (type, unpadded);
3409 /* If the input is a biased type, adjust first. */
3410 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
3411 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
3412 fold_convert (TREE_TYPE (etype),
3414 TYPE_MIN_VALUE (etype)));
3416 /* If the input is a justified modular type, we need to extract the actual
3417 object before converting it to any other type with the exceptions of an
3418 unconstrained array or of a mere type variant. It is useful to avoid the
3419 extraction and conversion in the type variant case because it could end
3420 up replacing a VAR_DECL expr by a constructor and we might be about the
3421 take the address of the result. */
3422 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
3423 && code != UNCONSTRAINED_ARRAY_TYPE
3424 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
3425 return convert (type, build_component_ref (expr, NULL_TREE,
3426 TYPE_FIELDS (etype), false));
3428 /* If converting to a type that contains a template, convert to the data
3429 type and then build the template. */
3430 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
3432 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
3434 /* If the source already has a template, get a reference to the
3435 associated array only, as we are going to rebuild a template
3436 for the target type anyway. */
3437 expr = maybe_unconstrained_array (expr);
3440 gnat_build_constructor
3442 tree_cons (TYPE_FIELDS (type),
3443 build_template (TREE_TYPE (TYPE_FIELDS (type)),
3444 obj_type, NULL_TREE),
3445 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3446 convert (obj_type, expr), NULL_TREE)));
3449 /* There are some special cases of expressions that we process
3451 switch (TREE_CODE (expr))
3457 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
3458 conversion in gnat_expand_expr. NULL_EXPR does not represent
3459 and actual value, so no conversion is needed. */
3460 expr = copy_node (expr);
3461 TREE_TYPE (expr) = type;
3465 /* If we are converting a STRING_CST to another constrained array type,
3466 just make a new one in the proper type. */
3467 if (code == ecode && AGGREGATE_TYPE_P (etype)
3468 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
3469 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
3471 expr = copy_node (expr);
3472 TREE_TYPE (expr) = type;
3477 case UNCONSTRAINED_ARRAY_REF:
3478 /* Convert this to the type of the inner array by getting the address of
3479 the array from the template. */
3480 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3481 build_component_ref (TREE_OPERAND (expr, 0),
3482 get_identifier ("P_ARRAY"),
3484 etype = TREE_TYPE (expr);
3485 ecode = TREE_CODE (etype);
3488 case VIEW_CONVERT_EXPR:
3490 /* GCC 4.x is very sensitive to type consistency overall, and view
3491 conversions thus are very frequent. Even though just "convert"ing
3492 the inner operand to the output type is fine in most cases, it
3493 might expose unexpected input/output type mismatches in special
3494 circumstances so we avoid such recursive calls when we can. */
3496 tree op0 = TREE_OPERAND (expr, 0);
3498 /* If we are converting back to the original type, we can just
3499 lift the input conversion. This is a common occurrence with
3500 switches back-and-forth amongst type variants. */
3501 if (type == TREE_TYPE (op0))
3504 /* Otherwise, if we're converting between two aggregate types, we
3505 might be allowed to substitute the VIEW_CONVERT target type in
3506 place or to just convert the inner expression. */
3507 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
3509 /* If we are converting between type variants, we can just
3510 substitute the VIEW_CONVERT in place. */
3511 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
3512 return build1 (VIEW_CONVERT_EXPR, type, op0);
3514 /* Otherwise, we may just bypass the input view conversion unless
3515 one of the types is a fat pointer, which is handled by
3516 specialized code below which relies on exact type matching. */
3517 else if (!TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
3518 return convert (type, op0);
3524 /* If both types are record types, just convert the pointer and
3525 make a new INDIRECT_REF.
3527 ??? Disable this for now since it causes problems with the
3528 code in build_binary_op for MODIFY_EXPR which wants to
3529 strip off conversions. But that code really is a mess and
3530 we need to do this a much better way some time. */
3532 && (TREE_CODE (type) == RECORD_TYPE
3533 || TREE_CODE (type) == UNION_TYPE)
3534 && (TREE_CODE (etype) == RECORD_TYPE
3535 || TREE_CODE (etype) == UNION_TYPE)
3536 && !TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
3537 return build_unary_op (INDIRECT_REF, NULL_TREE,
3538 convert (build_pointer_type (type),
3539 TREE_OPERAND (expr, 0)));
3546 /* Check for converting to a pointer to an unconstrained array. */
3547 if (TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
3548 return convert_to_fat_pointer (type, expr);
3550 /* If we're converting between two aggregate types that have the same main
3551 variant, just make a VIEW_CONVER_EXPR. */
3552 else if (AGGREGATE_TYPE_P (type)
3553 && TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
3554 return build1 (VIEW_CONVERT_EXPR, type, expr);
3556 /* In all other cases of related types, make a NOP_EXPR. */
3557 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
3558 || (code == INTEGER_CST && ecode == INTEGER_CST
3559 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
3560 return fold_convert (type, expr);
3565 return fold_build1 (CONVERT_EXPR, type, expr);
3568 return fold_convert (type, gnat_truthvalue_conversion (expr));
3571 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
3572 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
3573 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
3574 return unchecked_convert (type, expr, false);
3575 else if (TYPE_BIASED_REPRESENTATION_P (type))
3576 return fold_convert (type,
3577 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
3578 convert (TREE_TYPE (type), expr),
3579 TYPE_MIN_VALUE (type)));
3581 /* ... fall through ... */
3584 return fold (convert_to_integer (type, expr));
3587 case REFERENCE_TYPE:
3588 /* If converting between two pointers to records denoting
3589 both a template and type, adjust if needed to account
3590 for any differing offsets, since one might be negative. */
3591 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
3594 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
3595 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
3596 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
3597 sbitsize_int (BITS_PER_UNIT));
3599 expr = build1 (NOP_EXPR, type, expr);
3600 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
3601 if (integer_zerop (byte_diff))
3604 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
3605 fold (convert (sizetype, byte_diff)));
3608 /* If converting to a thin pointer, handle specially. */
3609 if (TYPE_THIN_POINTER_P (type)
3610 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
3611 return convert_to_thin_pointer (type, expr);
3613 /* If converting fat pointer to normal pointer, get the pointer to the
3614 array and then convert it. */
3615 else if (TYPE_FAT_POINTER_P (etype))
3616 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
3619 return fold (convert_to_pointer (type, expr));
3622 return fold (convert_to_real (type, expr));
3625 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
3627 gnat_build_constructor
3628 (type, tree_cons (TYPE_FIELDS (type),
3629 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3632 /* ... fall through ... */
3635 /* In these cases, assume the front-end has validated the conversion.
3636 If the conversion is valid, it will be a bit-wise conversion, so
3637 it can be viewed as an unchecked conversion. */
3638 return unchecked_convert (type, expr, false);
3641 /* This is a either a conversion between a tagged type and some
3642 subtype, which we have to mark as a UNION_TYPE because of
3643 overlapping fields or a conversion of an Unchecked_Union. */
3644 return unchecked_convert (type, expr, false);
3646 case UNCONSTRAINED_ARRAY_TYPE:
3647 /* If EXPR is a constrained array, take its address, convert it to a
3648 fat pointer, and then dereference it. Likewise if EXPR is a
3649 record containing both a template and a constrained array.
3650 Note that a record representing a justified modular type
3651 always represents a packed constrained array. */
3652 if (ecode == ARRAY_TYPE
3653 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
3654 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
3655 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
3658 (INDIRECT_REF, NULL_TREE,
3659 convert_to_fat_pointer (TREE_TYPE (type),
3660 build_unary_op (ADDR_EXPR,
3663 /* Do something very similar for converting one unconstrained
3664 array to another. */
3665 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
3667 build_unary_op (INDIRECT_REF, NULL_TREE,
3668 convert (TREE_TYPE (type),
3669 build_unary_op (ADDR_EXPR,
3675 return fold (convert_to_complex (type, expr));
3682 /* Remove all conversions that are done in EXP. This includes converting
3683 from a padded type or to a justified modular type. If TRUE_ADDRESS
3684 is true, always return the address of the containing object even if
3685 the address is not bit-aligned. */
3688 remove_conversions (tree exp, bool true_address)
3690 switch (TREE_CODE (exp))
3694 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3695 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
3697 remove_conversions (VEC_index (constructor_elt,
3698 CONSTRUCTOR_ELTS (exp), 0)->value,
3703 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
3704 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
3705 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3708 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
3709 case NOP_EXPR: case CONVERT_EXPR:
3710 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3719 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3720 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3721 likewise return an expression pointing to the underlying array. */
3724 maybe_unconstrained_array (tree exp)
3726 enum tree_code code = TREE_CODE (exp);
3729 switch (TREE_CODE (TREE_TYPE (exp)))
3731 case UNCONSTRAINED_ARRAY_TYPE:
3732 if (code == UNCONSTRAINED_ARRAY_REF)
3735 = build_unary_op (INDIRECT_REF, NULL_TREE,
3736 build_component_ref (TREE_OPERAND (exp, 0),
3737 get_identifier ("P_ARRAY"),
3739 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
3743 else if (code == NULL_EXPR)
3744 return build1 (NULL_EXPR,
3745 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3746 (TREE_TYPE (TREE_TYPE (exp))))),
3747 TREE_OPERAND (exp, 0));
3750 /* If this is a padded type, convert to the unpadded type and see if
3751 it contains a template. */
3752 if (TYPE_IS_PADDING_P (TREE_TYPE (exp)))
3754 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
3755 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3756 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
3758 build_component_ref (new, NULL_TREE,
3759 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
3762 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
3764 build_component_ref (exp, NULL_TREE,
3765 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
3775 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
3776 If NOTRUNC_P is true, truncation operations should be suppressed. */
3779 unchecked_convert (tree type, tree expr, bool notrunc_p)
3781 tree etype = TREE_TYPE (expr);
3783 /* If the expression is already the right type, we are done. */
3787 /* If both types types are integral just do a normal conversion.
3788 Likewise for a conversion to an unconstrained array. */
3789 if ((((INTEGRAL_TYPE_P (type)
3790 && !(TREE_CODE (type) == INTEGER_TYPE
3791 && TYPE_VAX_FLOATING_POINT_P (type)))
3792 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
3793 || (TREE_CODE (type) == RECORD_TYPE
3794 && TYPE_JUSTIFIED_MODULAR_P (type)))
3795 && ((INTEGRAL_TYPE_P (etype)
3796 && !(TREE_CODE (etype) == INTEGER_TYPE
3797 && TYPE_VAX_FLOATING_POINT_P (etype)))
3798 || (POINTER_TYPE_P (etype) && !TYPE_THIN_POINTER_P (etype))
3799 || (TREE_CODE (etype) == RECORD_TYPE
3800 && TYPE_JUSTIFIED_MODULAR_P (etype))))
3801 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3804 bool final_unchecked = false;
3806 if (TREE_CODE (etype) == INTEGER_TYPE
3807 && TYPE_BIASED_REPRESENTATION_P (etype))
3809 tree ntype = copy_type (etype);
3811 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
3812 TYPE_MAIN_VARIANT (ntype) = ntype;
3813 expr = build1 (NOP_EXPR, ntype, expr);
3816 if (TREE_CODE (type) == INTEGER_TYPE
3817 && TYPE_BIASED_REPRESENTATION_P (type))
3819 rtype = copy_type (type);
3820 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
3821 TYPE_MAIN_VARIANT (rtype) = rtype;
3824 /* We have another special case. If we are unchecked converting subtype
3825 into a base type, we need to ensure that VRP doesn't propagate range
3826 information since this conversion may be done precisely to validate
3827 that the object is within the range it is supposed to have. */
3828 else if (TREE_CODE (expr) != INTEGER_CST
3829 && TREE_CODE (type) == INTEGER_TYPE && !TREE_TYPE (type)
3830 && ((TREE_CODE (etype) == INTEGER_TYPE && TREE_TYPE (etype))
3831 || TREE_CODE (etype) == ENUMERAL_TYPE
3832 || TREE_CODE (etype) == BOOLEAN_TYPE))
3834 /* ??? The pattern to be "preserved" by the middle-end and the
3835 optimizers is a VIEW_CONVERT_EXPR between a pair of different
3836 "base" types (integer types without TREE_TYPE). But this may
3837 raise addressability/aliasing issues because VIEW_CONVERT_EXPR
3838 gets gimplified as an lvalue, thus causing the address of its
3839 operand to be taken if it is deemed addressable and not already
3841 rtype = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
3845 rtype = copy_type (rtype);
3846 TYPE_MAIN_VARIANT (rtype) = rtype;
3849 final_unchecked = true;
3852 expr = convert (rtype, expr);
3854 expr = build1 (final_unchecked ? VIEW_CONVERT_EXPR : NOP_EXPR,
3858 /* If we are converting TO an integral type whose precision is not the
3859 same as its size, first unchecked convert to a record that contains
3860 an object of the output type. Then extract the field. */
3861 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
3862 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3863 GET_MODE_BITSIZE (TYPE_MODE (type))))
3865 tree rec_type = make_node (RECORD_TYPE);
3866 tree field = create_field_decl (get_identifier ("OBJ"), type,
3867 rec_type, 1, 0, 0, 0);
3869 TYPE_FIELDS (rec_type) = field;
3870 layout_type (rec_type);
3872 expr = unchecked_convert (rec_type, expr, notrunc_p);
3873 expr = build_component_ref (expr, NULL_TREE, field, 0);
3876 /* Similarly for integral input type whose precision is not equal to its
3878 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype)
3879 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
3880 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3882 tree rec_type = make_node (RECORD_TYPE);
3884 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
3887 TYPE_FIELDS (rec_type) = field;
3888 layout_type (rec_type);
3890 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
3891 expr = unchecked_convert (type, expr, notrunc_p);
3894 /* We have a special case when we are converting between two
3895 unconstrained array types. In that case, take the address,
3896 convert the fat pointer types, and dereference. */
3897 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
3898 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3899 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3900 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
3901 build_unary_op (ADDR_EXPR, NULL_TREE,
3905 expr = maybe_unconstrained_array (expr);
3907 /* There's no point in doing two unchecked conversions in a row. */
3908 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3909 expr = TREE_OPERAND (expr, 0);
3911 etype = TREE_TYPE (expr);
3912 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
3915 /* If the result is an integral type whose size is not equal to
3916 the size of the underlying machine type, sign- or zero-extend
3917 the result. We need not do this in the case where the input is
3918 an integral type of the same precision and signedness or if the output
3919 is a biased type or if both the input and output are unsigned. */
3921 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
3922 && !(TREE_CODE (type) == INTEGER_TYPE
3923 && TYPE_BIASED_REPRESENTATION_P (type))
3924 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3925 GET_MODE_BITSIZE (TYPE_MODE (type)))
3926 && !(INTEGRAL_TYPE_P (etype)
3927 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
3928 && operand_equal_p (TYPE_RM_SIZE (type),
3929 (TYPE_RM_SIZE (etype) != 0
3930 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
3932 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
3934 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
3935 TYPE_UNSIGNED (type));
3937 = convert (base_type,
3938 size_binop (MINUS_EXPR,
3940 (GET_MODE_BITSIZE (TYPE_MODE (type))),
3941 TYPE_RM_SIZE (type)));
3944 build_binary_op (RSHIFT_EXPR, base_type,
3945 build_binary_op (LSHIFT_EXPR, base_type,
3946 convert (base_type, expr),
3951 /* An unchecked conversion should never raise Constraint_Error. The code
3952 below assumes that GCC's conversion routines overflow the same way that
3953 the underlying hardware does. This is probably true. In the rare case
3954 when it is false, we can rely on the fact that such conversions are
3955 erroneous anyway. */
3956 if (TREE_CODE (expr) == INTEGER_CST)
3957 TREE_OVERFLOW (expr) = 0;
3959 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
3960 show no longer constant. */
3961 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3962 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
3964 TREE_CONSTANT (expr) = 0;
3969 /* Search the chain of currently available builtin declarations for a node
3970 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
3971 found, if any, or NULL_TREE otherwise. */
3973 builtin_decl_for (tree name)
3978 for (i = 0; VEC_iterate(tree, builtin_decls, i, decl); i++)
3979 if (DECL_NAME (decl) == name)
3985 /* Return the appropriate GCC tree code for the specified GNAT type,
3986 the latter being a record type as predicated by Is_Record_Type. */
3989 tree_code_for_record_type (Entity_Id gnat_type)
3991 Node_Id component_list
3992 = Component_List (Type_Definition
3994 (Implementation_Base_Type (gnat_type))));
3997 /* Make this a UNION_TYPE unless it's either not an Unchecked_Union or
3998 we have a non-discriminant field outside a variant. In either case,
3999 it's a RECORD_TYPE. */
4001 if (!Is_Unchecked_Union (gnat_type))
4004 for (component = First_Non_Pragma (Component_Items (component_list));
4005 Present (component);
4006 component = Next_Non_Pragma (component))
4007 if (Ekind (Defining_Entity (component)) == E_Component)
4013 /* Build a global constructor or destructor function. METHOD_TYPE gives
4014 the type of the function and VEC points to the vector of constructor
4015 or destructor functions to be invoked. FIXME: Migrate into cgraph. */
4018 build_global_cdtor (int method_type, tree *vec, int len)
4020 tree body = NULL_TREE;
4023 for (i = 0; i < len; i++)
4025 tree fntype = TREE_TYPE (vec[i]);
4026 tree fnaddr = build1 (ADDR_EXPR, build_pointer_type (fntype), vec[i]);
4027 tree fncall = build_call_nary (TREE_TYPE (fntype), fnaddr, 0);
4028 append_to_statement_list (fncall, &body);
4032 cgraph_build_static_cdtor (method_type, body, DEFAULT_INIT_PRIORITY);
4035 /* Perform final processing on global variables. */
4038 gnat_write_global_declarations (void)
4040 /* Generate functions to call static constructors and destructors
4041 for targets that do not support .ctors/.dtors sections. These
4042 functions have magic names which are detected by collect2. */
4043 build_global_cdtor ('I', VEC_address (tree, static_ctors),
4044 VEC_length (tree, static_ctors));
4045 build_global_cdtor ('D', VEC_address (tree, static_dtors),
4046 VEC_length (tree, static_dtors));
4048 /* Proceed to optimize and emit assembly.
4049 FIXME: shouldn't be the front end's responsibility to call this. */
4052 /* Emit debug info for all global declarations. */
4053 emit_debug_global_declarations (VEC_address (tree, global_decls),
4054 VEC_length (tree, global_decls));
4057 #include "gt-ada-utils.h"
4058 #include "gtype-ada.h"