1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2008, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 ****************************************************************************/
28 #include "coretypes.h"
41 #include "tree-inline.h"
42 #include "tree-gimple.h"
43 #include "tree-dump.h"
44 #include "pointer-set.h"
60 #ifndef MAX_FIXED_MODE_SIZE
61 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
64 #ifndef MAX_BITS_PER_WORD
65 #define MAX_BITS_PER_WORD BITS_PER_WORD
68 /* If nonzero, pretend we are allocating at global level. */
71 /* Tree nodes for the various types and decls we create. */
72 tree gnat_std_decls[(int) ADT_LAST];
74 /* Functions to call for each of the possible raise reasons. */
75 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
77 /* Forward declarations for handlers of attributes. */
78 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
79 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
81 /* Table of machine-independent internal attributes for Ada. We support
82 this minimal set of attributes to accommodate the Alpha back-end which
83 unconditionally puts them on its builtins. */
84 const struct attribute_spec gnat_internal_attribute_table[] =
86 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
87 { "const", 0, 0, true, false, false, handle_const_attribute },
88 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute },
89 { NULL, 0, 0, false, false, false, NULL }
92 /* Associates a GNAT tree node to a GCC tree node. It is used in
93 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
94 of `save_gnu_tree' for more info. */
95 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
97 #define GET_GNU_TREE(GNAT_ENTITY) \
98 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
100 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
101 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
103 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
104 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
106 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
107 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
109 #define GET_DUMMY_NODE(GNAT_ENTITY) \
110 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
112 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
113 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
115 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
116 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
118 /* This variable keeps a table for types for each precision so that we only
119 allocate each of them once. Signed and unsigned types are kept separate.
121 Note that these types are only used when fold-const requests something
122 special. Perhaps we should NOT share these types; we'll see how it
124 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
126 /* Likewise for float types, but record these by mode. */
127 static GTY(()) tree float_types[NUM_MACHINE_MODES];
129 /* For each binding contour we allocate a binding_level structure to indicate
130 the binding depth. */
132 struct gnat_binding_level GTY((chain_next ("%h.chain")))
134 /* The binding level containing this one (the enclosing binding level). */
135 struct gnat_binding_level *chain;
136 /* The BLOCK node for this level. */
138 /* If nonzero, the setjmp buffer that needs to be updated for any
139 variable-sized definition within this context. */
143 /* The binding level currently in effect. */
144 static GTY(()) struct gnat_binding_level *current_binding_level;
146 /* A chain of gnat_binding_level structures awaiting reuse. */
147 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
149 /* An array of global declarations. */
150 static GTY(()) VEC(tree,gc) *global_decls;
152 /* An array of builtin declarations. */
153 static GTY(()) VEC(tree,gc) *builtin_decls;
155 /* An array of global renaming pointers. */
156 static GTY(()) VEC(tree,gc) *global_renaming_pointers;
158 /* A chain of unused BLOCK nodes. */
159 static GTY((deletable)) tree free_block_chain;
161 static void gnat_install_builtins (void);
162 static tree merge_sizes (tree, tree, tree, bool, bool);
163 static tree compute_related_constant (tree, tree);
164 static tree split_plus (tree, tree *);
165 static void gnat_gimplify_function (tree);
166 static tree float_type_for_precision (int, enum machine_mode);
167 static tree convert_to_fat_pointer (tree, tree);
168 static tree convert_to_thin_pointer (tree, tree);
169 static tree make_descriptor_field (const char *,tree, tree, tree);
170 static bool potential_alignment_gap (tree, tree, tree);
172 /* Initialize the association of GNAT nodes to GCC trees. */
175 init_gnat_to_gnu (void)
177 associate_gnat_to_gnu
178 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
181 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
182 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
183 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
185 If GNU_DECL is zero, a previous association is to be reset. */
188 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
190 /* Check that GNAT_ENTITY is not already defined and that it is being set
191 to something which is a decl. Raise gigi 401 if not. Usually, this
192 means GNAT_ENTITY is defined twice, but occasionally is due to some
194 gcc_assert (!(gnu_decl
195 && (PRESENT_GNU_TREE (gnat_entity)
196 || (!no_check && !DECL_P (gnu_decl)))));
198 SET_GNU_TREE (gnat_entity, gnu_decl);
201 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
202 Return the ..._DECL node that was associated with it. If there is no tree
203 node associated with GNAT_ENTITY, abort.
205 In some cases, such as delayed elaboration or expressions that need to
206 be elaborated only once, GNAT_ENTITY is really not an entity. */
209 get_gnu_tree (Entity_Id gnat_entity)
211 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
212 return GET_GNU_TREE (gnat_entity);
215 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
218 present_gnu_tree (Entity_Id gnat_entity)
220 return PRESENT_GNU_TREE (gnat_entity);
223 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
226 init_dummy_type (void)
229 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
232 /* Make a dummy type corresponding to GNAT_TYPE. */
235 make_dummy_type (Entity_Id gnat_type)
237 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
240 /* If there is an equivalent type, get its underlying type. */
241 if (Present (gnat_underlying))
242 gnat_underlying = Underlying_Type (gnat_underlying);
244 /* If there was no equivalent type (can only happen when just annotating
245 types) or underlying type, go back to the original type. */
246 if (No (gnat_underlying))
247 gnat_underlying = gnat_type;
249 /* If it there already a dummy type, use that one. Else make one. */
250 if (PRESENT_DUMMY_NODE (gnat_underlying))
251 return GET_DUMMY_NODE (gnat_underlying);
253 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
255 gnu_type = make_node (Is_Record_Type (gnat_underlying)
256 ? tree_code_for_record_type (gnat_underlying)
258 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
259 TYPE_DUMMY_P (gnu_type) = 1;
260 if (AGGREGATE_TYPE_P (gnu_type))
262 TYPE_STUB_DECL (gnu_type) = build_decl (TYPE_DECL, NULL_TREE, gnu_type);
263 TYPE_BY_REFERENCE_P (gnu_type) = Is_By_Reference_Type (gnat_type);
266 SET_DUMMY_NODE (gnat_underlying, gnu_type);
271 /* Return nonzero if we are currently in the global binding level. */
274 global_bindings_p (void)
276 return ((force_global || !current_function_decl) ? -1 : 0);
279 /* Enter a new binding level. */
284 struct gnat_binding_level *newlevel = NULL;
286 /* Reuse a struct for this binding level, if there is one. */
287 if (free_binding_level)
289 newlevel = free_binding_level;
290 free_binding_level = free_binding_level->chain;
294 = (struct gnat_binding_level *)
295 ggc_alloc (sizeof (struct gnat_binding_level));
297 /* Use a free BLOCK, if any; otherwise, allocate one. */
298 if (free_block_chain)
300 newlevel->block = free_block_chain;
301 free_block_chain = BLOCK_CHAIN (free_block_chain);
302 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
305 newlevel->block = make_node (BLOCK);
307 /* Point the BLOCK we just made to its parent. */
308 if (current_binding_level)
309 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
311 BLOCK_VARS (newlevel->block) = BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
312 TREE_USED (newlevel->block) = 1;
314 /* Add this level to the front of the chain (stack) of levels that are
316 newlevel->chain = current_binding_level;
317 newlevel->jmpbuf_decl = NULL_TREE;
318 current_binding_level = newlevel;
321 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
322 and point FNDECL to this BLOCK. */
325 set_current_block_context (tree fndecl)
327 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
328 DECL_INITIAL (fndecl) = current_binding_level->block;
331 /* Set the jmpbuf_decl for the current binding level to DECL. */
334 set_block_jmpbuf_decl (tree decl)
336 current_binding_level->jmpbuf_decl = decl;
339 /* Get the jmpbuf_decl, if any, for the current binding level. */
342 get_block_jmpbuf_decl ()
344 return current_binding_level->jmpbuf_decl;
347 /* Exit a binding level. Set any BLOCK into the current code group. */
352 struct gnat_binding_level *level = current_binding_level;
353 tree block = level->block;
355 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
356 BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
358 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
359 are no variables free the block and merge its subblocks into those of its
360 parent block. Otherwise, add it to the list of its parent. */
361 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
363 else if (BLOCK_VARS (block) == NULL_TREE)
365 BLOCK_SUBBLOCKS (level->chain->block)
366 = chainon (BLOCK_SUBBLOCKS (block),
367 BLOCK_SUBBLOCKS (level->chain->block));
368 BLOCK_CHAIN (block) = free_block_chain;
369 free_block_chain = block;
373 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
374 BLOCK_SUBBLOCKS (level->chain->block) = block;
375 TREE_USED (block) = 1;
376 set_block_for_group (block);
379 /* Free this binding structure. */
380 current_binding_level = level->chain;
381 level->chain = free_binding_level;
382 free_binding_level = level;
386 /* Records a ..._DECL node DECL as belonging to the current lexical scope
387 and uses GNAT_NODE for location information and propagating flags. */
390 gnat_pushdecl (tree decl, Node_Id gnat_node)
392 /* If at top level, there is no context. But PARM_DECLs always go in the
393 level of its function. */
394 if (global_bindings_p () && TREE_CODE (decl) != PARM_DECL)
395 DECL_CONTEXT (decl) = 0;
398 DECL_CONTEXT (decl) = current_function_decl;
400 /* Functions imported in another function are not really nested. */
401 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_PUBLIC (decl))
402 DECL_NO_STATIC_CHAIN (decl) = 1;
405 TREE_NO_WARNING (decl) = (gnat_node == Empty || Warnings_Off (gnat_node));
407 /* Set the location of DECL and emit a declaration for it. */
408 if (Present (gnat_node))
409 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
410 add_decl_expr (decl, gnat_node);
412 /* Put the declaration on the list. The list of declarations is in reverse
413 order. The list will be reversed later. Put global variables in the
414 globals list and builtin functions in a dedicated list to speed up
415 further lookups. Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
416 the list, as they will cause trouble with the debugger and aren't needed
418 if (TREE_CODE (decl) != TYPE_DECL
419 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
421 if (global_bindings_p ())
423 VEC_safe_push (tree, gc, global_decls, decl);
425 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
426 VEC_safe_push (tree, gc, builtin_decls, decl);
430 TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
431 BLOCK_VARS (current_binding_level->block) = decl;
435 /* For the declaration of a type, set its name if it either is not already
436 set, was set to an IDENTIFIER_NODE, indicating an internal name,
437 or if the previous type name was not derived from a source name.
438 We'd rather have the type named with a real name and all the pointer
439 types to the same object have the same POINTER_TYPE node. Code in the
440 equivalent function of c-decl.c makes a copy of the type node here, but
441 that may cause us trouble with incomplete types. We make an exception
442 for fat pointer types because the compiler automatically builds them
443 for unconstrained array types and the debugger uses them to represent
444 both these and pointers to these. */
445 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
447 tree t = TREE_TYPE (decl);
449 if (!TYPE_NAME (t) || TREE_CODE (TYPE_NAME (t)) == IDENTIFIER_NODE)
451 else if (TYPE_FAT_POINTER_P (t))
453 tree tt = build_variant_type_copy (t);
454 TYPE_NAME (tt) = decl;
455 TREE_USED (tt) = TREE_USED (t);
456 TREE_TYPE (decl) = tt;
457 DECL_ORIGINAL_TYPE (decl) = t;
460 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
465 /* Propagate the name to all the variants. This is needed for
466 the type qualifiers machinery to work properly. */
468 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
469 TYPE_NAME (t) = decl;
473 /* Do little here. Set up the standard declarations later after the
474 front end has been run. */
477 gnat_init_decl_processing (void)
479 /* Make the binding_level structure for global names. */
480 current_function_decl = 0;
481 current_binding_level = 0;
482 free_binding_level = 0;
485 build_common_tree_nodes (true, true);
487 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
488 corresponding to the size of Pmode. In most cases when ptr_mode and
489 Pmode differ, C will use the width of ptr_mode as sizetype. But we get
490 far better code using the width of Pmode. Make this here since we need
491 this before we can expand the GNAT types. */
492 size_type_node = gnat_type_for_size (GET_MODE_BITSIZE (Pmode), 0);
493 set_sizetype (size_type_node);
494 build_common_tree_nodes_2 (0);
496 ptr_void_type_node = build_pointer_type (void_type_node);
498 gnat_install_builtins ();
501 /* Install the builtin functions we might need. */
504 gnat_install_builtins ()
506 /* Builtins used by generic middle-end optimizers. */
507 build_common_builtin_nodes ();
509 /* Target specific builtins, such as the AltiVec family on ppc. */
510 targetm.init_builtins ();
513 /* Create the predefined scalar types such as `integer_type_node' needed
514 in the gcc back-end and initialize the global binding level. */
517 init_gigi_decls (tree long_long_float_type, tree exception_type)
522 /* Set the types that GCC and Gigi use from the front end. We would like
523 to do this for char_type_node, but it needs to correspond to the C
525 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
527 /* In this case, the builtin floating point types are VAX float,
528 so make up a type for use. */
529 longest_float_type_node = make_node (REAL_TYPE);
530 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
531 layout_type (longest_float_type_node);
532 create_type_decl (get_identifier ("longest float type"),
533 longest_float_type_node, NULL, false, true, Empty);
536 longest_float_type_node = TREE_TYPE (long_long_float_type);
538 except_type_node = TREE_TYPE (exception_type);
540 unsigned_type_node = gnat_type_for_size (INT_TYPE_SIZE, 1);
541 create_type_decl (get_identifier ("unsigned int"), unsigned_type_node,
542 NULL, false, true, Empty);
544 void_type_decl_node = create_type_decl (get_identifier ("void"),
545 void_type_node, NULL, false, true,
548 void_ftype = build_function_type (void_type_node, NULL_TREE);
549 ptr_void_ftype = build_pointer_type (void_ftype);
551 /* Build the special descriptor type and its null node if needed. */
552 if (TARGET_VTABLE_USES_DESCRIPTORS)
554 tree field_list = NULL_TREE, null_list = NULL_TREE;
557 fdesc_type_node = make_node (RECORD_TYPE);
559 for (j = 0; j < TARGET_VTABLE_USES_DESCRIPTORS; j++)
561 tree field = create_field_decl (NULL_TREE, ptr_void_ftype,
562 fdesc_type_node, 0, 0, 0, 1);
563 TREE_CHAIN (field) = field_list;
565 null_list = tree_cons (field, null_pointer_node, null_list);
568 finish_record_type (fdesc_type_node, nreverse (field_list), 0, false);
569 null_fdesc_node = gnat_build_constructor (fdesc_type_node, null_list);
572 /* Now declare runtime functions. */
573 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
575 /* malloc is a function declaration tree for a function to allocate
577 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
579 build_function_type (ptr_void_type_node,
580 tree_cons (NULL_TREE,
583 NULL_TREE, false, true, true, NULL,
585 DECL_IS_MALLOC (malloc_decl) = 1;
587 /* malloc32 is a function declaration tree for a function to allocate
588 32bit memory on a 64bit system. Needed only on 64bit VMS. */
589 malloc32_decl = create_subprog_decl (get_identifier ("__gnat_malloc32"),
591 build_function_type (ptr_void_type_node,
592 tree_cons (NULL_TREE,
595 NULL_TREE, false, true, true, NULL,
597 DECL_IS_MALLOC (malloc32_decl) = 1;
599 /* free is a function declaration tree for a function to free memory. */
601 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
602 build_function_type (void_type_node,
603 tree_cons (NULL_TREE,
606 NULL_TREE, false, true, true, NULL, Empty);
608 /* Make the types and functions used for exception processing. */
610 = build_array_type (gnat_type_for_mode (Pmode, 0),
611 build_index_type (build_int_cst (NULL_TREE, 5)));
612 create_type_decl (get_identifier ("JMPBUF_T"), jmpbuf_type, NULL,
614 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
616 /* Functions to get and set the jumpbuf pointer for the current thread. */
618 = create_subprog_decl
619 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
620 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
621 NULL_TREE, false, true, true, NULL, Empty);
622 /* Avoid creating superfluous edges to __builtin_setjmp receivers. */
623 DECL_IS_PURE (get_jmpbuf_decl) = 1;
626 = create_subprog_decl
627 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
629 build_function_type (void_type_node,
630 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
631 NULL_TREE, false, true, true, NULL, Empty);
633 /* Function to get the current exception. */
635 = create_subprog_decl
636 (get_identifier ("system__soft_links__get_gnat_exception"),
638 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
639 NULL_TREE, false, true, true, NULL, Empty);
640 /* Avoid creating superfluous edges to __builtin_setjmp receivers. */
641 DECL_IS_PURE (get_excptr_decl) = 1;
643 /* Functions that raise exceptions. */
645 = create_subprog_decl
646 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
647 build_function_type (void_type_node,
648 tree_cons (NULL_TREE,
649 build_pointer_type (except_type_node),
651 NULL_TREE, false, true, true, NULL, Empty);
653 /* Dummy objects to materialize "others" and "all others" in the exception
654 tables. These are exported by a-exexpr.adb, so see this unit for the
658 = create_var_decl (get_identifier ("OTHERS"),
659 get_identifier ("__gnat_others_value"),
660 integer_type_node, 0, 1, 0, 1, 1, 0, Empty);
663 = create_var_decl (get_identifier ("ALL_OTHERS"),
664 get_identifier ("__gnat_all_others_value"),
665 integer_type_node, 0, 1, 0, 1, 1, 0, Empty);
667 /* Hooks to call when entering/leaving an exception handler. */
669 = create_subprog_decl (get_identifier ("__gnat_begin_handler"), NULL_TREE,
670 build_function_type (void_type_node,
671 tree_cons (NULL_TREE,
674 NULL_TREE, false, true, true, NULL, Empty);
677 = create_subprog_decl (get_identifier ("__gnat_end_handler"), NULL_TREE,
678 build_function_type (void_type_node,
679 tree_cons (NULL_TREE,
682 NULL_TREE, false, true, true, NULL, Empty);
684 /* If in no exception handlers mode, all raise statements are redirected to
685 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
686 this procedure will never be called in this mode. */
687 if (No_Exception_Handlers_Set ())
690 = create_subprog_decl
691 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE,
692 build_function_type (void_type_node,
693 tree_cons (NULL_TREE,
694 build_pointer_type (char_type_node),
695 tree_cons (NULL_TREE,
698 NULL_TREE, false, true, true, NULL, Empty);
700 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
701 gnat_raise_decls[i] = decl;
704 /* Otherwise, make one decl for each exception reason. */
705 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
709 sprintf (name, "__gnat_rcheck_%.2d", i);
711 = create_subprog_decl
712 (get_identifier (name), NULL_TREE,
713 build_function_type (void_type_node,
714 tree_cons (NULL_TREE,
717 tree_cons (NULL_TREE,
720 NULL_TREE, false, true, true, NULL, Empty);
723 /* Indicate that these never return. */
724 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
725 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
726 TREE_TYPE (raise_nodefer_decl)
727 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
730 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
732 TREE_THIS_VOLATILE (gnat_raise_decls[i]) = 1;
733 TREE_SIDE_EFFECTS (gnat_raise_decls[i]) = 1;
734 TREE_TYPE (gnat_raise_decls[i])
735 = build_qualified_type (TREE_TYPE (gnat_raise_decls[i]),
739 /* setjmp returns an integer and has one operand, which is a pointer to
742 = create_subprog_decl
743 (get_identifier ("__builtin_setjmp"), NULL_TREE,
744 build_function_type (integer_type_node,
745 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
746 NULL_TREE, false, true, true, NULL, Empty);
748 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
749 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
751 /* update_setjmp_buf updates a setjmp buffer from the current stack pointer
753 update_setjmp_buf_decl
754 = create_subprog_decl
755 (get_identifier ("__builtin_update_setjmp_buf"), NULL_TREE,
756 build_function_type (void_type_node,
757 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
758 NULL_TREE, false, true, true, NULL, Empty);
760 DECL_BUILT_IN_CLASS (update_setjmp_buf_decl) = BUILT_IN_NORMAL;
761 DECL_FUNCTION_CODE (update_setjmp_buf_decl) = BUILT_IN_UPDATE_SETJMP_BUF;
763 main_identifier_node = get_identifier ("main");
766 /* Given a record type RECORD_TYPE and a chain of FIELD_DECL nodes FIELDLIST,
767 finish constructing the record or union type. If REP_LEVEL is zero, this
768 record has no representation clause and so will be entirely laid out here.
769 If REP_LEVEL is one, this record has a representation clause and has been
770 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
771 this record is derived from a parent record and thus inherits its layout;
772 only make a pass on the fields to finalize them. If DO_NOT_FINALIZE is
773 true, the record type is expected to be modified afterwards so it will
774 not be sent to the back-end for finalization. */
777 finish_record_type (tree record_type, tree fieldlist, int rep_level,
778 bool do_not_finalize)
780 enum tree_code code = TREE_CODE (record_type);
781 tree name = TYPE_NAME (record_type);
782 tree ada_size = bitsize_zero_node;
783 tree size = bitsize_zero_node;
784 bool had_size = TYPE_SIZE (record_type) != 0;
785 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
786 bool had_align = TYPE_ALIGN (record_type) != 0;
789 if (name && TREE_CODE (name) == TYPE_DECL)
790 name = DECL_NAME (name);
792 TYPE_FIELDS (record_type) = fieldlist;
793 TYPE_STUB_DECL (record_type) = build_decl (TYPE_DECL, name, record_type);
795 /* We don't need both the typedef name and the record name output in
796 the debugging information, since they are the same. */
797 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
799 /* Globally initialize the record first. If this is a rep'ed record,
800 that just means some initializations; otherwise, layout the record. */
803 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
804 TYPE_MODE (record_type) = BLKmode;
807 TYPE_SIZE_UNIT (record_type) = size_zero_node;
809 TYPE_SIZE (record_type) = bitsize_zero_node;
811 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
812 out just like a UNION_TYPE, since the size will be fixed. */
813 else if (code == QUAL_UNION_TYPE)
818 /* Ensure there isn't a size already set. There can be in an error
819 case where there is a rep clause but all fields have errors and
820 no longer have a position. */
821 TYPE_SIZE (record_type) = 0;
822 layout_type (record_type);
825 /* At this point, the position and size of each field is known. It was
826 either set before entry by a rep clause, or by laying out the type above.
828 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
829 to compute the Ada size; the GCC size and alignment (for rep'ed records
830 that are not padding types); and the mode (for rep'ed records). We also
831 clear the DECL_BIT_FIELD indication for the cases we know have not been
832 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
834 if (code == QUAL_UNION_TYPE)
835 fieldlist = nreverse (fieldlist);
837 for (field = fieldlist; field; field = TREE_CHAIN (field))
839 tree type = TREE_TYPE (field);
840 tree pos = bit_position (field);
841 tree this_size = DECL_SIZE (field);
844 if ((TREE_CODE (type) == RECORD_TYPE
845 || TREE_CODE (type) == UNION_TYPE
846 || TREE_CODE (type) == QUAL_UNION_TYPE)
847 && !TYPE_IS_FAT_POINTER_P (type)
848 && !TYPE_CONTAINS_TEMPLATE_P (type)
849 && TYPE_ADA_SIZE (type))
850 this_ada_size = TYPE_ADA_SIZE (type);
852 this_ada_size = this_size;
854 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
855 if (DECL_BIT_FIELD (field)
856 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
858 unsigned int align = TYPE_ALIGN (type);
860 /* In the general case, type alignment is required. */
861 if (value_factor_p (pos, align))
863 /* The enclosing record type must be sufficiently aligned.
864 Otherwise, if no alignment was specified for it and it
865 has been laid out already, bump its alignment to the
866 desired one if this is compatible with its size. */
867 if (TYPE_ALIGN (record_type) >= align)
869 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
870 DECL_BIT_FIELD (field) = 0;
874 && value_factor_p (TYPE_SIZE (record_type), align))
876 TYPE_ALIGN (record_type) = align;
877 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
878 DECL_BIT_FIELD (field) = 0;
882 /* In the non-strict alignment case, only byte alignment is. */
883 if (!STRICT_ALIGNMENT
884 && DECL_BIT_FIELD (field)
885 && value_factor_p (pos, BITS_PER_UNIT))
886 DECL_BIT_FIELD (field) = 0;
889 /* If we still have DECL_BIT_FIELD set at this point, we know the field
890 is technically not addressable. Except that it can actually be
891 addressed if the field is BLKmode and happens to be properly
893 DECL_NONADDRESSABLE_P (field)
894 |= DECL_BIT_FIELD (field) && DECL_MODE (field) != BLKmode;
896 /* A type must be as aligned as its most aligned field that is not
897 a bit-field. But this is already enforced by layout_type. */
898 if (rep_level > 0 && !DECL_BIT_FIELD (field))
899 TYPE_ALIGN (record_type)
900 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
905 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
906 size = size_binop (MAX_EXPR, size, this_size);
909 case QUAL_UNION_TYPE:
911 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
912 this_ada_size, ada_size);
913 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
918 /* Since we know here that all fields are sorted in order of
919 increasing bit position, the size of the record is one
920 higher than the ending bit of the last field processed
921 unless we have a rep clause, since in that case we might
922 have a field outside a QUAL_UNION_TYPE that has a higher ending
923 position. So use a MAX in that case. Also, if this field is a
924 QUAL_UNION_TYPE, we need to take into account the previous size in
925 the case of empty variants. */
927 = merge_sizes (ada_size, pos, this_ada_size,
928 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
930 = merge_sizes (size, pos, this_size,
931 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
939 if (code == QUAL_UNION_TYPE)
940 nreverse (fieldlist);
944 /* If this is a padding record, we never want to make the size smaller
945 than what was specified in it, if any. */
946 if (TREE_CODE (record_type) == RECORD_TYPE
947 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
948 size = TYPE_SIZE (record_type);
950 /* Now set any of the values we've just computed that apply. */
951 if (!TYPE_IS_FAT_POINTER_P (record_type)
952 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
953 SET_TYPE_ADA_SIZE (record_type, ada_size);
957 tree size_unit = had_size_unit
958 ? TYPE_SIZE_UNIT (record_type)
960 size_binop (CEIL_DIV_EXPR, size,
962 unsigned int align = TYPE_ALIGN (record_type);
964 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
965 TYPE_SIZE_UNIT (record_type)
966 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
968 compute_record_mode (record_type);
972 if (!do_not_finalize)
973 rest_of_record_type_compilation (record_type);
976 /* Wrap up compilation of RECORD_TYPE, i.e. most notably output all
977 the debug information associated with it. It need not be invoked
978 directly in most cases since finish_record_type takes care of doing
979 so, unless explicitly requested not to through DO_NOT_FINALIZE. */
982 rest_of_record_type_compilation (tree record_type)
984 tree fieldlist = TYPE_FIELDS (record_type);
986 enum tree_code code = TREE_CODE (record_type);
987 bool var_size = false;
989 for (field = fieldlist; field; field = TREE_CHAIN (field))
991 /* We need to make an XVE/XVU record if any field has variable size,
992 whether or not the record does. For example, if we have a union,
993 it may be that all fields, rounded up to the alignment, have the
994 same size, in which case we'll use that size. But the debug
995 output routines (except Dwarf2) won't be able to output the fields,
996 so we need to make the special record. */
997 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
998 /* If a field has a non-constant qualifier, the record will have
999 variable size too. */
1000 || (code == QUAL_UNION_TYPE
1001 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
1008 /* If this record is of variable size, rename it so that the
1009 debugger knows it is and make a new, parallel, record
1010 that tells the debugger how the record is laid out. See
1011 exp_dbug.ads. But don't do this for records that are padding
1012 since they confuse GDB. */
1014 && !(TREE_CODE (record_type) == RECORD_TYPE
1015 && TYPE_IS_PADDING_P (record_type)))
1017 tree new_record_type
1018 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
1019 ? UNION_TYPE : TREE_CODE (record_type));
1020 tree orig_name = TYPE_NAME (record_type);
1022 = (TREE_CODE (orig_name) == TYPE_DECL ? DECL_NAME (orig_name)
1025 = concat_id_with_name (orig_id,
1026 TREE_CODE (record_type) == QUAL_UNION_TYPE
1028 tree last_pos = bitsize_zero_node;
1030 tree prev_old_field = 0;
1032 TYPE_NAME (new_record_type) = new_id;
1033 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
1034 TYPE_STUB_DECL (new_record_type)
1035 = build_decl (TYPE_DECL, new_id, new_record_type);
1036 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
1037 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
1038 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
1039 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
1040 TYPE_SIZE_UNIT (new_record_type)
1041 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
1043 /* Now scan all the fields, replacing each field with a new
1044 field corresponding to the new encoding. */
1045 for (old_field = TYPE_FIELDS (record_type); old_field;
1046 old_field = TREE_CHAIN (old_field))
1048 tree field_type = TREE_TYPE (old_field);
1049 tree field_name = DECL_NAME (old_field);
1051 tree curpos = bit_position (old_field);
1053 unsigned int align = 0;
1056 /* See how the position was modified from the last position.
1058 There are two basic cases we support: a value was added
1059 to the last position or the last position was rounded to
1060 a boundary and they something was added. Check for the
1061 first case first. If not, see if there is any evidence
1062 of rounding. If so, round the last position and try
1065 If this is a union, the position can be taken as zero. */
1067 if (TREE_CODE (new_record_type) == UNION_TYPE)
1068 pos = bitsize_zero_node, align = 0;
1070 pos = compute_related_constant (curpos, last_pos);
1072 if (!pos && TREE_CODE (curpos) == MULT_EXPR
1073 && host_integerp (TREE_OPERAND (curpos, 1), 1))
1075 tree offset = TREE_OPERAND (curpos, 0);
1076 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
1078 /* Strip off any conversions. */
1079 while (TREE_CODE (offset) == NON_LVALUE_EXPR
1080 || TREE_CODE (offset) == NOP_EXPR
1081 || TREE_CODE (offset) == CONVERT_EXPR)
1082 offset = TREE_OPERAND (offset, 0);
1084 /* An offset which is a bitwise AND with a negative power of 2
1085 means an alignment corresponding to this power of 2. */
1086 if (TREE_CODE (offset) == BIT_AND_EXPR
1087 && host_integerp (TREE_OPERAND (offset, 1), 0)
1088 && tree_int_cst_sgn (TREE_OPERAND (offset, 1)) < 0)
1091 = - tree_low_cst (TREE_OPERAND (offset, 1), 0);
1092 if (exact_log2 (pow) > 0)
1096 pos = compute_related_constant (curpos,
1097 round_up (last_pos, align));
1099 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
1100 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
1101 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
1102 && host_integerp (TREE_OPERAND
1103 (TREE_OPERAND (curpos, 0), 1),
1108 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
1109 pos = compute_related_constant (curpos,
1110 round_up (last_pos, align));
1112 else if (potential_alignment_gap (prev_old_field, old_field,
1115 align = TYPE_ALIGN (field_type);
1116 pos = compute_related_constant (curpos,
1117 round_up (last_pos, align));
1120 /* If we can't compute a position, set it to zero.
1122 ??? We really should abort here, but it's too much work
1123 to get this correct for all cases. */
1126 pos = bitsize_zero_node;
1128 /* See if this type is variable-sized and make a pointer type
1129 and indicate the indirection if so. Beware that the debug
1130 back-end may adjust the position computed above according
1131 to the alignment of the field type, i.e. the pointer type
1132 in this case, if we don't preventively counter that. */
1133 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
1135 field_type = build_pointer_type (field_type);
1136 if (align != 0 && TYPE_ALIGN (field_type) > align)
1138 field_type = copy_node (field_type);
1139 TYPE_ALIGN (field_type) = align;
1144 /* Make a new field name, if necessary. */
1145 if (var || align != 0)
1150 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1151 align / BITS_PER_UNIT);
1153 strcpy (suffix, "XVL");
1155 field_name = concat_id_with_name (field_name, suffix);
1158 new_field = create_field_decl (field_name, field_type,
1160 DECL_SIZE (old_field), pos, 0);
1161 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1162 TYPE_FIELDS (new_record_type) = new_field;
1164 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1165 zero. The only time it's not the last field of the record
1166 is when there are other components at fixed positions after
1167 it (meaning there was a rep clause for every field) and we
1168 want to be able to encode them. */
1169 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1170 (TREE_CODE (TREE_TYPE (old_field))
1173 : DECL_SIZE (old_field));
1174 prev_old_field = old_field;
1177 TYPE_FIELDS (new_record_type)
1178 = nreverse (TYPE_FIELDS (new_record_type));
1180 rest_of_type_decl_compilation (TYPE_STUB_DECL (new_record_type));
1183 rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type));
1186 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1187 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1188 if this represents a QUAL_UNION_TYPE in which case we must look for
1189 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1190 is nonzero, we must take the MAX of the end position of this field
1191 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1193 We return an expression for the size. */
1196 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1199 tree type = TREE_TYPE (last_size);
1202 if (!special || TREE_CODE (size) != COND_EXPR)
1204 new = size_binop (PLUS_EXPR, first_bit, size);
1206 new = size_binop (MAX_EXPR, last_size, new);
1210 new = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1211 integer_zerop (TREE_OPERAND (size, 1))
1212 ? last_size : merge_sizes (last_size, first_bit,
1213 TREE_OPERAND (size, 1),
1215 integer_zerop (TREE_OPERAND (size, 2))
1216 ? last_size : merge_sizes (last_size, first_bit,
1217 TREE_OPERAND (size, 2),
1220 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1221 when fed through substitute_in_expr) into thinking that a constant
1222 size is not constant. */
1223 while (TREE_CODE (new) == NON_LVALUE_EXPR)
1224 new = TREE_OPERAND (new, 0);
1229 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1230 related by the addition of a constant. Return that constant if so. */
1233 compute_related_constant (tree op0, tree op1)
1235 tree op0_var, op1_var;
1236 tree op0_con = split_plus (op0, &op0_var);
1237 tree op1_con = split_plus (op1, &op1_var);
1238 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1240 if (operand_equal_p (op0_var, op1_var, 0))
1242 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1248 /* Utility function of above to split a tree OP which may be a sum, into a
1249 constant part, which is returned, and a variable part, which is stored
1250 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1254 split_plus (tree in, tree *pvar)
1256 /* Strip NOPS in order to ease the tree traversal and maximize the
1257 potential for constant or plus/minus discovery. We need to be careful
1258 to always return and set *pvar to bitsizetype trees, but it's worth
1262 *pvar = convert (bitsizetype, in);
1264 if (TREE_CODE (in) == INTEGER_CST)
1266 *pvar = bitsize_zero_node;
1267 return convert (bitsizetype, in);
1269 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1271 tree lhs_var, rhs_var;
1272 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1273 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1275 if (lhs_var == TREE_OPERAND (in, 0)
1276 && rhs_var == TREE_OPERAND (in, 1))
1277 return bitsize_zero_node;
1279 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1280 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1283 return bitsize_zero_node;
1286 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1287 subprogram. If it is void_type_node, then we are dealing with a procedure,
1288 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1289 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1290 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1291 RETURNS_UNCONSTRAINED is true if the function returns an unconstrained
1292 object. RETURNS_BY_REF is true if the function returns by reference.
1293 RETURNS_BY_TARGET_PTR is true if the function is to be passed (as its
1294 first parameter) the address of the place to copy its result. */
1297 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1298 bool returns_unconstrained, bool returns_by_ref,
1299 bool returns_by_target_ptr)
1301 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1302 the subprogram formal parameters. This list is generated by traversing the
1303 input list of PARM_DECL nodes. */
1304 tree param_type_list = NULL;
1308 for (param_decl = param_decl_list; param_decl;
1309 param_decl = TREE_CHAIN (param_decl))
1310 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1313 /* The list of the function parameter types has to be terminated by the void
1314 type to signal to the back-end that we are not dealing with a variable
1315 parameter subprogram, but that the subprogram has a fixed number of
1317 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1319 /* The list of argument types has been created in reverse
1321 param_type_list = nreverse (param_type_list);
1323 type = build_function_type (return_type, param_type_list);
1325 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1326 or the new type should, make a copy of TYPE. Likewise for
1327 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1328 if (TYPE_CI_CO_LIST (type) || cico_list
1329 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1330 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref
1331 || TYPE_RETURNS_BY_TARGET_PTR_P (type) != returns_by_target_ptr)
1332 type = copy_type (type);
1334 TYPE_CI_CO_LIST (type) = cico_list;
1335 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1336 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1337 TYPE_RETURNS_BY_TARGET_PTR_P (type) = returns_by_target_ptr;
1341 /* Return a copy of TYPE but safe to modify in any way. */
1344 copy_type (tree type)
1346 tree new = copy_node (type);
1348 /* copy_node clears this field instead of copying it, because it is
1349 aliased with TREE_CHAIN. */
1350 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1352 TYPE_POINTER_TO (new) = 0;
1353 TYPE_REFERENCE_TO (new) = 0;
1354 TYPE_MAIN_VARIANT (new) = new;
1355 TYPE_NEXT_VARIANT (new) = 0;
1360 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1361 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position of
1365 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
1367 /* First build a type for the desired range. */
1368 tree type = build_index_2_type (min, max);
1370 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1371 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1372 is set, but not to INDEX, make a copy of this type with the requested
1373 index type. Note that we have no way of sharing these types, but that's
1374 only a small hole. */
1375 if (TYPE_INDEX_TYPE (type) == index)
1377 else if (TYPE_INDEX_TYPE (type))
1378 type = copy_type (type);
1380 SET_TYPE_INDEX_TYPE (type, index);
1381 create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
1385 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1386 string) and TYPE is a ..._TYPE node giving its data type.
1387 ARTIFICIAL_P is true if this is a declaration that was generated
1388 by the compiler. DEBUG_INFO_P is true if we need to write debugging
1389 information about this type. GNAT_NODE is used for the position of
1393 create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1394 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1396 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1397 enum tree_code code = TREE_CODE (type);
1399 DECL_ARTIFICIAL (type_decl) = artificial_p;
1401 if (!TYPE_IS_DUMMY_P (type))
1402 gnat_pushdecl (type_decl, gnat_node);
1404 process_attributes (type_decl, attr_list);
1406 /* Pass type declaration information to the debugger unless this is an
1407 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1408 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately, or
1409 type for which debugging information was not requested. */
1410 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
1411 DECL_IGNORED_P (type_decl) = 1;
1412 else if (code != ENUMERAL_TYPE
1413 && (code != RECORD_TYPE || TYPE_IS_FAT_POINTER_P (type))
1414 && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
1415 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1416 rest_of_type_decl_compilation (type_decl);
1421 /* Helper for create_var_decl and create_true_var_decl. Returns a GCC VAR_DECL
1424 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1425 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1426 the GCC tree for an optional initial expression; NULL_TREE if none.
1428 CONST_FLAG is true if this variable is constant, in which case we might
1429 return a CONST_DECL node unless CONST_DECL_ALLOWED_FLAG is false.
1431 PUBLIC_FLAG is true if this definition is to be made visible outside of
1432 the current compilation unit. This flag should be set when processing the
1433 variable definitions in a package specification. EXTERN_FLAG is nonzero
1434 when processing an external variable declaration (as opposed to a
1435 definition: no storage is to be allocated for the variable here).
1437 STATIC_FLAG is only relevant when not at top level. In that case
1438 it indicates whether to always allocate storage to the variable.
1440 GNAT_NODE is used for the position of the decl. */
1443 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
1444 bool const_flag, bool const_decl_allowed_flag,
1445 bool public_flag, bool extern_flag, bool static_flag,
1446 struct attrib *attr_list, Node_Id gnat_node)
1450 && TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1451 && (global_bindings_p () || static_flag
1452 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
1453 : TREE_CONSTANT (var_init)));
1455 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1456 case the initializer may be used in-lieu of the DECL node (as done in
1457 Identifier_to_gnu). This is useful to prevent the need of elaboration
1458 code when an identifier for which such a decl is made is in turn used as
1459 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1460 but extra constraints apply to this choice (see below) and are not
1461 relevant to the distinction we wish to make. */
1462 bool constant_p = const_flag && init_const;
1464 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1465 and may be used for scalars in general but not for aggregates. */
1467 = build_decl ((constant_p && const_decl_allowed_flag
1468 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
1471 /* If this is external, throw away any initializations (they will be done
1472 elsewhere) unless this is a constant for which we would like to remain
1473 able to get the initializer. If we are defining a global here, leave a
1474 constant initialization and save any variable elaborations for the
1475 elaboration routine. If we are just annotating types, throw away the
1476 initialization if it isn't a constant. */
1477 if ((extern_flag && !constant_p)
1478 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1479 var_init = NULL_TREE;
1481 /* At the global level, an initializer requiring code to be generated
1482 produces elaboration statements. Check that such statements are allowed,
1483 that is, not violating a No_Elaboration_Code restriction. */
1484 if (global_bindings_p () && var_init != 0 && ! init_const)
1485 Check_Elaboration_Code_Allowed (gnat_node);
1487 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1488 try to fiddle with DECL_COMMON. However, on platforms that don't
1489 support global BSS sections, uninitialized global variables would
1490 go in DATA instead, thus increasing the size of the executable. */
1492 && TREE_CODE (var_decl) == VAR_DECL
1493 && !have_global_bss_p ())
1494 DECL_COMMON (var_decl) = 1;
1495 DECL_INITIAL (var_decl) = var_init;
1496 TREE_READONLY (var_decl) = const_flag;
1497 DECL_EXTERNAL (var_decl) = extern_flag;
1498 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1499 TREE_CONSTANT (var_decl) = constant_p;
1500 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1501 = TYPE_VOLATILE (type);
1503 /* If it's public and not external, always allocate storage for it.
1504 At the global binding level we need to allocate static storage for the
1505 variable if and only if it's not external. If we are not at the top level
1506 we allocate automatic storage unless requested not to. */
1507 TREE_STATIC (var_decl)
1508 = public_flag || (global_bindings_p () ? !extern_flag : static_flag);
1510 if (asm_name && VAR_OR_FUNCTION_DECL_P (var_decl))
1511 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1513 process_attributes (var_decl, attr_list);
1515 /* Add this decl to the current binding level. */
1516 gnat_pushdecl (var_decl, gnat_node);
1518 if (TREE_SIDE_EFFECTS (var_decl))
1519 TREE_ADDRESSABLE (var_decl) = 1;
1521 if (TREE_CODE (var_decl) != CONST_DECL)
1523 if (global_bindings_p ())
1524 rest_of_decl_compilation (var_decl, true, 0);
1527 expand_decl (var_decl);
1532 /* Wrapper around create_var_decl_1 for cases where we don't care whether
1533 a VAR or a CONST decl node is created. */
1536 create_var_decl (tree var_name, tree asm_name, tree type, tree var_init,
1537 bool const_flag, bool public_flag, bool extern_flag,
1538 bool static_flag, struct attrib *attr_list,
1541 return create_var_decl_1 (var_name, asm_name, type, var_init,
1543 public_flag, extern_flag, static_flag,
1544 attr_list, gnat_node);
1547 /* Wrapper around create_var_decl_1 for cases where a VAR_DECL node is
1548 required. The primary intent is for DECL_CONST_CORRESPONDING_VARs, which
1549 must be VAR_DECLs and on which we want TREE_READONLY set to have them
1550 possibly assigned to a readonly data section. */
1553 create_true_var_decl (tree var_name, tree asm_name, tree type, tree var_init,
1554 bool const_flag, bool public_flag, bool extern_flag,
1555 bool static_flag, struct attrib *attr_list,
1558 return create_var_decl_1 (var_name, asm_name, type, var_init,
1560 public_flag, extern_flag, static_flag,
1561 attr_list, gnat_node);
1564 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
1567 aggregate_type_contains_array_p (tree type)
1569 switch (TREE_CODE (type))
1573 case QUAL_UNION_TYPE:
1576 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1577 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
1578 && aggregate_type_contains_array_p (TREE_TYPE (field)))
1591 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1592 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1593 this field is in a record type with a "pragma pack". If SIZE is nonzero
1594 it is the specified size for this field. If POS is nonzero, it is the bit
1595 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1596 the address of this field for aliasing purposes. If it is negative, we
1597 should not make a bitfield, which is used by make_aligning_type. */
1600 create_field_decl (tree field_name, tree field_type, tree record_type,
1601 int packed, tree size, tree pos, int addressable)
1603 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1605 DECL_CONTEXT (field_decl) = record_type;
1606 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1608 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1609 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1610 Likewise for an aggregate without specified position that contains an
1611 array, because in this case slices of variable length of this array
1612 must be handled by GCC and variable-sized objects need to be aligned
1613 to at least a byte boundary. */
1614 if (packed && (TYPE_MODE (field_type) == BLKmode
1616 && AGGREGATE_TYPE_P (field_type)
1617 && aggregate_type_contains_array_p (field_type))))
1618 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1620 /* If a size is specified, use it. Otherwise, if the record type is packed
1621 compute a size to use, which may differ from the object's natural size.
1622 We always set a size in this case to trigger the checks for bitfield
1623 creation below, which is typically required when no position has been
1626 size = convert (bitsizetype, size);
1627 else if (packed == 1)
1629 size = rm_size (field_type);
1631 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1633 if (TREE_CODE (size) == INTEGER_CST
1634 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1635 size = round_up (size, BITS_PER_UNIT);
1638 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1639 specified for two reasons: first if the size differs from the natural
1640 size. Second, if the alignment is insufficient. There are a number of
1641 ways the latter can be true.
1643 We never make a bitfield if the type of the field has a nonconstant size,
1644 because no such entity requiring bitfield operations should reach here.
1646 We do *preventively* make a bitfield when there might be the need for it
1647 but we don't have all the necessary information to decide, as is the case
1648 of a field with no specified position in a packed record.
1650 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1651 in layout_decl or finish_record_type to clear the bit_field indication if
1652 it is in fact not needed. */
1653 if (addressable >= 0
1655 && TREE_CODE (size) == INTEGER_CST
1656 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1657 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
1658 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
1660 || (TYPE_ALIGN (record_type) != 0
1661 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1663 DECL_BIT_FIELD (field_decl) = 1;
1664 DECL_SIZE (field_decl) = size;
1665 if (!packed && !pos)
1666 DECL_ALIGN (field_decl)
1667 = (TYPE_ALIGN (record_type) != 0
1668 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1669 : TYPE_ALIGN (field_type));
1672 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1674 /* Bump the alignment if need be, either for bitfield/packing purposes or
1675 to satisfy the type requirements if no such consideration applies. When
1676 we get the alignment from the type, indicate if this is from an explicit
1677 user request, which prevents stor-layout from lowering it later on. */
1680 = (DECL_BIT_FIELD (field_decl) ? 1
1681 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
1683 if (bit_align > DECL_ALIGN (field_decl))
1684 DECL_ALIGN (field_decl) = bit_align;
1685 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
1687 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1688 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
1694 /* We need to pass in the alignment the DECL is known to have.
1695 This is the lowest-order bit set in POS, but no more than
1696 the alignment of the record, if one is specified. Note
1697 that an alignment of 0 is taken as infinite. */
1698 unsigned int known_align;
1700 if (host_integerp (pos, 1))
1701 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1703 known_align = BITS_PER_UNIT;
1705 if (TYPE_ALIGN (record_type)
1706 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1707 known_align = TYPE_ALIGN (record_type);
1709 layout_decl (field_decl, known_align);
1710 SET_DECL_OFFSET_ALIGN (field_decl,
1711 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1713 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1714 &DECL_FIELD_BIT_OFFSET (field_decl),
1715 DECL_OFFSET_ALIGN (field_decl), pos);
1717 DECL_HAS_REP_P (field_decl) = 1;
1720 /* In addition to what our caller says, claim the field is addressable if we
1721 know that its type is not suitable.
1723 The field may also be "technically" nonaddressable, meaning that even if
1724 we attempt to take the field's address we will actually get the address
1725 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
1726 value we have at this point is not accurate enough, so we don't account
1727 for this here and let finish_record_type decide. */
1728 if (!type_for_nonaliased_component_p (field_type))
1731 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1736 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1737 PARAM_TYPE is its type. READONLY is true if the parameter is
1738 readonly (either an In parameter or an address of a pass-by-ref
1742 create_param_decl (tree param_name, tree param_type, bool readonly)
1744 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1746 /* Honor targetm.calls.promote_prototypes(), as not doing so can
1747 lead to various ABI violations. */
1748 if (targetm.calls.promote_prototypes (param_type)
1749 && (TREE_CODE (param_type) == INTEGER_TYPE
1750 || TREE_CODE (param_type) == ENUMERAL_TYPE)
1751 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1753 /* We have to be careful about biased types here. Make a subtype
1754 of integer_type_node with the proper biasing. */
1755 if (TREE_CODE (param_type) == INTEGER_TYPE
1756 && TYPE_BIASED_REPRESENTATION_P (param_type))
1759 = copy_type (build_range_type (integer_type_node,
1760 TYPE_MIN_VALUE (param_type),
1761 TYPE_MAX_VALUE (param_type)));
1763 TYPE_BIASED_REPRESENTATION_P (param_type) = 1;
1766 param_type = integer_type_node;
1769 DECL_ARG_TYPE (param_decl) = param_type;
1770 TREE_READONLY (param_decl) = readonly;
1774 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1777 process_attributes (tree decl, struct attrib *attr_list)
1779 for (; attr_list; attr_list = attr_list->next)
1780 switch (attr_list->type)
1782 case ATTR_MACHINE_ATTRIBUTE:
1783 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1785 ATTR_FLAG_TYPE_IN_PLACE);
1788 case ATTR_LINK_ALIAS:
1789 if (! DECL_EXTERNAL (decl))
1791 TREE_STATIC (decl) = 1;
1792 assemble_alias (decl, attr_list->name);
1796 case ATTR_WEAK_EXTERNAL:
1798 declare_weak (decl);
1800 post_error ("?weak declarations not supported on this target",
1801 attr_list->error_point);
1804 case ATTR_LINK_SECTION:
1805 if (targetm.have_named_sections)
1807 DECL_SECTION_NAME (decl)
1808 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1809 IDENTIFIER_POINTER (attr_list->name));
1810 DECL_COMMON (decl) = 0;
1813 post_error ("?section attributes are not supported for this target",
1814 attr_list->error_point);
1817 case ATTR_LINK_CONSTRUCTOR:
1818 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1819 TREE_USED (decl) = 1;
1822 case ATTR_LINK_DESTRUCTOR:
1823 DECL_STATIC_DESTRUCTOR (decl) = 1;
1824 TREE_USED (decl) = 1;
1829 /* Record a global renaming pointer. */
1832 record_global_renaming_pointer (tree decl)
1834 gcc_assert (DECL_RENAMED_OBJECT (decl));
1835 VEC_safe_push (tree, gc, global_renaming_pointers, decl);
1838 /* Invalidate the global renaming pointers. */
1841 invalidate_global_renaming_pointers (void)
1846 for (i = 0; VEC_iterate(tree, global_renaming_pointers, i, iter); i++)
1847 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
1849 VEC_free (tree, gc, global_renaming_pointers);
1852 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1856 value_factor_p (tree value, HOST_WIDE_INT factor)
1858 if (host_integerp (value, 1))
1859 return tree_low_cst (value, 1) % factor == 0;
1861 if (TREE_CODE (value) == MULT_EXPR)
1862 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1863 || value_factor_p (TREE_OPERAND (value, 1), factor));
1868 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1869 unless we can prove these 2 fields are laid out in such a way that no gap
1870 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1871 is the distance in bits between the end of PREV_FIELD and the starting
1872 position of CURR_FIELD. It is ignored if null. */
1875 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1877 /* If this is the first field of the record, there cannot be any gap */
1881 /* If the previous field is a union type, then return False: The only
1882 time when such a field is not the last field of the record is when
1883 there are other components at fixed positions after it (meaning there
1884 was a rep clause for every field), in which case we don't want the
1885 alignment constraint to override them. */
1886 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1889 /* If the distance between the end of prev_field and the beginning of
1890 curr_field is constant, then there is a gap if the value of this
1891 constant is not null. */
1892 if (offset && host_integerp (offset, 1))
1893 return !integer_zerop (offset);
1895 /* If the size and position of the previous field are constant,
1896 then check the sum of this size and position. There will be a gap
1897 iff it is not multiple of the current field alignment. */
1898 if (host_integerp (DECL_SIZE (prev_field), 1)
1899 && host_integerp (bit_position (prev_field), 1))
1900 return ((tree_low_cst (bit_position (prev_field), 1)
1901 + tree_low_cst (DECL_SIZE (prev_field), 1))
1902 % DECL_ALIGN (curr_field) != 0);
1904 /* If both the position and size of the previous field are multiples
1905 of the current field alignment, there cannot be any gap. */
1906 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1907 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1910 /* Fallback, return that there may be a potential gap */
1914 /* Returns a LABEL_DECL node for LABEL_NAME. */
1917 create_label_decl (tree label_name)
1919 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1921 DECL_CONTEXT (label_decl) = current_function_decl;
1922 DECL_MODE (label_decl) = VOIDmode;
1923 DECL_SOURCE_LOCATION (label_decl) = input_location;
1928 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1929 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1930 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1931 PARM_DECL nodes chained through the TREE_CHAIN field).
1933 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1934 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1937 create_subprog_decl (tree subprog_name, tree asm_name,
1938 tree subprog_type, tree param_decl_list, bool inline_flag,
1939 bool public_flag, bool extern_flag,
1940 struct attrib *attr_list, Node_Id gnat_node)
1942 tree return_type = TREE_TYPE (subprog_type);
1943 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1945 /* If this is a function nested inside an inlined external function, it
1946 means we aren't going to compile the outer function unless it is
1947 actually inlined, so do the same for us. */
1948 if (current_function_decl && DECL_INLINE (current_function_decl)
1949 && DECL_EXTERNAL (current_function_decl))
1952 DECL_EXTERNAL (subprog_decl) = extern_flag;
1953 TREE_PUBLIC (subprog_decl) = public_flag;
1954 TREE_STATIC (subprog_decl) = 1;
1955 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1956 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1957 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1958 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1959 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1960 DECL_ARTIFICIAL (DECL_RESULT (subprog_decl)) = 1;
1961 DECL_IGNORED_P (DECL_RESULT (subprog_decl)) = 1;
1963 /* TREE_ADDRESSABLE is set on the result type to request the use of the
1964 target by-reference return mechanism. This is not supported all the
1965 way down to RTL expansion with GCC 4, which ICEs on temporary creation
1966 attempts with such a type and expects DECL_BY_REFERENCE to be set on
1967 the RESULT_DECL instead - see gnat_genericize for more details. */
1968 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (subprog_decl))))
1970 tree result_decl = DECL_RESULT (subprog_decl);
1972 TREE_ADDRESSABLE (TREE_TYPE (result_decl)) = 0;
1973 DECL_BY_REFERENCE (result_decl) = 1;
1977 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
1980 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1982 process_attributes (subprog_decl, attr_list);
1984 /* Add this decl to the current binding level. */
1985 gnat_pushdecl (subprog_decl, gnat_node);
1987 /* Output the assembler code and/or RTL for the declaration. */
1988 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
1990 return subprog_decl;
1993 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1994 body. This routine needs to be invoked before processing the declarations
1995 appearing in the subprogram. */
1998 begin_subprog_body (tree subprog_decl)
2002 current_function_decl = subprog_decl;
2003 announce_function (subprog_decl);
2005 /* Enter a new binding level and show that all the parameters belong to
2008 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
2009 param_decl = TREE_CHAIN (param_decl))
2010 DECL_CONTEXT (param_decl) = subprog_decl;
2012 make_decl_rtl (subprog_decl);
2014 /* We handle pending sizes via the elaboration of types, so we don't need to
2015 save them. This causes them to be marked as part of the outer function
2016 and then discarded. */
2017 get_pending_sizes ();
2021 /* Helper for the genericization callback. Return a dereference of VAL
2022 if it is of a reference type. */
2025 convert_from_reference (tree val)
2027 tree value_type, ref;
2029 if (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE)
2032 value_type = TREE_TYPE (TREE_TYPE (val));
2033 ref = build1 (INDIRECT_REF, value_type, val);
2035 /* See if what we reference is CONST or VOLATILE, which requires
2036 looking into array types to get to the component type. */
2038 while (TREE_CODE (value_type) == ARRAY_TYPE)
2039 value_type = TREE_TYPE (value_type);
2042 = (TYPE_QUALS (value_type) & TYPE_QUAL_CONST);
2043 TREE_THIS_VOLATILE (ref)
2044 = (TYPE_QUALS (value_type) & TYPE_QUAL_VOLATILE);
2046 TREE_SIDE_EFFECTS (ref)
2047 = (TREE_THIS_VOLATILE (ref) || TREE_SIDE_EFFECTS (val));
2052 /* Helper for the genericization callback. Returns true if T denotes
2053 a RESULT_DECL with DECL_BY_REFERENCE set. */
2056 is_byref_result (tree t)
2058 return (TREE_CODE (t) == RESULT_DECL && DECL_BY_REFERENCE (t));
2062 /* Tree walking callback for gnat_genericize. Currently ...
2064 o Adjust references to the function's DECL_RESULT if it is marked
2065 DECL_BY_REFERENCE and so has had its type turned into a reference
2066 type at the end of the function compilation. */
2069 gnat_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
2071 /* This implementation is modeled after what the C++ front-end is
2072 doing, basis of the downstream passes behavior. */
2074 tree stmt = *stmt_p;
2075 struct pointer_set_t *p_set = (struct pointer_set_t*) data;
2077 /* If we have a direct mention of the result decl, dereference. */
2078 if (is_byref_result (stmt))
2080 *stmt_p = convert_from_reference (stmt);
2085 /* Otherwise, no need to walk the same tree twice. */
2086 if (pointer_set_contains (p_set, stmt))
2092 /* If we are taking the address of what now is a reference, just get the
2094 if (TREE_CODE (stmt) == ADDR_EXPR
2095 && is_byref_result (TREE_OPERAND (stmt, 0)))
2097 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
2101 /* Don't dereference an by-reference RESULT_DECL inside a RETURN_EXPR. */
2102 else if (TREE_CODE (stmt) == RETURN_EXPR
2103 && TREE_OPERAND (stmt, 0)
2104 && is_byref_result (TREE_OPERAND (stmt, 0)))
2107 /* Don't look inside trees that cannot embed references of interest. */
2108 else if (IS_TYPE_OR_DECL_P (stmt))
2111 pointer_set_insert (p_set, *stmt_p);
2116 /* Perform lowering of Ada trees to GENERIC. In particular:
2118 o Turn a DECL_BY_REFERENCE RESULT_DECL into a real by-reference decl
2119 and adjust all the references to this decl accordingly. */
2122 gnat_genericize (tree fndecl)
2124 /* Prior to GCC 4, an explicit By_Reference result mechanism for a function
2125 was handled by simply setting TREE_ADDRESSABLE on the result type.
2126 Everything required to actually pass by invisible ref using the target
2127 mechanism (e.g. extra parameter) was handled at RTL expansion time.
2129 This doesn't work with GCC 4 any more for several reasons. First, the
2130 gimplification process might need the creation of temporaries of this
2131 type, and the gimplifier ICEs on such attempts. Second, the middle-end
2132 now relies on a different attribute for such cases (DECL_BY_REFERENCE on
2133 RESULT/PARM_DECLs), and expects the user invisible by-reference-ness to
2134 be explicitly accounted for by the front-end in the function body.
2136 We achieve the complete transformation in two steps:
2138 1/ create_subprog_decl performs early attribute tweaks: it clears
2139 TREE_ADDRESSABLE from the result type and sets DECL_BY_REFERENCE on
2140 the result decl. The former ensures that the bit isn't set in the GCC
2141 tree saved for the function, so prevents ICEs on temporary creation.
2142 The latter we use here to trigger the rest of the processing.
2144 2/ This function performs the type transformation on the result decl
2145 and adjusts all the references to this decl from the function body
2148 Clearing TREE_ADDRESSABLE from the type differs from the C++ front-end
2149 strategy, which escapes the gimplifier temporary creation issues by
2150 creating it's own temporaries using TARGET_EXPR nodes. Our way relies
2151 on simple specific support code in aggregate_value_p to look at the
2152 target function result decl explicitly. */
2154 struct pointer_set_t *p_set;
2155 tree decl_result = DECL_RESULT (fndecl);
2157 if (!DECL_BY_REFERENCE (decl_result))
2160 /* Make the DECL_RESULT explicitly by-reference and adjust all the
2161 occurrences in the function body using the common tree-walking facility.
2162 We want to see every occurrence of the result decl to adjust the
2163 referencing tree, so need to use our own pointer set to control which
2164 trees should be visited again or not. */
2166 p_set = pointer_set_create ();
2168 TREE_TYPE (decl_result) = build_reference_type (TREE_TYPE (decl_result));
2169 TREE_ADDRESSABLE (decl_result) = 0;
2170 relayout_decl (decl_result);
2172 walk_tree (&DECL_SAVED_TREE (fndecl), gnat_genericize_r, p_set, NULL);
2174 pointer_set_destroy (p_set);
2177 /* Finish the definition of the current subprogram and compile it all the way
2178 to assembler language output. BODY is the tree corresponding to
2182 end_subprog_body (tree body)
2184 tree fndecl = current_function_decl;
2186 /* Mark the BLOCK for this level as being for this function and pop the
2187 level. Since the vars in it are the parameters, clear them. */
2188 BLOCK_VARS (current_binding_level->block) = 0;
2189 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
2190 DECL_INITIAL (fndecl) = current_binding_level->block;
2193 /* Deal with inline. If declared inline or we should default to inline,
2194 set the flag in the decl. */
2195 DECL_INLINE (fndecl)
2196 = DECL_DECLARED_INLINE_P (fndecl) || flag_inline_trees == 2;
2198 /* We handle pending sizes via the elaboration of types, so we don't
2199 need to save them. */
2200 get_pending_sizes ();
2202 /* Mark the RESULT_DECL as being in this subprogram. */
2203 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
2205 DECL_SAVED_TREE (fndecl) = body;
2207 current_function_decl = DECL_CONTEXT (fndecl);
2210 /* We cannot track the location of errors past this point. */
2211 error_gnat_node = Empty;
2213 /* If we're only annotating types, don't actually compile this function. */
2214 if (type_annotate_only)
2217 /* Perform the required pre-gimplification transformations on the tree. */
2218 gnat_genericize (fndecl);
2220 /* We do different things for nested and non-nested functions.
2221 ??? This should be in cgraph. */
2222 if (!DECL_CONTEXT (fndecl))
2224 gnat_gimplify_function (fndecl);
2225 cgraph_finalize_function (fndecl, false);
2228 /* Register this function with cgraph just far enough to get it
2229 added to our parent's nested function list. */
2230 (void) cgraph_node (fndecl);
2233 /* Convert FNDECL's code to GIMPLE and handle any nested functions. */
2236 gnat_gimplify_function (tree fndecl)
2238 struct cgraph_node *cgn;
2240 dump_function (TDI_original, fndecl);
2241 gimplify_function_tree (fndecl);
2242 dump_function (TDI_generic, fndecl);
2244 /* Convert all nested functions to GIMPLE now. We do things in this order
2245 so that items like VLA sizes are expanded properly in the context of the
2246 correct function. */
2247 cgn = cgraph_node (fndecl);
2248 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
2249 gnat_gimplify_function (cgn->decl);
2254 gnat_builtin_function (tree decl)
2256 gnat_pushdecl (decl, Empty);
2260 /* Handle a "const" attribute; arguments as in
2261 struct attribute_spec.handler. */
2264 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
2265 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
2268 if (TREE_CODE (*node) == FUNCTION_DECL)
2269 TREE_READONLY (*node) = 1;
2271 *no_add_attrs = true;
2276 /* Handle a "nothrow" attribute; arguments as in
2277 struct attribute_spec.handler. */
2280 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
2281 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
2284 if (TREE_CODE (*node) == FUNCTION_DECL)
2285 TREE_NOTHROW (*node) = 1;
2287 *no_add_attrs = true;
2292 /* Return an integer type with the number of bits of precision given by
2293 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2294 it is a signed type. */
2297 gnat_type_for_size (unsigned precision, int unsignedp)
2302 if (precision <= 2 * MAX_BITS_PER_WORD
2303 && signed_and_unsigned_types[precision][unsignedp])
2304 return signed_and_unsigned_types[precision][unsignedp];
2307 t = make_unsigned_type (precision);
2309 t = make_signed_type (precision);
2311 if (precision <= 2 * MAX_BITS_PER_WORD)
2312 signed_and_unsigned_types[precision][unsignedp] = t;
2316 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2317 TYPE_NAME (t) = get_identifier (type_name);
2323 /* Likewise for floating-point types. */
2326 float_type_for_precision (int precision, enum machine_mode mode)
2331 if (float_types[(int) mode])
2332 return float_types[(int) mode];
2334 float_types[(int) mode] = t = make_node (REAL_TYPE);
2335 TYPE_PRECISION (t) = precision;
2338 gcc_assert (TYPE_MODE (t) == mode);
2341 sprintf (type_name, "FLOAT_%d", precision);
2342 TYPE_NAME (t) = get_identifier (type_name);
2348 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2349 an unsigned type; otherwise a signed type is returned. */
2352 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2354 if (mode == BLKmode)
2356 else if (mode == VOIDmode)
2357 return void_type_node;
2358 else if (COMPLEX_MODE_P (mode))
2360 else if (SCALAR_FLOAT_MODE_P (mode))
2361 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2362 else if (SCALAR_INT_MODE_P (mode))
2363 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2368 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2371 gnat_unsigned_type (tree type_node)
2373 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2375 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2377 type = copy_node (type);
2378 TREE_TYPE (type) = type_node;
2380 else if (TREE_TYPE (type_node)
2381 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2382 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2384 type = copy_node (type);
2385 TREE_TYPE (type) = TREE_TYPE (type_node);
2391 /* Return the signed version of a TYPE_NODE, a scalar type. */
2394 gnat_signed_type (tree type_node)
2396 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2398 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2400 type = copy_node (type);
2401 TREE_TYPE (type) = type_node;
2403 else if (TREE_TYPE (type_node)
2404 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2405 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2407 type = copy_node (type);
2408 TREE_TYPE (type) = TREE_TYPE (type_node);
2414 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2415 transparently converted to each other. */
2418 gnat_types_compatible_p (tree t1, tree t2)
2420 enum tree_code code;
2422 /* This is the default criterion. */
2423 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2426 /* We only check structural equivalence here. */
2427 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2430 /* Array types are also compatible if they are constrained and have
2431 the same component type and the same domain. */
2432 if (code == ARRAY_TYPE
2433 && TREE_TYPE (t1) == TREE_TYPE (t2)
2434 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2435 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2436 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
2437 TYPE_MAX_VALUE (TYPE_DOMAIN (t2))))
2440 /* Padding record types are also compatible if they pad the same
2441 type and have the same constant size. */
2442 if (code == RECORD_TYPE
2443 && TYPE_IS_PADDING_P (t1) && TYPE_IS_PADDING_P (t2)
2444 && TREE_TYPE (TYPE_FIELDS (t1)) == TREE_TYPE (TYPE_FIELDS (t2))
2445 && tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2)))
2451 /* EXP is an expression for the size of an object. If this size contains
2452 discriminant references, replace them with the maximum (if MAX_P) or
2453 minimum (if !MAX_P) possible value of the discriminant. */
2456 max_size (tree exp, bool max_p)
2458 enum tree_code code = TREE_CODE (exp);
2459 tree type = TREE_TYPE (exp);
2461 switch (TREE_CODE_CLASS (code))
2463 case tcc_declaration:
2468 if (code == CALL_EXPR)
2471 int i, n = call_expr_nargs (exp);
2474 argarray = (tree *) alloca (n * sizeof (tree));
2475 for (i = 0; i < n; i++)
2476 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
2477 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
2482 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2483 modify. Otherwise, we treat it like a variable. */
2484 if (!CONTAINS_PLACEHOLDER_P (exp))
2487 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2489 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
2491 case tcc_comparison:
2492 return max_p ? size_one_node : size_zero_node;
2496 case tcc_expression:
2497 switch (TREE_CODE_LENGTH (code))
2500 if (code == NON_LVALUE_EXPR)
2501 return max_size (TREE_OPERAND (exp, 0), max_p);
2504 fold_build1 (code, type,
2505 max_size (TREE_OPERAND (exp, 0),
2506 code == NEGATE_EXPR ? !max_p : max_p));
2509 if (code == COMPOUND_EXPR)
2510 return max_size (TREE_OPERAND (exp, 1), max_p);
2512 /* Calculate "(A ? B : C) - D" as "A ? B - D : C - D" which
2513 may provide a tighter bound on max_size. */
2514 if (code == MINUS_EXPR
2515 && TREE_CODE (TREE_OPERAND (exp, 0)) == COND_EXPR)
2517 tree lhs = fold_build2 (MINUS_EXPR, type,
2518 TREE_OPERAND (TREE_OPERAND (exp, 0), 1),
2519 TREE_OPERAND (exp, 1));
2520 tree rhs = fold_build2 (MINUS_EXPR, type,
2521 TREE_OPERAND (TREE_OPERAND (exp, 0), 2),
2522 TREE_OPERAND (exp, 1));
2523 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2524 max_size (lhs, max_p),
2525 max_size (rhs, max_p));
2529 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2530 tree rhs = max_size (TREE_OPERAND (exp, 1),
2531 code == MINUS_EXPR ? !max_p : max_p);
2533 /* Special-case wanting the maximum value of a MIN_EXPR.
2534 In that case, if one side overflows, return the other.
2535 sizetype is signed, but we know sizes are non-negative.
2536 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2537 overflowing or the maximum possible value and the RHS
2541 && TREE_CODE (rhs) == INTEGER_CST
2542 && TREE_OVERFLOW (rhs))
2546 && TREE_CODE (lhs) == INTEGER_CST
2547 && TREE_OVERFLOW (lhs))
2549 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2550 && ((TREE_CODE (lhs) == INTEGER_CST
2551 && TREE_OVERFLOW (lhs))
2552 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2553 && !TREE_CONSTANT (rhs))
2556 return fold_build2 (code, type, lhs, rhs);
2560 if (code == SAVE_EXPR)
2562 else if (code == COND_EXPR)
2563 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2564 max_size (TREE_OPERAND (exp, 1), max_p),
2565 max_size (TREE_OPERAND (exp, 2), max_p));
2568 /* Other tree classes cannot happen. */
2576 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2577 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2578 Return a constructor for the template. */
2581 build_template (tree template_type, tree array_type, tree expr)
2583 tree template_elts = NULL_TREE;
2584 tree bound_list = NULL_TREE;
2587 while (TREE_CODE (array_type) == RECORD_TYPE
2588 && (TYPE_IS_PADDING_P (array_type)
2589 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2590 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2592 if (TREE_CODE (array_type) == ARRAY_TYPE
2593 || (TREE_CODE (array_type) == INTEGER_TYPE
2594 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2595 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2597 /* First make the list for a CONSTRUCTOR for the template. Go down the
2598 field list of the template instead of the type chain because this
2599 array might be an Ada array of arrays and we can't tell where the
2600 nested arrays stop being the underlying object. */
2602 for (field = TYPE_FIELDS (template_type); field;
2604 ? (bound_list = TREE_CHAIN (bound_list))
2605 : (array_type = TREE_TYPE (array_type))),
2606 field = TREE_CHAIN (TREE_CHAIN (field)))
2608 tree bounds, min, max;
2610 /* If we have a bound list, get the bounds from there. Likewise
2611 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2612 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2613 This will give us a maximum range. */
2615 bounds = TREE_VALUE (bound_list);
2616 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2617 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2618 else if (expr && TREE_CODE (expr) == PARM_DECL
2619 && DECL_BY_COMPONENT_PTR_P (expr))
2620 bounds = TREE_TYPE (field);
2624 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
2625 max = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MAX_VALUE (bounds));
2627 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2628 substitute it from OBJECT. */
2629 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2630 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2632 template_elts = tree_cons (TREE_CHAIN (field), max,
2633 tree_cons (field, min, template_elts));
2636 return gnat_build_constructor (template_type, nreverse (template_elts));
2639 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2640 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2641 in the type contains in its DECL_INITIAL the expression to use when
2642 a constructor is made for the type. GNAT_ENTITY is an entity used
2643 to print out an error message if the mechanism cannot be applied to
2644 an object of that type and also for the name. */
2647 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2649 tree record_type = make_node (RECORD_TYPE);
2650 tree pointer32_type;
2651 tree field_list = 0;
2660 /* If TYPE is an unconstrained array, use the underlying array type. */
2661 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2662 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2664 /* If this is an array, compute the number of dimensions in the array,
2665 get the index types, and point to the inner type. */
2666 if (TREE_CODE (type) != ARRAY_TYPE)
2669 for (ndim = 1, inner_type = type;
2670 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2671 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2672 ndim++, inner_type = TREE_TYPE (inner_type))
2675 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2677 if (mech != By_Descriptor_NCA
2678 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2679 for (i = ndim - 1, inner_type = type;
2681 i--, inner_type = TREE_TYPE (inner_type))
2682 idx_arr[i] = TYPE_DOMAIN (inner_type);
2684 for (i = 0, inner_type = type;
2686 i++, inner_type = TREE_TYPE (inner_type))
2687 idx_arr[i] = TYPE_DOMAIN (inner_type);
2689 /* Now get the DTYPE value. */
2690 switch (TREE_CODE (type))
2694 if (TYPE_VAX_FLOATING_POINT_P (type))
2695 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2708 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2711 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2714 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2717 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2720 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2723 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2729 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2733 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2734 && TYPE_VAX_FLOATING_POINT_P (type))
2735 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2747 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2758 /* Get the CLASS value. */
2761 case By_Descriptor_A:
2764 case By_Descriptor_NCA:
2767 case By_Descriptor_SB:
2771 case By_Descriptor_S:
2777 /* Make the type for a descriptor for VMS. The first four fields
2778 are the same for all types. */
2781 = chainon (field_list,
2782 make_descriptor_field
2783 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2784 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2786 field_list = chainon (field_list,
2787 make_descriptor_field ("DTYPE",
2788 gnat_type_for_size (8, 1),
2789 record_type, size_int (dtype)));
2790 field_list = chainon (field_list,
2791 make_descriptor_field ("CLASS",
2792 gnat_type_for_size (8, 1),
2793 record_type, size_int (class)));
2795 /* Of course this will crash at run-time if the address space is not
2796 within the low 32 bits, but there is nothing else we can do. */
2797 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
2800 = chainon (field_list,
2801 make_descriptor_field
2802 ("POINTER", pointer32_type, record_type,
2803 build_unary_op (ADDR_EXPR,
2805 build0 (PLACEHOLDER_EXPR, type))));
2810 case By_Descriptor_S:
2813 case By_Descriptor_SB:
2815 = chainon (field_list,
2816 make_descriptor_field
2817 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2818 TREE_CODE (type) == ARRAY_TYPE
2819 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2821 = chainon (field_list,
2822 make_descriptor_field
2823 ("SB_U1", gnat_type_for_size (32, 1), record_type,
2824 TREE_CODE (type) == ARRAY_TYPE
2825 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2828 case By_Descriptor_A:
2829 case By_Descriptor_NCA:
2830 field_list = chainon (field_list,
2831 make_descriptor_field ("SCALE",
2832 gnat_type_for_size (8, 1),
2836 field_list = chainon (field_list,
2837 make_descriptor_field ("DIGITS",
2838 gnat_type_for_size (8, 1),
2843 = chainon (field_list,
2844 make_descriptor_field
2845 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2846 size_int (mech == By_Descriptor_NCA
2848 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2849 : (TREE_CODE (type) == ARRAY_TYPE
2850 && TYPE_CONVENTION_FORTRAN_P (type)
2853 field_list = chainon (field_list,
2854 make_descriptor_field ("DIMCT",
2855 gnat_type_for_size (8, 1),
2859 field_list = chainon (field_list,
2860 make_descriptor_field ("ARSIZE",
2861 gnat_type_for_size (32, 1),
2863 size_in_bytes (type)));
2865 /* Now build a pointer to the 0,0,0... element. */
2866 tem = build0 (PLACEHOLDER_EXPR, type);
2867 for (i = 0, inner_type = type; i < ndim;
2868 i++, inner_type = TREE_TYPE (inner_type))
2869 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2870 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2871 NULL_TREE, NULL_TREE);
2874 = chainon (field_list,
2875 make_descriptor_field
2877 build_pointer_type_for_mode (inner_type, SImode, false),
2880 build_pointer_type_for_mode (inner_type, SImode,
2884 /* Next come the addressing coefficients. */
2885 tem = size_one_node;
2886 for (i = 0; i < ndim; i++)
2890 = size_binop (MULT_EXPR, tem,
2891 size_binop (PLUS_EXPR,
2892 size_binop (MINUS_EXPR,
2893 TYPE_MAX_VALUE (idx_arr[i]),
2894 TYPE_MIN_VALUE (idx_arr[i])),
2897 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2898 fname[1] = '0' + i, fname[2] = 0;
2900 = chainon (field_list,
2901 make_descriptor_field (fname,
2902 gnat_type_for_size (32, 1),
2903 record_type, idx_length));
2905 if (mech == By_Descriptor_NCA)
2909 /* Finally here are the bounds. */
2910 for (i = 0; i < ndim; i++)
2914 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2916 = chainon (field_list,
2917 make_descriptor_field
2918 (fname, gnat_type_for_size (32, 1), record_type,
2919 TYPE_MIN_VALUE (idx_arr[i])));
2923 = chainon (field_list,
2924 make_descriptor_field
2925 (fname, gnat_type_for_size (32, 1), record_type,
2926 TYPE_MAX_VALUE (idx_arr[i])));
2931 post_error ("unsupported descriptor type for &", gnat_entity);
2934 finish_record_type (record_type, field_list, 0, true);
2935 create_type_decl (create_concat_name (gnat_entity, "DESC"), record_type,
2936 NULL, true, false, gnat_entity);
2941 /* Utility routine for above code to make a field. */
2944 make_descriptor_field (const char *name, tree type,
2945 tree rec_type, tree initial)
2948 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2950 DECL_INITIAL (field) = initial;
2954 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
2955 pointer or fat pointer type. GNAT_SUBPROG is the subprogram to which
2956 the VMS descriptor is passed. */
2959 convert_vms_descriptor (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
2961 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
2962 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
2963 /* The CLASS field is the 3rd field in the descriptor. */
2964 tree class = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
2965 /* The POINTER field is the 4th field in the descriptor. */
2966 tree pointer = TREE_CHAIN (class);
2968 /* Retrieve the value of the POINTER field. */
2970 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
2972 if (POINTER_TYPE_P (gnu_type))
2973 return convert (gnu_type, gnu_expr);
2975 else if (TYPE_FAT_POINTER_P (gnu_type))
2977 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
2978 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
2979 tree template_type = TREE_TYPE (p_bounds_type);
2980 tree min_field = TYPE_FIELDS (template_type);
2981 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
2982 tree template, template_addr, aflags, dimct, t, u;
2983 /* See the head comment of build_vms_descriptor. */
2984 int iclass = TREE_INT_CST_LOW (DECL_INITIAL (class));
2986 /* Convert POINTER to the type of the P_ARRAY field. */
2987 gnu_expr = convert (p_array_type, gnu_expr);
2991 case 1: /* Class S */
2992 case 15: /* Class SB */
2993 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
2994 t = TYPE_FIELDS (desc_type);
2995 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2996 t = tree_cons (min_field,
2997 convert (TREE_TYPE (min_field), integer_one_node),
2998 tree_cons (max_field,
2999 convert (TREE_TYPE (max_field), t),
3001 template = gnat_build_constructor (template_type, t);
3002 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
3004 /* For class S, we are done. */
3008 /* Test that we really have a SB descriptor, like DEC Ada. */
3009 t = build3 (COMPONENT_REF, TREE_TYPE (class), desc, class, NULL);
3010 u = convert (TREE_TYPE (class), DECL_INITIAL (class));
3011 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
3012 /* If so, there is already a template in the descriptor and
3013 it is located right after the POINTER field. */
3014 t = TREE_CHAIN (pointer);
3015 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3016 /* Otherwise use the {1, LENGTH} template we build above. */
3017 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3018 build_unary_op (ADDR_EXPR, p_bounds_type,
3023 case 4: /* Class A */
3024 /* The AFLAGS field is the 7th field in the descriptor. */
3025 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer)));
3026 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3027 /* The DIMCT field is the 8th field in the descriptor. */
3029 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3030 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3031 or FL_COEFF or FL_BOUNDS not set. */
3032 u = build_int_cst (TREE_TYPE (aflags), 192);
3033 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3034 build_binary_op (NE_EXPR, integer_type_node,
3036 convert (TREE_TYPE (dimct),
3038 build_binary_op (NE_EXPR, integer_type_node,
3039 build2 (BIT_AND_EXPR,
3043 add_stmt (build3 (COND_EXPR, void_type_node, u,
3044 build_call_raise (CE_Length_Check_Failed, Empty,
3045 N_Raise_Constraint_Error),
3047 /* There is already a template in the descriptor and it is
3048 located at the start of block 3 (12th field). */
3049 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (t))));
3050 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3051 template_addr = build_unary_op (ADDR_EXPR, p_bounds_type, template);
3054 case 10: /* Class NCA */
3056 post_error ("unsupported descriptor type for &", gnat_subprog);
3057 template_addr = integer_zero_node;
3061 /* Build the fat pointer in the form of a constructor. */
3062 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr,
3063 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3064 template_addr, NULL_TREE));
3065 return gnat_build_constructor (gnu_type, t);
3072 /* Build a stub for the subprogram specified by the GCC tree GNU_SUBPROG
3073 and the GNAT node GNAT_SUBPROG. */
3076 build_function_stub (tree gnu_subprog, Entity_Id gnat_subprog)
3078 tree gnu_subprog_type, gnu_subprog_addr, gnu_subprog_call;
3079 tree gnu_stub_param, gnu_param_list, gnu_arg_types, gnu_param;
3080 tree gnu_stub_decl = DECL_FUNCTION_STUB (gnu_subprog);
3083 gnu_subprog_type = TREE_TYPE (gnu_subprog);
3084 gnu_param_list = NULL_TREE;
3086 begin_subprog_body (gnu_stub_decl);
3089 start_stmt_group ();
3091 /* Loop over the parameters of the stub and translate any of them
3092 passed by descriptor into a by reference one. */
3093 for (gnu_stub_param = DECL_ARGUMENTS (gnu_stub_decl),
3094 gnu_arg_types = TYPE_ARG_TYPES (gnu_subprog_type);
3096 gnu_stub_param = TREE_CHAIN (gnu_stub_param),
3097 gnu_arg_types = TREE_CHAIN (gnu_arg_types))
3099 if (DECL_BY_DESCRIPTOR_P (gnu_stub_param))
3100 gnu_param = convert_vms_descriptor (TREE_VALUE (gnu_arg_types),
3101 gnu_stub_param, gnat_subprog);
3103 gnu_param = gnu_stub_param;
3105 gnu_param_list = tree_cons (NULL_TREE, gnu_param, gnu_param_list);
3108 gnu_body = end_stmt_group ();
3110 /* Invoke the internal subprogram. */
3111 gnu_subprog_addr = build1 (ADDR_EXPR, build_pointer_type (gnu_subprog_type),
3113 gnu_subprog_call = build_call_list (TREE_TYPE (gnu_subprog_type),
3115 nreverse (gnu_param_list));
3117 /* Propagate the return value, if any. */
3118 if (VOID_TYPE_P (TREE_TYPE (gnu_subprog_type)))
3119 append_to_statement_list (gnu_subprog_call, &gnu_body);
3121 append_to_statement_list (build_return_expr (DECL_RESULT (gnu_stub_decl),
3127 allocate_struct_function (gnu_stub_decl, false);
3128 end_subprog_body (gnu_body);
3131 /* Build a type to be used to represent an aliased object whose nominal
3132 type is an unconstrained array. This consists of a RECORD_TYPE containing
3133 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
3134 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
3135 is used to represent an arbitrary unconstrained object. Use NAME
3136 as the name of the record. */
3139 build_unc_object_type (tree template_type, tree object_type, tree name)
3141 tree type = make_node (RECORD_TYPE);
3142 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
3143 template_type, type, 0, 0, 0, 1);
3144 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
3147 TYPE_NAME (type) = name;
3148 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3149 finish_record_type (type,
3150 chainon (chainon (NULL_TREE, template_field),
3157 /* Same, taking a thin or fat pointer type instead of a template type. */
3160 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3165 gcc_assert (TYPE_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3168 = (TYPE_FAT_POINTER_P (thin_fat_ptr_type)
3169 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3170 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3171 return build_unc_object_type (template_type, object_type, name);
3174 /* Shift the component offsets within an unconstrained object TYPE to make it
3175 suitable for use as a designated type for thin pointers. */
3178 shift_unc_components_for_thin_pointers (tree type)
3180 /* Thin pointer values designate the ARRAY data of an unconstrained object,
3181 allocated past the BOUNDS template. The designated type is adjusted to
3182 have ARRAY at position zero and the template at a negative offset, so
3183 that COMPONENT_REFs on (*thin_ptr) designate the proper location. */
3185 tree bounds_field = TYPE_FIELDS (type);
3186 tree array_field = TREE_CHAIN (TYPE_FIELDS (type));
3188 DECL_FIELD_OFFSET (bounds_field)
3189 = size_binop (MINUS_EXPR, size_zero_node, byte_position (array_field));
3191 DECL_FIELD_OFFSET (array_field) = size_zero_node;
3192 DECL_FIELD_BIT_OFFSET (array_field) = bitsize_zero_node;
3195 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
3196 the normal case this is just two adjustments, but we have more to do
3197 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
3200 update_pointer_to (tree old_type, tree new_type)
3202 tree ptr = TYPE_POINTER_TO (old_type);
3203 tree ref = TYPE_REFERENCE_TO (old_type);
3207 /* If this is the main variant, process all the other variants first. */
3208 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3209 for (type = TYPE_NEXT_VARIANT (old_type); type;
3210 type = TYPE_NEXT_VARIANT (type))
3211 update_pointer_to (type, new_type);
3213 /* If no pointer or reference, we are done. */
3217 /* Merge the old type qualifiers in the new type.
3219 Each old variant has qualifiers for specific reasons, and the new
3220 designated type as well. Each set of qualifiers represents useful
3221 information grabbed at some point, and merging the two simply unifies
3222 these inputs into the final type description.
3224 Consider for instance a volatile type frozen after an access to constant
3225 type designating it. After the designated type freeze, we get here with a
3226 volatile new_type and a dummy old_type with a readonly variant, created
3227 when the access type was processed. We shall make a volatile and readonly
3228 designated type, because that's what it really is.
3230 We might also get here for a non-dummy old_type variant with different
3231 qualifiers than the new_type ones, for instance in some cases of pointers
3232 to private record type elaboration (see the comments around the call to
3233 this routine from gnat_to_gnu_entity/E_Access_Type). We have to merge the
3234 qualifiers in those cases too, to avoid accidentally discarding the
3235 initial set, and will often end up with old_type == new_type then. */
3236 new_type = build_qualified_type (new_type,
3237 TYPE_QUALS (old_type)
3238 | TYPE_QUALS (new_type));
3240 /* If the new type and the old one are identical, there is nothing to
3242 if (old_type == new_type)
3245 /* Otherwise, first handle the simple case. */
3246 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3248 TYPE_POINTER_TO (new_type) = ptr;
3249 TYPE_REFERENCE_TO (new_type) = ref;
3251 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3252 for (ptr1 = TYPE_MAIN_VARIANT (ptr); ptr1;
3253 ptr1 = TYPE_NEXT_VARIANT (ptr1))
3254 TREE_TYPE (ptr1) = new_type;
3256 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3257 for (ref1 = TYPE_MAIN_VARIANT (ref); ref1;
3258 ref1 = TYPE_NEXT_VARIANT (ref1))
3259 TREE_TYPE (ref1) = new_type;
3262 /* Now deal with the unconstrained array case. In this case the "pointer"
3263 is actually a RECORD_TYPE where both fields are pointers to dummy nodes.
3264 Turn them into pointers to the correct types using update_pointer_to. */
3265 else if (TREE_CODE (ptr) != RECORD_TYPE || !TYPE_IS_FAT_POINTER_P (ptr))
3270 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
3271 tree array_field = TYPE_FIELDS (ptr);
3272 tree bounds_field = TREE_CHAIN (TYPE_FIELDS (ptr));
3273 tree new_ptr = TYPE_POINTER_TO (new_type);
3277 /* Make pointers to the dummy template point to the real template. */
3279 (TREE_TYPE (TREE_TYPE (bounds_field)),
3280 TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_ptr)))));
3282 /* The references to the template bounds present in the array type
3283 are made through a PLACEHOLDER_EXPR of type new_ptr. Since we
3284 are updating ptr to make it a full replacement for new_ptr as
3285 pointer to new_type, we must rework the PLACEHOLDER_EXPR so as
3286 to make it of type ptr. */
3287 new_ref = build3 (COMPONENT_REF, TREE_TYPE (bounds_field),
3288 build0 (PLACEHOLDER_EXPR, ptr),
3289 bounds_field, NULL_TREE);
3291 /* Create the new array for the new PLACEHOLDER_EXPR and make
3292 pointers to the dummy array point to it.
3294 ??? This is now the only use of substitute_in_type,
3295 which is a very "heavy" routine to do this, so it
3296 should be replaced at some point. */
3298 (TREE_TYPE (TREE_TYPE (array_field)),
3299 substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))),
3300 TREE_CHAIN (TYPE_FIELDS (new_ptr)), new_ref));
3302 /* Make ptr the pointer to new_type. */
3303 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
3304 = TREE_TYPE (new_type) = ptr;
3306 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
3307 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
3309 /* Now handle updating the allocation record, what the thin pointer
3310 points to. Update all pointers from the old record into the new
3311 one, update the type of the array field, and recompute the size. */
3312 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
3314 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
3315 = TREE_TYPE (TREE_TYPE (array_field));
3317 /* The size recomputation needs to account for alignment constraints, so
3318 we let layout_type work it out. This will reset the field offsets to
3319 what they would be in a regular record, so we shift them back to what
3320 we want them to be for a thin pointer designated type afterwards. */
3321 DECL_SIZE (TYPE_FIELDS (new_obj_rec)) = 0;
3322 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))) = 0;
3323 TYPE_SIZE (new_obj_rec) = 0;
3324 layout_type (new_obj_rec);
3326 shift_unc_components_for_thin_pointers (new_obj_rec);
3328 /* We are done, at last. */
3329 rest_of_record_type_compilation (ptr);
3333 /* Convert a pointer to a constrained array into a pointer to a fat
3334 pointer. This involves making or finding a template. */
3337 convert_to_fat_pointer (tree type, tree expr)
3339 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
3340 tree template, template_addr;
3341 tree etype = TREE_TYPE (expr);
3343 /* If EXPR is a constant of zero, we make a fat pointer that has a null
3344 pointer to the template and array. */
3345 if (integer_zerop (expr))
3347 gnat_build_constructor
3349 tree_cons (TYPE_FIELDS (type),
3350 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3351 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3352 convert (build_pointer_type (template_type),
3356 /* If EXPR is a thin pointer, make the template and data from the record. */
3358 else if (TYPE_THIN_POINTER_P (etype))
3360 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
3362 expr = save_expr (expr);
3363 if (TREE_CODE (expr) == ADDR_EXPR)
3364 expr = TREE_OPERAND (expr, 0);
3366 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
3368 template = build_component_ref (expr, NULL_TREE, fields, false);
3369 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3370 build_component_ref (expr, NULL_TREE,
3371 TREE_CHAIN (fields), false));
3374 /* Otherwise, build the constructor for the template. */
3375 template = build_template (template_type, TREE_TYPE (etype), expr);
3377 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
3379 /* The result is a CONSTRUCTOR for the fat pointer.
3381 If expr is an argument of a foreign convention subprogram, the type it
3382 points to is directly the component type. In this case, the expression
3383 type may not match the corresponding FIELD_DECL type at this point, so we
3384 call "convert" here to fix that up if necessary. This type consistency is
3385 required, for instance because it ensures that possible later folding of
3386 component_refs against this constructor always yields something of the
3387 same type as the initial reference.
3389 Note that the call to "build_template" above is still fine, because it
3390 will only refer to the provided template_type in this case. */
3392 gnat_build_constructor
3393 (type, tree_cons (TYPE_FIELDS (type),
3394 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3395 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3396 template_addr, NULL_TREE)));
3399 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
3400 is something that is a fat pointer, so convert to it first if it EXPR
3401 is not already a fat pointer. */
3404 convert_to_thin_pointer (tree type, tree expr)
3406 if (!TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
3408 = convert_to_fat_pointer
3409 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
3411 /* We get the pointer to the data and use a NOP_EXPR to make it the
3413 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)),
3415 expr = build1 (NOP_EXPR, type, expr);
3420 /* Create an expression whose value is that of EXPR,
3421 converted to type TYPE. The TREE_TYPE of the value
3422 is always TYPE. This function implements all reasonable
3423 conversions; callers should filter out those that are
3424 not permitted by the language being compiled. */
3427 convert (tree type, tree expr)
3429 enum tree_code code = TREE_CODE (type);
3430 tree etype = TREE_TYPE (expr);
3431 enum tree_code ecode = TREE_CODE (etype);
3433 /* If EXPR is already the right type, we are done. */
3437 /* If both input and output have padding and are of variable size, do this
3438 as an unchecked conversion. Likewise if one is a mere variant of the
3439 other, so we avoid a pointless unpad/repad sequence. */
3440 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
3441 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
3442 && (!TREE_CONSTANT (TYPE_SIZE (type))
3443 || !TREE_CONSTANT (TYPE_SIZE (etype))
3444 || gnat_types_compatible_p (type, etype)))
3447 /* If the output type has padding, convert to the inner type and
3448 make a constructor to build the record. */
3449 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
3451 /* If we previously converted from another type and our type is
3452 of variable size, remove the conversion to avoid the need for
3453 variable-size temporaries. */
3454 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3455 && !TREE_CONSTANT (TYPE_SIZE (type)))
3456 expr = TREE_OPERAND (expr, 0);
3458 /* If we are just removing the padding from expr, convert the original
3459 object if we have variable size in order to avoid the need for some
3460 variable-size temporaries. Likewise if the padding is a mere variant
3461 of the other, so we avoid a pointless unpad/repad sequence. */
3462 if (TREE_CODE (expr) == COMPONENT_REF
3463 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
3464 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
3465 && (!TREE_CONSTANT (TYPE_SIZE (type))
3466 || gnat_types_compatible_p (type,
3467 TREE_TYPE (TREE_OPERAND (expr, 0)))))
3468 return convert (type, TREE_OPERAND (expr, 0));
3470 /* If the result type is a padded type with a self-referentially-sized
3471 field and the expression type is a record, do this as an
3472 unchecked conversion. */
3473 else if (TREE_CODE (etype) == RECORD_TYPE
3474 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
3475 return unchecked_convert (type, expr, false);
3479 gnat_build_constructor (type,
3480 tree_cons (TYPE_FIELDS (type),
3482 (TYPE_FIELDS (type)),
3487 /* If the input type has padding, remove it and convert to the output type.
3488 The conditions ordering is arranged to ensure that the output type is not
3489 a padding type here, as it is not clear whether the conversion would
3490 always be correct if this was to happen. */
3491 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
3495 /* If we have just converted to this padded type, just get the
3496 inner expression. */
3497 if (TREE_CODE (expr) == CONSTRUCTOR
3498 && !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (expr))
3499 && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->index
3500 == TYPE_FIELDS (etype))
3502 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->value;
3504 /* Otherwise, build an explicit component reference. */
3507 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
3509 return convert (type, unpadded);
3512 /* If the input is a biased type, adjust first. */
3513 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
3514 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
3515 fold_convert (TREE_TYPE (etype),
3517 TYPE_MIN_VALUE (etype)));
3519 /* If the input is a justified modular type, we need to extract the actual
3520 object before converting it to any other type with the exceptions of an
3521 unconstrained array or of a mere type variant. It is useful to avoid the
3522 extraction and conversion in the type variant case because it could end
3523 up replacing a VAR_DECL expr by a constructor and we might be about the
3524 take the address of the result. */
3525 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
3526 && code != UNCONSTRAINED_ARRAY_TYPE
3527 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
3528 return convert (type, build_component_ref (expr, NULL_TREE,
3529 TYPE_FIELDS (etype), false));
3531 /* If converting to a type that contains a template, convert to the data
3532 type and then build the template. */
3533 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
3535 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
3537 /* If the source already has a template, get a reference to the
3538 associated array only, as we are going to rebuild a template
3539 for the target type anyway. */
3540 expr = maybe_unconstrained_array (expr);
3543 gnat_build_constructor
3545 tree_cons (TYPE_FIELDS (type),
3546 build_template (TREE_TYPE (TYPE_FIELDS (type)),
3547 obj_type, NULL_TREE),
3548 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3549 convert (obj_type, expr), NULL_TREE)));
3552 /* There are some special cases of expressions that we process
3554 switch (TREE_CODE (expr))
3560 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
3561 conversion in gnat_expand_expr. NULL_EXPR does not represent
3562 and actual value, so no conversion is needed. */
3563 expr = copy_node (expr);
3564 TREE_TYPE (expr) = type;
3568 /* If we are converting a STRING_CST to another constrained array type,
3569 just make a new one in the proper type. */
3570 if (code == ecode && AGGREGATE_TYPE_P (etype)
3571 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
3572 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
3574 expr = copy_node (expr);
3575 TREE_TYPE (expr) = type;
3581 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
3582 a new one in the proper type. */
3583 if (gnat_types_compatible_p (type, etype))
3585 expr = copy_node (expr);
3586 TREE_TYPE (expr) = type;
3591 case UNCONSTRAINED_ARRAY_REF:
3592 /* Convert this to the type of the inner array by getting the address of
3593 the array from the template. */
3594 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3595 build_component_ref (TREE_OPERAND (expr, 0),
3596 get_identifier ("P_ARRAY"),
3598 etype = TREE_TYPE (expr);
3599 ecode = TREE_CODE (etype);
3602 case VIEW_CONVERT_EXPR:
3604 /* GCC 4.x is very sensitive to type consistency overall, and view
3605 conversions thus are very frequent. Even though just "convert"ing
3606 the inner operand to the output type is fine in most cases, it
3607 might expose unexpected input/output type mismatches in special
3608 circumstances so we avoid such recursive calls when we can. */
3609 tree op0 = TREE_OPERAND (expr, 0);
3611 /* If we are converting back to the original type, we can just
3612 lift the input conversion. This is a common occurrence with
3613 switches back-and-forth amongst type variants. */
3614 if (type == TREE_TYPE (op0))
3617 /* Otherwise, if we're converting between two aggregate types, we
3618 might be allowed to substitute the VIEW_CONVERT_EXPR target type
3619 in place or to just convert the inner expression. */
3620 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
3622 /* If we are converting between mere variants, we can just
3623 substitute the VIEW_CONVERT_EXPR in place. */
3624 if (gnat_types_compatible_p (type, etype))
3625 return build1 (VIEW_CONVERT_EXPR, type, op0);
3627 /* Otherwise, we may just bypass the input view conversion unless
3628 one of the types is a fat pointer, which is handled by
3629 specialized code below which relies on exact type matching. */
3630 else if (!TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
3631 return convert (type, op0);
3637 /* If both types are record types, just convert the pointer and
3638 make a new INDIRECT_REF.
3640 ??? Disable this for now since it causes problems with the
3641 code in build_binary_op for MODIFY_EXPR which wants to
3642 strip off conversions. But that code really is a mess and
3643 we need to do this a much better way some time. */
3645 && (TREE_CODE (type) == RECORD_TYPE
3646 || TREE_CODE (type) == UNION_TYPE)
3647 && (TREE_CODE (etype) == RECORD_TYPE
3648 || TREE_CODE (etype) == UNION_TYPE)
3649 && !TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
3650 return build_unary_op (INDIRECT_REF, NULL_TREE,
3651 convert (build_pointer_type (type),
3652 TREE_OPERAND (expr, 0)));
3659 /* Check for converting to a pointer to an unconstrained array. */
3660 if (TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
3661 return convert_to_fat_pointer (type, expr);
3663 /* If we're converting between two aggregate types that are mere
3664 variants, just make a VIEW_CONVERT_EXPR. */
3665 else if (AGGREGATE_TYPE_P (type)
3666 && gnat_types_compatible_p (type, etype))
3667 return build1 (VIEW_CONVERT_EXPR, type, expr);
3669 /* In all other cases of related types, make a NOP_EXPR. */
3670 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
3671 || (code == INTEGER_CST && ecode == INTEGER_CST
3672 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
3673 return fold_convert (type, expr);
3678 return fold_build1 (CONVERT_EXPR, type, expr);
3681 return fold_convert (type, gnat_truthvalue_conversion (expr));
3684 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
3685 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
3686 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
3687 return unchecked_convert (type, expr, false);
3688 else if (TYPE_BIASED_REPRESENTATION_P (type))
3689 return fold_convert (type,
3690 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
3691 convert (TREE_TYPE (type), expr),
3692 TYPE_MIN_VALUE (type)));
3694 /* ... fall through ... */
3697 return fold (convert_to_integer (type, expr));
3700 case REFERENCE_TYPE:
3701 /* If converting between two pointers to records denoting
3702 both a template and type, adjust if needed to account
3703 for any differing offsets, since one might be negative. */
3704 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
3707 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
3708 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
3709 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
3710 sbitsize_int (BITS_PER_UNIT));
3712 expr = build1 (NOP_EXPR, type, expr);
3713 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
3714 if (integer_zerop (byte_diff))
3717 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
3718 fold (convert (sizetype, byte_diff)));
3721 /* If converting to a thin pointer, handle specially. */
3722 if (TYPE_THIN_POINTER_P (type)
3723 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
3724 return convert_to_thin_pointer (type, expr);
3726 /* If converting fat pointer to normal pointer, get the pointer to the
3727 array and then convert it. */
3728 else if (TYPE_FAT_POINTER_P (etype))
3729 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
3732 return fold (convert_to_pointer (type, expr));
3735 return fold (convert_to_real (type, expr));
3738 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
3740 gnat_build_constructor
3741 (type, tree_cons (TYPE_FIELDS (type),
3742 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3745 /* ... fall through ... */
3748 /* In these cases, assume the front-end has validated the conversion.
3749 If the conversion is valid, it will be a bit-wise conversion, so
3750 it can be viewed as an unchecked conversion. */
3751 return unchecked_convert (type, expr, false);
3754 /* This is a either a conversion between a tagged type and some
3755 subtype, which we have to mark as a UNION_TYPE because of
3756 overlapping fields or a conversion of an Unchecked_Union. */
3757 return unchecked_convert (type, expr, false);
3759 case UNCONSTRAINED_ARRAY_TYPE:
3760 /* If EXPR is a constrained array, take its address, convert it to a
3761 fat pointer, and then dereference it. Likewise if EXPR is a
3762 record containing both a template and a constrained array.
3763 Note that a record representing a justified modular type
3764 always represents a packed constrained array. */
3765 if (ecode == ARRAY_TYPE
3766 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
3767 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
3768 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
3771 (INDIRECT_REF, NULL_TREE,
3772 convert_to_fat_pointer (TREE_TYPE (type),
3773 build_unary_op (ADDR_EXPR,
3776 /* Do something very similar for converting one unconstrained
3777 array to another. */
3778 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
3780 build_unary_op (INDIRECT_REF, NULL_TREE,
3781 convert (TREE_TYPE (type),
3782 build_unary_op (ADDR_EXPR,
3788 return fold (convert_to_complex (type, expr));
3795 /* Remove all conversions that are done in EXP. This includes converting
3796 from a padded type or to a justified modular type. If TRUE_ADDRESS
3797 is true, always return the address of the containing object even if
3798 the address is not bit-aligned. */
3801 remove_conversions (tree exp, bool true_address)
3803 switch (TREE_CODE (exp))
3807 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3808 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
3810 remove_conversions (VEC_index (constructor_elt,
3811 CONSTRUCTOR_ELTS (exp), 0)->value,
3816 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
3817 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
3818 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3821 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
3822 case NOP_EXPR: case CONVERT_EXPR:
3823 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3832 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3833 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3834 likewise return an expression pointing to the underlying array. */
3837 maybe_unconstrained_array (tree exp)
3839 enum tree_code code = TREE_CODE (exp);
3842 switch (TREE_CODE (TREE_TYPE (exp)))
3844 case UNCONSTRAINED_ARRAY_TYPE:
3845 if (code == UNCONSTRAINED_ARRAY_REF)
3848 = build_unary_op (INDIRECT_REF, NULL_TREE,
3849 build_component_ref (TREE_OPERAND (exp, 0),
3850 get_identifier ("P_ARRAY"),
3852 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
3856 else if (code == NULL_EXPR)
3857 return build1 (NULL_EXPR,
3858 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3859 (TREE_TYPE (TREE_TYPE (exp))))),
3860 TREE_OPERAND (exp, 0));
3863 /* If this is a padded type, convert to the unpadded type and see if
3864 it contains a template. */
3865 if (TYPE_IS_PADDING_P (TREE_TYPE (exp)))
3867 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
3868 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3869 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
3871 build_component_ref (new, NULL_TREE,
3872 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
3875 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
3877 build_component_ref (exp, NULL_TREE,
3878 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
3888 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
3889 If NOTRUNC_P is true, truncation operations should be suppressed. */
3892 unchecked_convert (tree type, tree expr, bool notrunc_p)
3894 tree etype = TREE_TYPE (expr);
3896 /* If the expression is already the right type, we are done. */
3900 /* If both types types are integral just do a normal conversion.
3901 Likewise for a conversion to an unconstrained array. */
3902 if ((((INTEGRAL_TYPE_P (type)
3903 && !(TREE_CODE (type) == INTEGER_TYPE
3904 && TYPE_VAX_FLOATING_POINT_P (type)))
3905 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
3906 || (TREE_CODE (type) == RECORD_TYPE
3907 && TYPE_JUSTIFIED_MODULAR_P (type)))
3908 && ((INTEGRAL_TYPE_P (etype)
3909 && !(TREE_CODE (etype) == INTEGER_TYPE
3910 && TYPE_VAX_FLOATING_POINT_P (etype)))
3911 || (POINTER_TYPE_P (etype) && !TYPE_THIN_POINTER_P (etype))
3912 || (TREE_CODE (etype) == RECORD_TYPE
3913 && TYPE_JUSTIFIED_MODULAR_P (etype))))
3914 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3917 bool final_unchecked = false;
3919 if (TREE_CODE (etype) == INTEGER_TYPE
3920 && TYPE_BIASED_REPRESENTATION_P (etype))
3922 tree ntype = copy_type (etype);
3924 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
3925 TYPE_MAIN_VARIANT (ntype) = ntype;
3926 expr = build1 (NOP_EXPR, ntype, expr);
3929 if (TREE_CODE (type) == INTEGER_TYPE
3930 && TYPE_BIASED_REPRESENTATION_P (type))
3932 rtype = copy_type (type);
3933 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
3934 TYPE_MAIN_VARIANT (rtype) = rtype;
3937 /* We have another special case: if we are unchecked converting subtype
3938 into a base type, we need to ensure that VRP doesn't propagate range
3939 information since this conversion may be done precisely to validate
3940 that the object is within the range it is supposed to have. */
3941 else if (TREE_CODE (expr) != INTEGER_CST
3942 && TREE_CODE (type) == INTEGER_TYPE && !TREE_TYPE (type)
3943 && ((TREE_CODE (etype) == INTEGER_TYPE && TREE_TYPE (etype))
3944 || TREE_CODE (etype) == ENUMERAL_TYPE
3945 || TREE_CODE (etype) == BOOLEAN_TYPE))
3947 /* The optimization barrier is a VIEW_CONVERT_EXPR node; moreover,
3948 in order not to be deemed an useless type conversion, it must
3949 be from subtype to base type.
3951 ??? This may raise addressability and/or aliasing issues because
3952 VIEW_CONVERT_EXPR gets gimplified as an lvalue, thus causing the
3953 address of its operand to be taken if it is deemed addressable
3954 and not already in GIMPLE form. */
3955 rtype = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
3956 rtype = copy_type (rtype);
3957 TYPE_MAIN_VARIANT (rtype) = rtype;
3958 TREE_TYPE (rtype) = type;
3959 final_unchecked = true;
3962 expr = convert (rtype, expr);
3964 expr = fold_build1 (final_unchecked ? VIEW_CONVERT_EXPR : NOP_EXPR,
3968 /* If we are converting TO an integral type whose precision is not the
3969 same as its size, first unchecked convert to a record that contains
3970 an object of the output type. Then extract the field. */
3971 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
3972 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3973 GET_MODE_BITSIZE (TYPE_MODE (type))))
3975 tree rec_type = make_node (RECORD_TYPE);
3976 tree field = create_field_decl (get_identifier ("OBJ"), type,
3977 rec_type, 1, 0, 0, 0);
3979 TYPE_FIELDS (rec_type) = field;
3980 layout_type (rec_type);
3982 expr = unchecked_convert (rec_type, expr, notrunc_p);
3983 expr = build_component_ref (expr, NULL_TREE, field, 0);
3986 /* Similarly for integral input type whose precision is not equal to its
3988 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype)
3989 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
3990 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3992 tree rec_type = make_node (RECORD_TYPE);
3994 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
3997 TYPE_FIELDS (rec_type) = field;
3998 layout_type (rec_type);
4000 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
4001 expr = unchecked_convert (type, expr, notrunc_p);
4004 /* We have a special case when we are converting between two
4005 unconstrained array types. In that case, take the address,
4006 convert the fat pointer types, and dereference. */
4007 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
4008 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
4009 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4010 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
4011 build_unary_op (ADDR_EXPR, NULL_TREE,
4015 expr = maybe_unconstrained_array (expr);
4016 etype = TREE_TYPE (expr);
4017 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
4020 /* If the result is an integral type whose size is not equal to
4021 the size of the underlying machine type, sign- or zero-extend
4022 the result. We need not do this in the case where the input is
4023 an integral type of the same precision and signedness or if the output
4024 is a biased type or if both the input and output are unsigned. */
4026 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4027 && !(TREE_CODE (type) == INTEGER_TYPE
4028 && TYPE_BIASED_REPRESENTATION_P (type))
4029 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4030 GET_MODE_BITSIZE (TYPE_MODE (type)))
4031 && !(INTEGRAL_TYPE_P (etype)
4032 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
4033 && operand_equal_p (TYPE_RM_SIZE (type),
4034 (TYPE_RM_SIZE (etype) != 0
4035 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
4037 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
4039 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
4040 TYPE_UNSIGNED (type));
4042 = convert (base_type,
4043 size_binop (MINUS_EXPR,
4045 (GET_MODE_BITSIZE (TYPE_MODE (type))),
4046 TYPE_RM_SIZE (type)));
4049 build_binary_op (RSHIFT_EXPR, base_type,
4050 build_binary_op (LSHIFT_EXPR, base_type,
4051 convert (base_type, expr),
4056 /* An unchecked conversion should never raise Constraint_Error. The code
4057 below assumes that GCC's conversion routines overflow the same way that
4058 the underlying hardware does. This is probably true. In the rare case
4059 when it is false, we can rely on the fact that such conversions are
4060 erroneous anyway. */
4061 if (TREE_CODE (expr) == INTEGER_CST)
4062 TREE_OVERFLOW (expr) = 0;
4064 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
4065 show no longer constant. */
4066 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4067 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
4069 TREE_CONSTANT (expr) = 0;
4074 /* Search the chain of currently available builtin declarations for a node
4075 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
4076 found, if any, or NULL_TREE otherwise. */
4078 builtin_decl_for (tree name)
4083 for (i = 0; VEC_iterate(tree, builtin_decls, i, decl); i++)
4084 if (DECL_NAME (decl) == name)
4090 /* Return the appropriate GCC tree code for the specified GNAT type,
4091 the latter being a record type as predicated by Is_Record_Type. */
4094 tree_code_for_record_type (Entity_Id gnat_type)
4096 Node_Id component_list
4097 = Component_List (Type_Definition
4099 (Implementation_Base_Type (gnat_type))));
4102 /* Make this a UNION_TYPE unless it's either not an Unchecked_Union or
4103 we have a non-discriminant field outside a variant. In either case,
4104 it's a RECORD_TYPE. */
4106 if (!Is_Unchecked_Union (gnat_type))
4109 for (component = First_Non_Pragma (Component_Items (component_list));
4110 Present (component);
4111 component = Next_Non_Pragma (component))
4112 if (Ekind (Defining_Entity (component)) == E_Component)
4118 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
4119 component of an aggregate type. */
4122 type_for_nonaliased_component_p (tree gnu_type)
4124 /* If the type is passed by reference, we may have pointers to the
4125 component so it cannot be made non-aliased. */
4126 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
4129 /* We used to say that any component of aggregate type is aliased
4130 because the front-end may take 'Reference of it. The front-end
4131 has been enhanced in the meantime so as to use a renaming instead
4132 in most cases, but the back-end can probably take the address of
4133 such a component too so we go for the conservative stance.
4135 For instance, we might need the address of any array type, even
4136 if normally passed by copy, to construct a fat pointer if the
4137 component is used as an actual for an unconstrained formal.
4139 Likewise for record types: even if a specific record subtype is
4140 passed by copy, the parent type might be passed by ref (e.g. if
4141 it's of variable size) and we might take the address of a child
4142 component to pass to a parent formal. We have no way to check
4143 for such conditions here. */
4144 if (AGGREGATE_TYPE_P (gnu_type))
4150 /* Perform final processing on global variables. */
4153 gnat_write_global_declarations (void)
4155 /* Proceed to optimize and emit assembly.
4156 FIXME: shouldn't be the front end's responsibility to call this. */
4159 /* Emit debug info for all global declarations. */
4160 emit_debug_global_declarations (VEC_address (tree, global_decls),
4161 VEC_length (tree, global_decls));
4164 #include "gt-ada-utils.h"
4165 #include "gtype-ada.h"