1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2008, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 ****************************************************************************/
26 /* We have attribute handlers using C specific format specifiers in warning
27 messages. Make sure they are properly recognized. */
28 #define GCC_DIAG_STYLE __gcc_cdiag__
32 #include "coretypes.h"
45 #include "tree-inline.h"
46 #include "tree-iterator.h"
48 #include "tree-dump.h"
49 #include "pointer-set.h"
50 #include "langhooks.h"
66 #ifndef MAX_FIXED_MODE_SIZE
67 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
70 #ifndef MAX_BITS_PER_WORD
71 #define MAX_BITS_PER_WORD BITS_PER_WORD
74 /* If nonzero, pretend we are allocating at global level. */
77 /* Tree nodes for the various types and decls we create. */
78 tree gnat_std_decls[(int) ADT_LAST];
80 /* Functions to call for each of the possible raise reasons. */
81 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
83 /* Forward declarations for handlers of attributes. */
84 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
85 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
86 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
87 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
88 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
94 /* Fake handler for attributes we don't properly support, typically because
95 they'd require dragging a lot of the common-c front-end circuitry. */
96 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
98 /* Table of machine-independent internal attributes for Ada. We support
99 this minimal set of attributes to accommodate the needs of builtins. */
100 const struct attribute_spec gnat_internal_attribute_table[] =
102 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
103 { "const", 0, 0, true, false, false, handle_const_attribute },
104 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute },
105 { "pure", 0, 0, true, false, false, handle_pure_attribute },
106 { "no vops", 0, 0, true, false, false, handle_novops_attribute },
107 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute },
108 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute },
109 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute },
110 { "malloc", 0, 0, true, false, false, handle_malloc_attribute },
111 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute },
113 /* ??? format and format_arg are heavy and not supported, which actually
114 prevents support for stdio builtins, which we however declare as part
115 of the common builtins.def contents. */
116 { "format", 3, 3, false, true, true, fake_attribute_handler },
117 { "format_arg", 1, 1, false, true, true, fake_attribute_handler },
119 { NULL, 0, 0, false, false, false, NULL }
122 /* Associates a GNAT tree node to a GCC tree node. It is used in
123 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
124 of `save_gnu_tree' for more info. */
125 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
127 #define GET_GNU_TREE(GNAT_ENTITY) \
128 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
130 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
131 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
133 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
134 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
136 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
137 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
139 #define GET_DUMMY_NODE(GNAT_ENTITY) \
140 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
142 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
143 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
145 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
146 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
148 /* This variable keeps a table for types for each precision so that we only
149 allocate each of them once. Signed and unsigned types are kept separate.
151 Note that these types are only used when fold-const requests something
152 special. Perhaps we should NOT share these types; we'll see how it
154 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
156 /* Likewise for float types, but record these by mode. */
157 static GTY(()) tree float_types[NUM_MACHINE_MODES];
159 /* For each binding contour we allocate a binding_level structure to indicate
160 the binding depth. */
162 struct gnat_binding_level GTY((chain_next ("%h.chain")))
164 /* The binding level containing this one (the enclosing binding level). */
165 struct gnat_binding_level *chain;
166 /* The BLOCK node for this level. */
168 /* If nonzero, the setjmp buffer that needs to be updated for any
169 variable-sized definition within this context. */
173 /* The binding level currently in effect. */
174 static GTY(()) struct gnat_binding_level *current_binding_level;
176 /* A chain of gnat_binding_level structures awaiting reuse. */
177 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
179 /* An array of global declarations. */
180 static GTY(()) VEC(tree,gc) *global_decls;
182 /* An array of builtin function declarations. */
183 static GTY(()) VEC(tree,gc) *builtin_decls;
185 /* An array of global renaming pointers. */
186 static GTY(()) VEC(tree,gc) *global_renaming_pointers;
188 /* A chain of unused BLOCK nodes. */
189 static GTY((deletable)) tree free_block_chain;
191 static void gnat_install_builtins (void);
192 static tree merge_sizes (tree, tree, tree, bool, bool);
193 static tree compute_related_constant (tree, tree);
194 static tree split_plus (tree, tree *);
195 static void gnat_gimplify_function (tree);
196 static tree float_type_for_precision (int, enum machine_mode);
197 static tree convert_to_fat_pointer (tree, tree);
198 static tree convert_to_thin_pointer (tree, tree);
199 static tree make_descriptor_field (const char *,tree, tree, tree);
200 static bool potential_alignment_gap (tree, tree, tree);
202 /* Initialize the association of GNAT nodes to GCC trees. */
205 init_gnat_to_gnu (void)
207 associate_gnat_to_gnu
208 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
211 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
212 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
213 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
215 If GNU_DECL is zero, a previous association is to be reset. */
218 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
220 /* Check that GNAT_ENTITY is not already defined and that it is being set
221 to something which is a decl. Raise gigi 401 if not. Usually, this
222 means GNAT_ENTITY is defined twice, but occasionally is due to some
224 gcc_assert (!(gnu_decl
225 && (PRESENT_GNU_TREE (gnat_entity)
226 || (!no_check && !DECL_P (gnu_decl)))));
228 SET_GNU_TREE (gnat_entity, gnu_decl);
231 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
232 Return the ..._DECL node that was associated with it. If there is no tree
233 node associated with GNAT_ENTITY, abort.
235 In some cases, such as delayed elaboration or expressions that need to
236 be elaborated only once, GNAT_ENTITY is really not an entity. */
239 get_gnu_tree (Entity_Id gnat_entity)
241 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
242 return GET_GNU_TREE (gnat_entity);
245 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
248 present_gnu_tree (Entity_Id gnat_entity)
250 return PRESENT_GNU_TREE (gnat_entity);
253 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
256 init_dummy_type (void)
259 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
262 /* Make a dummy type corresponding to GNAT_TYPE. */
265 make_dummy_type (Entity_Id gnat_type)
267 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
270 /* If there is an equivalent type, get its underlying type. */
271 if (Present (gnat_underlying))
272 gnat_underlying = Underlying_Type (gnat_underlying);
274 /* If there was no equivalent type (can only happen when just annotating
275 types) or underlying type, go back to the original type. */
276 if (No (gnat_underlying))
277 gnat_underlying = gnat_type;
279 /* If it there already a dummy type, use that one. Else make one. */
280 if (PRESENT_DUMMY_NODE (gnat_underlying))
281 return GET_DUMMY_NODE (gnat_underlying);
283 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
285 gnu_type = make_node (Is_Record_Type (gnat_underlying)
286 ? tree_code_for_record_type (gnat_underlying)
288 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
289 TYPE_DUMMY_P (gnu_type) = 1;
290 if (AGGREGATE_TYPE_P (gnu_type))
292 TYPE_STUB_DECL (gnu_type) = build_decl (TYPE_DECL, NULL_TREE, gnu_type);
293 TYPE_BY_REFERENCE_P (gnu_type) = Is_By_Reference_Type (gnat_type);
296 SET_DUMMY_NODE (gnat_underlying, gnu_type);
301 /* Return nonzero if we are currently in the global binding level. */
304 global_bindings_p (void)
306 return ((force_global || !current_function_decl) ? -1 : 0);
309 /* Enter a new binding level. */
314 struct gnat_binding_level *newlevel = NULL;
316 /* Reuse a struct for this binding level, if there is one. */
317 if (free_binding_level)
319 newlevel = free_binding_level;
320 free_binding_level = free_binding_level->chain;
324 = (struct gnat_binding_level *)
325 ggc_alloc (sizeof (struct gnat_binding_level));
327 /* Use a free BLOCK, if any; otherwise, allocate one. */
328 if (free_block_chain)
330 newlevel->block = free_block_chain;
331 free_block_chain = BLOCK_CHAIN (free_block_chain);
332 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
335 newlevel->block = make_node (BLOCK);
337 /* Point the BLOCK we just made to its parent. */
338 if (current_binding_level)
339 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
341 BLOCK_VARS (newlevel->block) = BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
342 TREE_USED (newlevel->block) = 1;
344 /* Add this level to the front of the chain (stack) of levels that are
346 newlevel->chain = current_binding_level;
347 newlevel->jmpbuf_decl = NULL_TREE;
348 current_binding_level = newlevel;
351 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
352 and point FNDECL to this BLOCK. */
355 set_current_block_context (tree fndecl)
357 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
358 DECL_INITIAL (fndecl) = current_binding_level->block;
361 /* Set the jmpbuf_decl for the current binding level to DECL. */
364 set_block_jmpbuf_decl (tree decl)
366 current_binding_level->jmpbuf_decl = decl;
369 /* Get the jmpbuf_decl, if any, for the current binding level. */
372 get_block_jmpbuf_decl ()
374 return current_binding_level->jmpbuf_decl;
377 /* Exit a binding level. Set any BLOCK into the current code group. */
382 struct gnat_binding_level *level = current_binding_level;
383 tree block = level->block;
385 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
386 BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
388 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
389 are no variables free the block and merge its subblocks into those of its
390 parent block. Otherwise, add it to the list of its parent. */
391 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
393 else if (BLOCK_VARS (block) == NULL_TREE)
395 BLOCK_SUBBLOCKS (level->chain->block)
396 = chainon (BLOCK_SUBBLOCKS (block),
397 BLOCK_SUBBLOCKS (level->chain->block));
398 BLOCK_CHAIN (block) = free_block_chain;
399 free_block_chain = block;
403 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
404 BLOCK_SUBBLOCKS (level->chain->block) = block;
405 TREE_USED (block) = 1;
406 set_block_for_group (block);
409 /* Free this binding structure. */
410 current_binding_level = level->chain;
411 level->chain = free_binding_level;
412 free_binding_level = level;
416 /* Records a ..._DECL node DECL as belonging to the current lexical scope
417 and uses GNAT_NODE for location information and propagating flags. */
420 gnat_pushdecl (tree decl, Node_Id gnat_node)
422 /* If this decl is public external or at toplevel, there is no context.
423 But PARM_DECLs always go in the level of its function. */
424 if (TREE_CODE (decl) != PARM_DECL
425 && ((DECL_EXTERNAL (decl) && TREE_PUBLIC (decl))
426 || global_bindings_p ()))
427 DECL_CONTEXT (decl) = 0;
430 DECL_CONTEXT (decl) = current_function_decl;
432 /* Functions imported in another function are not really nested. */
433 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_PUBLIC (decl))
434 DECL_NO_STATIC_CHAIN (decl) = 1;
437 TREE_NO_WARNING (decl) = (gnat_node == Empty || Warnings_Off (gnat_node));
439 /* Set the location of DECL and emit a declaration for it. */
440 if (Present (gnat_node))
441 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
442 add_decl_expr (decl, gnat_node);
444 /* Put the declaration on the list. The list of declarations is in reverse
445 order. The list will be reversed later. Put global variables in the
446 globals list and builtin functions in a dedicated list to speed up
447 further lookups. Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
448 the list, as they will cause trouble with the debugger and aren't needed
450 if (TREE_CODE (decl) != TYPE_DECL
451 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
453 if (global_bindings_p ())
455 VEC_safe_push (tree, gc, global_decls, decl);
457 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
458 VEC_safe_push (tree, gc, builtin_decls, decl);
462 TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
463 BLOCK_VARS (current_binding_level->block) = decl;
467 /* For the declaration of a type, set its name if it either is not already
468 set, was set to an IDENTIFIER_NODE, indicating an internal name,
469 or if the previous type name was not derived from a source name.
470 We'd rather have the type named with a real name and all the pointer
471 types to the same object have the same POINTER_TYPE node. Code in the
472 equivalent function of c-decl.c makes a copy of the type node here, but
473 that may cause us trouble with incomplete types. We make an exception
474 for fat pointer types because the compiler automatically builds them
475 for unconstrained array types and the debugger uses them to represent
476 both these and pointers to these. */
477 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
479 tree t = TREE_TYPE (decl);
481 if (!TYPE_NAME (t) || TREE_CODE (TYPE_NAME (t)) == IDENTIFIER_NODE)
483 else if (TYPE_FAT_POINTER_P (t))
485 tree tt = build_variant_type_copy (t);
486 TYPE_NAME (tt) = decl;
487 TREE_USED (tt) = TREE_USED (t);
488 TREE_TYPE (decl) = tt;
489 DECL_ORIGINAL_TYPE (decl) = t;
492 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
497 /* Propagate the name to all the variants. This is needed for
498 the type qualifiers machinery to work properly. */
500 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
501 TYPE_NAME (t) = decl;
505 /* Do little here. Set up the standard declarations later after the
506 front end has been run. */
509 gnat_init_decl_processing (void)
511 /* Make the binding_level structure for global names. */
512 current_function_decl = 0;
513 current_binding_level = 0;
514 free_binding_level = 0;
517 build_common_tree_nodes (true, true);
519 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
520 corresponding to the size of Pmode. In most cases when ptr_mode and
521 Pmode differ, C will use the width of ptr_mode as sizetype. But we get
522 far better code using the width of Pmode. Make this here since we need
523 this before we can expand the GNAT types. */
524 size_type_node = gnat_type_for_size (GET_MODE_BITSIZE (Pmode), 0);
525 set_sizetype (size_type_node);
527 /* In Ada, we use an unsigned 8-bit type for the default boolean type. */
528 boolean_type_node = make_node (BOOLEAN_TYPE);
529 TYPE_PRECISION (boolean_type_node) = 1;
530 fixup_unsigned_type (boolean_type_node);
531 TYPE_RM_SIZE_NUM (boolean_type_node) = bitsize_int (1);
533 build_common_tree_nodes_2 (0);
535 ptr_void_type_node = build_pointer_type (void_type_node);
538 /* Create the predefined scalar types such as `integer_type_node' needed
539 in the gcc back-end and initialize the global binding level. */
542 init_gigi_decls (tree long_long_float_type, tree exception_type)
545 tree int64_type = gnat_type_for_size (64, 0);
548 /* Set the types that GCC and Gigi use from the front end. We would like
549 to do this for char_type_node, but it needs to correspond to the C
551 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
553 /* In this case, the builtin floating point types are VAX float,
554 so make up a type for use. */
555 longest_float_type_node = make_node (REAL_TYPE);
556 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
557 layout_type (longest_float_type_node);
558 create_type_decl (get_identifier ("longest float type"),
559 longest_float_type_node, NULL, false, true, Empty);
562 longest_float_type_node = TREE_TYPE (long_long_float_type);
564 except_type_node = TREE_TYPE (exception_type);
566 unsigned_type_node = gnat_type_for_size (INT_TYPE_SIZE, 1);
567 create_type_decl (get_identifier ("unsigned int"), unsigned_type_node,
568 NULL, false, true, Empty);
570 void_type_decl_node = create_type_decl (get_identifier ("void"),
571 void_type_node, NULL, false, true,
574 void_ftype = build_function_type (void_type_node, NULL_TREE);
575 ptr_void_ftype = build_pointer_type (void_ftype);
577 /* Build the special descriptor type and its null node if needed. */
578 if (TARGET_VTABLE_USES_DESCRIPTORS)
580 tree field_list = NULL_TREE, null_list = NULL_TREE;
583 fdesc_type_node = make_node (RECORD_TYPE);
585 for (j = 0; j < TARGET_VTABLE_USES_DESCRIPTORS; j++)
587 tree field = create_field_decl (NULL_TREE, ptr_void_ftype,
588 fdesc_type_node, 0, 0, 0, 1);
589 TREE_CHAIN (field) = field_list;
591 null_list = tree_cons (field, null_pointer_node, null_list);
594 finish_record_type (fdesc_type_node, nreverse (field_list), 0, false);
595 null_fdesc_node = gnat_build_constructor (fdesc_type_node, null_list);
598 /* Now declare runtime functions. */
599 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
601 /* malloc is a function declaration tree for a function to allocate
603 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
605 build_function_type (ptr_void_type_node,
606 tree_cons (NULL_TREE,
609 NULL_TREE, false, true, true, NULL,
611 DECL_IS_MALLOC (malloc_decl) = 1;
613 /* malloc32 is a function declaration tree for a function to allocate
614 32bit memory on a 64bit system. Needed only on 64bit VMS. */
615 malloc32_decl = create_subprog_decl (get_identifier ("__gnat_malloc32"),
617 build_function_type (ptr_void_type_node,
618 tree_cons (NULL_TREE,
621 NULL_TREE, false, true, true, NULL,
623 DECL_IS_MALLOC (malloc32_decl) = 1;
625 /* free is a function declaration tree for a function to free memory. */
627 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
628 build_function_type (void_type_node,
629 tree_cons (NULL_TREE,
632 NULL_TREE, false, true, true, NULL, Empty);
634 /* This is used for 64-bit multiplication with overflow checking. */
636 = create_subprog_decl (get_identifier ("__gnat_mulv64"), NULL_TREE,
637 build_function_type_list (int64_type, int64_type,
638 int64_type, NULL_TREE),
639 NULL_TREE, false, true, true, NULL, Empty);
641 /* Make the types and functions used for exception processing. */
643 = build_array_type (gnat_type_for_mode (Pmode, 0),
644 build_index_type (build_int_cst (NULL_TREE, 5)));
645 create_type_decl (get_identifier ("JMPBUF_T"), jmpbuf_type, NULL,
647 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
649 /* Functions to get and set the jumpbuf pointer for the current thread. */
651 = create_subprog_decl
652 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
653 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
654 NULL_TREE, false, true, true, NULL, Empty);
655 /* Avoid creating superfluous edges to __builtin_setjmp receivers. */
656 DECL_PURE_P (get_jmpbuf_decl) = 1;
659 = create_subprog_decl
660 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
662 build_function_type (void_type_node,
663 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
664 NULL_TREE, false, true, true, NULL, Empty);
666 /* Function to get the current exception. */
668 = create_subprog_decl
669 (get_identifier ("system__soft_links__get_gnat_exception"),
671 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
672 NULL_TREE, false, true, true, NULL, Empty);
673 /* Avoid creating superfluous edges to __builtin_setjmp receivers. */
674 DECL_PURE_P (get_excptr_decl) = 1;
676 /* Functions that raise exceptions. */
678 = create_subprog_decl
679 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
680 build_function_type (void_type_node,
681 tree_cons (NULL_TREE,
682 build_pointer_type (except_type_node),
684 NULL_TREE, false, true, true, NULL, Empty);
686 /* Dummy objects to materialize "others" and "all others" in the exception
687 tables. These are exported by a-exexpr.adb, so see this unit for the
691 = create_var_decl (get_identifier ("OTHERS"),
692 get_identifier ("__gnat_others_value"),
693 integer_type_node, 0, 1, 0, 1, 1, 0, Empty);
696 = create_var_decl (get_identifier ("ALL_OTHERS"),
697 get_identifier ("__gnat_all_others_value"),
698 integer_type_node, 0, 1, 0, 1, 1, 0, Empty);
700 /* Hooks to call when entering/leaving an exception handler. */
702 = create_subprog_decl (get_identifier ("__gnat_begin_handler"), NULL_TREE,
703 build_function_type (void_type_node,
704 tree_cons (NULL_TREE,
707 NULL_TREE, false, true, true, NULL, Empty);
710 = create_subprog_decl (get_identifier ("__gnat_end_handler"), NULL_TREE,
711 build_function_type (void_type_node,
712 tree_cons (NULL_TREE,
715 NULL_TREE, false, true, true, NULL, Empty);
717 /* If in no exception handlers mode, all raise statements are redirected to
718 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
719 this procedure will never be called in this mode. */
720 if (No_Exception_Handlers_Set ())
723 = create_subprog_decl
724 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE,
725 build_function_type (void_type_node,
726 tree_cons (NULL_TREE,
727 build_pointer_type (char_type_node),
728 tree_cons (NULL_TREE,
731 NULL_TREE, false, true, true, NULL, Empty);
733 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
734 gnat_raise_decls[i] = decl;
737 /* Otherwise, make one decl for each exception reason. */
738 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
742 sprintf (name, "__gnat_rcheck_%.2d", i);
744 = create_subprog_decl
745 (get_identifier (name), NULL_TREE,
746 build_function_type (void_type_node,
747 tree_cons (NULL_TREE,
750 tree_cons (NULL_TREE,
753 NULL_TREE, false, true, true, NULL, Empty);
756 /* Indicate that these never return. */
757 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
758 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
759 TREE_TYPE (raise_nodefer_decl)
760 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
763 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
765 TREE_THIS_VOLATILE (gnat_raise_decls[i]) = 1;
766 TREE_SIDE_EFFECTS (gnat_raise_decls[i]) = 1;
767 TREE_TYPE (gnat_raise_decls[i])
768 = build_qualified_type (TREE_TYPE (gnat_raise_decls[i]),
772 /* setjmp returns an integer and has one operand, which is a pointer to
775 = create_subprog_decl
776 (get_identifier ("__builtin_setjmp"), NULL_TREE,
777 build_function_type (integer_type_node,
778 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
779 NULL_TREE, false, true, true, NULL, Empty);
781 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
782 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
784 /* update_setjmp_buf updates a setjmp buffer from the current stack pointer
786 update_setjmp_buf_decl
787 = create_subprog_decl
788 (get_identifier ("__builtin_update_setjmp_buf"), NULL_TREE,
789 build_function_type (void_type_node,
790 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
791 NULL_TREE, false, true, true, NULL, Empty);
793 DECL_BUILT_IN_CLASS (update_setjmp_buf_decl) = BUILT_IN_NORMAL;
794 DECL_FUNCTION_CODE (update_setjmp_buf_decl) = BUILT_IN_UPDATE_SETJMP_BUF;
796 main_identifier_node = get_identifier ("main");
798 /* Install the builtins we might need, either internally or as
799 user available facilities for Intrinsic imports. */
800 gnat_install_builtins ();
803 /* Given a record type RECORD_TYPE and a chain of FIELD_DECL nodes FIELDLIST,
804 finish constructing the record or union type. If REP_LEVEL is zero, this
805 record has no representation clause and so will be entirely laid out here.
806 If REP_LEVEL is one, this record has a representation clause and has been
807 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
808 this record is derived from a parent record and thus inherits its layout;
809 only make a pass on the fields to finalize them. If DO_NOT_FINALIZE is
810 true, the record type is expected to be modified afterwards so it will
811 not be sent to the back-end for finalization. */
814 finish_record_type (tree record_type, tree fieldlist, int rep_level,
815 bool do_not_finalize)
817 enum tree_code code = TREE_CODE (record_type);
818 tree name = TYPE_NAME (record_type);
819 tree ada_size = bitsize_zero_node;
820 tree size = bitsize_zero_node;
821 bool had_size = TYPE_SIZE (record_type) != 0;
822 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
823 bool had_align = TYPE_ALIGN (record_type) != 0;
826 if (name && TREE_CODE (name) == TYPE_DECL)
827 name = DECL_NAME (name);
829 TYPE_FIELDS (record_type) = fieldlist;
830 TYPE_STUB_DECL (record_type) = build_decl (TYPE_DECL, name, record_type);
832 /* We don't need both the typedef name and the record name output in
833 the debugging information, since they are the same. */
834 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
836 /* Globally initialize the record first. If this is a rep'ed record,
837 that just means some initializations; otherwise, layout the record. */
840 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
841 TYPE_MODE (record_type) = BLKmode;
844 TYPE_SIZE_UNIT (record_type) = size_zero_node;
846 TYPE_SIZE (record_type) = bitsize_zero_node;
848 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
849 out just like a UNION_TYPE, since the size will be fixed. */
850 else if (code == QUAL_UNION_TYPE)
855 /* Ensure there isn't a size already set. There can be in an error
856 case where there is a rep clause but all fields have errors and
857 no longer have a position. */
858 TYPE_SIZE (record_type) = 0;
859 layout_type (record_type);
862 /* At this point, the position and size of each field is known. It was
863 either set before entry by a rep clause, or by laying out the type above.
865 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
866 to compute the Ada size; the GCC size and alignment (for rep'ed records
867 that are not padding types); and the mode (for rep'ed records). We also
868 clear the DECL_BIT_FIELD indication for the cases we know have not been
869 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
871 if (code == QUAL_UNION_TYPE)
872 fieldlist = nreverse (fieldlist);
874 for (field = fieldlist; field; field = TREE_CHAIN (field))
876 tree type = TREE_TYPE (field);
877 tree pos = bit_position (field);
878 tree this_size = DECL_SIZE (field);
881 if ((TREE_CODE (type) == RECORD_TYPE
882 || TREE_CODE (type) == UNION_TYPE
883 || TREE_CODE (type) == QUAL_UNION_TYPE)
884 && !TYPE_IS_FAT_POINTER_P (type)
885 && !TYPE_CONTAINS_TEMPLATE_P (type)
886 && TYPE_ADA_SIZE (type))
887 this_ada_size = TYPE_ADA_SIZE (type);
889 this_ada_size = this_size;
891 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
892 if (DECL_BIT_FIELD (field)
893 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
895 unsigned int align = TYPE_ALIGN (type);
897 /* In the general case, type alignment is required. */
898 if (value_factor_p (pos, align))
900 /* The enclosing record type must be sufficiently aligned.
901 Otherwise, if no alignment was specified for it and it
902 has been laid out already, bump its alignment to the
903 desired one if this is compatible with its size. */
904 if (TYPE_ALIGN (record_type) >= align)
906 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
907 DECL_BIT_FIELD (field) = 0;
911 && value_factor_p (TYPE_SIZE (record_type), align))
913 TYPE_ALIGN (record_type) = align;
914 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
915 DECL_BIT_FIELD (field) = 0;
919 /* In the non-strict alignment case, only byte alignment is. */
920 if (!STRICT_ALIGNMENT
921 && DECL_BIT_FIELD (field)
922 && value_factor_p (pos, BITS_PER_UNIT))
923 DECL_BIT_FIELD (field) = 0;
926 /* If we still have DECL_BIT_FIELD set at this point, we know the field
927 is technically not addressable. Except that it can actually be
928 addressed if the field is BLKmode and happens to be properly
930 DECL_NONADDRESSABLE_P (field)
931 |= DECL_BIT_FIELD (field) && DECL_MODE (field) != BLKmode;
933 /* A type must be as aligned as its most aligned field that is not
934 a bit-field. But this is already enforced by layout_type. */
935 if (rep_level > 0 && !DECL_BIT_FIELD (field))
936 TYPE_ALIGN (record_type)
937 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
942 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
943 size = size_binop (MAX_EXPR, size, this_size);
946 case QUAL_UNION_TYPE:
948 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
949 this_ada_size, ada_size);
950 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
955 /* Since we know here that all fields are sorted in order of
956 increasing bit position, the size of the record is one
957 higher than the ending bit of the last field processed
958 unless we have a rep clause, since in that case we might
959 have a field outside a QUAL_UNION_TYPE that has a higher ending
960 position. So use a MAX in that case. Also, if this field is a
961 QUAL_UNION_TYPE, we need to take into account the previous size in
962 the case of empty variants. */
964 = merge_sizes (ada_size, pos, this_ada_size,
965 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
967 = merge_sizes (size, pos, this_size,
968 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
976 if (code == QUAL_UNION_TYPE)
977 nreverse (fieldlist);
981 /* If this is a padding record, we never want to make the size smaller
982 than what was specified in it, if any. */
983 if (TREE_CODE (record_type) == RECORD_TYPE
984 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
985 size = TYPE_SIZE (record_type);
987 /* Now set any of the values we've just computed that apply. */
988 if (!TYPE_IS_FAT_POINTER_P (record_type)
989 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
990 SET_TYPE_ADA_SIZE (record_type, ada_size);
994 tree size_unit = had_size_unit
995 ? TYPE_SIZE_UNIT (record_type)
997 size_binop (CEIL_DIV_EXPR, size,
999 unsigned int align = TYPE_ALIGN (record_type);
1001 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
1002 TYPE_SIZE_UNIT (record_type)
1003 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
1005 compute_record_mode (record_type);
1009 if (!do_not_finalize)
1010 rest_of_record_type_compilation (record_type);
1013 /* Wrap up compilation of RECORD_TYPE, i.e. most notably output all
1014 the debug information associated with it. It need not be invoked
1015 directly in most cases since finish_record_type takes care of doing
1016 so, unless explicitly requested not to through DO_NOT_FINALIZE. */
1019 rest_of_record_type_compilation (tree record_type)
1021 tree fieldlist = TYPE_FIELDS (record_type);
1023 enum tree_code code = TREE_CODE (record_type);
1024 bool var_size = false;
1026 for (field = fieldlist; field; field = TREE_CHAIN (field))
1028 /* We need to make an XVE/XVU record if any field has variable size,
1029 whether or not the record does. For example, if we have a union,
1030 it may be that all fields, rounded up to the alignment, have the
1031 same size, in which case we'll use that size. But the debug
1032 output routines (except Dwarf2) won't be able to output the fields,
1033 so we need to make the special record. */
1034 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
1035 /* If a field has a non-constant qualifier, the record will have
1036 variable size too. */
1037 || (code == QUAL_UNION_TYPE
1038 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
1045 /* If this record is of variable size, rename it so that the
1046 debugger knows it is and make a new, parallel, record
1047 that tells the debugger how the record is laid out. See
1048 exp_dbug.ads. But don't do this for records that are padding
1049 since they confuse GDB. */
1051 && !(TREE_CODE (record_type) == RECORD_TYPE
1052 && TYPE_IS_PADDING_P (record_type)))
1054 tree new_record_type
1055 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
1056 ? UNION_TYPE : TREE_CODE (record_type));
1057 tree orig_name = TYPE_NAME (record_type);
1059 = (TREE_CODE (orig_name) == TYPE_DECL ? DECL_NAME (orig_name)
1062 = concat_id_with_name (orig_id,
1063 TREE_CODE (record_type) == QUAL_UNION_TYPE
1065 tree last_pos = bitsize_zero_node;
1067 tree prev_old_field = 0;
1069 TYPE_NAME (new_record_type) = new_id;
1070 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
1071 TYPE_STUB_DECL (new_record_type)
1072 = build_decl (TYPE_DECL, new_id, new_record_type);
1073 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
1074 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
1075 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
1076 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
1077 TYPE_SIZE_UNIT (new_record_type)
1078 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
1080 add_parallel_type (TYPE_STUB_DECL (record_type), new_record_type);
1082 /* Now scan all the fields, replacing each field with a new
1083 field corresponding to the new encoding. */
1084 for (old_field = TYPE_FIELDS (record_type); old_field;
1085 old_field = TREE_CHAIN (old_field))
1087 tree field_type = TREE_TYPE (old_field);
1088 tree field_name = DECL_NAME (old_field);
1090 tree curpos = bit_position (old_field);
1092 unsigned int align = 0;
1095 /* See how the position was modified from the last position.
1097 There are two basic cases we support: a value was added
1098 to the last position or the last position was rounded to
1099 a boundary and they something was added. Check for the
1100 first case first. If not, see if there is any evidence
1101 of rounding. If so, round the last position and try
1104 If this is a union, the position can be taken as zero. */
1106 /* Some computations depend on the shape of the position expression,
1107 so strip conversions to make sure it's exposed. */
1108 curpos = remove_conversions (curpos, true);
1110 if (TREE_CODE (new_record_type) == UNION_TYPE)
1111 pos = bitsize_zero_node, align = 0;
1113 pos = compute_related_constant (curpos, last_pos);
1115 if (!pos && TREE_CODE (curpos) == MULT_EXPR
1116 && host_integerp (TREE_OPERAND (curpos, 1), 1))
1118 tree offset = TREE_OPERAND (curpos, 0);
1119 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
1121 /* An offset which is a bitwise AND with a negative power of 2
1122 means an alignment corresponding to this power of 2. */
1123 offset = remove_conversions (offset, true);
1124 if (TREE_CODE (offset) == BIT_AND_EXPR
1125 && host_integerp (TREE_OPERAND (offset, 1), 0)
1126 && tree_int_cst_sgn (TREE_OPERAND (offset, 1)) < 0)
1129 = - tree_low_cst (TREE_OPERAND (offset, 1), 0);
1130 if (exact_log2 (pow) > 0)
1134 pos = compute_related_constant (curpos,
1135 round_up (last_pos, align));
1137 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
1138 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
1139 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
1140 && host_integerp (TREE_OPERAND
1141 (TREE_OPERAND (curpos, 0), 1),
1146 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
1147 pos = compute_related_constant (curpos,
1148 round_up (last_pos, align));
1150 else if (potential_alignment_gap (prev_old_field, old_field,
1153 align = TYPE_ALIGN (field_type);
1154 pos = compute_related_constant (curpos,
1155 round_up (last_pos, align));
1158 /* If we can't compute a position, set it to zero.
1160 ??? We really should abort here, but it's too much work
1161 to get this correct for all cases. */
1164 pos = bitsize_zero_node;
1166 /* See if this type is variable-sized and make a pointer type
1167 and indicate the indirection if so. Beware that the debug
1168 back-end may adjust the position computed above according
1169 to the alignment of the field type, i.e. the pointer type
1170 in this case, if we don't preventively counter that. */
1171 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
1173 field_type = build_pointer_type (field_type);
1174 if (align != 0 && TYPE_ALIGN (field_type) > align)
1176 field_type = copy_node (field_type);
1177 TYPE_ALIGN (field_type) = align;
1182 /* Make a new field name, if necessary. */
1183 if (var || align != 0)
1188 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1189 align / BITS_PER_UNIT);
1191 strcpy (suffix, "XVL");
1193 field_name = concat_id_with_name (field_name, suffix);
1196 new_field = create_field_decl (field_name, field_type,
1198 DECL_SIZE (old_field), pos, 0);
1199 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1200 TYPE_FIELDS (new_record_type) = new_field;
1202 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1203 zero. The only time it's not the last field of the record
1204 is when there are other components at fixed positions after
1205 it (meaning there was a rep clause for every field) and we
1206 want to be able to encode them. */
1207 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1208 (TREE_CODE (TREE_TYPE (old_field))
1211 : DECL_SIZE (old_field));
1212 prev_old_field = old_field;
1215 TYPE_FIELDS (new_record_type)
1216 = nreverse (TYPE_FIELDS (new_record_type));
1218 rest_of_type_decl_compilation (TYPE_STUB_DECL (new_record_type));
1221 rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type));
1224 /* Append PARALLEL_TYPE on the chain of parallel types for decl. */
1227 add_parallel_type (tree decl, tree parallel_type)
1231 while (DECL_PARALLEL_TYPE (d))
1232 d = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (d));
1234 SET_DECL_PARALLEL_TYPE (d, parallel_type);
1237 /* Return the parallel type associated to a type, if any. */
1240 get_parallel_type (tree type)
1242 if (TYPE_STUB_DECL (type))
1243 return DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type));
1248 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1249 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1250 if this represents a QUAL_UNION_TYPE in which case we must look for
1251 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1252 is nonzero, we must take the MAX of the end position of this field
1253 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1255 We return an expression for the size. */
1258 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1261 tree type = TREE_TYPE (last_size);
1264 if (!special || TREE_CODE (size) != COND_EXPR)
1266 new = size_binop (PLUS_EXPR, first_bit, size);
1268 new = size_binop (MAX_EXPR, last_size, new);
1272 new = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1273 integer_zerop (TREE_OPERAND (size, 1))
1274 ? last_size : merge_sizes (last_size, first_bit,
1275 TREE_OPERAND (size, 1),
1277 integer_zerop (TREE_OPERAND (size, 2))
1278 ? last_size : merge_sizes (last_size, first_bit,
1279 TREE_OPERAND (size, 2),
1282 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1283 when fed through substitute_in_expr) into thinking that a constant
1284 size is not constant. */
1285 while (TREE_CODE (new) == NON_LVALUE_EXPR)
1286 new = TREE_OPERAND (new, 0);
1291 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1292 related by the addition of a constant. Return that constant if so. */
1295 compute_related_constant (tree op0, tree op1)
1297 tree op0_var, op1_var;
1298 tree op0_con = split_plus (op0, &op0_var);
1299 tree op1_con = split_plus (op1, &op1_var);
1300 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1302 if (operand_equal_p (op0_var, op1_var, 0))
1304 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1310 /* Utility function of above to split a tree OP which may be a sum, into a
1311 constant part, which is returned, and a variable part, which is stored
1312 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1316 split_plus (tree in, tree *pvar)
1318 /* Strip NOPS in order to ease the tree traversal and maximize the
1319 potential for constant or plus/minus discovery. We need to be careful
1320 to always return and set *pvar to bitsizetype trees, but it's worth
1324 *pvar = convert (bitsizetype, in);
1326 if (TREE_CODE (in) == INTEGER_CST)
1328 *pvar = bitsize_zero_node;
1329 return convert (bitsizetype, in);
1331 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1333 tree lhs_var, rhs_var;
1334 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1335 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1337 if (lhs_var == TREE_OPERAND (in, 0)
1338 && rhs_var == TREE_OPERAND (in, 1))
1339 return bitsize_zero_node;
1341 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1342 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1345 return bitsize_zero_node;
1348 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1349 subprogram. If it is void_type_node, then we are dealing with a procedure,
1350 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1351 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1352 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1353 RETURNS_UNCONSTRAINED is true if the function returns an unconstrained
1354 object. RETURNS_BY_REF is true if the function returns by reference.
1355 RETURNS_BY_TARGET_PTR is true if the function is to be passed (as its
1356 first parameter) the address of the place to copy its result. */
1359 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1360 bool returns_unconstrained, bool returns_by_ref,
1361 bool returns_by_target_ptr)
1363 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1364 the subprogram formal parameters. This list is generated by traversing the
1365 input list of PARM_DECL nodes. */
1366 tree param_type_list = NULL;
1370 for (param_decl = param_decl_list; param_decl;
1371 param_decl = TREE_CHAIN (param_decl))
1372 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1375 /* The list of the function parameter types has to be terminated by the void
1376 type to signal to the back-end that we are not dealing with a variable
1377 parameter subprogram, but that the subprogram has a fixed number of
1379 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1381 /* The list of argument types has been created in reverse
1383 param_type_list = nreverse (param_type_list);
1385 type = build_function_type (return_type, param_type_list);
1387 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1388 or the new type should, make a copy of TYPE. Likewise for
1389 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1390 if (TYPE_CI_CO_LIST (type) || cico_list
1391 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1392 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref
1393 || TYPE_RETURNS_BY_TARGET_PTR_P (type) != returns_by_target_ptr)
1394 type = copy_type (type);
1396 TYPE_CI_CO_LIST (type) = cico_list;
1397 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1398 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1399 TYPE_RETURNS_BY_TARGET_PTR_P (type) = returns_by_target_ptr;
1403 /* Return a copy of TYPE but safe to modify in any way. */
1406 copy_type (tree type)
1408 tree new = copy_node (type);
1410 /* copy_node clears this field instead of copying it, because it is
1411 aliased with TREE_CHAIN. */
1412 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1414 TYPE_POINTER_TO (new) = 0;
1415 TYPE_REFERENCE_TO (new) = 0;
1416 TYPE_MAIN_VARIANT (new) = new;
1417 TYPE_NEXT_VARIANT (new) = 0;
1422 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1423 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position of
1427 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
1429 /* First build a type for the desired range. */
1430 tree type = build_index_2_type (min, max);
1432 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1433 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1434 is set, but not to INDEX, make a copy of this type with the requested
1435 index type. Note that we have no way of sharing these types, but that's
1436 only a small hole. */
1437 if (TYPE_INDEX_TYPE (type) == index)
1439 else if (TYPE_INDEX_TYPE (type))
1440 type = copy_type (type);
1442 SET_TYPE_INDEX_TYPE (type, index);
1443 create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
1447 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1448 string) and TYPE is a ..._TYPE node giving its data type.
1449 ARTIFICIAL_P is true if this is a declaration that was generated
1450 by the compiler. DEBUG_INFO_P is true if we need to write debugging
1451 information about this type. GNAT_NODE is used for the position of
1455 create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1456 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1458 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1459 enum tree_code code = TREE_CODE (type);
1461 DECL_ARTIFICIAL (type_decl) = artificial_p;
1463 if (!TYPE_IS_DUMMY_P (type))
1464 gnat_pushdecl (type_decl, gnat_node);
1466 process_attributes (type_decl, attr_list);
1468 /* Pass type declaration information to the debugger unless this is an
1469 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1470 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately, or
1471 type for which debugging information was not requested. */
1472 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
1473 DECL_IGNORED_P (type_decl) = 1;
1474 else if (code != ENUMERAL_TYPE
1475 && (code != RECORD_TYPE || TYPE_IS_FAT_POINTER_P (type))
1476 && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
1477 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1478 rest_of_type_decl_compilation (type_decl);
1483 /* Return a VAR_DECL or CONST_DECL node.
1485 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1486 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1487 the GCC tree for an optional initial expression; NULL_TREE if none.
1489 CONST_FLAG is true if this variable is constant, in which case we might
1490 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
1492 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
1493 definition to be made visible outside of the current compilation unit, for
1494 instance variable definitions in a package specification.
1496 EXTERN_FLAG is nonzero when processing an external variable declaration (as
1497 opposed to a definition: no storage is to be allocated for the variable).
1499 STATIC_FLAG is only relevant when not at top level. In that case
1500 it indicates whether to always allocate storage to the variable.
1502 GNAT_NODE is used for the position of the decl. */
1505 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
1506 bool const_flag, bool public_flag, bool extern_flag,
1507 bool static_flag, bool const_decl_allowed_p,
1508 struct attrib *attr_list, Node_Id gnat_node)
1512 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
1513 && (global_bindings_p () || static_flag
1514 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
1515 : TREE_CONSTANT (var_init)));
1517 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1518 case the initializer may be used in-lieu of the DECL node (as done in
1519 Identifier_to_gnu). This is useful to prevent the need of elaboration
1520 code when an identifier for which such a decl is made is in turn used as
1521 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1522 but extra constraints apply to this choice (see below) and are not
1523 relevant to the distinction we wish to make. */
1524 bool constant_p = const_flag && init_const;
1526 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1527 and may be used for scalars in general but not for aggregates. */
1529 = build_decl ((constant_p && const_decl_allowed_p
1530 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
1533 /* If this is external, throw away any initializations (they will be done
1534 elsewhere) unless this is a constant for which we would like to remain
1535 able to get the initializer. If we are defining a global here, leave a
1536 constant initialization and save any variable elaborations for the
1537 elaboration routine. If we are just annotating types, throw away the
1538 initialization if it isn't a constant. */
1539 if ((extern_flag && !constant_p)
1540 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1541 var_init = NULL_TREE;
1543 /* At the global level, an initializer requiring code to be generated
1544 produces elaboration statements. Check that such statements are allowed,
1545 that is, not violating a No_Elaboration_Code restriction. */
1546 if (global_bindings_p () && var_init != 0 && ! init_const)
1547 Check_Elaboration_Code_Allowed (gnat_node);
1549 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1550 try to fiddle with DECL_COMMON. However, on platforms that don't
1551 support global BSS sections, uninitialized global variables would
1552 go in DATA instead, thus increasing the size of the executable. */
1554 && TREE_CODE (var_decl) == VAR_DECL
1555 && !have_global_bss_p ())
1556 DECL_COMMON (var_decl) = 1;
1557 DECL_INITIAL (var_decl) = var_init;
1558 TREE_READONLY (var_decl) = const_flag;
1559 DECL_EXTERNAL (var_decl) = extern_flag;
1560 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1561 TREE_CONSTANT (var_decl) = constant_p;
1562 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1563 = TYPE_VOLATILE (type);
1565 /* If it's public and not external, always allocate storage for it.
1566 At the global binding level we need to allocate static storage for the
1567 variable if and only if it's not external. If we are not at the top level
1568 we allocate automatic storage unless requested not to. */
1569 TREE_STATIC (var_decl)
1570 = !extern_flag && (public_flag || static_flag || global_bindings_p ());
1572 if (asm_name && VAR_OR_FUNCTION_DECL_P (var_decl))
1573 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1575 process_attributes (var_decl, attr_list);
1577 /* Add this decl to the current binding level. */
1578 gnat_pushdecl (var_decl, gnat_node);
1580 if (TREE_SIDE_EFFECTS (var_decl))
1581 TREE_ADDRESSABLE (var_decl) = 1;
1583 if (TREE_CODE (var_decl) != CONST_DECL)
1585 if (global_bindings_p ())
1586 rest_of_decl_compilation (var_decl, true, 0);
1589 expand_decl (var_decl);
1594 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
1597 aggregate_type_contains_array_p (tree type)
1599 switch (TREE_CODE (type))
1603 case QUAL_UNION_TYPE:
1606 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1607 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
1608 && aggregate_type_contains_array_p (TREE_TYPE (field)))
1621 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1622 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1623 this field is in a record type with a "pragma pack". If SIZE is nonzero
1624 it is the specified size for this field. If POS is nonzero, it is the bit
1625 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1626 the address of this field for aliasing purposes. If it is negative, we
1627 should not make a bitfield, which is used by make_aligning_type. */
1630 create_field_decl (tree field_name, tree field_type, tree record_type,
1631 int packed, tree size, tree pos, int addressable)
1633 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1635 DECL_CONTEXT (field_decl) = record_type;
1636 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1638 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1639 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1640 Likewise for an aggregate without specified position that contains an
1641 array, because in this case slices of variable length of this array
1642 must be handled by GCC and variable-sized objects need to be aligned
1643 to at least a byte boundary. */
1644 if (packed && (TYPE_MODE (field_type) == BLKmode
1646 && AGGREGATE_TYPE_P (field_type)
1647 && aggregate_type_contains_array_p (field_type))))
1648 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1650 /* If a size is specified, use it. Otherwise, if the record type is packed
1651 compute a size to use, which may differ from the object's natural size.
1652 We always set a size in this case to trigger the checks for bitfield
1653 creation below, which is typically required when no position has been
1656 size = convert (bitsizetype, size);
1657 else if (packed == 1)
1659 size = rm_size (field_type);
1661 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1663 if (TREE_CODE (size) == INTEGER_CST
1664 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1665 size = round_up (size, BITS_PER_UNIT);
1668 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1669 specified for two reasons: first if the size differs from the natural
1670 size. Second, if the alignment is insufficient. There are a number of
1671 ways the latter can be true.
1673 We never make a bitfield if the type of the field has a nonconstant size,
1674 because no such entity requiring bitfield operations should reach here.
1676 We do *preventively* make a bitfield when there might be the need for it
1677 but we don't have all the necessary information to decide, as is the case
1678 of a field with no specified position in a packed record.
1680 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1681 in layout_decl or finish_record_type to clear the bit_field indication if
1682 it is in fact not needed. */
1683 if (addressable >= 0
1685 && TREE_CODE (size) == INTEGER_CST
1686 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1687 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
1688 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
1690 || (TYPE_ALIGN (record_type) != 0
1691 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1693 DECL_BIT_FIELD (field_decl) = 1;
1694 DECL_SIZE (field_decl) = size;
1695 if (!packed && !pos)
1696 DECL_ALIGN (field_decl)
1697 = (TYPE_ALIGN (record_type) != 0
1698 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1699 : TYPE_ALIGN (field_type));
1702 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1704 /* Bump the alignment if need be, either for bitfield/packing purposes or
1705 to satisfy the type requirements if no such consideration applies. When
1706 we get the alignment from the type, indicate if this is from an explicit
1707 user request, which prevents stor-layout from lowering it later on. */
1710 = (DECL_BIT_FIELD (field_decl) ? 1
1711 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
1713 if (bit_align > DECL_ALIGN (field_decl))
1714 DECL_ALIGN (field_decl) = bit_align;
1715 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
1717 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1718 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
1724 /* We need to pass in the alignment the DECL is known to have.
1725 This is the lowest-order bit set in POS, but no more than
1726 the alignment of the record, if one is specified. Note
1727 that an alignment of 0 is taken as infinite. */
1728 unsigned int known_align;
1730 if (host_integerp (pos, 1))
1731 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1733 known_align = BITS_PER_UNIT;
1735 if (TYPE_ALIGN (record_type)
1736 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1737 known_align = TYPE_ALIGN (record_type);
1739 layout_decl (field_decl, known_align);
1740 SET_DECL_OFFSET_ALIGN (field_decl,
1741 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1743 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1744 &DECL_FIELD_BIT_OFFSET (field_decl),
1745 DECL_OFFSET_ALIGN (field_decl), pos);
1747 DECL_HAS_REP_P (field_decl) = 1;
1750 /* In addition to what our caller says, claim the field is addressable if we
1751 know that its type is not suitable.
1753 The field may also be "technically" nonaddressable, meaning that even if
1754 we attempt to take the field's address we will actually get the address
1755 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
1756 value we have at this point is not accurate enough, so we don't account
1757 for this here and let finish_record_type decide. */
1758 if (!addressable && !type_for_nonaliased_component_p (field_type))
1761 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1766 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1767 PARAM_TYPE is its type. READONLY is true if the parameter is
1768 readonly (either an In parameter or an address of a pass-by-ref
1772 create_param_decl (tree param_name, tree param_type, bool readonly)
1774 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1776 /* Honor targetm.calls.promote_prototypes(), as not doing so can
1777 lead to various ABI violations. */
1778 if (targetm.calls.promote_prototypes (param_type)
1779 && (TREE_CODE (param_type) == INTEGER_TYPE
1780 || TREE_CODE (param_type) == ENUMERAL_TYPE
1781 || TREE_CODE (param_type) == BOOLEAN_TYPE)
1782 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1784 /* We have to be careful about biased types here. Make a subtype
1785 of integer_type_node with the proper biasing. */
1786 if (TREE_CODE (param_type) == INTEGER_TYPE
1787 && TYPE_BIASED_REPRESENTATION_P (param_type))
1790 = copy_type (build_range_type (integer_type_node,
1791 TYPE_MIN_VALUE (param_type),
1792 TYPE_MAX_VALUE (param_type)));
1794 TYPE_BIASED_REPRESENTATION_P (param_type) = 1;
1797 param_type = integer_type_node;
1800 DECL_ARG_TYPE (param_decl) = param_type;
1801 TREE_READONLY (param_decl) = readonly;
1805 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1808 process_attributes (tree decl, struct attrib *attr_list)
1810 for (; attr_list; attr_list = attr_list->next)
1811 switch (attr_list->type)
1813 case ATTR_MACHINE_ATTRIBUTE:
1814 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1816 ATTR_FLAG_TYPE_IN_PLACE);
1819 case ATTR_LINK_ALIAS:
1820 if (! DECL_EXTERNAL (decl))
1822 TREE_STATIC (decl) = 1;
1823 assemble_alias (decl, attr_list->name);
1827 case ATTR_WEAK_EXTERNAL:
1829 declare_weak (decl);
1831 post_error ("?weak declarations not supported on this target",
1832 attr_list->error_point);
1835 case ATTR_LINK_SECTION:
1836 if (targetm.have_named_sections)
1838 DECL_SECTION_NAME (decl)
1839 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1840 IDENTIFIER_POINTER (attr_list->name));
1841 DECL_COMMON (decl) = 0;
1844 post_error ("?section attributes are not supported for this target",
1845 attr_list->error_point);
1848 case ATTR_LINK_CONSTRUCTOR:
1849 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1850 TREE_USED (decl) = 1;
1853 case ATTR_LINK_DESTRUCTOR:
1854 DECL_STATIC_DESTRUCTOR (decl) = 1;
1855 TREE_USED (decl) = 1;
1860 /* Record a global renaming pointer. */
1863 record_global_renaming_pointer (tree decl)
1865 gcc_assert (DECL_RENAMED_OBJECT (decl));
1866 VEC_safe_push (tree, gc, global_renaming_pointers, decl);
1869 /* Invalidate the global renaming pointers. */
1872 invalidate_global_renaming_pointers (void)
1877 for (i = 0; VEC_iterate(tree, global_renaming_pointers, i, iter); i++)
1878 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
1880 VEC_free (tree, gc, global_renaming_pointers);
1883 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1887 value_factor_p (tree value, HOST_WIDE_INT factor)
1889 if (host_integerp (value, 1))
1890 return tree_low_cst (value, 1) % factor == 0;
1892 if (TREE_CODE (value) == MULT_EXPR)
1893 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1894 || value_factor_p (TREE_OPERAND (value, 1), factor));
1899 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1900 unless we can prove these 2 fields are laid out in such a way that no gap
1901 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1902 is the distance in bits between the end of PREV_FIELD and the starting
1903 position of CURR_FIELD. It is ignored if null. */
1906 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1908 /* If this is the first field of the record, there cannot be any gap */
1912 /* If the previous field is a union type, then return False: The only
1913 time when such a field is not the last field of the record is when
1914 there are other components at fixed positions after it (meaning there
1915 was a rep clause for every field), in which case we don't want the
1916 alignment constraint to override them. */
1917 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1920 /* If the distance between the end of prev_field and the beginning of
1921 curr_field is constant, then there is a gap if the value of this
1922 constant is not null. */
1923 if (offset && host_integerp (offset, 1))
1924 return !integer_zerop (offset);
1926 /* If the size and position of the previous field are constant,
1927 then check the sum of this size and position. There will be a gap
1928 iff it is not multiple of the current field alignment. */
1929 if (host_integerp (DECL_SIZE (prev_field), 1)
1930 && host_integerp (bit_position (prev_field), 1))
1931 return ((tree_low_cst (bit_position (prev_field), 1)
1932 + tree_low_cst (DECL_SIZE (prev_field), 1))
1933 % DECL_ALIGN (curr_field) != 0);
1935 /* If both the position and size of the previous field are multiples
1936 of the current field alignment, there cannot be any gap. */
1937 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1938 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1941 /* Fallback, return that there may be a potential gap */
1945 /* Returns a LABEL_DECL node for LABEL_NAME. */
1948 create_label_decl (tree label_name)
1950 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1952 DECL_CONTEXT (label_decl) = current_function_decl;
1953 DECL_MODE (label_decl) = VOIDmode;
1954 DECL_SOURCE_LOCATION (label_decl) = input_location;
1959 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1960 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1961 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1962 PARM_DECL nodes chained through the TREE_CHAIN field).
1964 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1965 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1968 create_subprog_decl (tree subprog_name, tree asm_name,
1969 tree subprog_type, tree param_decl_list, bool inline_flag,
1970 bool public_flag, bool extern_flag,
1971 struct attrib *attr_list, Node_Id gnat_node)
1973 tree return_type = TREE_TYPE (subprog_type);
1974 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1976 /* If this is a function nested inside an inlined external function, it
1977 means we aren't going to compile the outer function unless it is
1978 actually inlined, so do the same for us. */
1979 if (current_function_decl && DECL_INLINE (current_function_decl)
1980 && DECL_EXTERNAL (current_function_decl))
1983 DECL_EXTERNAL (subprog_decl) = extern_flag;
1984 TREE_PUBLIC (subprog_decl) = public_flag;
1985 TREE_STATIC (subprog_decl) = 1;
1986 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1987 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1988 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1989 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1990 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1991 DECL_ARTIFICIAL (DECL_RESULT (subprog_decl)) = 1;
1992 DECL_IGNORED_P (DECL_RESULT (subprog_decl)) = 1;
1994 /* TREE_ADDRESSABLE is set on the result type to request the use of the
1995 target by-reference return mechanism. This is not supported all the
1996 way down to RTL expansion with GCC 4, which ICEs on temporary creation
1997 attempts with such a type and expects DECL_BY_REFERENCE to be set on
1998 the RESULT_DECL instead - see gnat_genericize for more details. */
1999 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (subprog_decl))))
2001 tree result_decl = DECL_RESULT (subprog_decl);
2003 TREE_ADDRESSABLE (TREE_TYPE (result_decl)) = 0;
2004 DECL_BY_REFERENCE (result_decl) = 1;
2008 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
2012 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
2014 /* The expand_main_function circuitry expects "main_identifier_node" to
2015 designate the DECL_NAME of the 'main' entry point, in turn expected
2016 to be declared as the "main" function literally by default. Ada
2017 program entry points are typically declared with a different name
2018 within the binder generated file, exported as 'main' to satisfy the
2019 system expectations. Redirect main_identifier_node in this case. */
2020 if (asm_name == main_identifier_node)
2021 main_identifier_node = DECL_NAME (subprog_decl);
2024 process_attributes (subprog_decl, attr_list);
2026 /* Add this decl to the current binding level. */
2027 gnat_pushdecl (subprog_decl, gnat_node);
2029 /* Output the assembler code and/or RTL for the declaration. */
2030 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
2032 return subprog_decl;
2035 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
2036 body. This routine needs to be invoked before processing the declarations
2037 appearing in the subprogram. */
2040 begin_subprog_body (tree subprog_decl)
2044 current_function_decl = subprog_decl;
2045 announce_function (subprog_decl);
2047 /* Enter a new binding level and show that all the parameters belong to
2050 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
2051 param_decl = TREE_CHAIN (param_decl))
2052 DECL_CONTEXT (param_decl) = subprog_decl;
2054 make_decl_rtl (subprog_decl);
2056 /* We handle pending sizes via the elaboration of types, so we don't need to
2057 save them. This causes them to be marked as part of the outer function
2058 and then discarded. */
2059 get_pending_sizes ();
2063 /* Helper for the genericization callback. Return a dereference of VAL
2064 if it is of a reference type. */
2067 convert_from_reference (tree val)
2069 tree value_type, ref;
2071 if (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE)
2074 value_type = TREE_TYPE (TREE_TYPE (val));
2075 ref = build1 (INDIRECT_REF, value_type, val);
2077 /* See if what we reference is CONST or VOLATILE, which requires
2078 looking into array types to get to the component type. */
2080 while (TREE_CODE (value_type) == ARRAY_TYPE)
2081 value_type = TREE_TYPE (value_type);
2084 = (TYPE_QUALS (value_type) & TYPE_QUAL_CONST);
2085 TREE_THIS_VOLATILE (ref)
2086 = (TYPE_QUALS (value_type) & TYPE_QUAL_VOLATILE);
2088 TREE_SIDE_EFFECTS (ref)
2089 = (TREE_THIS_VOLATILE (ref) || TREE_SIDE_EFFECTS (val));
2094 /* Helper for the genericization callback. Returns true if T denotes
2095 a RESULT_DECL with DECL_BY_REFERENCE set. */
2098 is_byref_result (tree t)
2100 return (TREE_CODE (t) == RESULT_DECL && DECL_BY_REFERENCE (t));
2104 /* Tree walking callback for gnat_genericize. Currently ...
2106 o Adjust references to the function's DECL_RESULT if it is marked
2107 DECL_BY_REFERENCE and so has had its type turned into a reference
2108 type at the end of the function compilation. */
2111 gnat_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
2113 /* This implementation is modeled after what the C++ front-end is
2114 doing, basis of the downstream passes behavior. */
2116 tree stmt = *stmt_p;
2117 struct pointer_set_t *p_set = (struct pointer_set_t*) data;
2119 /* If we have a direct mention of the result decl, dereference. */
2120 if (is_byref_result (stmt))
2122 *stmt_p = convert_from_reference (stmt);
2127 /* Otherwise, no need to walk the same tree twice. */
2128 if (pointer_set_contains (p_set, stmt))
2134 /* If we are taking the address of what now is a reference, just get the
2136 if (TREE_CODE (stmt) == ADDR_EXPR
2137 && is_byref_result (TREE_OPERAND (stmt, 0)))
2139 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
2143 /* Don't dereference an by-reference RESULT_DECL inside a RETURN_EXPR. */
2144 else if (TREE_CODE (stmt) == RETURN_EXPR
2145 && TREE_OPERAND (stmt, 0)
2146 && is_byref_result (TREE_OPERAND (stmt, 0)))
2149 /* Don't look inside trees that cannot embed references of interest. */
2150 else if (IS_TYPE_OR_DECL_P (stmt))
2153 pointer_set_insert (p_set, *stmt_p);
2158 /* Perform lowering of Ada trees to GENERIC. In particular:
2160 o Turn a DECL_BY_REFERENCE RESULT_DECL into a real by-reference decl
2161 and adjust all the references to this decl accordingly. */
2164 gnat_genericize (tree fndecl)
2166 /* Prior to GCC 4, an explicit By_Reference result mechanism for a function
2167 was handled by simply setting TREE_ADDRESSABLE on the result type.
2168 Everything required to actually pass by invisible ref using the target
2169 mechanism (e.g. extra parameter) was handled at RTL expansion time.
2171 This doesn't work with GCC 4 any more for several reasons. First, the
2172 gimplification process might need the creation of temporaries of this
2173 type, and the gimplifier ICEs on such attempts. Second, the middle-end
2174 now relies on a different attribute for such cases (DECL_BY_REFERENCE on
2175 RESULT/PARM_DECLs), and expects the user invisible by-reference-ness to
2176 be explicitly accounted for by the front-end in the function body.
2178 We achieve the complete transformation in two steps:
2180 1/ create_subprog_decl performs early attribute tweaks: it clears
2181 TREE_ADDRESSABLE from the result type and sets DECL_BY_REFERENCE on
2182 the result decl. The former ensures that the bit isn't set in the GCC
2183 tree saved for the function, so prevents ICEs on temporary creation.
2184 The latter we use here to trigger the rest of the processing.
2186 2/ This function performs the type transformation on the result decl
2187 and adjusts all the references to this decl from the function body
2190 Clearing TREE_ADDRESSABLE from the type differs from the C++ front-end
2191 strategy, which escapes the gimplifier temporary creation issues by
2192 creating it's own temporaries using TARGET_EXPR nodes. Our way relies
2193 on simple specific support code in aggregate_value_p to look at the
2194 target function result decl explicitly. */
2196 struct pointer_set_t *p_set;
2197 tree decl_result = DECL_RESULT (fndecl);
2199 if (!DECL_BY_REFERENCE (decl_result))
2202 /* Make the DECL_RESULT explicitly by-reference and adjust all the
2203 occurrences in the function body using the common tree-walking facility.
2204 We want to see every occurrence of the result decl to adjust the
2205 referencing tree, so need to use our own pointer set to control which
2206 trees should be visited again or not. */
2208 p_set = pointer_set_create ();
2210 TREE_TYPE (decl_result) = build_reference_type (TREE_TYPE (decl_result));
2211 TREE_ADDRESSABLE (decl_result) = 0;
2212 relayout_decl (decl_result);
2214 walk_tree (&DECL_SAVED_TREE (fndecl), gnat_genericize_r, p_set, NULL);
2216 pointer_set_destroy (p_set);
2219 /* Finish the definition of the current subprogram BODY and compile it all the
2220 way to assembler language output. ELAB_P tells if this is called for an
2221 elaboration routine, to be entirely discarded if empty. */
2224 end_subprog_body (tree body, bool elab_p)
2226 tree fndecl = current_function_decl;
2228 /* Mark the BLOCK for this level as being for this function and pop the
2229 level. Since the vars in it are the parameters, clear them. */
2230 BLOCK_VARS (current_binding_level->block) = 0;
2231 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
2232 DECL_INITIAL (fndecl) = current_binding_level->block;
2235 /* Deal with inline. If declared inline or we should default to inline,
2236 set the flag in the decl. */
2237 DECL_INLINE (fndecl) = 1;
2239 /* We handle pending sizes via the elaboration of types, so we don't
2240 need to save them. */
2241 get_pending_sizes ();
2243 /* Mark the RESULT_DECL as being in this subprogram. */
2244 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
2246 DECL_SAVED_TREE (fndecl) = body;
2248 current_function_decl = DECL_CONTEXT (fndecl);
2251 /* We cannot track the location of errors past this point. */
2252 error_gnat_node = Empty;
2254 /* If we're only annotating types, don't actually compile this function. */
2255 if (type_annotate_only)
2258 /* Perform the required pre-gimplification transformations on the tree. */
2259 gnat_genericize (fndecl);
2261 /* We do different things for nested and non-nested functions.
2262 ??? This should be in cgraph. */
2263 if (!DECL_CONTEXT (fndecl))
2265 gnat_gimplify_function (fndecl);
2267 /* If this is an empty elaboration proc, just discard the node.
2268 Otherwise, compile further. */
2269 if (elab_p && empty_body_p (gimple_body (fndecl)))
2270 cgraph_remove_node (cgraph_node (fndecl));
2272 cgraph_finalize_function (fndecl, false);
2275 /* Register this function with cgraph just far enough to get it
2276 added to our parent's nested function list. */
2277 (void) cgraph_node (fndecl);
2280 /* Convert FNDECL's code to GIMPLE and handle any nested functions. */
2283 gnat_gimplify_function (tree fndecl)
2285 struct cgraph_node *cgn;
2287 dump_function (TDI_original, fndecl);
2288 gimplify_function_tree (fndecl);
2289 dump_function (TDI_generic, fndecl);
2291 /* Convert all nested functions to GIMPLE now. We do things in this order
2292 so that items like VLA sizes are expanded properly in the context of the
2293 correct function. */
2294 cgn = cgraph_node (fndecl);
2295 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
2296 gnat_gimplify_function (cgn->decl);
2301 gnat_builtin_function (tree decl)
2303 gnat_pushdecl (decl, Empty);
2307 /* Return an integer type with the number of bits of precision given by
2308 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2309 it is a signed type. */
2312 gnat_type_for_size (unsigned precision, int unsignedp)
2317 if (precision <= 2 * MAX_BITS_PER_WORD
2318 && signed_and_unsigned_types[precision][unsignedp])
2319 return signed_and_unsigned_types[precision][unsignedp];
2322 t = make_unsigned_type (precision);
2324 t = make_signed_type (precision);
2326 if (precision <= 2 * MAX_BITS_PER_WORD)
2327 signed_and_unsigned_types[precision][unsignedp] = t;
2331 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2332 TYPE_NAME (t) = get_identifier (type_name);
2338 /* Likewise for floating-point types. */
2341 float_type_for_precision (int precision, enum machine_mode mode)
2346 if (float_types[(int) mode])
2347 return float_types[(int) mode];
2349 float_types[(int) mode] = t = make_node (REAL_TYPE);
2350 TYPE_PRECISION (t) = precision;
2353 gcc_assert (TYPE_MODE (t) == mode);
2356 sprintf (type_name, "FLOAT_%d", precision);
2357 TYPE_NAME (t) = get_identifier (type_name);
2363 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2364 an unsigned type; otherwise a signed type is returned. */
2367 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2369 if (mode == BLKmode)
2371 else if (mode == VOIDmode)
2372 return void_type_node;
2373 else if (COMPLEX_MODE_P (mode))
2375 else if (SCALAR_FLOAT_MODE_P (mode))
2376 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2377 else if (SCALAR_INT_MODE_P (mode))
2378 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2383 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2386 gnat_unsigned_type (tree type_node)
2388 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2390 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2392 type = copy_node (type);
2393 TREE_TYPE (type) = type_node;
2395 else if (TREE_TYPE (type_node)
2396 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2397 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2399 type = copy_node (type);
2400 TREE_TYPE (type) = TREE_TYPE (type_node);
2406 /* Return the signed version of a TYPE_NODE, a scalar type. */
2409 gnat_signed_type (tree type_node)
2411 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2413 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2415 type = copy_node (type);
2416 TREE_TYPE (type) = type_node;
2418 else if (TREE_TYPE (type_node)
2419 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2420 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2422 type = copy_node (type);
2423 TREE_TYPE (type) = TREE_TYPE (type_node);
2429 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2430 transparently converted to each other. */
2433 gnat_types_compatible_p (tree t1, tree t2)
2435 enum tree_code code;
2437 /* This is the default criterion. */
2438 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2441 /* We only check structural equivalence here. */
2442 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2445 /* Array types are also compatible if they are constrained and have
2446 the same component type and the same domain. */
2447 if (code == ARRAY_TYPE
2448 && TREE_TYPE (t1) == TREE_TYPE (t2)
2449 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2450 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2451 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
2452 TYPE_MAX_VALUE (TYPE_DOMAIN (t2))))
2455 /* Padding record types are also compatible if they pad the same
2456 type and have the same constant size. */
2457 if (code == RECORD_TYPE
2458 && TYPE_IS_PADDING_P (t1) && TYPE_IS_PADDING_P (t2)
2459 && TREE_TYPE (TYPE_FIELDS (t1)) == TREE_TYPE (TYPE_FIELDS (t2))
2460 && tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2)))
2466 /* EXP is an expression for the size of an object. If this size contains
2467 discriminant references, replace them with the maximum (if MAX_P) or
2468 minimum (if !MAX_P) possible value of the discriminant. */
2471 max_size (tree exp, bool max_p)
2473 enum tree_code code = TREE_CODE (exp);
2474 tree type = TREE_TYPE (exp);
2476 switch (TREE_CODE_CLASS (code))
2478 case tcc_declaration:
2483 if (code == CALL_EXPR)
2486 int i, n = call_expr_nargs (exp);
2489 argarray = (tree *) alloca (n * sizeof (tree));
2490 for (i = 0; i < n; i++)
2491 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
2492 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
2497 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2498 modify. Otherwise, we treat it like a variable. */
2499 if (!CONTAINS_PLACEHOLDER_P (exp))
2502 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2504 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
2506 case tcc_comparison:
2507 return max_p ? size_one_node : size_zero_node;
2511 case tcc_expression:
2512 switch (TREE_CODE_LENGTH (code))
2515 if (code == NON_LVALUE_EXPR)
2516 return max_size (TREE_OPERAND (exp, 0), max_p);
2519 fold_build1 (code, type,
2520 max_size (TREE_OPERAND (exp, 0),
2521 code == NEGATE_EXPR ? !max_p : max_p));
2524 if (code == COMPOUND_EXPR)
2525 return max_size (TREE_OPERAND (exp, 1), max_p);
2527 /* Calculate "(A ? B : C) - D" as "A ? B - D : C - D" which
2528 may provide a tighter bound on max_size. */
2529 if (code == MINUS_EXPR
2530 && TREE_CODE (TREE_OPERAND (exp, 0)) == COND_EXPR)
2532 tree lhs = fold_build2 (MINUS_EXPR, type,
2533 TREE_OPERAND (TREE_OPERAND (exp, 0), 1),
2534 TREE_OPERAND (exp, 1));
2535 tree rhs = fold_build2 (MINUS_EXPR, type,
2536 TREE_OPERAND (TREE_OPERAND (exp, 0), 2),
2537 TREE_OPERAND (exp, 1));
2538 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2539 max_size (lhs, max_p),
2540 max_size (rhs, max_p));
2544 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2545 tree rhs = max_size (TREE_OPERAND (exp, 1),
2546 code == MINUS_EXPR ? !max_p : max_p);
2548 /* Special-case wanting the maximum value of a MIN_EXPR.
2549 In that case, if one side overflows, return the other.
2550 sizetype is signed, but we know sizes are non-negative.
2551 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2552 overflowing or the maximum possible value and the RHS
2556 && TREE_CODE (rhs) == INTEGER_CST
2557 && TREE_OVERFLOW (rhs))
2561 && TREE_CODE (lhs) == INTEGER_CST
2562 && TREE_OVERFLOW (lhs))
2564 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2565 && ((TREE_CODE (lhs) == INTEGER_CST
2566 && TREE_OVERFLOW (lhs))
2567 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2568 && !TREE_CONSTANT (rhs))
2571 return fold_build2 (code, type, lhs, rhs);
2575 if (code == SAVE_EXPR)
2577 else if (code == COND_EXPR)
2578 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2579 max_size (TREE_OPERAND (exp, 1), max_p),
2580 max_size (TREE_OPERAND (exp, 2), max_p));
2583 /* Other tree classes cannot happen. */
2591 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2592 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2593 Return a constructor for the template. */
2596 build_template (tree template_type, tree array_type, tree expr)
2598 tree template_elts = NULL_TREE;
2599 tree bound_list = NULL_TREE;
2602 while (TREE_CODE (array_type) == RECORD_TYPE
2603 && (TYPE_IS_PADDING_P (array_type)
2604 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2605 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2607 if (TREE_CODE (array_type) == ARRAY_TYPE
2608 || (TREE_CODE (array_type) == INTEGER_TYPE
2609 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2610 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2612 /* First make the list for a CONSTRUCTOR for the template. Go down the
2613 field list of the template instead of the type chain because this
2614 array might be an Ada array of arrays and we can't tell where the
2615 nested arrays stop being the underlying object. */
2617 for (field = TYPE_FIELDS (template_type); field;
2619 ? (bound_list = TREE_CHAIN (bound_list))
2620 : (array_type = TREE_TYPE (array_type))),
2621 field = TREE_CHAIN (TREE_CHAIN (field)))
2623 tree bounds, min, max;
2625 /* If we have a bound list, get the bounds from there. Likewise
2626 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2627 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2628 This will give us a maximum range. */
2630 bounds = TREE_VALUE (bound_list);
2631 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2632 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2633 else if (expr && TREE_CODE (expr) == PARM_DECL
2634 && DECL_BY_COMPONENT_PTR_P (expr))
2635 bounds = TREE_TYPE (field);
2639 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
2640 max = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MAX_VALUE (bounds));
2642 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2643 substitute it from OBJECT. */
2644 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2645 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2647 template_elts = tree_cons (TREE_CHAIN (field), max,
2648 tree_cons (field, min, template_elts));
2651 return gnat_build_constructor (template_type, nreverse (template_elts));
2654 /* Build a 32bit VMS descriptor from a Mechanism_Type, which must specify
2655 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2656 in the type contains in its DECL_INITIAL the expression to use when
2657 a constructor is made for the type. GNAT_ENTITY is an entity used
2658 to print out an error message if the mechanism cannot be applied to
2659 an object of that type and also for the name. */
2662 build_vms_descriptor32 (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2664 tree record_type = make_node (RECORD_TYPE);
2665 tree pointer32_type;
2666 tree field_list = 0;
2675 /* If TYPE is an unconstrained array, use the underlying array type. */
2676 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2677 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2679 /* If this is an array, compute the number of dimensions in the array,
2680 get the index types, and point to the inner type. */
2681 if (TREE_CODE (type) != ARRAY_TYPE)
2684 for (ndim = 1, inner_type = type;
2685 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2686 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2687 ndim++, inner_type = TREE_TYPE (inner_type))
2690 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2692 if (mech != By_Descriptor_NCA && mech != By_Short_Descriptor_NCA
2693 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2694 for (i = ndim - 1, inner_type = type;
2696 i--, inner_type = TREE_TYPE (inner_type))
2697 idx_arr[i] = TYPE_DOMAIN (inner_type);
2699 for (i = 0, inner_type = type;
2701 i++, inner_type = TREE_TYPE (inner_type))
2702 idx_arr[i] = TYPE_DOMAIN (inner_type);
2704 /* Now get the DTYPE value. */
2705 switch (TREE_CODE (type))
2710 if (TYPE_VAX_FLOATING_POINT_P (type))
2711 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2724 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2727 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2730 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2733 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2736 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2739 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2745 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2749 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2750 && TYPE_VAX_FLOATING_POINT_P (type))
2751 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2763 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2774 /* Get the CLASS value. */
2777 case By_Descriptor_A:
2778 case By_Short_Descriptor_A:
2781 case By_Descriptor_NCA:
2782 case By_Short_Descriptor_NCA:
2785 case By_Descriptor_SB:
2786 case By_Short_Descriptor_SB:
2790 case By_Short_Descriptor:
2791 case By_Descriptor_S:
2792 case By_Short_Descriptor_S:
2798 /* Make the type for a descriptor for VMS. The first four fields
2799 are the same for all types. */
2802 = chainon (field_list,
2803 make_descriptor_field
2804 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2805 size_in_bytes ((mech == By_Descriptor_A ||
2806 mech == By_Short_Descriptor_A)
2807 ? inner_type : type)));
2809 field_list = chainon (field_list,
2810 make_descriptor_field ("DTYPE",
2811 gnat_type_for_size (8, 1),
2812 record_type, size_int (dtype)));
2813 field_list = chainon (field_list,
2814 make_descriptor_field ("CLASS",
2815 gnat_type_for_size (8, 1),
2816 record_type, size_int (class)));
2818 /* Of course this will crash at run-time if the address space is not
2819 within the low 32 bits, but there is nothing else we can do. */
2820 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
2823 = chainon (field_list,
2824 make_descriptor_field
2825 ("POINTER", pointer32_type, record_type,
2826 build_unary_op (ADDR_EXPR,
2828 build0 (PLACEHOLDER_EXPR, type))));
2833 case By_Short_Descriptor:
2834 case By_Descriptor_S:
2835 case By_Short_Descriptor_S:
2838 case By_Descriptor_SB:
2839 case By_Short_Descriptor_SB:
2841 = chainon (field_list,
2842 make_descriptor_field
2843 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2844 TREE_CODE (type) == ARRAY_TYPE
2845 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2847 = chainon (field_list,
2848 make_descriptor_field
2849 ("SB_U1", gnat_type_for_size (32, 1), record_type,
2850 TREE_CODE (type) == ARRAY_TYPE
2851 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2854 case By_Descriptor_A:
2855 case By_Short_Descriptor_A:
2856 case By_Descriptor_NCA:
2857 case By_Short_Descriptor_NCA:
2858 field_list = chainon (field_list,
2859 make_descriptor_field ("SCALE",
2860 gnat_type_for_size (8, 1),
2864 field_list = chainon (field_list,
2865 make_descriptor_field ("DIGITS",
2866 gnat_type_for_size (8, 1),
2871 = chainon (field_list,
2872 make_descriptor_field
2873 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2874 size_int ((mech == By_Descriptor_NCA ||
2875 mech == By_Short_Descriptor_NCA)
2877 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2878 : (TREE_CODE (type) == ARRAY_TYPE
2879 && TYPE_CONVENTION_FORTRAN_P (type)
2882 field_list = chainon (field_list,
2883 make_descriptor_field ("DIMCT",
2884 gnat_type_for_size (8, 1),
2888 field_list = chainon (field_list,
2889 make_descriptor_field ("ARSIZE",
2890 gnat_type_for_size (32, 1),
2892 size_in_bytes (type)));
2894 /* Now build a pointer to the 0,0,0... element. */
2895 tem = build0 (PLACEHOLDER_EXPR, type);
2896 for (i = 0, inner_type = type; i < ndim;
2897 i++, inner_type = TREE_TYPE (inner_type))
2898 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2899 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2900 NULL_TREE, NULL_TREE);
2903 = chainon (field_list,
2904 make_descriptor_field
2906 build_pointer_type_for_mode (inner_type, SImode, false),
2909 build_pointer_type_for_mode (inner_type, SImode,
2913 /* Next come the addressing coefficients. */
2914 tem = size_one_node;
2915 for (i = 0; i < ndim; i++)
2919 = size_binop (MULT_EXPR, tem,
2920 size_binop (PLUS_EXPR,
2921 size_binop (MINUS_EXPR,
2922 TYPE_MAX_VALUE (idx_arr[i]),
2923 TYPE_MIN_VALUE (idx_arr[i])),
2926 fname[0] = ((mech == By_Descriptor_NCA ||
2927 mech == By_Short_Descriptor_NCA) ? 'S' : 'M');
2928 fname[1] = '0' + i, fname[2] = 0;
2930 = chainon (field_list,
2931 make_descriptor_field (fname,
2932 gnat_type_for_size (32, 1),
2933 record_type, idx_length));
2935 if (mech == By_Descriptor_NCA || mech == By_Short_Descriptor_NCA)
2939 /* Finally here are the bounds. */
2940 for (i = 0; i < ndim; i++)
2944 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2946 = chainon (field_list,
2947 make_descriptor_field
2948 (fname, gnat_type_for_size (32, 1), record_type,
2949 TYPE_MIN_VALUE (idx_arr[i])));
2953 = chainon (field_list,
2954 make_descriptor_field
2955 (fname, gnat_type_for_size (32, 1), record_type,
2956 TYPE_MAX_VALUE (idx_arr[i])));
2961 post_error ("unsupported descriptor type for &", gnat_entity);
2964 finish_record_type (record_type, field_list, 0, true);
2965 create_type_decl (create_concat_name (gnat_entity, "DESC"), record_type,
2966 NULL, true, false, gnat_entity);
2971 /* Build a 64bit VMS descriptor from a Mechanism_Type, which must specify
2972 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2973 in the type contains in its DECL_INITIAL the expression to use when
2974 a constructor is made for the type. GNAT_ENTITY is an entity used
2975 to print out an error message if the mechanism cannot be applied to
2976 an object of that type and also for the name. */
2979 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2981 tree record64_type = make_node (RECORD_TYPE);
2982 tree pointer64_type;
2983 tree field_list64 = 0;
2992 /* If TYPE is an unconstrained array, use the underlying array type. */
2993 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2994 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2996 /* If this is an array, compute the number of dimensions in the array,
2997 get the index types, and point to the inner type. */
2998 if (TREE_CODE (type) != ARRAY_TYPE)
3001 for (ndim = 1, inner_type = type;
3002 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
3003 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
3004 ndim++, inner_type = TREE_TYPE (inner_type))
3007 idx_arr = (tree *) alloca (ndim * sizeof (tree));
3009 if (mech != By_Descriptor_NCA
3010 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
3011 for (i = ndim - 1, inner_type = type;
3013 i--, inner_type = TREE_TYPE (inner_type))
3014 idx_arr[i] = TYPE_DOMAIN (inner_type);
3016 for (i = 0, inner_type = type;
3018 i++, inner_type = TREE_TYPE (inner_type))
3019 idx_arr[i] = TYPE_DOMAIN (inner_type);
3021 /* Now get the DTYPE value. */
3022 switch (TREE_CODE (type))
3027 if (TYPE_VAX_FLOATING_POINT_P (type))
3028 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
3041 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
3044 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
3047 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
3050 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
3053 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
3056 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
3062 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
3066 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
3067 && TYPE_VAX_FLOATING_POINT_P (type))
3068 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
3080 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
3091 /* Get the CLASS value. */
3094 case By_Descriptor_A:
3097 case By_Descriptor_NCA:
3100 case By_Descriptor_SB:
3104 case By_Descriptor_S:
3110 /* Make the type for a 64bit descriptor for VMS. The first six fields
3111 are the same for all types. */
3113 field_list64 = chainon (field_list64,
3114 make_descriptor_field ("MBO",
3115 gnat_type_for_size (16, 1),
3116 record64_type, size_int (1)));
3118 field_list64 = chainon (field_list64,
3119 make_descriptor_field ("DTYPE",
3120 gnat_type_for_size (8, 1),
3121 record64_type, size_int (dtype)));
3122 field_list64 = chainon (field_list64,
3123 make_descriptor_field ("CLASS",
3124 gnat_type_for_size (8, 1),
3125 record64_type, size_int (class)));
3127 field_list64 = chainon (field_list64,
3128 make_descriptor_field ("MBMO",
3129 gnat_type_for_size (32, 1),
3130 record64_type, ssize_int (-1)));
3133 = chainon (field_list64,
3134 make_descriptor_field
3135 ("LENGTH", gnat_type_for_size (64, 1), record64_type,
3136 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
3138 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
3141 = chainon (field_list64,
3142 make_descriptor_field
3143 ("POINTER", pointer64_type, record64_type,
3144 build_unary_op (ADDR_EXPR,
3146 build0 (PLACEHOLDER_EXPR, type))));
3151 case By_Descriptor_S:
3154 case By_Descriptor_SB:
3156 = chainon (field_list64,
3157 make_descriptor_field
3158 ("SB_L1", gnat_type_for_size (64, 1), record64_type,
3159 TREE_CODE (type) == ARRAY_TYPE
3160 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
3162 = chainon (field_list64,
3163 make_descriptor_field
3164 ("SB_U1", gnat_type_for_size (64, 1), record64_type,
3165 TREE_CODE (type) == ARRAY_TYPE
3166 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
3169 case By_Descriptor_A:
3170 case By_Descriptor_NCA:
3171 field_list64 = chainon (field_list64,
3172 make_descriptor_field ("SCALE",
3173 gnat_type_for_size (8, 1),
3177 field_list64 = chainon (field_list64,
3178 make_descriptor_field ("DIGITS",
3179 gnat_type_for_size (8, 1),
3184 = chainon (field_list64,
3185 make_descriptor_field
3186 ("AFLAGS", gnat_type_for_size (8, 1), record64_type,
3187 size_int (mech == By_Descriptor_NCA
3189 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
3190 : (TREE_CODE (type) == ARRAY_TYPE
3191 && TYPE_CONVENTION_FORTRAN_P (type)
3194 field_list64 = chainon (field_list64,
3195 make_descriptor_field ("DIMCT",
3196 gnat_type_for_size (8, 1),
3200 field_list64 = chainon (field_list64,
3201 make_descriptor_field ("MBZ",
3202 gnat_type_for_size (32, 1),
3205 field_list64 = chainon (field_list64,
3206 make_descriptor_field ("ARSIZE",
3207 gnat_type_for_size (64, 1),
3209 size_in_bytes (type)));
3211 /* Now build a pointer to the 0,0,0... element. */
3212 tem = build0 (PLACEHOLDER_EXPR, type);
3213 for (i = 0, inner_type = type; i < ndim;
3214 i++, inner_type = TREE_TYPE (inner_type))
3215 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
3216 convert (TYPE_DOMAIN (inner_type), size_zero_node),
3217 NULL_TREE, NULL_TREE);
3220 = chainon (field_list64,
3221 make_descriptor_field
3223 build_pointer_type_for_mode (inner_type, DImode, false),
3226 build_pointer_type_for_mode (inner_type, DImode,
3230 /* Next come the addressing coefficients. */
3231 tem = size_one_node;
3232 for (i = 0; i < ndim; i++)
3236 = size_binop (MULT_EXPR, tem,
3237 size_binop (PLUS_EXPR,
3238 size_binop (MINUS_EXPR,
3239 TYPE_MAX_VALUE (idx_arr[i]),
3240 TYPE_MIN_VALUE (idx_arr[i])),
3243 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
3244 fname[1] = '0' + i, fname[2] = 0;
3246 = chainon (field_list64,
3247 make_descriptor_field (fname,
3248 gnat_type_for_size (64, 1),
3249 record64_type, idx_length));
3251 if (mech == By_Descriptor_NCA)
3255 /* Finally here are the bounds. */
3256 for (i = 0; i < ndim; i++)
3260 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
3262 = chainon (field_list64,
3263 make_descriptor_field
3264 (fname, gnat_type_for_size (64, 1), record64_type,
3265 TYPE_MIN_VALUE (idx_arr[i])));
3269 = chainon (field_list64,
3270 make_descriptor_field
3271 (fname, gnat_type_for_size (64, 1), record64_type,
3272 TYPE_MAX_VALUE (idx_arr[i])));
3277 post_error ("unsupported descriptor type for &", gnat_entity);
3280 finish_record_type (record64_type, field_list64, 0, true);
3281 create_type_decl (create_concat_name (gnat_entity, "DESC64"), record64_type,
3282 NULL, true, false, gnat_entity);
3284 return record64_type;
3287 /* Utility routine for above code to make a field. */
3290 make_descriptor_field (const char *name, tree type,
3291 tree rec_type, tree initial)
3294 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
3296 DECL_INITIAL (field) = initial;
3300 /* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
3301 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3302 which the VMS descriptor is passed. */
3305 convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3307 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3308 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3309 /* The CLASS field is the 3rd field in the descriptor. */
3310 tree class = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
3311 /* The POINTER field is the 6th field in the descriptor. */
3312 tree pointer64 = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (class)));
3314 /* Retrieve the value of the POINTER field. */
3316 = build3 (COMPONENT_REF, TREE_TYPE (pointer64), desc, pointer64, NULL_TREE);
3318 if (POINTER_TYPE_P (gnu_type))
3319 return convert (gnu_type, gnu_expr64);
3321 else if (TYPE_FAT_POINTER_P (gnu_type))
3323 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3324 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3325 tree template_type = TREE_TYPE (p_bounds_type);
3326 tree min_field = TYPE_FIELDS (template_type);
3327 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3328 tree template, template_addr, aflags, dimct, t, u;
3329 /* See the head comment of build_vms_descriptor. */
3330 int iclass = TREE_INT_CST_LOW (DECL_INITIAL (class));
3331 tree lfield, ufield;
3333 /* Convert POINTER to the type of the P_ARRAY field. */
3334 gnu_expr64 = convert (p_array_type, gnu_expr64);
3338 case 1: /* Class S */
3339 case 15: /* Class SB */
3340 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
3341 t = TREE_CHAIN (TREE_CHAIN (class));
3342 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3343 t = tree_cons (min_field,
3344 convert (TREE_TYPE (min_field), integer_one_node),
3345 tree_cons (max_field,
3346 convert (TREE_TYPE (max_field), t),
3348 template = gnat_build_constructor (template_type, t);
3349 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
3351 /* For class S, we are done. */
3355 /* Test that we really have a SB descriptor, like DEC Ada. */
3356 t = build3 (COMPONENT_REF, TREE_TYPE (class), desc, class, NULL);
3357 u = convert (TREE_TYPE (class), DECL_INITIAL (class));
3358 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
3359 /* If so, there is already a template in the descriptor and
3360 it is located right after the POINTER field. The fields are
3361 64bits so they must be repacked. */
3362 t = TREE_CHAIN (pointer64);
3363 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3364 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3367 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3369 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3371 /* Build the template in the form of a constructor. */
3372 t = tree_cons (TYPE_FIELDS (template_type), lfield,
3373 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3374 ufield, NULL_TREE));
3375 template = gnat_build_constructor (template_type, t);
3377 /* Otherwise use the {1, LENGTH} template we build above. */
3378 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3379 build_unary_op (ADDR_EXPR, p_bounds_type,
3384 case 4: /* Class A */
3385 /* The AFLAGS field is the 3rd field after the pointer in the
3387 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer64)));
3388 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3389 /* The DIMCT field is the next field in the descriptor after
3392 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3393 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3394 or FL_COEFF or FL_BOUNDS not set. */
3395 u = build_int_cst (TREE_TYPE (aflags), 192);
3396 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3397 build_binary_op (NE_EXPR, integer_type_node,
3399 convert (TREE_TYPE (dimct),
3401 build_binary_op (NE_EXPR, integer_type_node,
3402 build2 (BIT_AND_EXPR,
3406 /* There is already a template in the descriptor and it is located
3407 in block 3. The fields are 64bits so they must be repacked. */
3408 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN
3410 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3411 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3414 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3416 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3418 /* Build the template in the form of a constructor. */
3419 t = tree_cons (TYPE_FIELDS (template_type), lfield,
3420 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3421 ufield, NULL_TREE));
3422 template = gnat_build_constructor (template_type, t);
3423 template = build3 (COND_EXPR, p_bounds_type, u,
3424 build_call_raise (CE_Length_Check_Failed, Empty,
3425 N_Raise_Constraint_Error),
3427 template_addr = build_unary_op (ADDR_EXPR, p_bounds_type, template);
3430 case 10: /* Class NCA */
3432 post_error ("unsupported descriptor type for &", gnat_subprog);
3433 template_addr = integer_zero_node;
3437 /* Build the fat pointer in the form of a constructor. */
3438 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr64,
3439 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3440 template_addr, NULL_TREE));
3441 return gnat_build_constructor (gnu_type, t);
3448 /* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3449 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3450 which the VMS descriptor is passed. */
3453 convert_vms_descriptor32 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3455 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3456 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3457 /* The CLASS field is the 3rd field in the descriptor. */
3458 tree class = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
3459 /* The POINTER field is the 4th field in the descriptor. */
3460 tree pointer = TREE_CHAIN (class);
3462 /* Retrieve the value of the POINTER field. */
3464 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
3466 if (POINTER_TYPE_P (gnu_type))
3467 return convert (gnu_type, gnu_expr32);
3469 else if (TYPE_FAT_POINTER_P (gnu_type))
3471 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3472 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3473 tree template_type = TREE_TYPE (p_bounds_type);
3474 tree min_field = TYPE_FIELDS (template_type);
3475 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3476 tree template, template_addr, aflags, dimct, t, u;
3477 /* See the head comment of build_vms_descriptor. */
3478 int iclass = TREE_INT_CST_LOW (DECL_INITIAL (class));
3480 /* Convert POINTER to the type of the P_ARRAY field. */
3481 gnu_expr32 = convert (p_array_type, gnu_expr32);
3485 case 1: /* Class S */
3486 case 15: /* Class SB */
3487 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
3488 t = TYPE_FIELDS (desc_type);
3489 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3490 t = tree_cons (min_field,
3491 convert (TREE_TYPE (min_field), integer_one_node),
3492 tree_cons (max_field,
3493 convert (TREE_TYPE (max_field), t),
3495 template = gnat_build_constructor (template_type, t);
3496 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
3498 /* For class S, we are done. */
3502 /* Test that we really have a SB descriptor, like DEC Ada. */
3503 t = build3 (COMPONENT_REF, TREE_TYPE (class), desc, class, NULL);
3504 u = convert (TREE_TYPE (class), DECL_INITIAL (class));
3505 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
3506 /* If so, there is already a template in the descriptor and
3507 it is located right after the POINTER field. */
3508 t = TREE_CHAIN (pointer);
3509 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3510 /* Otherwise use the {1, LENGTH} template we build above. */
3511 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3512 build_unary_op (ADDR_EXPR, p_bounds_type,
3517 case 4: /* Class A */
3518 /* The AFLAGS field is the 7th field in the descriptor. */
3519 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer)));
3520 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3521 /* The DIMCT field is the 8th field in the descriptor. */
3523 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3524 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3525 or FL_COEFF or FL_BOUNDS not set. */
3526 u = build_int_cst (TREE_TYPE (aflags), 192);
3527 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3528 build_binary_op (NE_EXPR, integer_type_node,
3530 convert (TREE_TYPE (dimct),
3532 build_binary_op (NE_EXPR, integer_type_node,
3533 build2 (BIT_AND_EXPR,
3537 /* There is already a template in the descriptor and it is
3538 located at the start of block 3 (12th field). */
3539 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (t))));
3540 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3541 template = build3 (COND_EXPR, p_bounds_type, u,
3542 build_call_raise (CE_Length_Check_Failed, Empty,
3543 N_Raise_Constraint_Error),
3545 template_addr = build_unary_op (ADDR_EXPR, p_bounds_type, template);
3548 case 10: /* Class NCA */
3550 post_error ("unsupported descriptor type for &", gnat_subprog);
3551 template_addr = integer_zero_node;
3555 /* Build the fat pointer in the form of a constructor. */
3556 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr32,
3557 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3558 template_addr, NULL_TREE));
3560 return gnat_build_constructor (gnu_type, t);
3567 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a
3568 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3569 which the VMS descriptor is passed. */
3572 convert_vms_descriptor (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3574 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3575 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3576 tree mbo = TYPE_FIELDS (desc_type);
3577 const char *mbostr = IDENTIFIER_POINTER (DECL_NAME (mbo));
3578 tree mbmo = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (mbo)));
3580 tree save_type = TREE_TYPE (gnu_expr);
3581 tree gnu_expr32, gnu_expr64;
3583 if (strcmp (mbostr, "MBO") != 0)
3584 /* If the field name is not MBO, it must be 32bit and no alternate */
3585 return convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
3587 /* Otherwise primary must be 64bit and alternate 32bit */
3589 /* Test for 64bit descriptor */
3590 mbo = build3 (COMPONENT_REF, TREE_TYPE (mbo), desc, mbo, NULL_TREE);
3591 mbmo = build3 (COMPONENT_REF, TREE_TYPE (mbmo), desc, mbmo, NULL_TREE);
3592 is64bit = build_binary_op (TRUTH_ANDIF_EXPR, integer_type_node,
3593 build_binary_op (EQ_EXPR, integer_type_node,
3594 convert (integer_type_node, mbo),
3596 build_binary_op (EQ_EXPR, integer_type_node,
3597 convert (integer_type_node, mbmo),
3598 integer_minus_one_node));
3600 gnu_expr64 = convert_vms_descriptor64 (gnu_type, gnu_expr,
3602 /* Convert 32bit alternate. Hack alert ??? */
3603 TREE_TYPE (gnu_expr) = DECL_PARM_ALT (gnu_expr);
3604 gnu_expr32 = convert_vms_descriptor32 (gnu_type, gnu_expr,
3606 TREE_TYPE (gnu_expr) = save_type;
3608 if (POINTER_TYPE_P (gnu_type))
3609 return build3 (COND_EXPR, gnu_type, is64bit, gnu_expr64, gnu_expr32);
3611 else if (TYPE_FAT_POINTER_P (gnu_type))
3612 return build3 (COND_EXPR, gnu_type, is64bit, gnu_expr64, gnu_expr32);
3617 /* Build a stub for the subprogram specified by the GCC tree GNU_SUBPROG
3618 and the GNAT node GNAT_SUBPROG. */
3621 build_function_stub (tree gnu_subprog, Entity_Id gnat_subprog)
3623 tree gnu_subprog_type, gnu_subprog_addr, gnu_subprog_call;
3624 tree gnu_stub_param, gnu_param_list, gnu_arg_types, gnu_param;
3625 tree gnu_stub_decl = DECL_FUNCTION_STUB (gnu_subprog);
3628 gnu_subprog_type = TREE_TYPE (gnu_subprog);
3629 gnu_param_list = NULL_TREE;
3631 begin_subprog_body (gnu_stub_decl);
3634 start_stmt_group ();
3636 /* Loop over the parameters of the stub and translate any of them
3637 passed by descriptor into a by reference one. */
3638 for (gnu_stub_param = DECL_ARGUMENTS (gnu_stub_decl),
3639 gnu_arg_types = TYPE_ARG_TYPES (gnu_subprog_type);
3641 gnu_stub_param = TREE_CHAIN (gnu_stub_param),
3642 gnu_arg_types = TREE_CHAIN (gnu_arg_types))
3644 if (DECL_BY_DESCRIPTOR_P (gnu_stub_param))
3645 gnu_param = convert_vms_descriptor (TREE_VALUE (gnu_arg_types),
3646 gnu_stub_param, gnat_subprog);
3648 gnu_param = gnu_stub_param;
3650 gnu_param_list = tree_cons (NULL_TREE, gnu_param, gnu_param_list);
3653 gnu_body = end_stmt_group ();
3655 /* Invoke the internal subprogram. */
3656 gnu_subprog_addr = build1 (ADDR_EXPR, build_pointer_type (gnu_subprog_type),
3658 gnu_subprog_call = build_call_list (TREE_TYPE (gnu_subprog_type),
3660 nreverse (gnu_param_list));
3662 /* Propagate the return value, if any. */
3663 if (VOID_TYPE_P (TREE_TYPE (gnu_subprog_type)))
3664 append_to_statement_list (gnu_subprog_call, &gnu_body);
3666 append_to_statement_list (build_return_expr (DECL_RESULT (gnu_stub_decl),
3672 allocate_struct_function (gnu_stub_decl, false);
3673 end_subprog_body (gnu_body, false);
3676 /* Build a type to be used to represent an aliased object whose nominal
3677 type is an unconstrained array. This consists of a RECORD_TYPE containing
3678 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
3679 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
3680 is used to represent an arbitrary unconstrained object. Use NAME
3681 as the name of the record. */
3684 build_unc_object_type (tree template_type, tree object_type, tree name)
3686 tree type = make_node (RECORD_TYPE);
3687 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
3688 template_type, type, 0, 0, 0, 1);
3689 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
3692 TYPE_NAME (type) = name;
3693 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3694 finish_record_type (type,
3695 chainon (chainon (NULL_TREE, template_field),
3702 /* Same, taking a thin or fat pointer type instead of a template type. */
3705 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3710 gcc_assert (TYPE_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3713 = (TYPE_FAT_POINTER_P (thin_fat_ptr_type)
3714 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3715 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3716 return build_unc_object_type (template_type, object_type, name);
3719 /* Shift the component offsets within an unconstrained object TYPE to make it
3720 suitable for use as a designated type for thin pointers. */
3723 shift_unc_components_for_thin_pointers (tree type)
3725 /* Thin pointer values designate the ARRAY data of an unconstrained object,
3726 allocated past the BOUNDS template. The designated type is adjusted to
3727 have ARRAY at position zero and the template at a negative offset, so
3728 that COMPONENT_REFs on (*thin_ptr) designate the proper location. */
3730 tree bounds_field = TYPE_FIELDS (type);
3731 tree array_field = TREE_CHAIN (TYPE_FIELDS (type));
3733 DECL_FIELD_OFFSET (bounds_field)
3734 = size_binop (MINUS_EXPR, size_zero_node, byte_position (array_field));
3736 DECL_FIELD_OFFSET (array_field) = size_zero_node;
3737 DECL_FIELD_BIT_OFFSET (array_field) = bitsize_zero_node;
3740 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
3741 the normal case this is just two adjustments, but we have more to do
3742 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
3745 update_pointer_to (tree old_type, tree new_type)
3747 tree ptr = TYPE_POINTER_TO (old_type);
3748 tree ref = TYPE_REFERENCE_TO (old_type);
3752 /* If this is the main variant, process all the other variants first. */
3753 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3754 for (type = TYPE_NEXT_VARIANT (old_type); type;
3755 type = TYPE_NEXT_VARIANT (type))
3756 update_pointer_to (type, new_type);
3758 /* If no pointer or reference, we are done. */
3762 /* Merge the old type qualifiers in the new type.
3764 Each old variant has qualifiers for specific reasons, and the new
3765 designated type as well. Each set of qualifiers represents useful
3766 information grabbed at some point, and merging the two simply unifies
3767 these inputs into the final type description.
3769 Consider for instance a volatile type frozen after an access to constant
3770 type designating it. After the designated type freeze, we get here with a
3771 volatile new_type and a dummy old_type with a readonly variant, created
3772 when the access type was processed. We shall make a volatile and readonly
3773 designated type, because that's what it really is.
3775 We might also get here for a non-dummy old_type variant with different
3776 qualifiers than the new_type ones, for instance in some cases of pointers
3777 to private record type elaboration (see the comments around the call to
3778 this routine from gnat_to_gnu_entity/E_Access_Type). We have to merge the
3779 qualifiers in those cases too, to avoid accidentally discarding the
3780 initial set, and will often end up with old_type == new_type then. */
3781 new_type = build_qualified_type (new_type,
3782 TYPE_QUALS (old_type)
3783 | TYPE_QUALS (new_type));
3785 /* If the new type and the old one are identical, there is nothing to
3787 if (old_type == new_type)
3790 /* Otherwise, first handle the simple case. */
3791 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3793 TYPE_POINTER_TO (new_type) = ptr;
3794 TYPE_REFERENCE_TO (new_type) = ref;
3796 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3797 for (ptr1 = TYPE_MAIN_VARIANT (ptr); ptr1;
3798 ptr1 = TYPE_NEXT_VARIANT (ptr1))
3799 TREE_TYPE (ptr1) = new_type;
3801 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3802 for (ref1 = TYPE_MAIN_VARIANT (ref); ref1;
3803 ref1 = TYPE_NEXT_VARIANT (ref1))
3804 TREE_TYPE (ref1) = new_type;
3807 /* Now deal with the unconstrained array case. In this case the "pointer"
3808 is actually a RECORD_TYPE where both fields are pointers to dummy nodes.
3809 Turn them into pointers to the correct types using update_pointer_to. */
3810 else if (TREE_CODE (ptr) != RECORD_TYPE || !TYPE_IS_FAT_POINTER_P (ptr))
3815 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
3816 tree array_field = TYPE_FIELDS (ptr);
3817 tree bounds_field = TREE_CHAIN (TYPE_FIELDS (ptr));
3818 tree new_ptr = TYPE_POINTER_TO (new_type);
3822 /* Make pointers to the dummy template point to the real template. */
3824 (TREE_TYPE (TREE_TYPE (bounds_field)),
3825 TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_ptr)))));
3827 /* The references to the template bounds present in the array type
3828 are made through a PLACEHOLDER_EXPR of type new_ptr. Since we
3829 are updating ptr to make it a full replacement for new_ptr as
3830 pointer to new_type, we must rework the PLACEHOLDER_EXPR so as
3831 to make it of type ptr. */
3832 new_ref = build3 (COMPONENT_REF, TREE_TYPE (bounds_field),
3833 build0 (PLACEHOLDER_EXPR, ptr),
3834 bounds_field, NULL_TREE);
3836 /* Create the new array for the new PLACEHOLDER_EXPR and make
3837 pointers to the dummy array point to it.
3839 ??? This is now the only use of substitute_in_type,
3840 which is a very "heavy" routine to do this, so it
3841 should be replaced at some point. */
3843 (TREE_TYPE (TREE_TYPE (array_field)),
3844 substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))),
3845 TREE_CHAIN (TYPE_FIELDS (new_ptr)), new_ref));
3847 /* Make ptr the pointer to new_type. */
3848 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
3849 = TREE_TYPE (new_type) = ptr;
3851 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
3852 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
3854 /* Now handle updating the allocation record, what the thin pointer
3855 points to. Update all pointers from the old record into the new
3856 one, update the type of the array field, and recompute the size. */
3857 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
3859 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
3860 = TREE_TYPE (TREE_TYPE (array_field));
3862 /* The size recomputation needs to account for alignment constraints, so
3863 we let layout_type work it out. This will reset the field offsets to
3864 what they would be in a regular record, so we shift them back to what
3865 we want them to be for a thin pointer designated type afterwards. */
3866 DECL_SIZE (TYPE_FIELDS (new_obj_rec)) = 0;
3867 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))) = 0;
3868 TYPE_SIZE (new_obj_rec) = 0;
3869 layout_type (new_obj_rec);
3871 shift_unc_components_for_thin_pointers (new_obj_rec);
3873 /* We are done, at last. */
3874 rest_of_record_type_compilation (ptr);
3878 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
3879 unconstrained one. This involves making or finding a template. */
3882 convert_to_fat_pointer (tree type, tree expr)
3884 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
3885 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
3886 tree etype = TREE_TYPE (expr);
3889 /* If EXPR is null, make a fat pointer that contains null pointers to the
3890 template and array. */
3891 if (integer_zerop (expr))
3893 gnat_build_constructor
3895 tree_cons (TYPE_FIELDS (type),
3896 convert (p_array_type, expr),
3897 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3898 convert (build_pointer_type (template_type),
3902 /* If EXPR is a thin pointer, make template and data from the record.. */
3903 else if (TYPE_THIN_POINTER_P (etype))
3905 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
3907 expr = save_expr (expr);
3908 if (TREE_CODE (expr) == ADDR_EXPR)
3909 expr = TREE_OPERAND (expr, 0);
3911 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
3913 template = build_component_ref (expr, NULL_TREE, fields, false);
3914 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3915 build_component_ref (expr, NULL_TREE,
3916 TREE_CHAIN (fields), false));
3919 /* Otherwise, build the constructor for the template. */
3921 template = build_template (template_type, TREE_TYPE (etype), expr);
3923 /* The final result is a constructor for the fat pointer.
3925 If EXPR is an argument of a foreign convention subprogram, the type it
3926 points to is directly the component type. In this case, the expression
3927 type may not match the corresponding FIELD_DECL type at this point, so we
3928 call "convert" here to fix that up if necessary. This type consistency is
3929 required, for instance because it ensures that possible later folding of
3930 COMPONENT_REFs against this constructor always yields something of the
3931 same type as the initial reference.
3933 Note that the call to "build_template" above is still fine because it
3934 will only refer to the provided TEMPLATE_TYPE in this case. */
3936 gnat_build_constructor
3938 tree_cons (TYPE_FIELDS (type),
3939 convert (p_array_type, expr),
3940 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3941 build_unary_op (ADDR_EXPR, NULL_TREE, template),
3945 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
3946 is something that is a fat pointer, so convert to it first if it EXPR
3947 is not already a fat pointer. */
3950 convert_to_thin_pointer (tree type, tree expr)
3952 if (!TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
3954 = convert_to_fat_pointer
3955 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
3957 /* We get the pointer to the data and use a NOP_EXPR to make it the
3959 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)),
3961 expr = build1 (NOP_EXPR, type, expr);
3966 /* Create an expression whose value is that of EXPR,
3967 converted to type TYPE. The TREE_TYPE of the value
3968 is always TYPE. This function implements all reasonable
3969 conversions; callers should filter out those that are
3970 not permitted by the language being compiled. */
3973 convert (tree type, tree expr)
3975 enum tree_code code = TREE_CODE (type);
3976 tree etype = TREE_TYPE (expr);
3977 enum tree_code ecode = TREE_CODE (etype);
3979 /* If EXPR is already the right type, we are done. */
3983 /* If both input and output have padding and are of variable size, do this
3984 as an unchecked conversion. Likewise if one is a mere variant of the
3985 other, so we avoid a pointless unpad/repad sequence. */
3986 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
3987 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
3988 && (!TREE_CONSTANT (TYPE_SIZE (type))
3989 || !TREE_CONSTANT (TYPE_SIZE (etype))
3990 || gnat_types_compatible_p (type, etype)
3991 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
3992 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
3995 /* If the output type has padding, convert to the inner type and
3996 make a constructor to build the record. */
3997 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
3999 /* If we previously converted from another type and our type is
4000 of variable size, remove the conversion to avoid the need for
4001 variable-size temporaries. Likewise for a conversion between
4002 original and packable version. */
4003 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4004 && (!TREE_CONSTANT (TYPE_SIZE (type))
4005 || (ecode == RECORD_TYPE
4006 && TYPE_NAME (etype)
4007 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4008 expr = TREE_OPERAND (expr, 0);
4010 /* If we are just removing the padding from expr, convert the original
4011 object if we have variable size in order to avoid the need for some
4012 variable-size temporaries. Likewise if the padding is a mere variant
4013 of the other, so we avoid a pointless unpad/repad sequence. */
4014 if (TREE_CODE (expr) == COMPONENT_REF
4015 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
4016 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4017 && (!TREE_CONSTANT (TYPE_SIZE (type))
4018 || gnat_types_compatible_p (type,
4019 TREE_TYPE (TREE_OPERAND (expr, 0)))
4020 || (ecode == RECORD_TYPE
4021 && TYPE_NAME (etype)
4022 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4023 return convert (type, TREE_OPERAND (expr, 0));
4025 /* If the result type is a padded type with a self-referentially-sized
4026 field and the expression type is a record, do this as an
4027 unchecked conversion. */
4028 else if (TREE_CODE (etype) == RECORD_TYPE
4029 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
4030 return unchecked_convert (type, expr, false);
4034 gnat_build_constructor (type,
4035 tree_cons (TYPE_FIELDS (type),
4037 (TYPE_FIELDS (type)),
4042 /* If the input type has padding, remove it and convert to the output type.
4043 The conditions ordering is arranged to ensure that the output type is not
4044 a padding type here, as it is not clear whether the conversion would
4045 always be correct if this was to happen. */
4046 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
4050 /* If we have just converted to this padded type, just get the
4051 inner expression. */
4052 if (TREE_CODE (expr) == CONSTRUCTOR
4053 && !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (expr))
4054 && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->index
4055 == TYPE_FIELDS (etype))
4057 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->value;
4059 /* Otherwise, build an explicit component reference. */
4062 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
4064 return convert (type, unpadded);
4067 /* If the input is a biased type, adjust first. */
4068 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4069 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
4070 fold_convert (TREE_TYPE (etype),
4072 TYPE_MIN_VALUE (etype)));
4074 /* If the input is a justified modular type, we need to extract the actual
4075 object before converting it to any other type with the exceptions of an
4076 unconstrained array or of a mere type variant. It is useful to avoid the
4077 extraction and conversion in the type variant case because it could end
4078 up replacing a VAR_DECL expr by a constructor and we might be about the
4079 take the address of the result. */
4080 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4081 && code != UNCONSTRAINED_ARRAY_TYPE
4082 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4083 return convert (type, build_component_ref (expr, NULL_TREE,
4084 TYPE_FIELDS (etype), false));
4086 /* If converting to a type that contains a template, convert to the data
4087 type and then build the template. */
4088 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4090 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
4092 /* If the source already has a template, get a reference to the
4093 associated array only, as we are going to rebuild a template
4094 for the target type anyway. */
4095 expr = maybe_unconstrained_array (expr);
4098 gnat_build_constructor
4100 tree_cons (TYPE_FIELDS (type),
4101 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4102 obj_type, NULL_TREE),
4103 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
4104 convert (obj_type, expr), NULL_TREE)));
4107 /* There are some special cases of expressions that we process
4109 switch (TREE_CODE (expr))
4115 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4116 conversion in gnat_expand_expr. NULL_EXPR does not represent
4117 and actual value, so no conversion is needed. */
4118 expr = copy_node (expr);
4119 TREE_TYPE (expr) = type;
4123 /* If we are converting a STRING_CST to another constrained array type,
4124 just make a new one in the proper type. */
4125 if (code == ecode && AGGREGATE_TYPE_P (etype)
4126 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
4127 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
4129 expr = copy_node (expr);
4130 TREE_TYPE (expr) = type;
4136 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
4137 a new one in the proper type. */
4138 if (code == ecode && gnat_types_compatible_p (type, etype))
4140 expr = copy_node (expr);
4141 TREE_TYPE (expr) = type;
4145 /* Likewise for a conversion between original and packable version, but
4146 we have to work harder in order to preserve type consistency. */
4148 && code == RECORD_TYPE
4149 && TYPE_NAME (type) == TYPE_NAME (etype))
4151 VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
4152 unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
4153 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, len);
4154 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4155 unsigned HOST_WIDE_INT idx;
4158 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4160 constructor_elt *elt = VEC_quick_push (constructor_elt, v, NULL);
4161 /* We expect only simple constructors. Otherwise, punt. */
4162 if (!(index == efield || index == DECL_ORIGINAL_FIELD (efield)))
4165 elt->value = convert (TREE_TYPE (field), value);
4166 efield = TREE_CHAIN (efield);
4167 field = TREE_CHAIN (field);
4172 expr = copy_node (expr);
4173 TREE_TYPE (expr) = type;
4174 CONSTRUCTOR_ELTS (expr) = v;
4180 case UNCONSTRAINED_ARRAY_REF:
4181 /* Convert this to the type of the inner array by getting the address of
4182 the array from the template. */
4183 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4184 build_component_ref (TREE_OPERAND (expr, 0),
4185 get_identifier ("P_ARRAY"),
4187 etype = TREE_TYPE (expr);
4188 ecode = TREE_CODE (etype);
4191 case VIEW_CONVERT_EXPR:
4193 /* GCC 4.x is very sensitive to type consistency overall, and view
4194 conversions thus are very frequent. Even though just "convert"ing
4195 the inner operand to the output type is fine in most cases, it
4196 might expose unexpected input/output type mismatches in special
4197 circumstances so we avoid such recursive calls when we can. */
4198 tree op0 = TREE_OPERAND (expr, 0);
4200 /* If we are converting back to the original type, we can just
4201 lift the input conversion. This is a common occurrence with
4202 switches back-and-forth amongst type variants. */
4203 if (type == TREE_TYPE (op0))
4206 /* Otherwise, if we're converting between two aggregate types, we
4207 might be allowed to substitute the VIEW_CONVERT_EXPR target type
4208 in place or to just convert the inner expression. */
4209 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4211 /* If we are converting between mere variants, we can just
4212 substitute the VIEW_CONVERT_EXPR in place. */
4213 if (gnat_types_compatible_p (type, etype))
4214 return build1 (VIEW_CONVERT_EXPR, type, op0);
4216 /* Otherwise, we may just bypass the input view conversion unless
4217 one of the types is a fat pointer, which is handled by
4218 specialized code below which relies on exact type matching. */
4219 else if (!TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
4220 return convert (type, op0);
4226 /* If both types are record types, just convert the pointer and
4227 make a new INDIRECT_REF.
4229 ??? Disable this for now since it causes problems with the
4230 code in build_binary_op for MODIFY_EXPR which wants to
4231 strip off conversions. But that code really is a mess and
4232 we need to do this a much better way some time. */
4234 && (TREE_CODE (type) == RECORD_TYPE
4235 || TREE_CODE (type) == UNION_TYPE)
4236 && (TREE_CODE (etype) == RECORD_TYPE
4237 || TREE_CODE (etype) == UNION_TYPE)
4238 && !TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
4239 return build_unary_op (INDIRECT_REF, NULL_TREE,
4240 convert (build_pointer_type (type),
4241 TREE_OPERAND (expr, 0)));
4248 /* Check for converting to a pointer to an unconstrained array. */
4249 if (TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
4250 return convert_to_fat_pointer (type, expr);
4252 /* If we are converting between two aggregate types that are mere
4253 variants, just make a VIEW_CONVERT_EXPR. */
4254 else if (code == ecode
4255 && AGGREGATE_TYPE_P (type)
4256 && gnat_types_compatible_p (type, etype))
4257 return build1 (VIEW_CONVERT_EXPR, type, expr);
4259 /* In all other cases of related types, make a NOP_EXPR. */
4260 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4261 || (code == INTEGER_CST && ecode == INTEGER_CST
4262 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
4263 return fold_convert (type, expr);
4268 return fold_build1 (CONVERT_EXPR, type, expr);
4271 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4272 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4273 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4274 return unchecked_convert (type, expr, false);
4275 else if (TYPE_BIASED_REPRESENTATION_P (type))
4276 return fold_convert (type,
4277 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4278 convert (TREE_TYPE (type), expr),
4279 TYPE_MIN_VALUE (type)));
4281 /* ... fall through ... */
4285 /* If we are converting an additive expression to an integer type
4286 with lower precision, be wary of the optimization that can be
4287 applied by convert_to_integer. There are 2 problematic cases:
4288 - if the first operand was originally of a biased type,
4289 because we could be recursively called to convert it
4290 to an intermediate type and thus rematerialize the
4291 additive operator endlessly,
4292 - if the expression contains a placeholder, because an
4293 intermediate conversion that changes the sign could
4294 be inserted and thus introduce an artificial overflow
4295 at compile time when the placeholder is substituted. */
4296 if (code == INTEGER_TYPE
4297 && ecode == INTEGER_TYPE
4298 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4299 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4301 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4303 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4304 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4305 || CONTAINS_PLACEHOLDER_P (expr))
4306 return build1 (NOP_EXPR, type, expr);
4309 return fold (convert_to_integer (type, expr));
4312 case REFERENCE_TYPE:
4313 /* If converting between two pointers to records denoting
4314 both a template and type, adjust if needed to account
4315 for any differing offsets, since one might be negative. */
4316 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
4319 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
4320 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
4321 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
4322 sbitsize_int (BITS_PER_UNIT));
4324 expr = build1 (NOP_EXPR, type, expr);
4325 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
4326 if (integer_zerop (byte_diff))
4329 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4330 fold (convert (sizetype, byte_diff)));
4333 /* If converting to a thin pointer, handle specially. */
4334 if (TYPE_THIN_POINTER_P (type)
4335 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
4336 return convert_to_thin_pointer (type, expr);
4338 /* If converting fat pointer to normal pointer, get the pointer to the
4339 array and then convert it. */
4340 else if (TYPE_FAT_POINTER_P (etype))
4341 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
4344 return fold (convert_to_pointer (type, expr));
4347 return fold (convert_to_real (type, expr));
4350 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4352 gnat_build_constructor
4353 (type, tree_cons (TYPE_FIELDS (type),
4354 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
4357 /* ... fall through ... */
4360 /* In these cases, assume the front-end has validated the conversion.
4361 If the conversion is valid, it will be a bit-wise conversion, so
4362 it can be viewed as an unchecked conversion. */
4363 return unchecked_convert (type, expr, false);
4366 /* This is a either a conversion between a tagged type and some
4367 subtype, which we have to mark as a UNION_TYPE because of
4368 overlapping fields or a conversion of an Unchecked_Union. */
4369 return unchecked_convert (type, expr, false);
4371 case UNCONSTRAINED_ARRAY_TYPE:
4372 /* If EXPR is a constrained array, take its address, convert it to a
4373 fat pointer, and then dereference it. Likewise if EXPR is a
4374 record containing both a template and a constrained array.
4375 Note that a record representing a justified modular type
4376 always represents a packed constrained array. */
4377 if (ecode == ARRAY_TYPE
4378 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4379 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4380 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4383 (INDIRECT_REF, NULL_TREE,
4384 convert_to_fat_pointer (TREE_TYPE (type),
4385 build_unary_op (ADDR_EXPR,
4388 /* Do something very similar for converting one unconstrained
4389 array to another. */
4390 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4392 build_unary_op (INDIRECT_REF, NULL_TREE,
4393 convert (TREE_TYPE (type),
4394 build_unary_op (ADDR_EXPR,
4400 return fold (convert_to_complex (type, expr));
4407 /* Remove all conversions that are done in EXP. This includes converting
4408 from a padded type or to a justified modular type. If TRUE_ADDRESS
4409 is true, always return the address of the containing object even if
4410 the address is not bit-aligned. */
4413 remove_conversions (tree exp, bool true_address)
4415 switch (TREE_CODE (exp))
4419 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4420 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4422 remove_conversions (VEC_index (constructor_elt,
4423 CONSTRUCTOR_ELTS (exp), 0)->value,
4428 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
4429 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4430 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4433 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
4435 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4444 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4445 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
4446 likewise return an expression pointing to the underlying array. */
4449 maybe_unconstrained_array (tree exp)
4451 enum tree_code code = TREE_CODE (exp);
4454 switch (TREE_CODE (TREE_TYPE (exp)))
4456 case UNCONSTRAINED_ARRAY_TYPE:
4457 if (code == UNCONSTRAINED_ARRAY_REF)
4460 = build_unary_op (INDIRECT_REF, NULL_TREE,
4461 build_component_ref (TREE_OPERAND (exp, 0),
4462 get_identifier ("P_ARRAY"),
4464 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
4468 else if (code == NULL_EXPR)
4469 return build1 (NULL_EXPR,
4470 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
4471 (TREE_TYPE (TREE_TYPE (exp))))),
4472 TREE_OPERAND (exp, 0));
4475 /* If this is a padded type, convert to the unpadded type and see if
4476 it contains a template. */
4477 if (TYPE_IS_PADDING_P (TREE_TYPE (exp)))
4479 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
4480 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
4481 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
4483 build_component_ref (new, NULL_TREE,
4484 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
4487 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
4489 build_component_ref (exp, NULL_TREE,
4490 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
4500 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
4501 If NOTRUNC_P is true, truncation operations should be suppressed. */
4504 unchecked_convert (tree type, tree expr, bool notrunc_p)
4506 tree etype = TREE_TYPE (expr);
4508 /* If the expression is already the right type, we are done. */
4512 /* If both types types are integral just do a normal conversion.
4513 Likewise for a conversion to an unconstrained array. */
4514 if ((((INTEGRAL_TYPE_P (type)
4515 && !(TREE_CODE (type) == INTEGER_TYPE
4516 && TYPE_VAX_FLOATING_POINT_P (type)))
4517 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
4518 || (TREE_CODE (type) == RECORD_TYPE
4519 && TYPE_JUSTIFIED_MODULAR_P (type)))
4520 && ((INTEGRAL_TYPE_P (etype)
4521 && !(TREE_CODE (etype) == INTEGER_TYPE
4522 && TYPE_VAX_FLOATING_POINT_P (etype)))
4523 || (POINTER_TYPE_P (etype) && !TYPE_THIN_POINTER_P (etype))
4524 || (TREE_CODE (etype) == RECORD_TYPE
4525 && TYPE_JUSTIFIED_MODULAR_P (etype))))
4526 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
4529 bool final_unchecked = false;
4531 if (TREE_CODE (etype) == INTEGER_TYPE
4532 && TYPE_BIASED_REPRESENTATION_P (etype))
4534 tree ntype = copy_type (etype);
4536 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4537 TYPE_MAIN_VARIANT (ntype) = ntype;
4538 expr = build1 (NOP_EXPR, ntype, expr);
4541 if (TREE_CODE (type) == INTEGER_TYPE
4542 && TYPE_BIASED_REPRESENTATION_P (type))
4544 rtype = copy_type (type);
4545 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4546 TYPE_MAIN_VARIANT (rtype) = rtype;
4549 /* We have another special case: if we are unchecked converting subtype
4550 into a base type, we need to ensure that VRP doesn't propagate range
4551 information since this conversion may be done precisely to validate
4552 that the object is within the range it is supposed to have. */
4553 else if (TREE_CODE (expr) != INTEGER_CST
4554 && TREE_CODE (type) == INTEGER_TYPE && !TREE_TYPE (type)
4555 && ((TREE_CODE (etype) == INTEGER_TYPE && TREE_TYPE (etype))
4556 || TREE_CODE (etype) == ENUMERAL_TYPE
4557 || TREE_CODE (etype) == BOOLEAN_TYPE))
4559 /* The optimization barrier is a VIEW_CONVERT_EXPR node; moreover,
4560 in order not to be deemed an useless type conversion, it must
4561 be from subtype to base type.
4563 ??? This may raise addressability and/or aliasing issues because
4564 VIEW_CONVERT_EXPR gets gimplified as an lvalue, thus causing the
4565 address of its operand to be taken if it is deemed addressable
4566 and not already in GIMPLE form. */
4567 rtype = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
4568 rtype = copy_type (rtype);
4569 TYPE_MAIN_VARIANT (rtype) = rtype;
4570 TREE_TYPE (rtype) = type;
4571 final_unchecked = true;
4574 expr = convert (rtype, expr);
4576 expr = fold_build1 (final_unchecked ? VIEW_CONVERT_EXPR : NOP_EXPR,
4580 /* If we are converting TO an integral type whose precision is not the
4581 same as its size, first unchecked convert to a record that contains
4582 an object of the output type. Then extract the field. */
4583 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4584 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4585 GET_MODE_BITSIZE (TYPE_MODE (type))))
4587 tree rec_type = make_node (RECORD_TYPE);
4588 tree field = create_field_decl (get_identifier ("OBJ"), type,
4589 rec_type, 1, 0, 0, 0);
4591 TYPE_FIELDS (rec_type) = field;
4592 layout_type (rec_type);
4594 expr = unchecked_convert (rec_type, expr, notrunc_p);
4595 expr = build_component_ref (expr, NULL_TREE, field, 0);
4598 /* Similarly for integral input type whose precision is not equal to its
4600 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype)
4601 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
4602 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4604 tree rec_type = make_node (RECORD_TYPE);
4606 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
4609 TYPE_FIELDS (rec_type) = field;
4610 layout_type (rec_type);
4612 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
4613 expr = unchecked_convert (type, expr, notrunc_p);
4616 /* We have a special case when we are converting between two
4617 unconstrained array types. In that case, take the address,
4618 convert the fat pointer types, and dereference. */
4619 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
4620 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
4621 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4622 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
4623 build_unary_op (ADDR_EXPR, NULL_TREE,
4627 expr = maybe_unconstrained_array (expr);
4628 etype = TREE_TYPE (expr);
4629 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
4632 /* If the result is an integral type whose size is not equal to
4633 the size of the underlying machine type, sign- or zero-extend
4634 the result. We need not do this in the case where the input is
4635 an integral type of the same precision and signedness or if the output
4636 is a biased type or if both the input and output are unsigned. */
4638 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4639 && !(TREE_CODE (type) == INTEGER_TYPE
4640 && TYPE_BIASED_REPRESENTATION_P (type))
4641 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4642 GET_MODE_BITSIZE (TYPE_MODE (type)))
4643 && !(INTEGRAL_TYPE_P (etype)
4644 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
4645 && operand_equal_p (TYPE_RM_SIZE (type),
4646 (TYPE_RM_SIZE (etype) != 0
4647 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
4649 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
4651 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
4652 TYPE_UNSIGNED (type));
4654 = convert (base_type,
4655 size_binop (MINUS_EXPR,
4657 (GET_MODE_BITSIZE (TYPE_MODE (type))),
4658 TYPE_RM_SIZE (type)));
4661 build_binary_op (RSHIFT_EXPR, base_type,
4662 build_binary_op (LSHIFT_EXPR, base_type,
4663 convert (base_type, expr),
4668 /* An unchecked conversion should never raise Constraint_Error. The code
4669 below assumes that GCC's conversion routines overflow the same way that
4670 the underlying hardware does. This is probably true. In the rare case
4671 when it is false, we can rely on the fact that such conversions are
4672 erroneous anyway. */
4673 if (TREE_CODE (expr) == INTEGER_CST)
4674 TREE_OVERFLOW (expr) = 0;
4676 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
4677 show no longer constant. */
4678 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4679 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
4681 TREE_CONSTANT (expr) = 0;
4686 /* Return the appropriate GCC tree code for the specified GNAT type,
4687 the latter being a record type as predicated by Is_Record_Type. */
4690 tree_code_for_record_type (Entity_Id gnat_type)
4692 Node_Id component_list
4693 = Component_List (Type_Definition
4695 (Implementation_Base_Type (gnat_type))));
4698 /* Make this a UNION_TYPE unless it's either not an Unchecked_Union or
4699 we have a non-discriminant field outside a variant. In either case,
4700 it's a RECORD_TYPE. */
4702 if (!Is_Unchecked_Union (gnat_type))
4705 for (component = First_Non_Pragma (Component_Items (component_list));
4706 Present (component);
4707 component = Next_Non_Pragma (component))
4708 if (Ekind (Defining_Entity (component)) == E_Component)
4714 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
4715 component of an aggregate type. */
4718 type_for_nonaliased_component_p (tree gnu_type)
4720 /* If the type is passed by reference, we may have pointers to the
4721 component so it cannot be made non-aliased. */
4722 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
4725 /* We used to say that any component of aggregate type is aliased
4726 because the front-end may take 'Reference of it. The front-end
4727 has been enhanced in the meantime so as to use a renaming instead
4728 in most cases, but the back-end can probably take the address of
4729 such a component too so we go for the conservative stance.
4731 For instance, we might need the address of any array type, even
4732 if normally passed by copy, to construct a fat pointer if the
4733 component is used as an actual for an unconstrained formal.
4735 Likewise for record types: even if a specific record subtype is
4736 passed by copy, the parent type might be passed by ref (e.g. if
4737 it's of variable size) and we might take the address of a child
4738 component to pass to a parent formal. We have no way to check
4739 for such conditions here. */
4740 if (AGGREGATE_TYPE_P (gnu_type))
4746 /* Perform final processing on global variables. */
4749 gnat_write_global_declarations (void)
4751 /* Proceed to optimize and emit assembly.
4752 FIXME: shouldn't be the front end's responsibility to call this. */
4755 /* Emit debug info for all global declarations. */
4756 emit_debug_global_declarations (VEC_address (tree, global_decls),
4757 VEC_length (tree, global_decls));
4760 /* ************************************************************************
4761 * * GCC builtins support *
4762 * ************************************************************************ */
4764 /* The general scheme is fairly simple:
4766 For each builtin function/type to be declared, gnat_install_builtins calls
4767 internal facilities which eventually get to gnat_push_decl, which in turn
4768 tracks the so declared builtin function decls in the 'builtin_decls' global
4769 datastructure. When an Intrinsic subprogram declaration is processed, we
4770 search this global datastructure to retrieve the associated BUILT_IN DECL
4773 /* Search the chain of currently available builtin declarations for a node
4774 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
4775 found, if any, or NULL_TREE otherwise. */
4777 builtin_decl_for (tree name)
4782 for (i = 0; VEC_iterate(tree, builtin_decls, i, decl); i++)
4783 if (DECL_NAME (decl) == name)
4789 /* The code below eventually exposes gnat_install_builtins, which declares
4790 the builtin types and functions we might need, either internally or as
4791 user accessible facilities.
4793 ??? This is a first implementation shot, still in rough shape. It is
4794 heavily inspired from the "C" family implementation, with chunks copied
4795 verbatim from there.
4797 Two obvious TODO candidates are
4798 o Use a more efficient name/decl mapping scheme
4799 o Devise a middle-end infrastructure to avoid having to copy
4800 pieces between front-ends. */
4802 /* ----------------------------------------------------------------------- *
4803 * BUILTIN ELEMENTARY TYPES *
4804 * ----------------------------------------------------------------------- */
4806 /* Standard data types to be used in builtin argument declarations. */
4810 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
4812 CTI_CONST_STRING_TYPE,
4817 static tree c_global_trees[CTI_MAX];
4819 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
4820 #define string_type_node c_global_trees[CTI_STRING_TYPE]
4821 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
4823 /* ??? In addition some attribute handlers, we currently don't support a
4824 (small) number of builtin-types, which in turns inhibits support for a
4825 number of builtin functions. */
4826 #define wint_type_node void_type_node
4827 #define intmax_type_node void_type_node
4828 #define uintmax_type_node void_type_node
4830 /* Build the void_list_node (void_type_node having been created). */
4833 build_void_list_node (void)
4835 tree t = build_tree_list (NULL_TREE, void_type_node);
4839 /* Used to help initialize the builtin-types.def table. When a type of
4840 the correct size doesn't exist, use error_mark_node instead of NULL.
4841 The later results in segfaults even when a decl using the type doesn't
4845 builtin_type_for_size (int size, bool unsignedp)
4847 tree type = lang_hooks.types.type_for_size (size, unsignedp);
4848 return type ? type : error_mark_node;
4851 /* Build/push the elementary type decls that builtin functions/types
4855 install_builtin_elementary_types (void)
4857 signed_size_type_node = size_type_node;
4858 pid_type_node = integer_type_node;
4859 void_list_node = build_void_list_node ();
4861 string_type_node = build_pointer_type (char_type_node);
4862 const_string_type_node
4863 = build_pointer_type (build_qualified_type
4864 (char_type_node, TYPE_QUAL_CONST));
4867 /* ----------------------------------------------------------------------- *
4868 * BUILTIN FUNCTION TYPES *
4869 * ----------------------------------------------------------------------- */
4871 /* Now, builtin function types per se. */
4875 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
4876 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
4877 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
4878 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
4879 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4880 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4881 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
4882 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
4883 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
4884 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
4885 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
4886 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
4887 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4888 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4889 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
4891 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
4892 #include "builtin-types.def"
4893 #undef DEF_PRIMITIVE_TYPE
4894 #undef DEF_FUNCTION_TYPE_0
4895 #undef DEF_FUNCTION_TYPE_1
4896 #undef DEF_FUNCTION_TYPE_2
4897 #undef DEF_FUNCTION_TYPE_3
4898 #undef DEF_FUNCTION_TYPE_4
4899 #undef DEF_FUNCTION_TYPE_5
4900 #undef DEF_FUNCTION_TYPE_6
4901 #undef DEF_FUNCTION_TYPE_7
4902 #undef DEF_FUNCTION_TYPE_VAR_0
4903 #undef DEF_FUNCTION_TYPE_VAR_1
4904 #undef DEF_FUNCTION_TYPE_VAR_2
4905 #undef DEF_FUNCTION_TYPE_VAR_3
4906 #undef DEF_FUNCTION_TYPE_VAR_4
4907 #undef DEF_FUNCTION_TYPE_VAR_5
4908 #undef DEF_POINTER_TYPE
4912 typedef enum c_builtin_type builtin_type;
4914 /* A temporary array used in communication with def_fn_type. */
4915 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
4917 /* A helper function for install_builtin_types. Build function type
4918 for DEF with return type RET and N arguments. If VAR is true, then the
4919 function should be variadic after those N arguments.
4921 Takes special care not to ICE if any of the types involved are
4922 error_mark_node, which indicates that said type is not in fact available
4923 (see builtin_type_for_size). In which case the function type as a whole
4924 should be error_mark_node. */
4927 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
4929 tree args = NULL, t;
4934 for (i = 0; i < n; ++i)
4936 builtin_type a = va_arg (list, builtin_type);
4937 t = builtin_types[a];
4938 if (t == error_mark_node)
4940 args = tree_cons (NULL_TREE, t, args);
4944 args = nreverse (args);
4946 args = chainon (args, void_list_node);
4948 t = builtin_types[ret];
4949 if (t == error_mark_node)
4951 t = build_function_type (t, args);
4954 builtin_types[def] = t;
4957 /* Build the builtin function types and install them in the builtin_types
4958 array for later use in builtin function decls. */
4961 install_builtin_function_types (void)
4963 tree va_list_ref_type_node;
4964 tree va_list_arg_type_node;
4966 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4968 va_list_arg_type_node = va_list_ref_type_node =
4969 build_pointer_type (TREE_TYPE (va_list_type_node));
4973 va_list_arg_type_node = va_list_type_node;
4974 va_list_ref_type_node = build_reference_type (va_list_type_node);
4977 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
4978 builtin_types[ENUM] = VALUE;
4979 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
4980 def_fn_type (ENUM, RETURN, 0, 0);
4981 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
4982 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
4983 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
4984 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
4985 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
4986 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
4987 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
4988 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
4989 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
4990 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
4991 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4993 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
4994 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4996 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
4997 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
4998 def_fn_type (ENUM, RETURN, 1, 0);
4999 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5000 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5001 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5002 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5003 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5004 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5005 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5006 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5007 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5008 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5009 #define DEF_POINTER_TYPE(ENUM, TYPE) \
5010 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5012 #include "builtin-types.def"
5014 #undef DEF_PRIMITIVE_TYPE
5015 #undef DEF_FUNCTION_TYPE_1
5016 #undef DEF_FUNCTION_TYPE_2
5017 #undef DEF_FUNCTION_TYPE_3
5018 #undef DEF_FUNCTION_TYPE_4
5019 #undef DEF_FUNCTION_TYPE_5
5020 #undef DEF_FUNCTION_TYPE_6
5021 #undef DEF_FUNCTION_TYPE_VAR_0
5022 #undef DEF_FUNCTION_TYPE_VAR_1
5023 #undef DEF_FUNCTION_TYPE_VAR_2
5024 #undef DEF_FUNCTION_TYPE_VAR_3
5025 #undef DEF_FUNCTION_TYPE_VAR_4
5026 #undef DEF_FUNCTION_TYPE_VAR_5
5027 #undef DEF_POINTER_TYPE
5028 builtin_types[(int) BT_LAST] = NULL_TREE;
5031 /* ----------------------------------------------------------------------- *
5032 * BUILTIN ATTRIBUTES *
5033 * ----------------------------------------------------------------------- */
5035 enum built_in_attribute
5037 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5038 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5039 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5040 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5041 #include "builtin-attrs.def"
5042 #undef DEF_ATTR_NULL_TREE
5044 #undef DEF_ATTR_IDENT
5045 #undef DEF_ATTR_TREE_LIST
5049 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5052 install_builtin_attributes (void)
5054 /* Fill in the built_in_attributes array. */
5055 #define DEF_ATTR_NULL_TREE(ENUM) \
5056 built_in_attributes[(int) ENUM] = NULL_TREE;
5057 #define DEF_ATTR_INT(ENUM, VALUE) \
5058 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5059 #define DEF_ATTR_IDENT(ENUM, STRING) \
5060 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5061 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5062 built_in_attributes[(int) ENUM] \
5063 = tree_cons (built_in_attributes[(int) PURPOSE], \
5064 built_in_attributes[(int) VALUE], \
5065 built_in_attributes[(int) CHAIN]);
5066 #include "builtin-attrs.def"
5067 #undef DEF_ATTR_NULL_TREE
5069 #undef DEF_ATTR_IDENT
5070 #undef DEF_ATTR_TREE_LIST
5073 /* Handle a "const" attribute; arguments as in
5074 struct attribute_spec.handler. */
5077 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5078 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5081 if (TREE_CODE (*node) == FUNCTION_DECL)
5082 TREE_READONLY (*node) = 1;
5084 *no_add_attrs = true;
5089 /* Handle a "nothrow" attribute; arguments as in
5090 struct attribute_spec.handler. */
5093 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
5094 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5097 if (TREE_CODE (*node) == FUNCTION_DECL)
5098 TREE_NOTHROW (*node) = 1;
5100 *no_add_attrs = true;
5105 /* Handle a "pure" attribute; arguments as in
5106 struct attribute_spec.handler. */
5109 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5110 int ARG_UNUSED (flags), bool *no_add_attrs)
5112 if (TREE_CODE (*node) == FUNCTION_DECL)
5113 DECL_PURE_P (*node) = 1;
5114 /* ??? TODO: Support types. */
5117 warning (OPT_Wattributes, "%qE attribute ignored", name);
5118 *no_add_attrs = true;
5124 /* Handle a "no vops" attribute; arguments as in
5125 struct attribute_spec.handler. */
5128 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
5129 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5130 bool *ARG_UNUSED (no_add_attrs))
5132 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
5133 DECL_IS_NOVOPS (*node) = 1;
5137 /* Helper for nonnull attribute handling; fetch the operand number
5138 from the attribute argument list. */
5141 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
5143 /* Verify the arg number is a constant. */
5144 if (TREE_CODE (arg_num_expr) != INTEGER_CST
5145 || TREE_INT_CST_HIGH (arg_num_expr) != 0)
5148 *valp = TREE_INT_CST_LOW (arg_num_expr);
5152 /* Handle the "nonnull" attribute. */
5154 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
5155 tree args, int ARG_UNUSED (flags),
5159 unsigned HOST_WIDE_INT attr_arg_num;
5161 /* If no arguments are specified, all pointer arguments should be
5162 non-null. Verify a full prototype is given so that the arguments
5163 will have the correct types when we actually check them later. */
5166 if (!TYPE_ARG_TYPES (type))
5168 error ("nonnull attribute without arguments on a non-prototype");
5169 *no_add_attrs = true;
5174 /* Argument list specified. Verify that each argument number references
5175 a pointer argument. */
5176 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5179 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5181 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5183 error ("nonnull argument has invalid operand number (argument %lu)",
5184 (unsigned long) attr_arg_num);
5185 *no_add_attrs = true;
5189 argument = TYPE_ARG_TYPES (type);
5192 for (ck_num = 1; ; ck_num++)
5194 if (!argument || ck_num == arg_num)
5196 argument = TREE_CHAIN (argument);
5200 || TREE_CODE (TREE_VALUE (argument)) == VOID_TYPE)
5202 error ("nonnull argument with out-of-range operand number (argument %lu, operand %lu)",
5203 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5204 *no_add_attrs = true;
5208 if (TREE_CODE (TREE_VALUE (argument)) != POINTER_TYPE)
5210 error ("nonnull argument references non-pointer operand (argument %lu, operand %lu)",
5211 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5212 *no_add_attrs = true;
5221 /* Handle a "sentinel" attribute. */
5224 handle_sentinel_attribute (tree *node, tree name, tree args,
5225 int ARG_UNUSED (flags), bool *no_add_attrs)
5227 tree params = TYPE_ARG_TYPES (*node);
5231 warning (OPT_Wattributes,
5232 "%qE attribute requires prototypes with named arguments", name);
5233 *no_add_attrs = true;
5237 while (TREE_CHAIN (params))
5238 params = TREE_CHAIN (params);
5240 if (VOID_TYPE_P (TREE_VALUE (params)))
5242 warning (OPT_Wattributes,
5243 "%qE attribute only applies to variadic functions", name);
5244 *no_add_attrs = true;
5250 tree position = TREE_VALUE (args);
5252 if (TREE_CODE (position) != INTEGER_CST)
5254 warning (0, "requested position is not an integer constant");
5255 *no_add_attrs = true;
5259 if (tree_int_cst_lt (position, integer_zero_node))
5261 warning (0, "requested position is less than zero");
5262 *no_add_attrs = true;
5270 /* Handle a "noreturn" attribute; arguments as in
5271 struct attribute_spec.handler. */
5274 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5275 int ARG_UNUSED (flags), bool *no_add_attrs)
5277 tree type = TREE_TYPE (*node);
5279 /* See FIXME comment in c_common_attribute_table. */
5280 if (TREE_CODE (*node) == FUNCTION_DECL)
5281 TREE_THIS_VOLATILE (*node) = 1;
5282 else if (TREE_CODE (type) == POINTER_TYPE
5283 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5285 = build_pointer_type
5286 (build_type_variant (TREE_TYPE (type),
5287 TYPE_READONLY (TREE_TYPE (type)), 1));
5290 warning (OPT_Wattributes, "%qE attribute ignored", name);
5291 *no_add_attrs = true;
5297 /* Handle a "malloc" attribute; arguments as in
5298 struct attribute_spec.handler. */
5301 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5302 int ARG_UNUSED (flags), bool *no_add_attrs)
5304 if (TREE_CODE (*node) == FUNCTION_DECL
5305 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
5306 DECL_IS_MALLOC (*node) = 1;
5309 warning (OPT_Wattributes, "%qE attribute ignored", name);
5310 *no_add_attrs = true;
5316 /* Fake handler for attributes we don't properly support. */
5319 fake_attribute_handler (tree * ARG_UNUSED (node),
5320 tree ARG_UNUSED (name),
5321 tree ARG_UNUSED (args),
5322 int ARG_UNUSED (flags),
5323 bool * ARG_UNUSED (no_add_attrs))
5328 /* Handle a "type_generic" attribute. */
5331 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
5332 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5333 bool * ARG_UNUSED (no_add_attrs))
5337 /* Ensure we have a function type. */
5338 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
5340 params = TYPE_ARG_TYPES (*node);
5341 while (params && ! VOID_TYPE_P (TREE_VALUE (params)))
5342 params = TREE_CHAIN (params);
5344 /* Ensure we have a variadic function. */
5345 gcc_assert (!params);
5350 /* ----------------------------------------------------------------------- *
5351 * BUILTIN FUNCTIONS *
5352 * ----------------------------------------------------------------------- */
5354 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
5355 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
5356 if nonansi_p and flag_no_nonansi_builtin. */
5359 def_builtin_1 (enum built_in_function fncode,
5361 enum built_in_class fnclass,
5362 tree fntype, tree libtype,
5363 bool both_p, bool fallback_p,
5364 bool nonansi_p ATTRIBUTE_UNUSED,
5365 tree fnattrs, bool implicit_p)
5368 const char *libname;
5370 /* Preserve an already installed decl. It most likely was setup in advance
5371 (e.g. as part of the internal builtins) for specific reasons. */
5372 if (built_in_decls[(int) fncode] != NULL_TREE)
5375 gcc_assert ((!both_p && !fallback_p)
5376 || !strncmp (name, "__builtin_",
5377 strlen ("__builtin_")));
5379 libname = name + strlen ("__builtin_");
5380 decl = add_builtin_function (name, fntype, fncode, fnclass,
5381 (fallback_p ? libname : NULL),
5384 /* ??? This is normally further controlled by command-line options
5385 like -fno-builtin, but we don't have them for Ada. */
5386 add_builtin_function (libname, libtype, fncode, fnclass,
5389 built_in_decls[(int) fncode] = decl;
5391 implicit_built_in_decls[(int) fncode] = decl;
5394 static int flag_isoc94 = 0;
5395 static int flag_isoc99 = 0;
5397 /* Install what the common builtins.def offers. */
5400 install_builtin_functions (void)
5402 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
5403 NONANSI_P, ATTRS, IMPLICIT, COND) \
5405 def_builtin_1 (ENUM, NAME, CLASS, \
5406 builtin_types[(int) TYPE], \
5407 builtin_types[(int) LIBTYPE], \
5408 BOTH_P, FALLBACK_P, NONANSI_P, \
5409 built_in_attributes[(int) ATTRS], IMPLICIT);
5410 #include "builtins.def"
5414 /* ----------------------------------------------------------------------- *
5415 * BUILTIN FUNCTIONS *
5416 * ----------------------------------------------------------------------- */
5418 /* Install the builtin functions we might need. */
5421 gnat_install_builtins (void)
5423 install_builtin_elementary_types ();
5424 install_builtin_function_types ();
5425 install_builtin_attributes ();
5427 /* Install builtins used by generic middle-end pieces first. Some of these
5428 know about internal specificities and control attributes accordingly, for
5429 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
5430 the generic definition from builtins.def. */
5431 build_common_builtin_nodes ();
5433 /* Now, install the target specific builtins, such as the AltiVec family on
5434 ppc, and the common set as exposed by builtins.def. */
5435 targetm.init_builtins ();
5436 install_builtin_functions ();
5439 #include "gt-ada-utils.h"
5440 #include "gtype-ada.h"