1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2008, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 ****************************************************************************/
26 /* We have attribute handlers using C specific format specifiers in warning
27 messages. Make sure they are properly recognized. */
28 #define GCC_DIAG_STYLE __gcc_cdiag__
32 #include "coretypes.h"
45 #include "tree-inline.h"
46 #include "tree-gimple.h"
47 #include "tree-dump.h"
48 #include "pointer-set.h"
49 #include "langhooks.h"
65 #ifndef MAX_FIXED_MODE_SIZE
66 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
69 #ifndef MAX_BITS_PER_WORD
70 #define MAX_BITS_PER_WORD BITS_PER_WORD
73 /* If nonzero, pretend we are allocating at global level. */
76 /* Tree nodes for the various types and decls we create. */
77 tree gnat_std_decls[(int) ADT_LAST];
79 /* Functions to call for each of the possible raise reasons. */
80 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
82 /* Forward declarations for handlers of attributes. */
83 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
84 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
85 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
86 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
87 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
88 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
93 /* Fake handler for attributes we don't properly support, typically because
94 they'd require dragging a lot of the common-c front-end circuitry. */
95 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
97 /* Table of machine-independent internal attributes for Ada. We support
98 this minimal set of attributes to accommodate the needs of builtins. */
99 const struct attribute_spec gnat_internal_attribute_table[] =
101 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
102 { "const", 0, 0, true, false, false, handle_const_attribute },
103 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute },
104 { "pure", 0, 0, true, false, false, handle_pure_attribute },
105 { "no vops", 0, 0, true, false, false, handle_novops_attribute },
106 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute },
107 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute },
108 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute },
109 { "malloc", 0, 0, true, false, false, handle_malloc_attribute },
110 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute },
112 /* ??? format and format_arg are heavy and not supported, which actually
113 prevents support for stdio builtins, which we however declare as part
114 of the common builtins.def contents. */
115 { "format", 3, 3, false, true, true, fake_attribute_handler },
116 { "format_arg", 1, 1, false, true, true, fake_attribute_handler },
118 { NULL, 0, 0, false, false, false, NULL }
121 /* Associates a GNAT tree node to a GCC tree node. It is used in
122 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
123 of `save_gnu_tree' for more info. */
124 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
126 #define GET_GNU_TREE(GNAT_ENTITY) \
127 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
129 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
130 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
132 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
133 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
135 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
136 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
138 #define GET_DUMMY_NODE(GNAT_ENTITY) \
139 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
141 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
142 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
144 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
145 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
147 /* This variable keeps a table for types for each precision so that we only
148 allocate each of them once. Signed and unsigned types are kept separate.
150 Note that these types are only used when fold-const requests something
151 special. Perhaps we should NOT share these types; we'll see how it
153 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
155 /* Likewise for float types, but record these by mode. */
156 static GTY(()) tree float_types[NUM_MACHINE_MODES];
158 /* For each binding contour we allocate a binding_level structure to indicate
159 the binding depth. */
161 struct gnat_binding_level GTY((chain_next ("%h.chain")))
163 /* The binding level containing this one (the enclosing binding level). */
164 struct gnat_binding_level *chain;
165 /* The BLOCK node for this level. */
167 /* If nonzero, the setjmp buffer that needs to be updated for any
168 variable-sized definition within this context. */
172 /* The binding level currently in effect. */
173 static GTY(()) struct gnat_binding_level *current_binding_level;
175 /* A chain of gnat_binding_level structures awaiting reuse. */
176 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
178 /* An array of global declarations. */
179 static GTY(()) VEC(tree,gc) *global_decls;
181 /* An array of builtin function declarations. */
182 static GTY(()) VEC(tree,gc) *builtin_decls;
184 /* An array of global renaming pointers. */
185 static GTY(()) VEC(tree,gc) *global_renaming_pointers;
187 /* A chain of unused BLOCK nodes. */
188 static GTY((deletable)) tree free_block_chain;
190 static void gnat_install_builtins (void);
191 static tree merge_sizes (tree, tree, tree, bool, bool);
192 static tree compute_related_constant (tree, tree);
193 static tree split_plus (tree, tree *);
194 static void gnat_gimplify_function (tree);
195 static tree float_type_for_precision (int, enum machine_mode);
196 static tree convert_to_fat_pointer (tree, tree);
197 static tree convert_to_thin_pointer (tree, tree);
198 static tree make_descriptor_field (const char *,tree, tree, tree);
199 static bool potential_alignment_gap (tree, tree, tree);
201 /* Initialize the association of GNAT nodes to GCC trees. */
204 init_gnat_to_gnu (void)
206 associate_gnat_to_gnu
207 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
210 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
211 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
212 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
214 If GNU_DECL is zero, a previous association is to be reset. */
217 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
219 /* Check that GNAT_ENTITY is not already defined and that it is being set
220 to something which is a decl. Raise gigi 401 if not. Usually, this
221 means GNAT_ENTITY is defined twice, but occasionally is due to some
223 gcc_assert (!(gnu_decl
224 && (PRESENT_GNU_TREE (gnat_entity)
225 || (!no_check && !DECL_P (gnu_decl)))));
227 SET_GNU_TREE (gnat_entity, gnu_decl);
230 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
231 Return the ..._DECL node that was associated with it. If there is no tree
232 node associated with GNAT_ENTITY, abort.
234 In some cases, such as delayed elaboration or expressions that need to
235 be elaborated only once, GNAT_ENTITY is really not an entity. */
238 get_gnu_tree (Entity_Id gnat_entity)
240 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
241 return GET_GNU_TREE (gnat_entity);
244 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
247 present_gnu_tree (Entity_Id gnat_entity)
249 return PRESENT_GNU_TREE (gnat_entity);
252 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
255 init_dummy_type (void)
258 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
261 /* Make a dummy type corresponding to GNAT_TYPE. */
264 make_dummy_type (Entity_Id gnat_type)
266 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
269 /* If there is an equivalent type, get its underlying type. */
270 if (Present (gnat_underlying))
271 gnat_underlying = Underlying_Type (gnat_underlying);
273 /* If there was no equivalent type (can only happen when just annotating
274 types) or underlying type, go back to the original type. */
275 if (No (gnat_underlying))
276 gnat_underlying = gnat_type;
278 /* If it there already a dummy type, use that one. Else make one. */
279 if (PRESENT_DUMMY_NODE (gnat_underlying))
280 return GET_DUMMY_NODE (gnat_underlying);
282 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
284 gnu_type = make_node (Is_Record_Type (gnat_underlying)
285 ? tree_code_for_record_type (gnat_underlying)
287 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
288 TYPE_DUMMY_P (gnu_type) = 1;
289 if (AGGREGATE_TYPE_P (gnu_type))
291 TYPE_STUB_DECL (gnu_type) = build_decl (TYPE_DECL, NULL_TREE, gnu_type);
292 TYPE_BY_REFERENCE_P (gnu_type) = Is_By_Reference_Type (gnat_type);
295 SET_DUMMY_NODE (gnat_underlying, gnu_type);
300 /* Return nonzero if we are currently in the global binding level. */
303 global_bindings_p (void)
305 return ((force_global || !current_function_decl) ? -1 : 0);
308 /* Enter a new binding level. */
313 struct gnat_binding_level *newlevel = NULL;
315 /* Reuse a struct for this binding level, if there is one. */
316 if (free_binding_level)
318 newlevel = free_binding_level;
319 free_binding_level = free_binding_level->chain;
323 = (struct gnat_binding_level *)
324 ggc_alloc (sizeof (struct gnat_binding_level));
326 /* Use a free BLOCK, if any; otherwise, allocate one. */
327 if (free_block_chain)
329 newlevel->block = free_block_chain;
330 free_block_chain = BLOCK_CHAIN (free_block_chain);
331 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
334 newlevel->block = make_node (BLOCK);
336 /* Point the BLOCK we just made to its parent. */
337 if (current_binding_level)
338 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
340 BLOCK_VARS (newlevel->block) = BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
341 TREE_USED (newlevel->block) = 1;
343 /* Add this level to the front of the chain (stack) of levels that are
345 newlevel->chain = current_binding_level;
346 newlevel->jmpbuf_decl = NULL_TREE;
347 current_binding_level = newlevel;
350 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
351 and point FNDECL to this BLOCK. */
354 set_current_block_context (tree fndecl)
356 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
357 DECL_INITIAL (fndecl) = current_binding_level->block;
360 /* Set the jmpbuf_decl for the current binding level to DECL. */
363 set_block_jmpbuf_decl (tree decl)
365 current_binding_level->jmpbuf_decl = decl;
368 /* Get the jmpbuf_decl, if any, for the current binding level. */
371 get_block_jmpbuf_decl ()
373 return current_binding_level->jmpbuf_decl;
376 /* Exit a binding level. Set any BLOCK into the current code group. */
381 struct gnat_binding_level *level = current_binding_level;
382 tree block = level->block;
384 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
385 BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
387 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
388 are no variables free the block and merge its subblocks into those of its
389 parent block. Otherwise, add it to the list of its parent. */
390 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
392 else if (BLOCK_VARS (block) == NULL_TREE)
394 BLOCK_SUBBLOCKS (level->chain->block)
395 = chainon (BLOCK_SUBBLOCKS (block),
396 BLOCK_SUBBLOCKS (level->chain->block));
397 BLOCK_CHAIN (block) = free_block_chain;
398 free_block_chain = block;
402 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
403 BLOCK_SUBBLOCKS (level->chain->block) = block;
404 TREE_USED (block) = 1;
405 set_block_for_group (block);
408 /* Free this binding structure. */
409 current_binding_level = level->chain;
410 level->chain = free_binding_level;
411 free_binding_level = level;
415 /* Records a ..._DECL node DECL as belonging to the current lexical scope
416 and uses GNAT_NODE for location information and propagating flags. */
419 gnat_pushdecl (tree decl, Node_Id gnat_node)
421 /* If this decl is public external or at toplevel, there is no context.
422 But PARM_DECLs always go in the level of its function. */
423 if (TREE_CODE (decl) != PARM_DECL
424 && ((DECL_EXTERNAL (decl) && TREE_PUBLIC (decl))
425 || global_bindings_p ()))
426 DECL_CONTEXT (decl) = 0;
429 DECL_CONTEXT (decl) = current_function_decl;
431 /* Functions imported in another function are not really nested. */
432 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_PUBLIC (decl))
433 DECL_NO_STATIC_CHAIN (decl) = 1;
436 TREE_NO_WARNING (decl) = (gnat_node == Empty || Warnings_Off (gnat_node));
438 /* Set the location of DECL and emit a declaration for it. */
439 if (Present (gnat_node))
440 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
441 add_decl_expr (decl, gnat_node);
443 /* Put the declaration on the list. The list of declarations is in reverse
444 order. The list will be reversed later. Put global variables in the
445 globals list and builtin functions in a dedicated list to speed up
446 further lookups. Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
447 the list, as they will cause trouble with the debugger and aren't needed
449 if (TREE_CODE (decl) != TYPE_DECL
450 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
452 if (global_bindings_p ())
454 VEC_safe_push (tree, gc, global_decls, decl);
456 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
457 VEC_safe_push (tree, gc, builtin_decls, decl);
461 TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
462 BLOCK_VARS (current_binding_level->block) = decl;
466 /* For the declaration of a type, set its name if it either is not already
467 set, was set to an IDENTIFIER_NODE, indicating an internal name,
468 or if the previous type name was not derived from a source name.
469 We'd rather have the type named with a real name and all the pointer
470 types to the same object have the same POINTER_TYPE node. Code in the
471 equivalent function of c-decl.c makes a copy of the type node here, but
472 that may cause us trouble with incomplete types. We make an exception
473 for fat pointer types because the compiler automatically builds them
474 for unconstrained array types and the debugger uses them to represent
475 both these and pointers to these. */
476 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
478 tree t = TREE_TYPE (decl);
480 if (!TYPE_NAME (t) || TREE_CODE (TYPE_NAME (t)) == IDENTIFIER_NODE)
482 else if (TYPE_FAT_POINTER_P (t))
484 tree tt = build_variant_type_copy (t);
485 TYPE_NAME (tt) = decl;
486 TREE_USED (tt) = TREE_USED (t);
487 TREE_TYPE (decl) = tt;
488 DECL_ORIGINAL_TYPE (decl) = t;
491 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
496 /* Propagate the name to all the variants. This is needed for
497 the type qualifiers machinery to work properly. */
499 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
500 TYPE_NAME (t) = decl;
504 /* Do little here. Set up the standard declarations later after the
505 front end has been run. */
508 gnat_init_decl_processing (void)
510 /* Make the binding_level structure for global names. */
511 current_function_decl = 0;
512 current_binding_level = 0;
513 free_binding_level = 0;
516 build_common_tree_nodes (true, true);
518 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
519 corresponding to the size of Pmode. In most cases when ptr_mode and
520 Pmode differ, C will use the width of ptr_mode as sizetype. But we get
521 far better code using the width of Pmode. Make this here since we need
522 this before we can expand the GNAT types. */
523 size_type_node = gnat_type_for_size (GET_MODE_BITSIZE (Pmode), 0);
524 set_sizetype (size_type_node);
525 build_common_tree_nodes_2 (0);
527 ptr_void_type_node = build_pointer_type (void_type_node);
530 /* Create the predefined scalar types such as `integer_type_node' needed
531 in the gcc back-end and initialize the global binding level. */
534 init_gigi_decls (tree long_long_float_type, tree exception_type)
539 /* Set the types that GCC and Gigi use from the front end. We would like
540 to do this for char_type_node, but it needs to correspond to the C
542 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
544 /* In this case, the builtin floating point types are VAX float,
545 so make up a type for use. */
546 longest_float_type_node = make_node (REAL_TYPE);
547 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
548 layout_type (longest_float_type_node);
549 create_type_decl (get_identifier ("longest float type"),
550 longest_float_type_node, NULL, false, true, Empty);
553 longest_float_type_node = TREE_TYPE (long_long_float_type);
555 except_type_node = TREE_TYPE (exception_type);
557 unsigned_type_node = gnat_type_for_size (INT_TYPE_SIZE, 1);
558 create_type_decl (get_identifier ("unsigned int"), unsigned_type_node,
559 NULL, false, true, Empty);
561 void_type_decl_node = create_type_decl (get_identifier ("void"),
562 void_type_node, NULL, false, true,
565 void_ftype = build_function_type (void_type_node, NULL_TREE);
566 ptr_void_ftype = build_pointer_type (void_ftype);
568 /* Build the special descriptor type and its null node if needed. */
569 if (TARGET_VTABLE_USES_DESCRIPTORS)
571 tree field_list = NULL_TREE, null_list = NULL_TREE;
574 fdesc_type_node = make_node (RECORD_TYPE);
576 for (j = 0; j < TARGET_VTABLE_USES_DESCRIPTORS; j++)
578 tree field = create_field_decl (NULL_TREE, ptr_void_ftype,
579 fdesc_type_node, 0, 0, 0, 1);
580 TREE_CHAIN (field) = field_list;
582 null_list = tree_cons (field, null_pointer_node, null_list);
585 finish_record_type (fdesc_type_node, nreverse (field_list), 0, false);
586 null_fdesc_node = gnat_build_constructor (fdesc_type_node, null_list);
589 /* Now declare runtime functions. */
590 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
592 /* malloc is a function declaration tree for a function to allocate
594 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
596 build_function_type (ptr_void_type_node,
597 tree_cons (NULL_TREE,
600 NULL_TREE, false, true, true, NULL,
602 DECL_IS_MALLOC (malloc_decl) = 1;
604 /* malloc32 is a function declaration tree for a function to allocate
605 32bit memory on a 64bit system. Needed only on 64bit VMS. */
606 malloc32_decl = create_subprog_decl (get_identifier ("__gnat_malloc32"),
608 build_function_type (ptr_void_type_node,
609 tree_cons (NULL_TREE,
612 NULL_TREE, false, true, true, NULL,
614 DECL_IS_MALLOC (malloc32_decl) = 1;
616 /* free is a function declaration tree for a function to free memory. */
618 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
619 build_function_type (void_type_node,
620 tree_cons (NULL_TREE,
623 NULL_TREE, false, true, true, NULL, Empty);
625 /* Make the types and functions used for exception processing. */
627 = build_array_type (gnat_type_for_mode (Pmode, 0),
628 build_index_type (build_int_cst (NULL_TREE, 5)));
629 create_type_decl (get_identifier ("JMPBUF_T"), jmpbuf_type, NULL,
631 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
633 /* Functions to get and set the jumpbuf pointer for the current thread. */
635 = create_subprog_decl
636 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
637 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
638 NULL_TREE, false, true, true, NULL, Empty);
639 /* Avoid creating superfluous edges to __builtin_setjmp receivers. */
640 DECL_PURE_P (get_jmpbuf_decl) = 1;
643 = create_subprog_decl
644 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
646 build_function_type (void_type_node,
647 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
648 NULL_TREE, false, true, true, NULL, Empty);
650 /* Function to get the current exception. */
652 = create_subprog_decl
653 (get_identifier ("system__soft_links__get_gnat_exception"),
655 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
656 NULL_TREE, false, true, true, NULL, Empty);
657 /* Avoid creating superfluous edges to __builtin_setjmp receivers. */
658 DECL_PURE_P (get_excptr_decl) = 1;
660 /* Functions that raise exceptions. */
662 = create_subprog_decl
663 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
664 build_function_type (void_type_node,
665 tree_cons (NULL_TREE,
666 build_pointer_type (except_type_node),
668 NULL_TREE, false, true, true, NULL, Empty);
670 /* Dummy objects to materialize "others" and "all others" in the exception
671 tables. These are exported by a-exexpr.adb, so see this unit for the
675 = create_var_decl (get_identifier ("OTHERS"),
676 get_identifier ("__gnat_others_value"),
677 integer_type_node, 0, 1, 0, 1, 1, 0, Empty);
680 = create_var_decl (get_identifier ("ALL_OTHERS"),
681 get_identifier ("__gnat_all_others_value"),
682 integer_type_node, 0, 1, 0, 1, 1, 0, Empty);
684 /* Hooks to call when entering/leaving an exception handler. */
686 = create_subprog_decl (get_identifier ("__gnat_begin_handler"), NULL_TREE,
687 build_function_type (void_type_node,
688 tree_cons (NULL_TREE,
691 NULL_TREE, false, true, true, NULL, Empty);
694 = create_subprog_decl (get_identifier ("__gnat_end_handler"), NULL_TREE,
695 build_function_type (void_type_node,
696 tree_cons (NULL_TREE,
699 NULL_TREE, false, true, true, NULL, Empty);
701 /* If in no exception handlers mode, all raise statements are redirected to
702 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
703 this procedure will never be called in this mode. */
704 if (No_Exception_Handlers_Set ())
707 = create_subprog_decl
708 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE,
709 build_function_type (void_type_node,
710 tree_cons (NULL_TREE,
711 build_pointer_type (char_type_node),
712 tree_cons (NULL_TREE,
715 NULL_TREE, false, true, true, NULL, Empty);
717 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
718 gnat_raise_decls[i] = decl;
721 /* Otherwise, make one decl for each exception reason. */
722 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
726 sprintf (name, "__gnat_rcheck_%.2d", i);
728 = create_subprog_decl
729 (get_identifier (name), NULL_TREE,
730 build_function_type (void_type_node,
731 tree_cons (NULL_TREE,
734 tree_cons (NULL_TREE,
737 NULL_TREE, false, true, true, NULL, Empty);
740 /* Indicate that these never return. */
741 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
742 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
743 TREE_TYPE (raise_nodefer_decl)
744 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
747 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
749 TREE_THIS_VOLATILE (gnat_raise_decls[i]) = 1;
750 TREE_SIDE_EFFECTS (gnat_raise_decls[i]) = 1;
751 TREE_TYPE (gnat_raise_decls[i])
752 = build_qualified_type (TREE_TYPE (gnat_raise_decls[i]),
756 /* setjmp returns an integer and has one operand, which is a pointer to
759 = create_subprog_decl
760 (get_identifier ("__builtin_setjmp"), NULL_TREE,
761 build_function_type (integer_type_node,
762 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
763 NULL_TREE, false, true, true, NULL, Empty);
765 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
766 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
768 /* update_setjmp_buf updates a setjmp buffer from the current stack pointer
770 update_setjmp_buf_decl
771 = create_subprog_decl
772 (get_identifier ("__builtin_update_setjmp_buf"), NULL_TREE,
773 build_function_type (void_type_node,
774 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
775 NULL_TREE, false, true, true, NULL, Empty);
777 DECL_BUILT_IN_CLASS (update_setjmp_buf_decl) = BUILT_IN_NORMAL;
778 DECL_FUNCTION_CODE (update_setjmp_buf_decl) = BUILT_IN_UPDATE_SETJMP_BUF;
780 main_identifier_node = get_identifier ("main");
782 /* Install the builtins we might need, either internally or as
783 user available facilities for Intrinsic imports. */
784 gnat_install_builtins ();
787 /* Given a record type RECORD_TYPE and a chain of FIELD_DECL nodes FIELDLIST,
788 finish constructing the record or union type. If REP_LEVEL is zero, this
789 record has no representation clause and so will be entirely laid out here.
790 If REP_LEVEL is one, this record has a representation clause and has been
791 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
792 this record is derived from a parent record and thus inherits its layout;
793 only make a pass on the fields to finalize them. If DO_NOT_FINALIZE is
794 true, the record type is expected to be modified afterwards so it will
795 not be sent to the back-end for finalization. */
798 finish_record_type (tree record_type, tree fieldlist, int rep_level,
799 bool do_not_finalize)
801 enum tree_code code = TREE_CODE (record_type);
802 tree name = TYPE_NAME (record_type);
803 tree ada_size = bitsize_zero_node;
804 tree size = bitsize_zero_node;
805 bool had_size = TYPE_SIZE (record_type) != 0;
806 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
807 bool had_align = TYPE_ALIGN (record_type) != 0;
810 if (name && TREE_CODE (name) == TYPE_DECL)
811 name = DECL_NAME (name);
813 TYPE_FIELDS (record_type) = fieldlist;
814 TYPE_STUB_DECL (record_type) = build_decl (TYPE_DECL, name, record_type);
816 /* We don't need both the typedef name and the record name output in
817 the debugging information, since they are the same. */
818 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
820 /* Globally initialize the record first. If this is a rep'ed record,
821 that just means some initializations; otherwise, layout the record. */
824 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
825 TYPE_MODE (record_type) = BLKmode;
828 TYPE_SIZE_UNIT (record_type) = size_zero_node;
830 TYPE_SIZE (record_type) = bitsize_zero_node;
832 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
833 out just like a UNION_TYPE, since the size will be fixed. */
834 else if (code == QUAL_UNION_TYPE)
839 /* Ensure there isn't a size already set. There can be in an error
840 case where there is a rep clause but all fields have errors and
841 no longer have a position. */
842 TYPE_SIZE (record_type) = 0;
843 layout_type (record_type);
846 /* At this point, the position and size of each field is known. It was
847 either set before entry by a rep clause, or by laying out the type above.
849 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
850 to compute the Ada size; the GCC size and alignment (for rep'ed records
851 that are not padding types); and the mode (for rep'ed records). We also
852 clear the DECL_BIT_FIELD indication for the cases we know have not been
853 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
855 if (code == QUAL_UNION_TYPE)
856 fieldlist = nreverse (fieldlist);
858 for (field = fieldlist; field; field = TREE_CHAIN (field))
860 tree type = TREE_TYPE (field);
861 tree pos = bit_position (field);
862 tree this_size = DECL_SIZE (field);
865 if ((TREE_CODE (type) == RECORD_TYPE
866 || TREE_CODE (type) == UNION_TYPE
867 || TREE_CODE (type) == QUAL_UNION_TYPE)
868 && !TYPE_IS_FAT_POINTER_P (type)
869 && !TYPE_CONTAINS_TEMPLATE_P (type)
870 && TYPE_ADA_SIZE (type))
871 this_ada_size = TYPE_ADA_SIZE (type);
873 this_ada_size = this_size;
875 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
876 if (DECL_BIT_FIELD (field)
877 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
879 unsigned int align = TYPE_ALIGN (type);
881 /* In the general case, type alignment is required. */
882 if (value_factor_p (pos, align))
884 /* The enclosing record type must be sufficiently aligned.
885 Otherwise, if no alignment was specified for it and it
886 has been laid out already, bump its alignment to the
887 desired one if this is compatible with its size. */
888 if (TYPE_ALIGN (record_type) >= align)
890 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
891 DECL_BIT_FIELD (field) = 0;
895 && value_factor_p (TYPE_SIZE (record_type), align))
897 TYPE_ALIGN (record_type) = align;
898 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
899 DECL_BIT_FIELD (field) = 0;
903 /* In the non-strict alignment case, only byte alignment is. */
904 if (!STRICT_ALIGNMENT
905 && DECL_BIT_FIELD (field)
906 && value_factor_p (pos, BITS_PER_UNIT))
907 DECL_BIT_FIELD (field) = 0;
910 /* If we still have DECL_BIT_FIELD set at this point, we know the field
911 is technically not addressable. Except that it can actually be
912 addressed if the field is BLKmode and happens to be properly
914 DECL_NONADDRESSABLE_P (field)
915 |= DECL_BIT_FIELD (field) && DECL_MODE (field) != BLKmode;
917 /* A type must be as aligned as its most aligned field that is not
918 a bit-field. But this is already enforced by layout_type. */
919 if (rep_level > 0 && !DECL_BIT_FIELD (field))
920 TYPE_ALIGN (record_type)
921 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
926 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
927 size = size_binop (MAX_EXPR, size, this_size);
930 case QUAL_UNION_TYPE:
932 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
933 this_ada_size, ada_size);
934 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
939 /* Since we know here that all fields are sorted in order of
940 increasing bit position, the size of the record is one
941 higher than the ending bit of the last field processed
942 unless we have a rep clause, since in that case we might
943 have a field outside a QUAL_UNION_TYPE that has a higher ending
944 position. So use a MAX in that case. Also, if this field is a
945 QUAL_UNION_TYPE, we need to take into account the previous size in
946 the case of empty variants. */
948 = merge_sizes (ada_size, pos, this_ada_size,
949 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
951 = merge_sizes (size, pos, this_size,
952 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
960 if (code == QUAL_UNION_TYPE)
961 nreverse (fieldlist);
965 /* If this is a padding record, we never want to make the size smaller
966 than what was specified in it, if any. */
967 if (TREE_CODE (record_type) == RECORD_TYPE
968 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
969 size = TYPE_SIZE (record_type);
971 /* Now set any of the values we've just computed that apply. */
972 if (!TYPE_IS_FAT_POINTER_P (record_type)
973 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
974 SET_TYPE_ADA_SIZE (record_type, ada_size);
978 tree size_unit = had_size_unit
979 ? TYPE_SIZE_UNIT (record_type)
981 size_binop (CEIL_DIV_EXPR, size,
983 unsigned int align = TYPE_ALIGN (record_type);
985 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
986 TYPE_SIZE_UNIT (record_type)
987 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
989 compute_record_mode (record_type);
993 if (!do_not_finalize)
994 rest_of_record_type_compilation (record_type);
997 /* Wrap up compilation of RECORD_TYPE, i.e. most notably output all
998 the debug information associated with it. It need not be invoked
999 directly in most cases since finish_record_type takes care of doing
1000 so, unless explicitly requested not to through DO_NOT_FINALIZE. */
1003 rest_of_record_type_compilation (tree record_type)
1005 tree fieldlist = TYPE_FIELDS (record_type);
1007 enum tree_code code = TREE_CODE (record_type);
1008 bool var_size = false;
1010 for (field = fieldlist; field; field = TREE_CHAIN (field))
1012 /* We need to make an XVE/XVU record if any field has variable size,
1013 whether or not the record does. For example, if we have a union,
1014 it may be that all fields, rounded up to the alignment, have the
1015 same size, in which case we'll use that size. But the debug
1016 output routines (except Dwarf2) won't be able to output the fields,
1017 so we need to make the special record. */
1018 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
1019 /* If a field has a non-constant qualifier, the record will have
1020 variable size too. */
1021 || (code == QUAL_UNION_TYPE
1022 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
1029 /* If this record is of variable size, rename it so that the
1030 debugger knows it is and make a new, parallel, record
1031 that tells the debugger how the record is laid out. See
1032 exp_dbug.ads. But don't do this for records that are padding
1033 since they confuse GDB. */
1035 && !(TREE_CODE (record_type) == RECORD_TYPE
1036 && TYPE_IS_PADDING_P (record_type)))
1038 tree new_record_type
1039 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
1040 ? UNION_TYPE : TREE_CODE (record_type));
1041 tree orig_name = TYPE_NAME (record_type);
1043 = (TREE_CODE (orig_name) == TYPE_DECL ? DECL_NAME (orig_name)
1046 = concat_id_with_name (orig_id,
1047 TREE_CODE (record_type) == QUAL_UNION_TYPE
1049 tree last_pos = bitsize_zero_node;
1051 tree prev_old_field = 0;
1053 TYPE_NAME (new_record_type) = new_id;
1054 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
1055 TYPE_STUB_DECL (new_record_type)
1056 = build_decl (TYPE_DECL, new_id, new_record_type);
1057 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
1058 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
1059 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
1060 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
1061 TYPE_SIZE_UNIT (new_record_type)
1062 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
1064 add_parallel_type (TYPE_STUB_DECL (record_type), new_record_type);
1066 /* Now scan all the fields, replacing each field with a new
1067 field corresponding to the new encoding. */
1068 for (old_field = TYPE_FIELDS (record_type); old_field;
1069 old_field = TREE_CHAIN (old_field))
1071 tree field_type = TREE_TYPE (old_field);
1072 tree field_name = DECL_NAME (old_field);
1074 tree curpos = bit_position (old_field);
1076 unsigned int align = 0;
1079 /* See how the position was modified from the last position.
1081 There are two basic cases we support: a value was added
1082 to the last position or the last position was rounded to
1083 a boundary and they something was added. Check for the
1084 first case first. If not, see if there is any evidence
1085 of rounding. If so, round the last position and try
1088 If this is a union, the position can be taken as zero. */
1090 /* Some computations depend on the shape of the position expression,
1091 so strip conversions to make sure it's exposed. */
1092 curpos = remove_conversions (curpos, true);
1094 if (TREE_CODE (new_record_type) == UNION_TYPE)
1095 pos = bitsize_zero_node, align = 0;
1097 pos = compute_related_constant (curpos, last_pos);
1099 if (!pos && TREE_CODE (curpos) == MULT_EXPR
1100 && host_integerp (TREE_OPERAND (curpos, 1), 1))
1102 tree offset = TREE_OPERAND (curpos, 0);
1103 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
1105 /* An offset which is a bitwise AND with a negative power of 2
1106 means an alignment corresponding to this power of 2. */
1107 offset = remove_conversions (offset, true);
1108 if (TREE_CODE (offset) == BIT_AND_EXPR
1109 && host_integerp (TREE_OPERAND (offset, 1), 0)
1110 && tree_int_cst_sgn (TREE_OPERAND (offset, 1)) < 0)
1113 = - tree_low_cst (TREE_OPERAND (offset, 1), 0);
1114 if (exact_log2 (pow) > 0)
1118 pos = compute_related_constant (curpos,
1119 round_up (last_pos, align));
1121 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
1122 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
1123 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
1124 && host_integerp (TREE_OPERAND
1125 (TREE_OPERAND (curpos, 0), 1),
1130 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
1131 pos = compute_related_constant (curpos,
1132 round_up (last_pos, align));
1134 else if (potential_alignment_gap (prev_old_field, old_field,
1137 align = TYPE_ALIGN (field_type);
1138 pos = compute_related_constant (curpos,
1139 round_up (last_pos, align));
1142 /* If we can't compute a position, set it to zero.
1144 ??? We really should abort here, but it's too much work
1145 to get this correct for all cases. */
1148 pos = bitsize_zero_node;
1150 /* See if this type is variable-sized and make a pointer type
1151 and indicate the indirection if so. Beware that the debug
1152 back-end may adjust the position computed above according
1153 to the alignment of the field type, i.e. the pointer type
1154 in this case, if we don't preventively counter that. */
1155 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
1157 field_type = build_pointer_type (field_type);
1158 if (align != 0 && TYPE_ALIGN (field_type) > align)
1160 field_type = copy_node (field_type);
1161 TYPE_ALIGN (field_type) = align;
1166 /* Make a new field name, if necessary. */
1167 if (var || align != 0)
1172 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1173 align / BITS_PER_UNIT);
1175 strcpy (suffix, "XVL");
1177 field_name = concat_id_with_name (field_name, suffix);
1180 new_field = create_field_decl (field_name, field_type,
1182 DECL_SIZE (old_field), pos, 0);
1183 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1184 TYPE_FIELDS (new_record_type) = new_field;
1186 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1187 zero. The only time it's not the last field of the record
1188 is when there are other components at fixed positions after
1189 it (meaning there was a rep clause for every field) and we
1190 want to be able to encode them. */
1191 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1192 (TREE_CODE (TREE_TYPE (old_field))
1195 : DECL_SIZE (old_field));
1196 prev_old_field = old_field;
1199 TYPE_FIELDS (new_record_type)
1200 = nreverse (TYPE_FIELDS (new_record_type));
1202 rest_of_type_decl_compilation (TYPE_STUB_DECL (new_record_type));
1205 rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type));
1208 /* Append PARALLEL_TYPE on the chain of parallel types for decl. */
1211 add_parallel_type (tree decl, tree parallel_type)
1215 while (DECL_PARALLEL_TYPE (d))
1216 d = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (d));
1218 SET_DECL_PARALLEL_TYPE (d, parallel_type);
1221 /* Return the parallel type associated to a type, if any. */
1224 get_parallel_type (tree type)
1226 if (TYPE_STUB_DECL (type))
1227 return DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type));
1232 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1233 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1234 if this represents a QUAL_UNION_TYPE in which case we must look for
1235 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1236 is nonzero, we must take the MAX of the end position of this field
1237 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1239 We return an expression for the size. */
1242 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1245 tree type = TREE_TYPE (last_size);
1248 if (!special || TREE_CODE (size) != COND_EXPR)
1250 new = size_binop (PLUS_EXPR, first_bit, size);
1252 new = size_binop (MAX_EXPR, last_size, new);
1256 new = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1257 integer_zerop (TREE_OPERAND (size, 1))
1258 ? last_size : merge_sizes (last_size, first_bit,
1259 TREE_OPERAND (size, 1),
1261 integer_zerop (TREE_OPERAND (size, 2))
1262 ? last_size : merge_sizes (last_size, first_bit,
1263 TREE_OPERAND (size, 2),
1266 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1267 when fed through substitute_in_expr) into thinking that a constant
1268 size is not constant. */
1269 while (TREE_CODE (new) == NON_LVALUE_EXPR)
1270 new = TREE_OPERAND (new, 0);
1275 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1276 related by the addition of a constant. Return that constant if so. */
1279 compute_related_constant (tree op0, tree op1)
1281 tree op0_var, op1_var;
1282 tree op0_con = split_plus (op0, &op0_var);
1283 tree op1_con = split_plus (op1, &op1_var);
1284 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1286 if (operand_equal_p (op0_var, op1_var, 0))
1288 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1294 /* Utility function of above to split a tree OP which may be a sum, into a
1295 constant part, which is returned, and a variable part, which is stored
1296 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1300 split_plus (tree in, tree *pvar)
1302 /* Strip NOPS in order to ease the tree traversal and maximize the
1303 potential for constant or plus/minus discovery. We need to be careful
1304 to always return and set *pvar to bitsizetype trees, but it's worth
1308 *pvar = convert (bitsizetype, in);
1310 if (TREE_CODE (in) == INTEGER_CST)
1312 *pvar = bitsize_zero_node;
1313 return convert (bitsizetype, in);
1315 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1317 tree lhs_var, rhs_var;
1318 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1319 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1321 if (lhs_var == TREE_OPERAND (in, 0)
1322 && rhs_var == TREE_OPERAND (in, 1))
1323 return bitsize_zero_node;
1325 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1326 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1329 return bitsize_zero_node;
1332 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1333 subprogram. If it is void_type_node, then we are dealing with a procedure,
1334 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1335 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1336 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1337 RETURNS_UNCONSTRAINED is true if the function returns an unconstrained
1338 object. RETURNS_BY_REF is true if the function returns by reference.
1339 RETURNS_BY_TARGET_PTR is true if the function is to be passed (as its
1340 first parameter) the address of the place to copy its result. */
1343 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1344 bool returns_unconstrained, bool returns_by_ref,
1345 bool returns_by_target_ptr)
1347 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1348 the subprogram formal parameters. This list is generated by traversing the
1349 input list of PARM_DECL nodes. */
1350 tree param_type_list = NULL;
1354 for (param_decl = param_decl_list; param_decl;
1355 param_decl = TREE_CHAIN (param_decl))
1356 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1359 /* The list of the function parameter types has to be terminated by the void
1360 type to signal to the back-end that we are not dealing with a variable
1361 parameter subprogram, but that the subprogram has a fixed number of
1363 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1365 /* The list of argument types has been created in reverse
1367 param_type_list = nreverse (param_type_list);
1369 type = build_function_type (return_type, param_type_list);
1371 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1372 or the new type should, make a copy of TYPE. Likewise for
1373 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1374 if (TYPE_CI_CO_LIST (type) || cico_list
1375 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1376 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref
1377 || TYPE_RETURNS_BY_TARGET_PTR_P (type) != returns_by_target_ptr)
1378 type = copy_type (type);
1380 TYPE_CI_CO_LIST (type) = cico_list;
1381 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1382 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1383 TYPE_RETURNS_BY_TARGET_PTR_P (type) = returns_by_target_ptr;
1387 /* Return a copy of TYPE but safe to modify in any way. */
1390 copy_type (tree type)
1392 tree new = copy_node (type);
1394 /* copy_node clears this field instead of copying it, because it is
1395 aliased with TREE_CHAIN. */
1396 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1398 TYPE_POINTER_TO (new) = 0;
1399 TYPE_REFERENCE_TO (new) = 0;
1400 TYPE_MAIN_VARIANT (new) = new;
1401 TYPE_NEXT_VARIANT (new) = 0;
1406 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1407 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position of
1411 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
1413 /* First build a type for the desired range. */
1414 tree type = build_index_2_type (min, max);
1416 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1417 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1418 is set, but not to INDEX, make a copy of this type with the requested
1419 index type. Note that we have no way of sharing these types, but that's
1420 only a small hole. */
1421 if (TYPE_INDEX_TYPE (type) == index)
1423 else if (TYPE_INDEX_TYPE (type))
1424 type = copy_type (type);
1426 SET_TYPE_INDEX_TYPE (type, index);
1427 create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
1431 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1432 string) and TYPE is a ..._TYPE node giving its data type.
1433 ARTIFICIAL_P is true if this is a declaration that was generated
1434 by the compiler. DEBUG_INFO_P is true if we need to write debugging
1435 information about this type. GNAT_NODE is used for the position of
1439 create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1440 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1442 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1443 enum tree_code code = TREE_CODE (type);
1445 DECL_ARTIFICIAL (type_decl) = artificial_p;
1447 if (!TYPE_IS_DUMMY_P (type))
1448 gnat_pushdecl (type_decl, gnat_node);
1450 process_attributes (type_decl, attr_list);
1452 /* Pass type declaration information to the debugger unless this is an
1453 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1454 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately, or
1455 type for which debugging information was not requested. */
1456 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
1457 DECL_IGNORED_P (type_decl) = 1;
1458 else if (code != ENUMERAL_TYPE
1459 && (code != RECORD_TYPE || TYPE_IS_FAT_POINTER_P (type))
1460 && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
1461 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1462 rest_of_type_decl_compilation (type_decl);
1467 /* Return a VAR_DECL or CONST_DECL node.
1469 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1470 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1471 the GCC tree for an optional initial expression; NULL_TREE if none.
1473 CONST_FLAG is true if this variable is constant, in which case we might
1474 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
1476 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
1477 definition to be made visible outside of the current compilation unit, for
1478 instance variable definitions in a package specification.
1480 EXTERN_FLAG is nonzero when processing an external variable declaration (as
1481 opposed to a definition: no storage is to be allocated for the variable).
1483 STATIC_FLAG is only relevant when not at top level. In that case
1484 it indicates whether to always allocate storage to the variable.
1486 GNAT_NODE is used for the position of the decl. */
1489 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
1490 bool const_flag, bool public_flag, bool extern_flag,
1491 bool static_flag, bool const_decl_allowed_p,
1492 struct attrib *attr_list, Node_Id gnat_node)
1496 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
1497 && (global_bindings_p () || static_flag
1498 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
1499 : TREE_CONSTANT (var_init)));
1501 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1502 case the initializer may be used in-lieu of the DECL node (as done in
1503 Identifier_to_gnu). This is useful to prevent the need of elaboration
1504 code when an identifier for which such a decl is made is in turn used as
1505 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1506 but extra constraints apply to this choice (see below) and are not
1507 relevant to the distinction we wish to make. */
1508 bool constant_p = const_flag && init_const;
1510 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1511 and may be used for scalars in general but not for aggregates. */
1513 = build_decl ((constant_p && const_decl_allowed_p
1514 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
1517 /* If this is external, throw away any initializations (they will be done
1518 elsewhere) unless this is a constant for which we would like to remain
1519 able to get the initializer. If we are defining a global here, leave a
1520 constant initialization and save any variable elaborations for the
1521 elaboration routine. If we are just annotating types, throw away the
1522 initialization if it isn't a constant. */
1523 if ((extern_flag && !constant_p)
1524 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1525 var_init = NULL_TREE;
1527 /* At the global level, an initializer requiring code to be generated
1528 produces elaboration statements. Check that such statements are allowed,
1529 that is, not violating a No_Elaboration_Code restriction. */
1530 if (global_bindings_p () && var_init != 0 && ! init_const)
1531 Check_Elaboration_Code_Allowed (gnat_node);
1533 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1534 try to fiddle with DECL_COMMON. However, on platforms that don't
1535 support global BSS sections, uninitialized global variables would
1536 go in DATA instead, thus increasing the size of the executable. */
1538 && TREE_CODE (var_decl) == VAR_DECL
1539 && !have_global_bss_p ())
1540 DECL_COMMON (var_decl) = 1;
1541 DECL_INITIAL (var_decl) = var_init;
1542 TREE_READONLY (var_decl) = const_flag;
1543 DECL_EXTERNAL (var_decl) = extern_flag;
1544 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1545 TREE_CONSTANT (var_decl) = constant_p;
1546 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1547 = TYPE_VOLATILE (type);
1549 /* If it's public and not external, always allocate storage for it.
1550 At the global binding level we need to allocate static storage for the
1551 variable if and only if it's not external. If we are not at the top level
1552 we allocate automatic storage unless requested not to. */
1553 TREE_STATIC (var_decl)
1554 = !extern_flag && (public_flag || static_flag || global_bindings_p ());
1556 if (asm_name && VAR_OR_FUNCTION_DECL_P (var_decl))
1557 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1559 process_attributes (var_decl, attr_list);
1561 /* Add this decl to the current binding level. */
1562 gnat_pushdecl (var_decl, gnat_node);
1564 if (TREE_SIDE_EFFECTS (var_decl))
1565 TREE_ADDRESSABLE (var_decl) = 1;
1567 if (TREE_CODE (var_decl) != CONST_DECL)
1569 if (global_bindings_p ())
1570 rest_of_decl_compilation (var_decl, true, 0);
1573 expand_decl (var_decl);
1578 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
1581 aggregate_type_contains_array_p (tree type)
1583 switch (TREE_CODE (type))
1587 case QUAL_UNION_TYPE:
1590 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1591 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
1592 && aggregate_type_contains_array_p (TREE_TYPE (field)))
1605 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1606 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1607 this field is in a record type with a "pragma pack". If SIZE is nonzero
1608 it is the specified size for this field. If POS is nonzero, it is the bit
1609 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1610 the address of this field for aliasing purposes. If it is negative, we
1611 should not make a bitfield, which is used by make_aligning_type. */
1614 create_field_decl (tree field_name, tree field_type, tree record_type,
1615 int packed, tree size, tree pos, int addressable)
1617 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1619 DECL_CONTEXT (field_decl) = record_type;
1620 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1622 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1623 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1624 Likewise for an aggregate without specified position that contains an
1625 array, because in this case slices of variable length of this array
1626 must be handled by GCC and variable-sized objects need to be aligned
1627 to at least a byte boundary. */
1628 if (packed && (TYPE_MODE (field_type) == BLKmode
1630 && AGGREGATE_TYPE_P (field_type)
1631 && aggregate_type_contains_array_p (field_type))))
1632 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1634 /* If a size is specified, use it. Otherwise, if the record type is packed
1635 compute a size to use, which may differ from the object's natural size.
1636 We always set a size in this case to trigger the checks for bitfield
1637 creation below, which is typically required when no position has been
1640 size = convert (bitsizetype, size);
1641 else if (packed == 1)
1643 size = rm_size (field_type);
1645 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1647 if (TREE_CODE (size) == INTEGER_CST
1648 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1649 size = round_up (size, BITS_PER_UNIT);
1652 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1653 specified for two reasons: first if the size differs from the natural
1654 size. Second, if the alignment is insufficient. There are a number of
1655 ways the latter can be true.
1657 We never make a bitfield if the type of the field has a nonconstant size,
1658 because no such entity requiring bitfield operations should reach here.
1660 We do *preventively* make a bitfield when there might be the need for it
1661 but we don't have all the necessary information to decide, as is the case
1662 of a field with no specified position in a packed record.
1664 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1665 in layout_decl or finish_record_type to clear the bit_field indication if
1666 it is in fact not needed. */
1667 if (addressable >= 0
1669 && TREE_CODE (size) == INTEGER_CST
1670 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1671 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
1672 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
1674 || (TYPE_ALIGN (record_type) != 0
1675 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1677 DECL_BIT_FIELD (field_decl) = 1;
1678 DECL_SIZE (field_decl) = size;
1679 if (!packed && !pos)
1680 DECL_ALIGN (field_decl)
1681 = (TYPE_ALIGN (record_type) != 0
1682 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1683 : TYPE_ALIGN (field_type));
1686 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1688 /* Bump the alignment if need be, either for bitfield/packing purposes or
1689 to satisfy the type requirements if no such consideration applies. When
1690 we get the alignment from the type, indicate if this is from an explicit
1691 user request, which prevents stor-layout from lowering it later on. */
1694 = (DECL_BIT_FIELD (field_decl) ? 1
1695 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
1697 if (bit_align > DECL_ALIGN (field_decl))
1698 DECL_ALIGN (field_decl) = bit_align;
1699 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
1701 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1702 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
1708 /* We need to pass in the alignment the DECL is known to have.
1709 This is the lowest-order bit set in POS, but no more than
1710 the alignment of the record, if one is specified. Note
1711 that an alignment of 0 is taken as infinite. */
1712 unsigned int known_align;
1714 if (host_integerp (pos, 1))
1715 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1717 known_align = BITS_PER_UNIT;
1719 if (TYPE_ALIGN (record_type)
1720 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1721 known_align = TYPE_ALIGN (record_type);
1723 layout_decl (field_decl, known_align);
1724 SET_DECL_OFFSET_ALIGN (field_decl,
1725 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1727 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1728 &DECL_FIELD_BIT_OFFSET (field_decl),
1729 DECL_OFFSET_ALIGN (field_decl), pos);
1731 DECL_HAS_REP_P (field_decl) = 1;
1734 /* In addition to what our caller says, claim the field is addressable if we
1735 know that its type is not suitable.
1737 The field may also be "technically" nonaddressable, meaning that even if
1738 we attempt to take the field's address we will actually get the address
1739 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
1740 value we have at this point is not accurate enough, so we don't account
1741 for this here and let finish_record_type decide. */
1742 if (!type_for_nonaliased_component_p (field_type))
1745 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1750 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1751 PARAM_TYPE is its type. READONLY is true if the parameter is
1752 readonly (either an In parameter or an address of a pass-by-ref
1756 create_param_decl (tree param_name, tree param_type, bool readonly)
1758 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1760 /* Honor targetm.calls.promote_prototypes(), as not doing so can
1761 lead to various ABI violations. */
1762 if (targetm.calls.promote_prototypes (param_type)
1763 && (TREE_CODE (param_type) == INTEGER_TYPE
1764 || TREE_CODE (param_type) == ENUMERAL_TYPE)
1765 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1767 /* We have to be careful about biased types here. Make a subtype
1768 of integer_type_node with the proper biasing. */
1769 if (TREE_CODE (param_type) == INTEGER_TYPE
1770 && TYPE_BIASED_REPRESENTATION_P (param_type))
1773 = copy_type (build_range_type (integer_type_node,
1774 TYPE_MIN_VALUE (param_type),
1775 TYPE_MAX_VALUE (param_type)));
1777 TYPE_BIASED_REPRESENTATION_P (param_type) = 1;
1780 param_type = integer_type_node;
1783 DECL_ARG_TYPE (param_decl) = param_type;
1784 TREE_READONLY (param_decl) = readonly;
1788 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1791 process_attributes (tree decl, struct attrib *attr_list)
1793 for (; attr_list; attr_list = attr_list->next)
1794 switch (attr_list->type)
1796 case ATTR_MACHINE_ATTRIBUTE:
1797 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1799 ATTR_FLAG_TYPE_IN_PLACE);
1802 case ATTR_LINK_ALIAS:
1803 if (! DECL_EXTERNAL (decl))
1805 TREE_STATIC (decl) = 1;
1806 assemble_alias (decl, attr_list->name);
1810 case ATTR_WEAK_EXTERNAL:
1812 declare_weak (decl);
1814 post_error ("?weak declarations not supported on this target",
1815 attr_list->error_point);
1818 case ATTR_LINK_SECTION:
1819 if (targetm.have_named_sections)
1821 DECL_SECTION_NAME (decl)
1822 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1823 IDENTIFIER_POINTER (attr_list->name));
1824 DECL_COMMON (decl) = 0;
1827 post_error ("?section attributes are not supported for this target",
1828 attr_list->error_point);
1831 case ATTR_LINK_CONSTRUCTOR:
1832 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1833 TREE_USED (decl) = 1;
1836 case ATTR_LINK_DESTRUCTOR:
1837 DECL_STATIC_DESTRUCTOR (decl) = 1;
1838 TREE_USED (decl) = 1;
1843 /* Record a global renaming pointer. */
1846 record_global_renaming_pointer (tree decl)
1848 gcc_assert (DECL_RENAMED_OBJECT (decl));
1849 VEC_safe_push (tree, gc, global_renaming_pointers, decl);
1852 /* Invalidate the global renaming pointers. */
1855 invalidate_global_renaming_pointers (void)
1860 for (i = 0; VEC_iterate(tree, global_renaming_pointers, i, iter); i++)
1861 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
1863 VEC_free (tree, gc, global_renaming_pointers);
1866 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1870 value_factor_p (tree value, HOST_WIDE_INT factor)
1872 if (host_integerp (value, 1))
1873 return tree_low_cst (value, 1) % factor == 0;
1875 if (TREE_CODE (value) == MULT_EXPR)
1876 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1877 || value_factor_p (TREE_OPERAND (value, 1), factor));
1882 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1883 unless we can prove these 2 fields are laid out in such a way that no gap
1884 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1885 is the distance in bits between the end of PREV_FIELD and the starting
1886 position of CURR_FIELD. It is ignored if null. */
1889 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1891 /* If this is the first field of the record, there cannot be any gap */
1895 /* If the previous field is a union type, then return False: The only
1896 time when such a field is not the last field of the record is when
1897 there are other components at fixed positions after it (meaning there
1898 was a rep clause for every field), in which case we don't want the
1899 alignment constraint to override them. */
1900 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1903 /* If the distance between the end of prev_field and the beginning of
1904 curr_field is constant, then there is a gap if the value of this
1905 constant is not null. */
1906 if (offset && host_integerp (offset, 1))
1907 return !integer_zerop (offset);
1909 /* If the size and position of the previous field are constant,
1910 then check the sum of this size and position. There will be a gap
1911 iff it is not multiple of the current field alignment. */
1912 if (host_integerp (DECL_SIZE (prev_field), 1)
1913 && host_integerp (bit_position (prev_field), 1))
1914 return ((tree_low_cst (bit_position (prev_field), 1)
1915 + tree_low_cst (DECL_SIZE (prev_field), 1))
1916 % DECL_ALIGN (curr_field) != 0);
1918 /* If both the position and size of the previous field are multiples
1919 of the current field alignment, there cannot be any gap. */
1920 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1921 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1924 /* Fallback, return that there may be a potential gap */
1928 /* Returns a LABEL_DECL node for LABEL_NAME. */
1931 create_label_decl (tree label_name)
1933 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1935 DECL_CONTEXT (label_decl) = current_function_decl;
1936 DECL_MODE (label_decl) = VOIDmode;
1937 DECL_SOURCE_LOCATION (label_decl) = input_location;
1942 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1943 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1944 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1945 PARM_DECL nodes chained through the TREE_CHAIN field).
1947 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1948 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1951 create_subprog_decl (tree subprog_name, tree asm_name,
1952 tree subprog_type, tree param_decl_list, bool inline_flag,
1953 bool public_flag, bool extern_flag,
1954 struct attrib *attr_list, Node_Id gnat_node)
1956 tree return_type = TREE_TYPE (subprog_type);
1957 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1959 /* If this is a function nested inside an inlined external function, it
1960 means we aren't going to compile the outer function unless it is
1961 actually inlined, so do the same for us. */
1962 if (current_function_decl && DECL_INLINE (current_function_decl)
1963 && DECL_EXTERNAL (current_function_decl))
1966 DECL_EXTERNAL (subprog_decl) = extern_flag;
1967 TREE_PUBLIC (subprog_decl) = public_flag;
1968 TREE_STATIC (subprog_decl) = 1;
1969 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1970 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1971 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1972 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1973 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1974 DECL_ARTIFICIAL (DECL_RESULT (subprog_decl)) = 1;
1975 DECL_IGNORED_P (DECL_RESULT (subprog_decl)) = 1;
1977 /* TREE_ADDRESSABLE is set on the result type to request the use of the
1978 target by-reference return mechanism. This is not supported all the
1979 way down to RTL expansion with GCC 4, which ICEs on temporary creation
1980 attempts with such a type and expects DECL_BY_REFERENCE to be set on
1981 the RESULT_DECL instead - see gnat_genericize for more details. */
1982 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (subprog_decl))))
1984 tree result_decl = DECL_RESULT (subprog_decl);
1986 TREE_ADDRESSABLE (TREE_TYPE (result_decl)) = 0;
1987 DECL_BY_REFERENCE (result_decl) = 1;
1991 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
1995 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1997 /* The expand_main_function circuitry expects "main_identifier_node" to
1998 designate the DECL_NAME of the 'main' entry point, in turn expected
1999 to be declared as the "main" function literally by default. Ada
2000 program entry points are typically declared with a different name
2001 within the binder generated file, exported as 'main' to satisfy the
2002 system expectations. Redirect main_identifier_node in this case. */
2003 if (asm_name == main_identifier_node)
2004 main_identifier_node = DECL_NAME (subprog_decl);
2007 process_attributes (subprog_decl, attr_list);
2009 /* Add this decl to the current binding level. */
2010 gnat_pushdecl (subprog_decl, gnat_node);
2012 /* Output the assembler code and/or RTL for the declaration. */
2013 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
2015 return subprog_decl;
2018 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
2019 body. This routine needs to be invoked before processing the declarations
2020 appearing in the subprogram. */
2023 begin_subprog_body (tree subprog_decl)
2027 current_function_decl = subprog_decl;
2028 announce_function (subprog_decl);
2030 /* Enter a new binding level and show that all the parameters belong to
2033 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
2034 param_decl = TREE_CHAIN (param_decl))
2035 DECL_CONTEXT (param_decl) = subprog_decl;
2037 make_decl_rtl (subprog_decl);
2039 /* We handle pending sizes via the elaboration of types, so we don't need to
2040 save them. This causes them to be marked as part of the outer function
2041 and then discarded. */
2042 get_pending_sizes ();
2046 /* Helper for the genericization callback. Return a dereference of VAL
2047 if it is of a reference type. */
2050 convert_from_reference (tree val)
2052 tree value_type, ref;
2054 if (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE)
2057 value_type = TREE_TYPE (TREE_TYPE (val));
2058 ref = build1 (INDIRECT_REF, value_type, val);
2060 /* See if what we reference is CONST or VOLATILE, which requires
2061 looking into array types to get to the component type. */
2063 while (TREE_CODE (value_type) == ARRAY_TYPE)
2064 value_type = TREE_TYPE (value_type);
2067 = (TYPE_QUALS (value_type) & TYPE_QUAL_CONST);
2068 TREE_THIS_VOLATILE (ref)
2069 = (TYPE_QUALS (value_type) & TYPE_QUAL_VOLATILE);
2071 TREE_SIDE_EFFECTS (ref)
2072 = (TREE_THIS_VOLATILE (ref) || TREE_SIDE_EFFECTS (val));
2077 /* Helper for the genericization callback. Returns true if T denotes
2078 a RESULT_DECL with DECL_BY_REFERENCE set. */
2081 is_byref_result (tree t)
2083 return (TREE_CODE (t) == RESULT_DECL && DECL_BY_REFERENCE (t));
2087 /* Tree walking callback for gnat_genericize. Currently ...
2089 o Adjust references to the function's DECL_RESULT if it is marked
2090 DECL_BY_REFERENCE and so has had its type turned into a reference
2091 type at the end of the function compilation. */
2094 gnat_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
2096 /* This implementation is modeled after what the C++ front-end is
2097 doing, basis of the downstream passes behavior. */
2099 tree stmt = *stmt_p;
2100 struct pointer_set_t *p_set = (struct pointer_set_t*) data;
2102 /* If we have a direct mention of the result decl, dereference. */
2103 if (is_byref_result (stmt))
2105 *stmt_p = convert_from_reference (stmt);
2110 /* Otherwise, no need to walk the same tree twice. */
2111 if (pointer_set_contains (p_set, stmt))
2117 /* If we are taking the address of what now is a reference, just get the
2119 if (TREE_CODE (stmt) == ADDR_EXPR
2120 && is_byref_result (TREE_OPERAND (stmt, 0)))
2122 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
2126 /* Don't dereference an by-reference RESULT_DECL inside a RETURN_EXPR. */
2127 else if (TREE_CODE (stmt) == RETURN_EXPR
2128 && TREE_OPERAND (stmt, 0)
2129 && is_byref_result (TREE_OPERAND (stmt, 0)))
2132 /* Don't look inside trees that cannot embed references of interest. */
2133 else if (IS_TYPE_OR_DECL_P (stmt))
2136 pointer_set_insert (p_set, *stmt_p);
2141 /* Perform lowering of Ada trees to GENERIC. In particular:
2143 o Turn a DECL_BY_REFERENCE RESULT_DECL into a real by-reference decl
2144 and adjust all the references to this decl accordingly. */
2147 gnat_genericize (tree fndecl)
2149 /* Prior to GCC 4, an explicit By_Reference result mechanism for a function
2150 was handled by simply setting TREE_ADDRESSABLE on the result type.
2151 Everything required to actually pass by invisible ref using the target
2152 mechanism (e.g. extra parameter) was handled at RTL expansion time.
2154 This doesn't work with GCC 4 any more for several reasons. First, the
2155 gimplification process might need the creation of temporaries of this
2156 type, and the gimplifier ICEs on such attempts. Second, the middle-end
2157 now relies on a different attribute for such cases (DECL_BY_REFERENCE on
2158 RESULT/PARM_DECLs), and expects the user invisible by-reference-ness to
2159 be explicitly accounted for by the front-end in the function body.
2161 We achieve the complete transformation in two steps:
2163 1/ create_subprog_decl performs early attribute tweaks: it clears
2164 TREE_ADDRESSABLE from the result type and sets DECL_BY_REFERENCE on
2165 the result decl. The former ensures that the bit isn't set in the GCC
2166 tree saved for the function, so prevents ICEs on temporary creation.
2167 The latter we use here to trigger the rest of the processing.
2169 2/ This function performs the type transformation on the result decl
2170 and adjusts all the references to this decl from the function body
2173 Clearing TREE_ADDRESSABLE from the type differs from the C++ front-end
2174 strategy, which escapes the gimplifier temporary creation issues by
2175 creating it's own temporaries using TARGET_EXPR nodes. Our way relies
2176 on simple specific support code in aggregate_value_p to look at the
2177 target function result decl explicitly. */
2179 struct pointer_set_t *p_set;
2180 tree decl_result = DECL_RESULT (fndecl);
2182 if (!DECL_BY_REFERENCE (decl_result))
2185 /* Make the DECL_RESULT explicitly by-reference and adjust all the
2186 occurrences in the function body using the common tree-walking facility.
2187 We want to see every occurrence of the result decl to adjust the
2188 referencing tree, so need to use our own pointer set to control which
2189 trees should be visited again or not. */
2191 p_set = pointer_set_create ();
2193 TREE_TYPE (decl_result) = build_reference_type (TREE_TYPE (decl_result));
2194 TREE_ADDRESSABLE (decl_result) = 0;
2195 relayout_decl (decl_result);
2197 walk_tree (&DECL_SAVED_TREE (fndecl), gnat_genericize_r, p_set, NULL);
2199 pointer_set_destroy (p_set);
2202 /* Finish the definition of the current subprogram and compile it all the way
2203 to assembler language output. BODY is the tree corresponding to
2207 end_subprog_body (tree body)
2209 tree fndecl = current_function_decl;
2211 /* Mark the BLOCK for this level as being for this function and pop the
2212 level. Since the vars in it are the parameters, clear them. */
2213 BLOCK_VARS (current_binding_level->block) = 0;
2214 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
2215 DECL_INITIAL (fndecl) = current_binding_level->block;
2218 /* Deal with inline. If declared inline or we should default to inline,
2219 set the flag in the decl. */
2220 DECL_INLINE (fndecl) = 1;
2222 /* We handle pending sizes via the elaboration of types, so we don't
2223 need to save them. */
2224 get_pending_sizes ();
2226 /* Mark the RESULT_DECL as being in this subprogram. */
2227 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
2229 DECL_SAVED_TREE (fndecl) = body;
2231 current_function_decl = DECL_CONTEXT (fndecl);
2234 /* We cannot track the location of errors past this point. */
2235 error_gnat_node = Empty;
2237 /* If we're only annotating types, don't actually compile this function. */
2238 if (type_annotate_only)
2241 /* Perform the required pre-gimplification transformations on the tree. */
2242 gnat_genericize (fndecl);
2244 /* We do different things for nested and non-nested functions.
2245 ??? This should be in cgraph. */
2246 if (!DECL_CONTEXT (fndecl))
2248 gnat_gimplify_function (fndecl);
2249 cgraph_finalize_function (fndecl, false);
2252 /* Register this function with cgraph just far enough to get it
2253 added to our parent's nested function list. */
2254 (void) cgraph_node (fndecl);
2257 /* Convert FNDECL's code to GIMPLE and handle any nested functions. */
2260 gnat_gimplify_function (tree fndecl)
2262 struct cgraph_node *cgn;
2264 dump_function (TDI_original, fndecl);
2265 gimplify_function_tree (fndecl);
2266 dump_function (TDI_generic, fndecl);
2268 /* Convert all nested functions to GIMPLE now. We do things in this order
2269 so that items like VLA sizes are expanded properly in the context of the
2270 correct function. */
2271 cgn = cgraph_node (fndecl);
2272 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
2273 gnat_gimplify_function (cgn->decl);
2278 gnat_builtin_function (tree decl)
2280 gnat_pushdecl (decl, Empty);
2284 /* Return an integer type with the number of bits of precision given by
2285 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2286 it is a signed type. */
2289 gnat_type_for_size (unsigned precision, int unsignedp)
2294 if (precision <= 2 * MAX_BITS_PER_WORD
2295 && signed_and_unsigned_types[precision][unsignedp])
2296 return signed_and_unsigned_types[precision][unsignedp];
2299 t = make_unsigned_type (precision);
2301 t = make_signed_type (precision);
2303 if (precision <= 2 * MAX_BITS_PER_WORD)
2304 signed_and_unsigned_types[precision][unsignedp] = t;
2308 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2309 TYPE_NAME (t) = get_identifier (type_name);
2315 /* Likewise for floating-point types. */
2318 float_type_for_precision (int precision, enum machine_mode mode)
2323 if (float_types[(int) mode])
2324 return float_types[(int) mode];
2326 float_types[(int) mode] = t = make_node (REAL_TYPE);
2327 TYPE_PRECISION (t) = precision;
2330 gcc_assert (TYPE_MODE (t) == mode);
2333 sprintf (type_name, "FLOAT_%d", precision);
2334 TYPE_NAME (t) = get_identifier (type_name);
2340 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2341 an unsigned type; otherwise a signed type is returned. */
2344 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2346 if (mode == BLKmode)
2348 else if (mode == VOIDmode)
2349 return void_type_node;
2350 else if (COMPLEX_MODE_P (mode))
2352 else if (SCALAR_FLOAT_MODE_P (mode))
2353 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2354 else if (SCALAR_INT_MODE_P (mode))
2355 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2360 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2363 gnat_unsigned_type (tree type_node)
2365 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2367 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2369 type = copy_node (type);
2370 TREE_TYPE (type) = type_node;
2372 else if (TREE_TYPE (type_node)
2373 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2374 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2376 type = copy_node (type);
2377 TREE_TYPE (type) = TREE_TYPE (type_node);
2383 /* Return the signed version of a TYPE_NODE, a scalar type. */
2386 gnat_signed_type (tree type_node)
2388 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2390 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2392 type = copy_node (type);
2393 TREE_TYPE (type) = type_node;
2395 else if (TREE_TYPE (type_node)
2396 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2397 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2399 type = copy_node (type);
2400 TREE_TYPE (type) = TREE_TYPE (type_node);
2406 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2407 transparently converted to each other. */
2410 gnat_types_compatible_p (tree t1, tree t2)
2412 enum tree_code code;
2414 /* This is the default criterion. */
2415 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2418 /* We only check structural equivalence here. */
2419 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2422 /* Array types are also compatible if they are constrained and have
2423 the same component type and the same domain. */
2424 if (code == ARRAY_TYPE
2425 && TREE_TYPE (t1) == TREE_TYPE (t2)
2426 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2427 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2428 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
2429 TYPE_MAX_VALUE (TYPE_DOMAIN (t2))))
2432 /* Padding record types are also compatible if they pad the same
2433 type and have the same constant size. */
2434 if (code == RECORD_TYPE
2435 && TYPE_IS_PADDING_P (t1) && TYPE_IS_PADDING_P (t2)
2436 && TREE_TYPE (TYPE_FIELDS (t1)) == TREE_TYPE (TYPE_FIELDS (t2))
2437 && tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2)))
2443 /* EXP is an expression for the size of an object. If this size contains
2444 discriminant references, replace them with the maximum (if MAX_P) or
2445 minimum (if !MAX_P) possible value of the discriminant. */
2448 max_size (tree exp, bool max_p)
2450 enum tree_code code = TREE_CODE (exp);
2451 tree type = TREE_TYPE (exp);
2453 switch (TREE_CODE_CLASS (code))
2455 case tcc_declaration:
2460 if (code == CALL_EXPR)
2463 int i, n = call_expr_nargs (exp);
2466 argarray = (tree *) alloca (n * sizeof (tree));
2467 for (i = 0; i < n; i++)
2468 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
2469 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
2474 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2475 modify. Otherwise, we treat it like a variable. */
2476 if (!CONTAINS_PLACEHOLDER_P (exp))
2479 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2481 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
2483 case tcc_comparison:
2484 return max_p ? size_one_node : size_zero_node;
2488 case tcc_expression:
2489 switch (TREE_CODE_LENGTH (code))
2492 if (code == NON_LVALUE_EXPR)
2493 return max_size (TREE_OPERAND (exp, 0), max_p);
2496 fold_build1 (code, type,
2497 max_size (TREE_OPERAND (exp, 0),
2498 code == NEGATE_EXPR ? !max_p : max_p));
2501 if (code == COMPOUND_EXPR)
2502 return max_size (TREE_OPERAND (exp, 1), max_p);
2504 /* Calculate "(A ? B : C) - D" as "A ? B - D : C - D" which
2505 may provide a tighter bound on max_size. */
2506 if (code == MINUS_EXPR
2507 && TREE_CODE (TREE_OPERAND (exp, 0)) == COND_EXPR)
2509 tree lhs = fold_build2 (MINUS_EXPR, type,
2510 TREE_OPERAND (TREE_OPERAND (exp, 0), 1),
2511 TREE_OPERAND (exp, 1));
2512 tree rhs = fold_build2 (MINUS_EXPR, type,
2513 TREE_OPERAND (TREE_OPERAND (exp, 0), 2),
2514 TREE_OPERAND (exp, 1));
2515 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2516 max_size (lhs, max_p),
2517 max_size (rhs, max_p));
2521 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2522 tree rhs = max_size (TREE_OPERAND (exp, 1),
2523 code == MINUS_EXPR ? !max_p : max_p);
2525 /* Special-case wanting the maximum value of a MIN_EXPR.
2526 In that case, if one side overflows, return the other.
2527 sizetype is signed, but we know sizes are non-negative.
2528 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2529 overflowing or the maximum possible value and the RHS
2533 && TREE_CODE (rhs) == INTEGER_CST
2534 && TREE_OVERFLOW (rhs))
2538 && TREE_CODE (lhs) == INTEGER_CST
2539 && TREE_OVERFLOW (lhs))
2541 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2542 && ((TREE_CODE (lhs) == INTEGER_CST
2543 && TREE_OVERFLOW (lhs))
2544 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2545 && !TREE_CONSTANT (rhs))
2548 return fold_build2 (code, type, lhs, rhs);
2552 if (code == SAVE_EXPR)
2554 else if (code == COND_EXPR)
2555 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2556 max_size (TREE_OPERAND (exp, 1), max_p),
2557 max_size (TREE_OPERAND (exp, 2), max_p));
2560 /* Other tree classes cannot happen. */
2568 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2569 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2570 Return a constructor for the template. */
2573 build_template (tree template_type, tree array_type, tree expr)
2575 tree template_elts = NULL_TREE;
2576 tree bound_list = NULL_TREE;
2579 while (TREE_CODE (array_type) == RECORD_TYPE
2580 && (TYPE_IS_PADDING_P (array_type)
2581 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2582 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2584 if (TREE_CODE (array_type) == ARRAY_TYPE
2585 || (TREE_CODE (array_type) == INTEGER_TYPE
2586 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2587 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2589 /* First make the list for a CONSTRUCTOR for the template. Go down the
2590 field list of the template instead of the type chain because this
2591 array might be an Ada array of arrays and we can't tell where the
2592 nested arrays stop being the underlying object. */
2594 for (field = TYPE_FIELDS (template_type); field;
2596 ? (bound_list = TREE_CHAIN (bound_list))
2597 : (array_type = TREE_TYPE (array_type))),
2598 field = TREE_CHAIN (TREE_CHAIN (field)))
2600 tree bounds, min, max;
2602 /* If we have a bound list, get the bounds from there. Likewise
2603 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2604 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2605 This will give us a maximum range. */
2607 bounds = TREE_VALUE (bound_list);
2608 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2609 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2610 else if (expr && TREE_CODE (expr) == PARM_DECL
2611 && DECL_BY_COMPONENT_PTR_P (expr))
2612 bounds = TREE_TYPE (field);
2616 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
2617 max = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MAX_VALUE (bounds));
2619 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2620 substitute it from OBJECT. */
2621 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2622 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2624 template_elts = tree_cons (TREE_CHAIN (field), max,
2625 tree_cons (field, min, template_elts));
2628 return gnat_build_constructor (template_type, nreverse (template_elts));
2631 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2632 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2633 in the type contains in its DECL_INITIAL the expression to use when
2634 a constructor is made for the type. GNAT_ENTITY is an entity used
2635 to print out an error message if the mechanism cannot be applied to
2636 an object of that type and also for the name. */
2639 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2641 tree record_type = make_node (RECORD_TYPE);
2642 tree pointer32_type;
2643 tree field_list = 0;
2652 /* If TYPE is an unconstrained array, use the underlying array type. */
2653 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2654 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2656 /* If this is an array, compute the number of dimensions in the array,
2657 get the index types, and point to the inner type. */
2658 if (TREE_CODE (type) != ARRAY_TYPE)
2661 for (ndim = 1, inner_type = type;
2662 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2663 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2664 ndim++, inner_type = TREE_TYPE (inner_type))
2667 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2669 if (mech != By_Descriptor_NCA
2670 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2671 for (i = ndim - 1, inner_type = type;
2673 i--, inner_type = TREE_TYPE (inner_type))
2674 idx_arr[i] = TYPE_DOMAIN (inner_type);
2676 for (i = 0, inner_type = type;
2678 i++, inner_type = TREE_TYPE (inner_type))
2679 idx_arr[i] = TYPE_DOMAIN (inner_type);
2681 /* Now get the DTYPE value. */
2682 switch (TREE_CODE (type))
2686 if (TYPE_VAX_FLOATING_POINT_P (type))
2687 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2700 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2703 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2706 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2709 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2712 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2715 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2721 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2725 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2726 && TYPE_VAX_FLOATING_POINT_P (type))
2727 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2739 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2750 /* Get the CLASS value. */
2753 case By_Descriptor_A:
2756 case By_Descriptor_NCA:
2759 case By_Descriptor_SB:
2763 case By_Descriptor_S:
2769 /* Make the type for a descriptor for VMS. The first four fields
2770 are the same for all types. */
2773 = chainon (field_list,
2774 make_descriptor_field
2775 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2776 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2778 field_list = chainon (field_list,
2779 make_descriptor_field ("DTYPE",
2780 gnat_type_for_size (8, 1),
2781 record_type, size_int (dtype)));
2782 field_list = chainon (field_list,
2783 make_descriptor_field ("CLASS",
2784 gnat_type_for_size (8, 1),
2785 record_type, size_int (class)));
2787 /* Of course this will crash at run-time if the address space is not
2788 within the low 32 bits, but there is nothing else we can do. */
2789 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
2792 = chainon (field_list,
2793 make_descriptor_field
2794 ("POINTER", pointer32_type, record_type,
2795 build_unary_op (ADDR_EXPR,
2797 build0 (PLACEHOLDER_EXPR, type))));
2802 case By_Descriptor_S:
2805 case By_Descriptor_SB:
2807 = chainon (field_list,
2808 make_descriptor_field
2809 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2810 TREE_CODE (type) == ARRAY_TYPE
2811 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2813 = chainon (field_list,
2814 make_descriptor_field
2815 ("SB_U1", gnat_type_for_size (32, 1), record_type,
2816 TREE_CODE (type) == ARRAY_TYPE
2817 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2820 case By_Descriptor_A:
2821 case By_Descriptor_NCA:
2822 field_list = chainon (field_list,
2823 make_descriptor_field ("SCALE",
2824 gnat_type_for_size (8, 1),
2828 field_list = chainon (field_list,
2829 make_descriptor_field ("DIGITS",
2830 gnat_type_for_size (8, 1),
2835 = chainon (field_list,
2836 make_descriptor_field
2837 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2838 size_int (mech == By_Descriptor_NCA
2840 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2841 : (TREE_CODE (type) == ARRAY_TYPE
2842 && TYPE_CONVENTION_FORTRAN_P (type)
2845 field_list = chainon (field_list,
2846 make_descriptor_field ("DIMCT",
2847 gnat_type_for_size (8, 1),
2851 field_list = chainon (field_list,
2852 make_descriptor_field ("ARSIZE",
2853 gnat_type_for_size (32, 1),
2855 size_in_bytes (type)));
2857 /* Now build a pointer to the 0,0,0... element. */
2858 tem = build0 (PLACEHOLDER_EXPR, type);
2859 for (i = 0, inner_type = type; i < ndim;
2860 i++, inner_type = TREE_TYPE (inner_type))
2861 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2862 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2863 NULL_TREE, NULL_TREE);
2866 = chainon (field_list,
2867 make_descriptor_field
2869 build_pointer_type_for_mode (inner_type, SImode, false),
2872 build_pointer_type_for_mode (inner_type, SImode,
2876 /* Next come the addressing coefficients. */
2877 tem = size_one_node;
2878 for (i = 0; i < ndim; i++)
2882 = size_binop (MULT_EXPR, tem,
2883 size_binop (PLUS_EXPR,
2884 size_binop (MINUS_EXPR,
2885 TYPE_MAX_VALUE (idx_arr[i]),
2886 TYPE_MIN_VALUE (idx_arr[i])),
2889 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2890 fname[1] = '0' + i, fname[2] = 0;
2892 = chainon (field_list,
2893 make_descriptor_field (fname,
2894 gnat_type_for_size (32, 1),
2895 record_type, idx_length));
2897 if (mech == By_Descriptor_NCA)
2901 /* Finally here are the bounds. */
2902 for (i = 0; i < ndim; i++)
2906 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2908 = chainon (field_list,
2909 make_descriptor_field
2910 (fname, gnat_type_for_size (32, 1), record_type,
2911 TYPE_MIN_VALUE (idx_arr[i])));
2915 = chainon (field_list,
2916 make_descriptor_field
2917 (fname, gnat_type_for_size (32, 1), record_type,
2918 TYPE_MAX_VALUE (idx_arr[i])));
2923 post_error ("unsupported descriptor type for &", gnat_entity);
2926 finish_record_type (record_type, field_list, 0, true);
2927 create_type_decl (create_concat_name (gnat_entity, "DESC"), record_type,
2928 NULL, true, false, gnat_entity);
2933 /* Utility routine for above code to make a field. */
2936 make_descriptor_field (const char *name, tree type,
2937 tree rec_type, tree initial)
2940 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2942 DECL_INITIAL (field) = initial;
2946 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
2947 pointer or fat pointer type. GNAT_SUBPROG is the subprogram to which
2948 the VMS descriptor is passed. */
2951 convert_vms_descriptor (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
2953 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
2954 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
2955 /* The CLASS field is the 3rd field in the descriptor. */
2956 tree class = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
2957 /* The POINTER field is the 4th field in the descriptor. */
2958 tree pointer = TREE_CHAIN (class);
2960 /* Retrieve the value of the POINTER field. */
2962 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
2964 if (POINTER_TYPE_P (gnu_type))
2965 return convert (gnu_type, gnu_expr);
2967 else if (TYPE_FAT_POINTER_P (gnu_type))
2969 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
2970 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
2971 tree template_type = TREE_TYPE (p_bounds_type);
2972 tree min_field = TYPE_FIELDS (template_type);
2973 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
2974 tree template, template_addr, aflags, dimct, t, u;
2975 /* See the head comment of build_vms_descriptor. */
2976 int iclass = TREE_INT_CST_LOW (DECL_INITIAL (class));
2978 /* Convert POINTER to the type of the P_ARRAY field. */
2979 gnu_expr = convert (p_array_type, gnu_expr);
2983 case 1: /* Class S */
2984 case 15: /* Class SB */
2985 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
2986 t = TYPE_FIELDS (desc_type);
2987 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2988 t = tree_cons (min_field,
2989 convert (TREE_TYPE (min_field), integer_one_node),
2990 tree_cons (max_field,
2991 convert (TREE_TYPE (max_field), t),
2993 template = gnat_build_constructor (template_type, t);
2994 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
2996 /* For class S, we are done. */
3000 /* Test that we really have a SB descriptor, like DEC Ada. */
3001 t = build3 (COMPONENT_REF, TREE_TYPE (class), desc, class, NULL);
3002 u = convert (TREE_TYPE (class), DECL_INITIAL (class));
3003 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
3004 /* If so, there is already a template in the descriptor and
3005 it is located right after the POINTER field. */
3006 t = TREE_CHAIN (pointer);
3007 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3008 /* Otherwise use the {1, LENGTH} template we build above. */
3009 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3010 build_unary_op (ADDR_EXPR, p_bounds_type,
3015 case 4: /* Class A */
3016 /* The AFLAGS field is the 7th field in the descriptor. */
3017 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer)));
3018 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3019 /* The DIMCT field is the 8th field in the descriptor. */
3021 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3022 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3023 or FL_COEFF or FL_BOUNDS not set. */
3024 u = build_int_cst (TREE_TYPE (aflags), 192);
3025 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3026 build_binary_op (NE_EXPR, integer_type_node,
3028 convert (TREE_TYPE (dimct),
3030 build_binary_op (NE_EXPR, integer_type_node,
3031 build2 (BIT_AND_EXPR,
3035 add_stmt (build3 (COND_EXPR, void_type_node, u,
3036 build_call_raise (CE_Length_Check_Failed, Empty,
3037 N_Raise_Constraint_Error),
3039 /* There is already a template in the descriptor and it is
3040 located at the start of block 3 (12th field). */
3041 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (t))));
3042 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3043 template_addr = build_unary_op (ADDR_EXPR, p_bounds_type, template);
3046 case 10: /* Class NCA */
3048 post_error ("unsupported descriptor type for &", gnat_subprog);
3049 template_addr = integer_zero_node;
3053 /* Build the fat pointer in the form of a constructor. */
3054 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr,
3055 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3056 template_addr, NULL_TREE));
3057 return gnat_build_constructor (gnu_type, t);
3064 /* Build a stub for the subprogram specified by the GCC tree GNU_SUBPROG
3065 and the GNAT node GNAT_SUBPROG. */
3068 build_function_stub (tree gnu_subprog, Entity_Id gnat_subprog)
3070 tree gnu_subprog_type, gnu_subprog_addr, gnu_subprog_call;
3071 tree gnu_stub_param, gnu_param_list, gnu_arg_types, gnu_param;
3072 tree gnu_stub_decl = DECL_FUNCTION_STUB (gnu_subprog);
3075 gnu_subprog_type = TREE_TYPE (gnu_subprog);
3076 gnu_param_list = NULL_TREE;
3078 begin_subprog_body (gnu_stub_decl);
3081 start_stmt_group ();
3083 /* Loop over the parameters of the stub and translate any of them
3084 passed by descriptor into a by reference one. */
3085 for (gnu_stub_param = DECL_ARGUMENTS (gnu_stub_decl),
3086 gnu_arg_types = TYPE_ARG_TYPES (gnu_subprog_type);
3088 gnu_stub_param = TREE_CHAIN (gnu_stub_param),
3089 gnu_arg_types = TREE_CHAIN (gnu_arg_types))
3091 if (DECL_BY_DESCRIPTOR_P (gnu_stub_param))
3092 gnu_param = convert_vms_descriptor (TREE_VALUE (gnu_arg_types),
3093 gnu_stub_param, gnat_subprog);
3095 gnu_param = gnu_stub_param;
3097 gnu_param_list = tree_cons (NULL_TREE, gnu_param, gnu_param_list);
3100 gnu_body = end_stmt_group ();
3102 /* Invoke the internal subprogram. */
3103 gnu_subprog_addr = build1 (ADDR_EXPR, build_pointer_type (gnu_subprog_type),
3105 gnu_subprog_call = build_call_list (TREE_TYPE (gnu_subprog_type),
3107 nreverse (gnu_param_list));
3109 /* Propagate the return value, if any. */
3110 if (VOID_TYPE_P (TREE_TYPE (gnu_subprog_type)))
3111 append_to_statement_list (gnu_subprog_call, &gnu_body);
3113 append_to_statement_list (build_return_expr (DECL_RESULT (gnu_stub_decl),
3119 allocate_struct_function (gnu_stub_decl, false);
3120 end_subprog_body (gnu_body);
3123 /* Build a type to be used to represent an aliased object whose nominal
3124 type is an unconstrained array. This consists of a RECORD_TYPE containing
3125 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
3126 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
3127 is used to represent an arbitrary unconstrained object. Use NAME
3128 as the name of the record. */
3131 build_unc_object_type (tree template_type, tree object_type, tree name)
3133 tree type = make_node (RECORD_TYPE);
3134 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
3135 template_type, type, 0, 0, 0, 1);
3136 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
3139 TYPE_NAME (type) = name;
3140 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3141 finish_record_type (type,
3142 chainon (chainon (NULL_TREE, template_field),
3149 /* Same, taking a thin or fat pointer type instead of a template type. */
3152 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3157 gcc_assert (TYPE_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3160 = (TYPE_FAT_POINTER_P (thin_fat_ptr_type)
3161 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3162 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3163 return build_unc_object_type (template_type, object_type, name);
3166 /* Shift the component offsets within an unconstrained object TYPE to make it
3167 suitable for use as a designated type for thin pointers. */
3170 shift_unc_components_for_thin_pointers (tree type)
3172 /* Thin pointer values designate the ARRAY data of an unconstrained object,
3173 allocated past the BOUNDS template. The designated type is adjusted to
3174 have ARRAY at position zero and the template at a negative offset, so
3175 that COMPONENT_REFs on (*thin_ptr) designate the proper location. */
3177 tree bounds_field = TYPE_FIELDS (type);
3178 tree array_field = TREE_CHAIN (TYPE_FIELDS (type));
3180 DECL_FIELD_OFFSET (bounds_field)
3181 = size_binop (MINUS_EXPR, size_zero_node, byte_position (array_field));
3183 DECL_FIELD_OFFSET (array_field) = size_zero_node;
3184 DECL_FIELD_BIT_OFFSET (array_field) = bitsize_zero_node;
3187 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
3188 the normal case this is just two adjustments, but we have more to do
3189 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
3192 update_pointer_to (tree old_type, tree new_type)
3194 tree ptr = TYPE_POINTER_TO (old_type);
3195 tree ref = TYPE_REFERENCE_TO (old_type);
3199 /* If this is the main variant, process all the other variants first. */
3200 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3201 for (type = TYPE_NEXT_VARIANT (old_type); type;
3202 type = TYPE_NEXT_VARIANT (type))
3203 update_pointer_to (type, new_type);
3205 /* If no pointer or reference, we are done. */
3209 /* Merge the old type qualifiers in the new type.
3211 Each old variant has qualifiers for specific reasons, and the new
3212 designated type as well. Each set of qualifiers represents useful
3213 information grabbed at some point, and merging the two simply unifies
3214 these inputs into the final type description.
3216 Consider for instance a volatile type frozen after an access to constant
3217 type designating it. After the designated type freeze, we get here with a
3218 volatile new_type and a dummy old_type with a readonly variant, created
3219 when the access type was processed. We shall make a volatile and readonly
3220 designated type, because that's what it really is.
3222 We might also get here for a non-dummy old_type variant with different
3223 qualifiers than the new_type ones, for instance in some cases of pointers
3224 to private record type elaboration (see the comments around the call to
3225 this routine from gnat_to_gnu_entity/E_Access_Type). We have to merge the
3226 qualifiers in those cases too, to avoid accidentally discarding the
3227 initial set, and will often end up with old_type == new_type then. */
3228 new_type = build_qualified_type (new_type,
3229 TYPE_QUALS (old_type)
3230 | TYPE_QUALS (new_type));
3232 /* If the new type and the old one are identical, there is nothing to
3234 if (old_type == new_type)
3237 /* Otherwise, first handle the simple case. */
3238 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3240 TYPE_POINTER_TO (new_type) = ptr;
3241 TYPE_REFERENCE_TO (new_type) = ref;
3243 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3244 for (ptr1 = TYPE_MAIN_VARIANT (ptr); ptr1;
3245 ptr1 = TYPE_NEXT_VARIANT (ptr1))
3246 TREE_TYPE (ptr1) = new_type;
3248 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3249 for (ref1 = TYPE_MAIN_VARIANT (ref); ref1;
3250 ref1 = TYPE_NEXT_VARIANT (ref1))
3251 TREE_TYPE (ref1) = new_type;
3254 /* Now deal with the unconstrained array case. In this case the "pointer"
3255 is actually a RECORD_TYPE where both fields are pointers to dummy nodes.
3256 Turn them into pointers to the correct types using update_pointer_to. */
3257 else if (TREE_CODE (ptr) != RECORD_TYPE || !TYPE_IS_FAT_POINTER_P (ptr))
3262 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
3263 tree array_field = TYPE_FIELDS (ptr);
3264 tree bounds_field = TREE_CHAIN (TYPE_FIELDS (ptr));
3265 tree new_ptr = TYPE_POINTER_TO (new_type);
3269 /* Make pointers to the dummy template point to the real template. */
3271 (TREE_TYPE (TREE_TYPE (bounds_field)),
3272 TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_ptr)))));
3274 /* The references to the template bounds present in the array type
3275 are made through a PLACEHOLDER_EXPR of type new_ptr. Since we
3276 are updating ptr to make it a full replacement for new_ptr as
3277 pointer to new_type, we must rework the PLACEHOLDER_EXPR so as
3278 to make it of type ptr. */
3279 new_ref = build3 (COMPONENT_REF, TREE_TYPE (bounds_field),
3280 build0 (PLACEHOLDER_EXPR, ptr),
3281 bounds_field, NULL_TREE);
3283 /* Create the new array for the new PLACEHOLDER_EXPR and make
3284 pointers to the dummy array point to it.
3286 ??? This is now the only use of substitute_in_type,
3287 which is a very "heavy" routine to do this, so it
3288 should be replaced at some point. */
3290 (TREE_TYPE (TREE_TYPE (array_field)),
3291 substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))),
3292 TREE_CHAIN (TYPE_FIELDS (new_ptr)), new_ref));
3294 /* Make ptr the pointer to new_type. */
3295 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
3296 = TREE_TYPE (new_type) = ptr;
3298 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
3299 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
3301 /* Now handle updating the allocation record, what the thin pointer
3302 points to. Update all pointers from the old record into the new
3303 one, update the type of the array field, and recompute the size. */
3304 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
3306 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
3307 = TREE_TYPE (TREE_TYPE (array_field));
3309 /* The size recomputation needs to account for alignment constraints, so
3310 we let layout_type work it out. This will reset the field offsets to
3311 what they would be in a regular record, so we shift them back to what
3312 we want them to be for a thin pointer designated type afterwards. */
3313 DECL_SIZE (TYPE_FIELDS (new_obj_rec)) = 0;
3314 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))) = 0;
3315 TYPE_SIZE (new_obj_rec) = 0;
3316 layout_type (new_obj_rec);
3318 shift_unc_components_for_thin_pointers (new_obj_rec);
3320 /* We are done, at last. */
3321 rest_of_record_type_compilation (ptr);
3325 /* Convert a pointer to a constrained array into a pointer to a fat
3326 pointer. This involves making or finding a template. */
3329 convert_to_fat_pointer (tree type, tree expr)
3331 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
3332 tree template, template_addr;
3333 tree etype = TREE_TYPE (expr);
3335 /* If EXPR is a constant of zero, we make a fat pointer that has a null
3336 pointer to the template and array. */
3337 if (integer_zerop (expr))
3339 gnat_build_constructor
3341 tree_cons (TYPE_FIELDS (type),
3342 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3343 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3344 convert (build_pointer_type (template_type),
3348 /* If EXPR is a thin pointer, make the template and data from the record. */
3350 else if (TYPE_THIN_POINTER_P (etype))
3352 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
3354 expr = save_expr (expr);
3355 if (TREE_CODE (expr) == ADDR_EXPR)
3356 expr = TREE_OPERAND (expr, 0);
3358 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
3360 template = build_component_ref (expr, NULL_TREE, fields, false);
3361 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3362 build_component_ref (expr, NULL_TREE,
3363 TREE_CHAIN (fields), false));
3366 /* Otherwise, build the constructor for the template. */
3367 template = build_template (template_type, TREE_TYPE (etype), expr);
3369 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
3371 /* The result is a CONSTRUCTOR for the fat pointer.
3373 If expr is an argument of a foreign convention subprogram, the type it
3374 points to is directly the component type. In this case, the expression
3375 type may not match the corresponding FIELD_DECL type at this point, so we
3376 call "convert" here to fix that up if necessary. This type consistency is
3377 required, for instance because it ensures that possible later folding of
3378 component_refs against this constructor always yields something of the
3379 same type as the initial reference.
3381 Note that the call to "build_template" above is still fine, because it
3382 will only refer to the provided template_type in this case. */
3384 gnat_build_constructor
3385 (type, tree_cons (TYPE_FIELDS (type),
3386 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3387 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3388 template_addr, NULL_TREE)));
3391 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
3392 is something that is a fat pointer, so convert to it first if it EXPR
3393 is not already a fat pointer. */
3396 convert_to_thin_pointer (tree type, tree expr)
3398 if (!TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
3400 = convert_to_fat_pointer
3401 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
3403 /* We get the pointer to the data and use a NOP_EXPR to make it the
3405 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)),
3407 expr = build1 (NOP_EXPR, type, expr);
3412 /* Create an expression whose value is that of EXPR,
3413 converted to type TYPE. The TREE_TYPE of the value
3414 is always TYPE. This function implements all reasonable
3415 conversions; callers should filter out those that are
3416 not permitted by the language being compiled. */
3419 convert (tree type, tree expr)
3421 enum tree_code code = TREE_CODE (type);
3422 tree etype = TREE_TYPE (expr);
3423 enum tree_code ecode = TREE_CODE (etype);
3425 /* If EXPR is already the right type, we are done. */
3429 /* If both input and output have padding and are of variable size, do this
3430 as an unchecked conversion. Likewise if one is a mere variant of the
3431 other, so we avoid a pointless unpad/repad sequence. */
3432 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
3433 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
3434 && (!TREE_CONSTANT (TYPE_SIZE (type))
3435 || !TREE_CONSTANT (TYPE_SIZE (etype))
3436 || gnat_types_compatible_p (type, etype)
3437 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
3438 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
3441 /* If the output type has padding, convert to the inner type and
3442 make a constructor to build the record. */
3443 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
3445 /* If we previously converted from another type and our type is
3446 of variable size, remove the conversion to avoid the need for
3447 variable-size temporaries. Likewise for a conversion between
3448 original and packable version. */
3449 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3450 && (!TREE_CONSTANT (TYPE_SIZE (type))
3451 || (ecode == RECORD_TYPE
3452 && TYPE_NAME (etype)
3453 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
3454 expr = TREE_OPERAND (expr, 0);
3456 /* If we are just removing the padding from expr, convert the original
3457 object if we have variable size in order to avoid the need for some
3458 variable-size temporaries. Likewise if the padding is a mere variant
3459 of the other, so we avoid a pointless unpad/repad sequence. */
3460 if (TREE_CODE (expr) == COMPONENT_REF
3461 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
3462 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
3463 && (!TREE_CONSTANT (TYPE_SIZE (type))
3464 || gnat_types_compatible_p (type,
3465 TREE_TYPE (TREE_OPERAND (expr, 0)))
3466 || (ecode == RECORD_TYPE
3467 && TYPE_NAME (etype)
3468 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
3469 return convert (type, TREE_OPERAND (expr, 0));
3471 /* If the result type is a padded type with a self-referentially-sized
3472 field and the expression type is a record, do this as an
3473 unchecked conversion. */
3474 else if (TREE_CODE (etype) == RECORD_TYPE
3475 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
3476 return unchecked_convert (type, expr, false);
3480 gnat_build_constructor (type,
3481 tree_cons (TYPE_FIELDS (type),
3483 (TYPE_FIELDS (type)),
3488 /* If the input type has padding, remove it and convert to the output type.
3489 The conditions ordering is arranged to ensure that the output type is not
3490 a padding type here, as it is not clear whether the conversion would
3491 always be correct if this was to happen. */
3492 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
3496 /* If we have just converted to this padded type, just get the
3497 inner expression. */
3498 if (TREE_CODE (expr) == CONSTRUCTOR
3499 && !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (expr))
3500 && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->index
3501 == TYPE_FIELDS (etype))
3503 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->value;
3505 /* Otherwise, build an explicit component reference. */
3508 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
3510 return convert (type, unpadded);
3513 /* If the input is a biased type, adjust first. */
3514 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
3515 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
3516 fold_convert (TREE_TYPE (etype),
3518 TYPE_MIN_VALUE (etype)));
3520 /* If the input is a justified modular type, we need to extract the actual
3521 object before converting it to any other type with the exceptions of an
3522 unconstrained array or of a mere type variant. It is useful to avoid the
3523 extraction and conversion in the type variant case because it could end
3524 up replacing a VAR_DECL expr by a constructor and we might be about the
3525 take the address of the result. */
3526 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
3527 && code != UNCONSTRAINED_ARRAY_TYPE
3528 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
3529 return convert (type, build_component_ref (expr, NULL_TREE,
3530 TYPE_FIELDS (etype), false));
3532 /* If converting to a type that contains a template, convert to the data
3533 type and then build the template. */
3534 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
3536 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
3538 /* If the source already has a template, get a reference to the
3539 associated array only, as we are going to rebuild a template
3540 for the target type anyway. */
3541 expr = maybe_unconstrained_array (expr);
3544 gnat_build_constructor
3546 tree_cons (TYPE_FIELDS (type),
3547 build_template (TREE_TYPE (TYPE_FIELDS (type)),
3548 obj_type, NULL_TREE),
3549 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3550 convert (obj_type, expr), NULL_TREE)));
3553 /* There are some special cases of expressions that we process
3555 switch (TREE_CODE (expr))
3561 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
3562 conversion in gnat_expand_expr. NULL_EXPR does not represent
3563 and actual value, so no conversion is needed. */
3564 expr = copy_node (expr);
3565 TREE_TYPE (expr) = type;
3569 /* If we are converting a STRING_CST to another constrained array type,
3570 just make a new one in the proper type. */
3571 if (code == ecode && AGGREGATE_TYPE_P (etype)
3572 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
3573 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
3575 expr = copy_node (expr);
3576 TREE_TYPE (expr) = type;
3582 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
3583 a new one in the proper type. */
3584 if (code == ecode && gnat_types_compatible_p (type, etype))
3586 expr = copy_node (expr);
3587 TREE_TYPE (expr) = type;
3591 /* Likewise for a conversion between original and packable version, but
3592 we have to work harder in order to preserve type consistency. */
3594 && code == RECORD_TYPE
3595 && TYPE_NAME (type) == TYPE_NAME (etype))
3597 VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
3598 unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
3599 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, len);
3600 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
3601 unsigned HOST_WIDE_INT idx;
3604 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
3606 constructor_elt *elt = VEC_quick_push (constructor_elt, v, NULL);
3607 /* We expect only simple constructors. Otherwise, punt. */
3608 if (!(index == efield || index == DECL_ORIGINAL_FIELD (efield)))
3611 elt->value = convert (TREE_TYPE (field), value);
3612 efield = TREE_CHAIN (efield);
3613 field = TREE_CHAIN (field);
3618 expr = copy_node (expr);
3619 TREE_TYPE (expr) = type;
3620 CONSTRUCTOR_ELTS (expr) = v;
3626 case UNCONSTRAINED_ARRAY_REF:
3627 /* Convert this to the type of the inner array by getting the address of
3628 the array from the template. */
3629 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3630 build_component_ref (TREE_OPERAND (expr, 0),
3631 get_identifier ("P_ARRAY"),
3633 etype = TREE_TYPE (expr);
3634 ecode = TREE_CODE (etype);
3637 case VIEW_CONVERT_EXPR:
3639 /* GCC 4.x is very sensitive to type consistency overall, and view
3640 conversions thus are very frequent. Even though just "convert"ing
3641 the inner operand to the output type is fine in most cases, it
3642 might expose unexpected input/output type mismatches in special
3643 circumstances so we avoid such recursive calls when we can. */
3644 tree op0 = TREE_OPERAND (expr, 0);
3646 /* If we are converting back to the original type, we can just
3647 lift the input conversion. This is a common occurrence with
3648 switches back-and-forth amongst type variants. */
3649 if (type == TREE_TYPE (op0))
3652 /* Otherwise, if we're converting between two aggregate types, we
3653 might be allowed to substitute the VIEW_CONVERT_EXPR target type
3654 in place or to just convert the inner expression. */
3655 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
3657 /* If we are converting between mere variants, we can just
3658 substitute the VIEW_CONVERT_EXPR in place. */
3659 if (gnat_types_compatible_p (type, etype))
3660 return build1 (VIEW_CONVERT_EXPR, type, op0);
3662 /* Otherwise, we may just bypass the input view conversion unless
3663 one of the types is a fat pointer, which is handled by
3664 specialized code below which relies on exact type matching. */
3665 else if (!TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
3666 return convert (type, op0);
3672 /* If both types are record types, just convert the pointer and
3673 make a new INDIRECT_REF.
3675 ??? Disable this for now since it causes problems with the
3676 code in build_binary_op for MODIFY_EXPR which wants to
3677 strip off conversions. But that code really is a mess and
3678 we need to do this a much better way some time. */
3680 && (TREE_CODE (type) == RECORD_TYPE
3681 || TREE_CODE (type) == UNION_TYPE)
3682 && (TREE_CODE (etype) == RECORD_TYPE
3683 || TREE_CODE (etype) == UNION_TYPE)
3684 && !TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
3685 return build_unary_op (INDIRECT_REF, NULL_TREE,
3686 convert (build_pointer_type (type),
3687 TREE_OPERAND (expr, 0)));
3694 /* Check for converting to a pointer to an unconstrained array. */
3695 if (TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
3696 return convert_to_fat_pointer (type, expr);
3698 /* If we are converting between two aggregate types that are mere
3699 variants, just make a VIEW_CONVERT_EXPR. */
3700 else if (code == ecode
3701 && AGGREGATE_TYPE_P (type)
3702 && gnat_types_compatible_p (type, etype))
3703 return build1 (VIEW_CONVERT_EXPR, type, expr);
3705 /* In all other cases of related types, make a NOP_EXPR. */
3706 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
3707 || (code == INTEGER_CST && ecode == INTEGER_CST
3708 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
3709 return fold_convert (type, expr);
3714 return fold_build1 (CONVERT_EXPR, type, expr);
3717 return fold_convert (type, gnat_truthvalue_conversion (expr));
3720 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
3721 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
3722 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
3723 return unchecked_convert (type, expr, false);
3724 else if (TYPE_BIASED_REPRESENTATION_P (type))
3725 return fold_convert (type,
3726 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
3727 convert (TREE_TYPE (type), expr),
3728 TYPE_MIN_VALUE (type)));
3730 /* ... fall through ... */
3733 /* If we are converting an additive expression to an integer type
3734 with lower precision, be wary of the optimization that can be
3735 applied by convert_to_integer. There are 2 problematic cases:
3736 - if the first operand was originally of a biased type,
3737 because we could be recursively called to convert it
3738 to an intermediate type and thus rematerialize the
3739 additive operator endlessly,
3740 - if the expression contains a placeholder, because an
3741 intermediate conversion that changes the sign could
3742 be inserted and thus introduce an artificial overflow
3743 at compile time when the placeholder is substituted. */
3744 if (code == INTEGER_TYPE
3745 && ecode == INTEGER_TYPE
3746 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
3747 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
3749 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
3751 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
3752 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
3753 || CONTAINS_PLACEHOLDER_P (expr))
3754 return build1 (NOP_EXPR, type, expr);
3757 return fold (convert_to_integer (type, expr));
3760 case REFERENCE_TYPE:
3761 /* If converting between two pointers to records denoting
3762 both a template and type, adjust if needed to account
3763 for any differing offsets, since one might be negative. */
3764 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
3767 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
3768 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
3769 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
3770 sbitsize_int (BITS_PER_UNIT));
3772 expr = build1 (NOP_EXPR, type, expr);
3773 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
3774 if (integer_zerop (byte_diff))
3777 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
3778 fold (convert (sizetype, byte_diff)));
3781 /* If converting to a thin pointer, handle specially. */
3782 if (TYPE_THIN_POINTER_P (type)
3783 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
3784 return convert_to_thin_pointer (type, expr);
3786 /* If converting fat pointer to normal pointer, get the pointer to the
3787 array and then convert it. */
3788 else if (TYPE_FAT_POINTER_P (etype))
3789 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
3792 return fold (convert_to_pointer (type, expr));
3795 return fold (convert_to_real (type, expr));
3798 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
3800 gnat_build_constructor
3801 (type, tree_cons (TYPE_FIELDS (type),
3802 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3805 /* ... fall through ... */
3808 /* In these cases, assume the front-end has validated the conversion.
3809 If the conversion is valid, it will be a bit-wise conversion, so
3810 it can be viewed as an unchecked conversion. */
3811 return unchecked_convert (type, expr, false);
3814 /* This is a either a conversion between a tagged type and some
3815 subtype, which we have to mark as a UNION_TYPE because of
3816 overlapping fields or a conversion of an Unchecked_Union. */
3817 return unchecked_convert (type, expr, false);
3819 case UNCONSTRAINED_ARRAY_TYPE:
3820 /* If EXPR is a constrained array, take its address, convert it to a
3821 fat pointer, and then dereference it. Likewise if EXPR is a
3822 record containing both a template and a constrained array.
3823 Note that a record representing a justified modular type
3824 always represents a packed constrained array. */
3825 if (ecode == ARRAY_TYPE
3826 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
3827 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
3828 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
3831 (INDIRECT_REF, NULL_TREE,
3832 convert_to_fat_pointer (TREE_TYPE (type),
3833 build_unary_op (ADDR_EXPR,
3836 /* Do something very similar for converting one unconstrained
3837 array to another. */
3838 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
3840 build_unary_op (INDIRECT_REF, NULL_TREE,
3841 convert (TREE_TYPE (type),
3842 build_unary_op (ADDR_EXPR,
3848 return fold (convert_to_complex (type, expr));
3855 /* Remove all conversions that are done in EXP. This includes converting
3856 from a padded type or to a justified modular type. If TRUE_ADDRESS
3857 is true, always return the address of the containing object even if
3858 the address is not bit-aligned. */
3861 remove_conversions (tree exp, bool true_address)
3863 switch (TREE_CODE (exp))
3867 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3868 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
3870 remove_conversions (VEC_index (constructor_elt,
3871 CONSTRUCTOR_ELTS (exp), 0)->value,
3876 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
3877 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
3878 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3881 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
3883 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3892 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3893 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3894 likewise return an expression pointing to the underlying array. */
3897 maybe_unconstrained_array (tree exp)
3899 enum tree_code code = TREE_CODE (exp);
3902 switch (TREE_CODE (TREE_TYPE (exp)))
3904 case UNCONSTRAINED_ARRAY_TYPE:
3905 if (code == UNCONSTRAINED_ARRAY_REF)
3908 = build_unary_op (INDIRECT_REF, NULL_TREE,
3909 build_component_ref (TREE_OPERAND (exp, 0),
3910 get_identifier ("P_ARRAY"),
3912 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
3916 else if (code == NULL_EXPR)
3917 return build1 (NULL_EXPR,
3918 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3919 (TREE_TYPE (TREE_TYPE (exp))))),
3920 TREE_OPERAND (exp, 0));
3923 /* If this is a padded type, convert to the unpadded type and see if
3924 it contains a template. */
3925 if (TYPE_IS_PADDING_P (TREE_TYPE (exp)))
3927 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
3928 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3929 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
3931 build_component_ref (new, NULL_TREE,
3932 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
3935 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
3937 build_component_ref (exp, NULL_TREE,
3938 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
3948 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
3949 If NOTRUNC_P is true, truncation operations should be suppressed. */
3952 unchecked_convert (tree type, tree expr, bool notrunc_p)
3954 tree etype = TREE_TYPE (expr);
3956 /* If the expression is already the right type, we are done. */
3960 /* If both types types are integral just do a normal conversion.
3961 Likewise for a conversion to an unconstrained array. */
3962 if ((((INTEGRAL_TYPE_P (type)
3963 && !(TREE_CODE (type) == INTEGER_TYPE
3964 && TYPE_VAX_FLOATING_POINT_P (type)))
3965 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
3966 || (TREE_CODE (type) == RECORD_TYPE
3967 && TYPE_JUSTIFIED_MODULAR_P (type)))
3968 && ((INTEGRAL_TYPE_P (etype)
3969 && !(TREE_CODE (etype) == INTEGER_TYPE
3970 && TYPE_VAX_FLOATING_POINT_P (etype)))
3971 || (POINTER_TYPE_P (etype) && !TYPE_THIN_POINTER_P (etype))
3972 || (TREE_CODE (etype) == RECORD_TYPE
3973 && TYPE_JUSTIFIED_MODULAR_P (etype))))
3974 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3977 bool final_unchecked = false;
3979 if (TREE_CODE (etype) == INTEGER_TYPE
3980 && TYPE_BIASED_REPRESENTATION_P (etype))
3982 tree ntype = copy_type (etype);
3984 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
3985 TYPE_MAIN_VARIANT (ntype) = ntype;
3986 expr = build1 (NOP_EXPR, ntype, expr);
3989 if (TREE_CODE (type) == INTEGER_TYPE
3990 && TYPE_BIASED_REPRESENTATION_P (type))
3992 rtype = copy_type (type);
3993 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
3994 TYPE_MAIN_VARIANT (rtype) = rtype;
3997 /* We have another special case: if we are unchecked converting subtype
3998 into a base type, we need to ensure that VRP doesn't propagate range
3999 information since this conversion may be done precisely to validate
4000 that the object is within the range it is supposed to have. */
4001 else if (TREE_CODE (expr) != INTEGER_CST
4002 && TREE_CODE (type) == INTEGER_TYPE && !TREE_TYPE (type)
4003 && ((TREE_CODE (etype) == INTEGER_TYPE && TREE_TYPE (etype))
4004 || TREE_CODE (etype) == ENUMERAL_TYPE
4005 || TREE_CODE (etype) == BOOLEAN_TYPE))
4007 /* The optimization barrier is a VIEW_CONVERT_EXPR node; moreover,
4008 in order not to be deemed an useless type conversion, it must
4009 be from subtype to base type.
4011 ??? This may raise addressability and/or aliasing issues because
4012 VIEW_CONVERT_EXPR gets gimplified as an lvalue, thus causing the
4013 address of its operand to be taken if it is deemed addressable
4014 and not already in GIMPLE form. */
4015 rtype = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
4016 rtype = copy_type (rtype);
4017 TYPE_MAIN_VARIANT (rtype) = rtype;
4018 TREE_TYPE (rtype) = type;
4019 final_unchecked = true;
4022 expr = convert (rtype, expr);
4024 expr = fold_build1 (final_unchecked ? VIEW_CONVERT_EXPR : NOP_EXPR,
4028 /* If we are converting TO an integral type whose precision is not the
4029 same as its size, first unchecked convert to a record that contains
4030 an object of the output type. Then extract the field. */
4031 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4032 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4033 GET_MODE_BITSIZE (TYPE_MODE (type))))
4035 tree rec_type = make_node (RECORD_TYPE);
4036 tree field = create_field_decl (get_identifier ("OBJ"), type,
4037 rec_type, 1, 0, 0, 0);
4039 TYPE_FIELDS (rec_type) = field;
4040 layout_type (rec_type);
4042 expr = unchecked_convert (rec_type, expr, notrunc_p);
4043 expr = build_component_ref (expr, NULL_TREE, field, 0);
4046 /* Similarly for integral input type whose precision is not equal to its
4048 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype)
4049 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
4050 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4052 tree rec_type = make_node (RECORD_TYPE);
4054 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
4057 TYPE_FIELDS (rec_type) = field;
4058 layout_type (rec_type);
4060 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
4061 expr = unchecked_convert (type, expr, notrunc_p);
4064 /* We have a special case when we are converting between two
4065 unconstrained array types. In that case, take the address,
4066 convert the fat pointer types, and dereference. */
4067 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
4068 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
4069 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4070 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
4071 build_unary_op (ADDR_EXPR, NULL_TREE,
4075 expr = maybe_unconstrained_array (expr);
4076 etype = TREE_TYPE (expr);
4077 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
4080 /* If the result is an integral type whose size is not equal to
4081 the size of the underlying machine type, sign- or zero-extend
4082 the result. We need not do this in the case where the input is
4083 an integral type of the same precision and signedness or if the output
4084 is a biased type or if both the input and output are unsigned. */
4086 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4087 && !(TREE_CODE (type) == INTEGER_TYPE
4088 && TYPE_BIASED_REPRESENTATION_P (type))
4089 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4090 GET_MODE_BITSIZE (TYPE_MODE (type)))
4091 && !(INTEGRAL_TYPE_P (etype)
4092 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
4093 && operand_equal_p (TYPE_RM_SIZE (type),
4094 (TYPE_RM_SIZE (etype) != 0
4095 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
4097 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
4099 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
4100 TYPE_UNSIGNED (type));
4102 = convert (base_type,
4103 size_binop (MINUS_EXPR,
4105 (GET_MODE_BITSIZE (TYPE_MODE (type))),
4106 TYPE_RM_SIZE (type)));
4109 build_binary_op (RSHIFT_EXPR, base_type,
4110 build_binary_op (LSHIFT_EXPR, base_type,
4111 convert (base_type, expr),
4116 /* An unchecked conversion should never raise Constraint_Error. The code
4117 below assumes that GCC's conversion routines overflow the same way that
4118 the underlying hardware does. This is probably true. In the rare case
4119 when it is false, we can rely on the fact that such conversions are
4120 erroneous anyway. */
4121 if (TREE_CODE (expr) == INTEGER_CST)
4122 TREE_OVERFLOW (expr) = 0;
4124 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
4125 show no longer constant. */
4126 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4127 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
4129 TREE_CONSTANT (expr) = 0;
4134 /* Return the appropriate GCC tree code for the specified GNAT type,
4135 the latter being a record type as predicated by Is_Record_Type. */
4138 tree_code_for_record_type (Entity_Id gnat_type)
4140 Node_Id component_list
4141 = Component_List (Type_Definition
4143 (Implementation_Base_Type (gnat_type))));
4146 /* Make this a UNION_TYPE unless it's either not an Unchecked_Union or
4147 we have a non-discriminant field outside a variant. In either case,
4148 it's a RECORD_TYPE. */
4150 if (!Is_Unchecked_Union (gnat_type))
4153 for (component = First_Non_Pragma (Component_Items (component_list));
4154 Present (component);
4155 component = Next_Non_Pragma (component))
4156 if (Ekind (Defining_Entity (component)) == E_Component)
4162 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
4163 component of an aggregate type. */
4166 type_for_nonaliased_component_p (tree gnu_type)
4168 /* If the type is passed by reference, we may have pointers to the
4169 component so it cannot be made non-aliased. */
4170 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
4173 /* We used to say that any component of aggregate type is aliased
4174 because the front-end may take 'Reference of it. The front-end
4175 has been enhanced in the meantime so as to use a renaming instead
4176 in most cases, but the back-end can probably take the address of
4177 such a component too so we go for the conservative stance.
4179 For instance, we might need the address of any array type, even
4180 if normally passed by copy, to construct a fat pointer if the
4181 component is used as an actual for an unconstrained formal.
4183 Likewise for record types: even if a specific record subtype is
4184 passed by copy, the parent type might be passed by ref (e.g. if
4185 it's of variable size) and we might take the address of a child
4186 component to pass to a parent formal. We have no way to check
4187 for such conditions here. */
4188 if (AGGREGATE_TYPE_P (gnu_type))
4194 /* Perform final processing on global variables. */
4197 gnat_write_global_declarations (void)
4199 /* Proceed to optimize and emit assembly.
4200 FIXME: shouldn't be the front end's responsibility to call this. */
4203 /* Emit debug info for all global declarations. */
4204 emit_debug_global_declarations (VEC_address (tree, global_decls),
4205 VEC_length (tree, global_decls));
4208 /* ************************************************************************
4209 * * GCC builtins support *
4210 * ************************************************************************ */
4212 /* The general scheme is fairly simple:
4214 For each builtin function/type to be declared, gnat_install_builtins calls
4215 internal facilities which eventually get to gnat_push_decl, which in turn
4216 tracks the so declared builtin function decls in the 'builtin_decls' global
4217 datastructure. When an Intrinsic subprogram declaration is processed, we
4218 search this global datastructure to retrieve the associated BUILT_IN DECL
4221 /* Search the chain of currently available builtin declarations for a node
4222 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
4223 found, if any, or NULL_TREE otherwise. */
4225 builtin_decl_for (tree name)
4230 for (i = 0; VEC_iterate(tree, builtin_decls, i, decl); i++)
4231 if (DECL_NAME (decl) == name)
4237 /* The code below eventually exposes gnat_install_builtins, which declares
4238 the builtin types and functions we might need, either internally or as
4239 user accessible facilities.
4241 ??? This is a first implementation shot, still in rough shape. It is
4242 heavily inspired from the "C" family implementation, with chunks copied
4243 verbatim from there.
4245 Two obvious TODO candidates are
4246 o Use a more efficient name/decl mapping scheme
4247 o Devise a middle-end infrastructure to avoid having to copy
4248 pieces between front-ends. */
4250 /* ----------------------------------------------------------------------- *
4251 * BUILTIN ELEMENTARY TYPES *
4252 * ----------------------------------------------------------------------- */
4254 /* Standard data types to be used in builtin argument declarations. */
4258 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
4260 CTI_CONST_STRING_TYPE,
4265 static tree c_global_trees[CTI_MAX];
4267 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
4268 #define string_type_node c_global_trees[CTI_STRING_TYPE]
4269 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
4271 /* ??? In addition some attribute handlers, we currently don't support a
4272 (small) number of builtin-types, which in turns inhibits support for a
4273 number of builtin functions. */
4274 #define wint_type_node void_type_node
4275 #define intmax_type_node void_type_node
4276 #define uintmax_type_node void_type_node
4278 /* Build the void_list_node (void_type_node having been created). */
4281 build_void_list_node (void)
4283 tree t = build_tree_list (NULL_TREE, void_type_node);
4287 /* Used to help initialize the builtin-types.def table. When a type of
4288 the correct size doesn't exist, use error_mark_node instead of NULL.
4289 The later results in segfaults even when a decl using the type doesn't
4293 builtin_type_for_size (int size, bool unsignedp)
4295 tree type = lang_hooks.types.type_for_size (size, unsignedp);
4296 return type ? type : error_mark_node;
4299 /* Build/push the elementary type decls that builtin functions/types
4303 install_builtin_elementary_types (void)
4305 signed_size_type_node = size_type_node;
4306 pid_type_node = integer_type_node;
4307 void_list_node = build_void_list_node ();
4309 string_type_node = build_pointer_type (char_type_node);
4310 const_string_type_node
4311 = build_pointer_type (build_qualified_type
4312 (char_type_node, TYPE_QUAL_CONST));
4315 /* ----------------------------------------------------------------------- *
4316 * BUILTIN FUNCTION TYPES *
4317 * ----------------------------------------------------------------------- */
4319 /* Now, builtin function types per se. */
4323 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
4324 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
4325 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
4326 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
4327 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4328 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4329 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
4330 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
4331 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
4332 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
4333 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
4334 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
4335 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4336 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4337 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
4339 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
4340 #include "builtin-types.def"
4341 #undef DEF_PRIMITIVE_TYPE
4342 #undef DEF_FUNCTION_TYPE_0
4343 #undef DEF_FUNCTION_TYPE_1
4344 #undef DEF_FUNCTION_TYPE_2
4345 #undef DEF_FUNCTION_TYPE_3
4346 #undef DEF_FUNCTION_TYPE_4
4347 #undef DEF_FUNCTION_TYPE_5
4348 #undef DEF_FUNCTION_TYPE_6
4349 #undef DEF_FUNCTION_TYPE_7
4350 #undef DEF_FUNCTION_TYPE_VAR_0
4351 #undef DEF_FUNCTION_TYPE_VAR_1
4352 #undef DEF_FUNCTION_TYPE_VAR_2
4353 #undef DEF_FUNCTION_TYPE_VAR_3
4354 #undef DEF_FUNCTION_TYPE_VAR_4
4355 #undef DEF_FUNCTION_TYPE_VAR_5
4356 #undef DEF_POINTER_TYPE
4360 typedef enum c_builtin_type builtin_type;
4362 /* A temporary array used in communication with def_fn_type. */
4363 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
4365 /* A helper function for install_builtin_types. Build function type
4366 for DEF with return type RET and N arguments. If VAR is true, then the
4367 function should be variadic after those N arguments.
4369 Takes special care not to ICE if any of the types involved are
4370 error_mark_node, which indicates that said type is not in fact available
4371 (see builtin_type_for_size). In which case the function type as a whole
4372 should be error_mark_node. */
4375 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
4377 tree args = NULL, t;
4382 for (i = 0; i < n; ++i)
4384 builtin_type a = va_arg (list, builtin_type);
4385 t = builtin_types[a];
4386 if (t == error_mark_node)
4388 args = tree_cons (NULL_TREE, t, args);
4392 args = nreverse (args);
4394 args = chainon (args, void_list_node);
4396 t = builtin_types[ret];
4397 if (t == error_mark_node)
4399 t = build_function_type (t, args);
4402 builtin_types[def] = t;
4405 /* Build the builtin function types and install them in the builtin_types
4406 array for later use in builtin function decls. */
4409 install_builtin_function_types (void)
4411 tree va_list_ref_type_node;
4412 tree va_list_arg_type_node;
4414 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4416 va_list_arg_type_node = va_list_ref_type_node =
4417 build_pointer_type (TREE_TYPE (va_list_type_node));
4421 va_list_arg_type_node = va_list_type_node;
4422 va_list_ref_type_node = build_reference_type (va_list_type_node);
4425 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
4426 builtin_types[ENUM] = VALUE;
4427 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
4428 def_fn_type (ENUM, RETURN, 0, 0);
4429 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
4430 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
4431 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
4432 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
4433 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
4434 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
4435 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
4436 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
4437 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
4438 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
4439 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4441 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
4442 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4444 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
4445 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
4446 def_fn_type (ENUM, RETURN, 1, 0);
4447 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
4448 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
4449 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
4450 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
4451 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
4452 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
4453 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
4454 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
4455 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
4456 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
4457 #define DEF_POINTER_TYPE(ENUM, TYPE) \
4458 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
4460 #include "builtin-types.def"
4462 #undef DEF_PRIMITIVE_TYPE
4463 #undef DEF_FUNCTION_TYPE_1
4464 #undef DEF_FUNCTION_TYPE_2
4465 #undef DEF_FUNCTION_TYPE_3
4466 #undef DEF_FUNCTION_TYPE_4
4467 #undef DEF_FUNCTION_TYPE_5
4468 #undef DEF_FUNCTION_TYPE_6
4469 #undef DEF_FUNCTION_TYPE_VAR_0
4470 #undef DEF_FUNCTION_TYPE_VAR_1
4471 #undef DEF_FUNCTION_TYPE_VAR_2
4472 #undef DEF_FUNCTION_TYPE_VAR_3
4473 #undef DEF_FUNCTION_TYPE_VAR_4
4474 #undef DEF_FUNCTION_TYPE_VAR_5
4475 #undef DEF_POINTER_TYPE
4476 builtin_types[(int) BT_LAST] = NULL_TREE;
4479 /* ----------------------------------------------------------------------- *
4480 * BUILTIN ATTRIBUTES *
4481 * ----------------------------------------------------------------------- */
4483 enum built_in_attribute
4485 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
4486 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
4487 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
4488 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
4489 #include "builtin-attrs.def"
4490 #undef DEF_ATTR_NULL_TREE
4492 #undef DEF_ATTR_IDENT
4493 #undef DEF_ATTR_TREE_LIST
4497 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
4500 install_builtin_attributes (void)
4502 /* Fill in the built_in_attributes array. */
4503 #define DEF_ATTR_NULL_TREE(ENUM) \
4504 built_in_attributes[(int) ENUM] = NULL_TREE;
4505 #define DEF_ATTR_INT(ENUM, VALUE) \
4506 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
4507 #define DEF_ATTR_IDENT(ENUM, STRING) \
4508 built_in_attributes[(int) ENUM] = get_identifier (STRING);
4509 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
4510 built_in_attributes[(int) ENUM] \
4511 = tree_cons (built_in_attributes[(int) PURPOSE], \
4512 built_in_attributes[(int) VALUE], \
4513 built_in_attributes[(int) CHAIN]);
4514 #include "builtin-attrs.def"
4515 #undef DEF_ATTR_NULL_TREE
4517 #undef DEF_ATTR_IDENT
4518 #undef DEF_ATTR_TREE_LIST
4521 /* Handle a "const" attribute; arguments as in
4522 struct attribute_spec.handler. */
4525 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
4526 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
4529 if (TREE_CODE (*node) == FUNCTION_DECL)
4530 TREE_READONLY (*node) = 1;
4532 *no_add_attrs = true;
4537 /* Handle a "nothrow" attribute; arguments as in
4538 struct attribute_spec.handler. */
4541 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
4542 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
4545 if (TREE_CODE (*node) == FUNCTION_DECL)
4546 TREE_NOTHROW (*node) = 1;
4548 *no_add_attrs = true;
4553 /* Handle a "pure" attribute; arguments as in
4554 struct attribute_spec.handler. */
4557 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
4558 int ARG_UNUSED (flags), bool *no_add_attrs)
4560 if (TREE_CODE (*node) == FUNCTION_DECL)
4561 DECL_PURE_P (*node) = 1;
4562 /* ??? TODO: Support types. */
4565 warning (OPT_Wattributes, "%qE attribute ignored", name);
4566 *no_add_attrs = true;
4572 /* Handle a "no vops" attribute; arguments as in
4573 struct attribute_spec.handler. */
4576 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
4577 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
4578 bool *ARG_UNUSED (no_add_attrs))
4580 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
4581 DECL_IS_NOVOPS (*node) = 1;
4585 /* Helper for nonnull attribute handling; fetch the operand number
4586 from the attribute argument list. */
4589 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
4591 /* Verify the arg number is a constant. */
4592 if (TREE_CODE (arg_num_expr) != INTEGER_CST
4593 || TREE_INT_CST_HIGH (arg_num_expr) != 0)
4596 *valp = TREE_INT_CST_LOW (arg_num_expr);
4600 /* Handle the "nonnull" attribute. */
4602 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
4603 tree args, int ARG_UNUSED (flags),
4607 unsigned HOST_WIDE_INT attr_arg_num;
4609 /* If no arguments are specified, all pointer arguments should be
4610 non-null. Verify a full prototype is given so that the arguments
4611 will have the correct types when we actually check them later. */
4614 if (!TYPE_ARG_TYPES (type))
4616 error ("nonnull attribute without arguments on a non-prototype");
4617 *no_add_attrs = true;
4622 /* Argument list specified. Verify that each argument number references
4623 a pointer argument. */
4624 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
4627 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
4629 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
4631 error ("nonnull argument has invalid operand number (argument %lu)",
4632 (unsigned long) attr_arg_num);
4633 *no_add_attrs = true;
4637 argument = TYPE_ARG_TYPES (type);
4640 for (ck_num = 1; ; ck_num++)
4642 if (!argument || ck_num == arg_num)
4644 argument = TREE_CHAIN (argument);
4648 || TREE_CODE (TREE_VALUE (argument)) == VOID_TYPE)
4650 error ("nonnull argument with out-of-range operand number (argument %lu, operand %lu)",
4651 (unsigned long) attr_arg_num, (unsigned long) arg_num);
4652 *no_add_attrs = true;
4656 if (TREE_CODE (TREE_VALUE (argument)) != POINTER_TYPE)
4658 error ("nonnull argument references non-pointer operand (argument %lu, operand %lu)",
4659 (unsigned long) attr_arg_num, (unsigned long) arg_num);
4660 *no_add_attrs = true;
4669 /* Handle a "sentinel" attribute. */
4672 handle_sentinel_attribute (tree *node, tree name, tree args,
4673 int ARG_UNUSED (flags), bool *no_add_attrs)
4675 tree params = TYPE_ARG_TYPES (*node);
4679 warning (OPT_Wattributes,
4680 "%qE attribute requires prototypes with named arguments", name);
4681 *no_add_attrs = true;
4685 while (TREE_CHAIN (params))
4686 params = TREE_CHAIN (params);
4688 if (VOID_TYPE_P (TREE_VALUE (params)))
4690 warning (OPT_Wattributes,
4691 "%qE attribute only applies to variadic functions", name);
4692 *no_add_attrs = true;
4698 tree position = TREE_VALUE (args);
4700 if (TREE_CODE (position) != INTEGER_CST)
4702 warning (0, "requested position is not an integer constant");
4703 *no_add_attrs = true;
4707 if (tree_int_cst_lt (position, integer_zero_node))
4709 warning (0, "requested position is less than zero");
4710 *no_add_attrs = true;
4718 /* Handle a "noreturn" attribute; arguments as in
4719 struct attribute_spec.handler. */
4722 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
4723 int ARG_UNUSED (flags), bool *no_add_attrs)
4725 tree type = TREE_TYPE (*node);
4727 /* See FIXME comment in c_common_attribute_table. */
4728 if (TREE_CODE (*node) == FUNCTION_DECL)
4729 TREE_THIS_VOLATILE (*node) = 1;
4730 else if (TREE_CODE (type) == POINTER_TYPE
4731 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
4733 = build_pointer_type
4734 (build_type_variant (TREE_TYPE (type),
4735 TYPE_READONLY (TREE_TYPE (type)), 1));
4738 warning (OPT_Wattributes, "%qE attribute ignored", name);
4739 *no_add_attrs = true;
4745 /* Handle a "malloc" attribute; arguments as in
4746 struct attribute_spec.handler. */
4749 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
4750 int ARG_UNUSED (flags), bool *no_add_attrs)
4752 if (TREE_CODE (*node) == FUNCTION_DECL
4753 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
4754 DECL_IS_MALLOC (*node) = 1;
4757 warning (OPT_Wattributes, "%qE attribute ignored", name);
4758 *no_add_attrs = true;
4764 /* Fake handler for attributes we don't properly support. */
4767 fake_attribute_handler (tree * ARG_UNUSED (node),
4768 tree ARG_UNUSED (name),
4769 tree ARG_UNUSED (args),
4770 int ARG_UNUSED (flags),
4771 bool * ARG_UNUSED (no_add_attrs))
4776 /* Handle a "type_generic" attribute. */
4779 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
4780 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
4781 bool * ARG_UNUSED (no_add_attrs))
4785 /* Ensure we have a function type. */
4786 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
4788 params = TYPE_ARG_TYPES (*node);
4789 while (params && ! VOID_TYPE_P (TREE_VALUE (params)))
4790 params = TREE_CHAIN (params);
4792 /* Ensure we have a variadic function. */
4793 gcc_assert (!params);
4798 /* ----------------------------------------------------------------------- *
4799 * BUILTIN FUNCTIONS *
4800 * ----------------------------------------------------------------------- */
4802 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
4803 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
4804 if nonansi_p and flag_no_nonansi_builtin. */
4807 def_builtin_1 (enum built_in_function fncode,
4809 enum built_in_class fnclass,
4810 tree fntype, tree libtype,
4811 bool both_p, bool fallback_p,
4812 bool nonansi_p ATTRIBUTE_UNUSED,
4813 tree fnattrs, bool implicit_p)
4816 const char *libname;
4818 /* Preserve an already installed decl. It most likely was setup in advance
4819 (e.g. as part of the internal builtins) for specific reasons. */
4820 if (built_in_decls[(int) fncode] != NULL_TREE)
4823 gcc_assert ((!both_p && !fallback_p)
4824 || !strncmp (name, "__builtin_",
4825 strlen ("__builtin_")));
4827 libname = name + strlen ("__builtin_");
4828 decl = add_builtin_function (name, fntype, fncode, fnclass,
4829 (fallback_p ? libname : NULL),
4832 /* ??? This is normally further controlled by command-line options
4833 like -fno-builtin, but we don't have them for Ada. */
4834 add_builtin_function (libname, libtype, fncode, fnclass,
4837 built_in_decls[(int) fncode] = decl;
4839 implicit_built_in_decls[(int) fncode] = decl;
4842 static int flag_isoc94 = 0;
4843 static int flag_isoc99 = 0;
4845 /* Install what the common builtins.def offers. */
4848 install_builtin_functions (void)
4850 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
4851 NONANSI_P, ATTRS, IMPLICIT, COND) \
4853 def_builtin_1 (ENUM, NAME, CLASS, \
4854 builtin_types[(int) TYPE], \
4855 builtin_types[(int) LIBTYPE], \
4856 BOTH_P, FALLBACK_P, NONANSI_P, \
4857 built_in_attributes[(int) ATTRS], IMPLICIT);
4858 #include "builtins.def"
4862 /* ----------------------------------------------------------------------- *
4863 * BUILTIN FUNCTIONS *
4864 * ----------------------------------------------------------------------- */
4866 /* Install the builtin functions we might need. */
4869 gnat_install_builtins (void)
4871 install_builtin_elementary_types ();
4872 install_builtin_function_types ();
4873 install_builtin_attributes ();
4875 /* Install builtins used by generic middle-end pieces first. Some of these
4876 know about internal specificities and control attributes accordingly, for
4877 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
4878 the generic definition from builtins.def. */
4879 build_common_builtin_nodes ();
4881 /* Now, install the target specific builtins, such as the AltiVec family on
4882 ppc, and the common set as exposed by builtins.def. */
4883 targetm.init_builtins ();
4884 install_builtin_functions ();
4887 #include "gt-ada-utils.h"
4888 #include "gtype-ada.h"