1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2008, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 ****************************************************************************/
28 #include "coretypes.h"
41 #include "tree-inline.h"
42 #include "tree-gimple.h"
43 #include "tree-dump.h"
44 #include "pointer-set.h"
60 #ifndef MAX_FIXED_MODE_SIZE
61 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
64 #ifndef MAX_BITS_PER_WORD
65 #define MAX_BITS_PER_WORD BITS_PER_WORD
68 /* If nonzero, pretend we are allocating at global level. */
71 /* Tree nodes for the various types and decls we create. */
72 tree gnat_std_decls[(int) ADT_LAST];
74 /* Functions to call for each of the possible raise reasons. */
75 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
77 /* Forward declarations for handlers of attributes. */
78 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
79 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
81 /* Table of machine-independent internal attributes for Ada. We support
82 this minimal set of attributes to accommodate the Alpha back-end which
83 unconditionally puts them on its builtins. */
84 const struct attribute_spec gnat_internal_attribute_table[] =
86 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
87 { "const", 0, 0, true, false, false, handle_const_attribute },
88 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute },
89 { NULL, 0, 0, false, false, false, NULL }
92 /* Associates a GNAT tree node to a GCC tree node. It is used in
93 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
94 of `save_gnu_tree' for more info. */
95 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
97 #define GET_GNU_TREE(GNAT_ENTITY) \
98 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
100 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
101 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
103 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
104 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
106 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
107 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
109 #define GET_DUMMY_NODE(GNAT_ENTITY) \
110 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
112 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
113 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
115 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
116 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
118 /* This variable keeps a table for types for each precision so that we only
119 allocate each of them once. Signed and unsigned types are kept separate.
121 Note that these types are only used when fold-const requests something
122 special. Perhaps we should NOT share these types; we'll see how it
124 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
126 /* Likewise for float types, but record these by mode. */
127 static GTY(()) tree float_types[NUM_MACHINE_MODES];
129 /* For each binding contour we allocate a binding_level structure to indicate
130 the binding depth. */
132 struct gnat_binding_level GTY((chain_next ("%h.chain")))
134 /* The binding level containing this one (the enclosing binding level). */
135 struct gnat_binding_level *chain;
136 /* The BLOCK node for this level. */
138 /* If nonzero, the setjmp buffer that needs to be updated for any
139 variable-sized definition within this context. */
143 /* The binding level currently in effect. */
144 static GTY(()) struct gnat_binding_level *current_binding_level;
146 /* A chain of gnat_binding_level structures awaiting reuse. */
147 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
149 /* An array of global declarations. */
150 static GTY(()) VEC(tree,gc) *global_decls;
152 /* An array of builtin declarations. */
153 static GTY(()) VEC(tree,gc) *builtin_decls;
155 /* An array of global renaming pointers. */
156 static GTY(()) VEC(tree,gc) *global_renaming_pointers;
158 /* A chain of unused BLOCK nodes. */
159 static GTY((deletable)) tree free_block_chain;
161 static void gnat_install_builtins (void);
162 static tree merge_sizes (tree, tree, tree, bool, bool);
163 static tree compute_related_constant (tree, tree);
164 static tree split_plus (tree, tree *);
165 static void gnat_gimplify_function (tree);
166 static tree float_type_for_precision (int, enum machine_mode);
167 static tree convert_to_fat_pointer (tree, tree);
168 static tree convert_to_thin_pointer (tree, tree);
169 static tree make_descriptor_field (const char *,tree, tree, tree);
170 static bool potential_alignment_gap (tree, tree, tree);
172 /* Initialize the association of GNAT nodes to GCC trees. */
175 init_gnat_to_gnu (void)
177 associate_gnat_to_gnu
178 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
181 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
182 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
183 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
185 If GNU_DECL is zero, a previous association is to be reset. */
188 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
190 /* Check that GNAT_ENTITY is not already defined and that it is being set
191 to something which is a decl. Raise gigi 401 if not. Usually, this
192 means GNAT_ENTITY is defined twice, but occasionally is due to some
194 gcc_assert (!(gnu_decl
195 && (PRESENT_GNU_TREE (gnat_entity)
196 || (!no_check && !DECL_P (gnu_decl)))));
198 SET_GNU_TREE (gnat_entity, gnu_decl);
201 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
202 Return the ..._DECL node that was associated with it. If there is no tree
203 node associated with GNAT_ENTITY, abort.
205 In some cases, such as delayed elaboration or expressions that need to
206 be elaborated only once, GNAT_ENTITY is really not an entity. */
209 get_gnu_tree (Entity_Id gnat_entity)
211 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
212 return GET_GNU_TREE (gnat_entity);
215 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
218 present_gnu_tree (Entity_Id gnat_entity)
220 return PRESENT_GNU_TREE (gnat_entity);
223 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
226 init_dummy_type (void)
229 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
232 /* Make a dummy type corresponding to GNAT_TYPE. */
235 make_dummy_type (Entity_Id gnat_type)
237 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
240 /* If there is an equivalent type, get its underlying type. */
241 if (Present (gnat_underlying))
242 gnat_underlying = Underlying_Type (gnat_underlying);
244 /* If there was no equivalent type (can only happen when just annotating
245 types) or underlying type, go back to the original type. */
246 if (No (gnat_underlying))
247 gnat_underlying = gnat_type;
249 /* If it there already a dummy type, use that one. Else make one. */
250 if (PRESENT_DUMMY_NODE (gnat_underlying))
251 return GET_DUMMY_NODE (gnat_underlying);
253 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
255 gnu_type = make_node (Is_Record_Type (gnat_underlying)
256 ? tree_code_for_record_type (gnat_underlying)
258 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
259 TYPE_DUMMY_P (gnu_type) = 1;
260 if (AGGREGATE_TYPE_P (gnu_type))
262 TYPE_STUB_DECL (gnu_type) = build_decl (TYPE_DECL, NULL_TREE, gnu_type);
263 TYPE_BY_REFERENCE_P (gnu_type) = Is_By_Reference_Type (gnat_type);
266 SET_DUMMY_NODE (gnat_underlying, gnu_type);
271 /* Return nonzero if we are currently in the global binding level. */
274 global_bindings_p (void)
276 return ((force_global || !current_function_decl) ? -1 : 0);
279 /* Enter a new binding level. */
284 struct gnat_binding_level *newlevel = NULL;
286 /* Reuse a struct for this binding level, if there is one. */
287 if (free_binding_level)
289 newlevel = free_binding_level;
290 free_binding_level = free_binding_level->chain;
294 = (struct gnat_binding_level *)
295 ggc_alloc (sizeof (struct gnat_binding_level));
297 /* Use a free BLOCK, if any; otherwise, allocate one. */
298 if (free_block_chain)
300 newlevel->block = free_block_chain;
301 free_block_chain = BLOCK_CHAIN (free_block_chain);
302 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
305 newlevel->block = make_node (BLOCK);
307 /* Point the BLOCK we just made to its parent. */
308 if (current_binding_level)
309 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
311 BLOCK_VARS (newlevel->block) = BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
312 TREE_USED (newlevel->block) = 1;
314 /* Add this level to the front of the chain (stack) of levels that are
316 newlevel->chain = current_binding_level;
317 newlevel->jmpbuf_decl = NULL_TREE;
318 current_binding_level = newlevel;
321 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
322 and point FNDECL to this BLOCK. */
325 set_current_block_context (tree fndecl)
327 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
328 DECL_INITIAL (fndecl) = current_binding_level->block;
331 /* Set the jmpbuf_decl for the current binding level to DECL. */
334 set_block_jmpbuf_decl (tree decl)
336 current_binding_level->jmpbuf_decl = decl;
339 /* Get the jmpbuf_decl, if any, for the current binding level. */
342 get_block_jmpbuf_decl ()
344 return current_binding_level->jmpbuf_decl;
347 /* Exit a binding level. Set any BLOCK into the current code group. */
352 struct gnat_binding_level *level = current_binding_level;
353 tree block = level->block;
355 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
356 BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
358 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
359 are no variables free the block and merge its subblocks into those of its
360 parent block. Otherwise, add it to the list of its parent. */
361 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
363 else if (BLOCK_VARS (block) == NULL_TREE)
365 BLOCK_SUBBLOCKS (level->chain->block)
366 = chainon (BLOCK_SUBBLOCKS (block),
367 BLOCK_SUBBLOCKS (level->chain->block));
368 BLOCK_CHAIN (block) = free_block_chain;
369 free_block_chain = block;
373 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
374 BLOCK_SUBBLOCKS (level->chain->block) = block;
375 TREE_USED (block) = 1;
376 set_block_for_group (block);
379 /* Free this binding structure. */
380 current_binding_level = level->chain;
381 level->chain = free_binding_level;
382 free_binding_level = level;
386 /* Records a ..._DECL node DECL as belonging to the current lexical scope
387 and uses GNAT_NODE for location information and propagating flags. */
390 gnat_pushdecl (tree decl, Node_Id gnat_node)
392 /* If at top level, there is no context. But PARM_DECLs always go in the
393 level of its function. */
394 if (global_bindings_p () && TREE_CODE (decl) != PARM_DECL)
395 DECL_CONTEXT (decl) = 0;
398 DECL_CONTEXT (decl) = current_function_decl;
400 /* Functions imported in another function are not really nested. */
401 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_PUBLIC (decl))
402 DECL_NO_STATIC_CHAIN (decl) = 1;
405 TREE_NO_WARNING (decl) = (gnat_node == Empty || Warnings_Off (gnat_node));
407 /* Set the location of DECL and emit a declaration for it. */
408 if (Present (gnat_node))
409 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
410 add_decl_expr (decl, gnat_node);
412 /* Put the declaration on the list. The list of declarations is in reverse
413 order. The list will be reversed later. Put global variables in the
414 globals list and builtin functions in a dedicated list to speed up
415 further lookups. Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
416 the list, as they will cause trouble with the debugger and aren't needed
418 if (TREE_CODE (decl) != TYPE_DECL
419 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
421 if (global_bindings_p ())
423 VEC_safe_push (tree, gc, global_decls, decl);
425 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
426 VEC_safe_push (tree, gc, builtin_decls, decl);
430 TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
431 BLOCK_VARS (current_binding_level->block) = decl;
435 /* For the declaration of a type, set its name if it either is not already
436 set, was set to an IDENTIFIER_NODE, indicating an internal name,
437 or if the previous type name was not derived from a source name.
438 We'd rather have the type named with a real name and all the pointer
439 types to the same object have the same POINTER_TYPE node. Code in the
440 equivalent function of c-decl.c makes a copy of the type node here, but
441 that may cause us trouble with incomplete types. We make an exception
442 for fat pointer types because the compiler automatically builds them
443 for unconstrained array types and the debugger uses them to represent
444 both these and pointers to these. */
445 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
447 tree t = TREE_TYPE (decl);
449 if (!TYPE_NAME (t) || TREE_CODE (TYPE_NAME (t)) == IDENTIFIER_NODE)
451 else if (TYPE_FAT_POINTER_P (t))
453 tree tt = build_variant_type_copy (t);
454 TYPE_NAME (tt) = decl;
455 TREE_USED (tt) = TREE_USED (t);
456 TREE_TYPE (decl) = tt;
457 DECL_ORIGINAL_TYPE (decl) = t;
460 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
465 /* Propagate the name to all the variants. This is needed for
466 the type qualifiers machinery to work properly. */
468 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
469 TYPE_NAME (t) = decl;
473 /* Do little here. Set up the standard declarations later after the
474 front end has been run. */
477 gnat_init_decl_processing (void)
479 /* Make the binding_level structure for global names. */
480 current_function_decl = 0;
481 current_binding_level = 0;
482 free_binding_level = 0;
485 build_common_tree_nodes (true, true);
487 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
488 corresponding to the size of Pmode. In most cases when ptr_mode and
489 Pmode differ, C will use the width of ptr_mode as sizetype. But we get
490 far better code using the width of Pmode. Make this here since we need
491 this before we can expand the GNAT types. */
492 size_type_node = gnat_type_for_size (GET_MODE_BITSIZE (Pmode), 0);
493 set_sizetype (size_type_node);
494 build_common_tree_nodes_2 (0);
496 ptr_void_type_node = build_pointer_type (void_type_node);
498 gnat_install_builtins ();
501 /* Install the builtin functions we might need. */
504 gnat_install_builtins ()
506 /* Builtins used by generic middle-end optimizers. */
507 build_common_builtin_nodes ();
509 /* Target specific builtins, such as the AltiVec family on ppc. */
510 targetm.init_builtins ();
513 /* Create the predefined scalar types such as `integer_type_node' needed
514 in the gcc back-end and initialize the global binding level. */
517 init_gigi_decls (tree long_long_float_type, tree exception_type)
522 /* Set the types that GCC and Gigi use from the front end. We would like
523 to do this for char_type_node, but it needs to correspond to the C
525 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
527 /* In this case, the builtin floating point types are VAX float,
528 so make up a type for use. */
529 longest_float_type_node = make_node (REAL_TYPE);
530 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
531 layout_type (longest_float_type_node);
532 create_type_decl (get_identifier ("longest float type"),
533 longest_float_type_node, NULL, false, true, Empty);
536 longest_float_type_node = TREE_TYPE (long_long_float_type);
538 except_type_node = TREE_TYPE (exception_type);
540 unsigned_type_node = gnat_type_for_size (INT_TYPE_SIZE, 1);
541 create_type_decl (get_identifier ("unsigned int"), unsigned_type_node,
542 NULL, false, true, Empty);
544 void_type_decl_node = create_type_decl (get_identifier ("void"),
545 void_type_node, NULL, false, true,
548 void_ftype = build_function_type (void_type_node, NULL_TREE);
549 ptr_void_ftype = build_pointer_type (void_ftype);
551 /* Now declare runtime functions. */
552 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
554 /* malloc is a function declaration tree for a function to allocate
556 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
558 build_function_type (ptr_void_type_node,
559 tree_cons (NULL_TREE,
562 NULL_TREE, false, true, true, NULL,
564 DECL_IS_MALLOC (malloc_decl) = 1;
566 /* free is a function declaration tree for a function to free memory. */
568 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
569 build_function_type (void_type_node,
570 tree_cons (NULL_TREE,
573 NULL_TREE, false, true, true, NULL, Empty);
575 /* Make the types and functions used for exception processing. */
577 = build_array_type (gnat_type_for_mode (Pmode, 0),
578 build_index_type (build_int_cst (NULL_TREE, 5)));
579 create_type_decl (get_identifier ("JMPBUF_T"), jmpbuf_type, NULL,
581 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
583 /* Functions to get and set the jumpbuf pointer for the current thread. */
585 = create_subprog_decl
586 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
587 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
588 NULL_TREE, false, true, true, NULL, Empty);
589 /* Avoid creating superfluous edges to __builtin_setjmp receivers. */
590 DECL_IS_PURE (get_jmpbuf_decl) = 1;
593 = create_subprog_decl
594 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
596 build_function_type (void_type_node,
597 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
598 NULL_TREE, false, true, true, NULL, Empty);
600 /* Function to get the current exception. */
602 = create_subprog_decl
603 (get_identifier ("system__soft_links__get_gnat_exception"),
605 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
606 NULL_TREE, false, true, true, NULL, Empty);
607 /* Avoid creating superfluous edges to __builtin_setjmp receivers. */
608 DECL_IS_PURE (get_excptr_decl) = 1;
610 /* Functions that raise exceptions. */
612 = create_subprog_decl
613 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
614 build_function_type (void_type_node,
615 tree_cons (NULL_TREE,
616 build_pointer_type (except_type_node),
618 NULL_TREE, false, true, true, NULL, Empty);
620 /* Dummy objects to materialize "others" and "all others" in the exception
621 tables. These are exported by a-exexpr.adb, so see this unit for the
625 = create_var_decl (get_identifier ("OTHERS"),
626 get_identifier ("__gnat_others_value"),
627 integer_type_node, 0, 1, 0, 1, 1, 0, Empty);
630 = create_var_decl (get_identifier ("ALL_OTHERS"),
631 get_identifier ("__gnat_all_others_value"),
632 integer_type_node, 0, 1, 0, 1, 1, 0, Empty);
634 /* Hooks to call when entering/leaving an exception handler. */
636 = create_subprog_decl (get_identifier ("__gnat_begin_handler"), NULL_TREE,
637 build_function_type (void_type_node,
638 tree_cons (NULL_TREE,
641 NULL_TREE, false, true, true, NULL, Empty);
644 = create_subprog_decl (get_identifier ("__gnat_end_handler"), NULL_TREE,
645 build_function_type (void_type_node,
646 tree_cons (NULL_TREE,
649 NULL_TREE, false, true, true, NULL, Empty);
651 /* If in no exception handlers mode, all raise statements are redirected to
652 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
653 this procedure will never be called in this mode. */
654 if (No_Exception_Handlers_Set ())
657 = create_subprog_decl
658 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE,
659 build_function_type (void_type_node,
660 tree_cons (NULL_TREE,
661 build_pointer_type (char_type_node),
662 tree_cons (NULL_TREE,
665 NULL_TREE, false, true, true, NULL, Empty);
667 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
668 gnat_raise_decls[i] = decl;
671 /* Otherwise, make one decl for each exception reason. */
672 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
676 sprintf (name, "__gnat_rcheck_%.2d", i);
678 = create_subprog_decl
679 (get_identifier (name), NULL_TREE,
680 build_function_type (void_type_node,
681 tree_cons (NULL_TREE,
684 tree_cons (NULL_TREE,
687 NULL_TREE, false, true, true, NULL, Empty);
690 /* Indicate that these never return. */
691 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
692 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
693 TREE_TYPE (raise_nodefer_decl)
694 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
697 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
699 TREE_THIS_VOLATILE (gnat_raise_decls[i]) = 1;
700 TREE_SIDE_EFFECTS (gnat_raise_decls[i]) = 1;
701 TREE_TYPE (gnat_raise_decls[i])
702 = build_qualified_type (TREE_TYPE (gnat_raise_decls[i]),
706 /* setjmp returns an integer and has one operand, which is a pointer to
709 = create_subprog_decl
710 (get_identifier ("__builtin_setjmp"), NULL_TREE,
711 build_function_type (integer_type_node,
712 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
713 NULL_TREE, false, true, true, NULL, Empty);
715 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
716 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
718 /* update_setjmp_buf updates a setjmp buffer from the current stack pointer
720 update_setjmp_buf_decl
721 = create_subprog_decl
722 (get_identifier ("__builtin_update_setjmp_buf"), NULL_TREE,
723 build_function_type (void_type_node,
724 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
725 NULL_TREE, false, true, true, NULL, Empty);
727 DECL_BUILT_IN_CLASS (update_setjmp_buf_decl) = BUILT_IN_NORMAL;
728 DECL_FUNCTION_CODE (update_setjmp_buf_decl) = BUILT_IN_UPDATE_SETJMP_BUF;
730 main_identifier_node = get_identifier ("main");
733 /* Given a record type RECORD_TYPE and a chain of FIELD_DECL nodes FIELDLIST,
734 finish constructing the record or union type. If REP_LEVEL is zero, this
735 record has no representation clause and so will be entirely laid out here.
736 If REP_LEVEL is one, this record has a representation clause and has been
737 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
738 this record is derived from a parent record and thus inherits its layout;
739 only make a pass on the fields to finalize them. If DO_NOT_FINALIZE is
740 true, the record type is expected to be modified afterwards so it will
741 not be sent to the back-end for finalization. */
744 finish_record_type (tree record_type, tree fieldlist, int rep_level,
745 bool do_not_finalize)
747 enum tree_code code = TREE_CODE (record_type);
748 tree name = TYPE_NAME (record_type);
749 tree ada_size = bitsize_zero_node;
750 tree size = bitsize_zero_node;
751 bool had_size = TYPE_SIZE (record_type) != 0;
752 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
753 bool had_align = TYPE_ALIGN (record_type) != 0;
756 if (name && TREE_CODE (name) == TYPE_DECL)
757 name = DECL_NAME (name);
759 TYPE_FIELDS (record_type) = fieldlist;
760 TYPE_STUB_DECL (record_type) = build_decl (TYPE_DECL, name, record_type);
762 /* We don't need both the typedef name and the record name output in
763 the debugging information, since they are the same. */
764 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
766 /* Globally initialize the record first. If this is a rep'ed record,
767 that just means some initializations; otherwise, layout the record. */
770 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
771 TYPE_MODE (record_type) = BLKmode;
774 TYPE_SIZE_UNIT (record_type) = size_zero_node;
776 TYPE_SIZE (record_type) = bitsize_zero_node;
778 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
779 out just like a UNION_TYPE, since the size will be fixed. */
780 else if (code == QUAL_UNION_TYPE)
785 /* Ensure there isn't a size already set. There can be in an error
786 case where there is a rep clause but all fields have errors and
787 no longer have a position. */
788 TYPE_SIZE (record_type) = 0;
789 layout_type (record_type);
792 /* At this point, the position and size of each field is known. It was
793 either set before entry by a rep clause, or by laying out the type above.
795 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
796 to compute the Ada size; the GCC size and alignment (for rep'ed records
797 that are not padding types); and the mode (for rep'ed records). We also
798 clear the DECL_BIT_FIELD indication for the cases we know have not been
799 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
801 if (code == QUAL_UNION_TYPE)
802 fieldlist = nreverse (fieldlist);
804 for (field = fieldlist; field; field = TREE_CHAIN (field))
806 tree type = TREE_TYPE (field);
807 tree pos = bit_position (field);
808 tree this_size = DECL_SIZE (field);
811 if ((TREE_CODE (type) == RECORD_TYPE
812 || TREE_CODE (type) == UNION_TYPE
813 || TREE_CODE (type) == QUAL_UNION_TYPE)
814 && !TYPE_IS_FAT_POINTER_P (type)
815 && !TYPE_CONTAINS_TEMPLATE_P (type)
816 && TYPE_ADA_SIZE (type))
817 this_ada_size = TYPE_ADA_SIZE (type);
819 this_ada_size = this_size;
821 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
822 if (DECL_BIT_FIELD (field)
823 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
825 unsigned int align = TYPE_ALIGN (type);
827 /* In the general case, type alignment is required. */
828 if (value_factor_p (pos, align))
830 /* The enclosing record type must be sufficiently aligned.
831 Otherwise, if no alignment was specified for it and it
832 has been laid out already, bump its alignment to the
833 desired one if this is compatible with its size. */
834 if (TYPE_ALIGN (record_type) >= align)
836 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
837 DECL_BIT_FIELD (field) = 0;
841 && value_factor_p (TYPE_SIZE (record_type), align))
843 TYPE_ALIGN (record_type) = align;
844 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
845 DECL_BIT_FIELD (field) = 0;
849 /* In the non-strict alignment case, only byte alignment is. */
850 if (!STRICT_ALIGNMENT
851 && DECL_BIT_FIELD (field)
852 && value_factor_p (pos, BITS_PER_UNIT))
853 DECL_BIT_FIELD (field) = 0;
856 /* If we still have DECL_BIT_FIELD set at this point, we know the field
857 is technically not addressable. Except that it can actually be
858 addressed if the field is BLKmode and happens to be properly
860 DECL_NONADDRESSABLE_P (field)
861 |= DECL_BIT_FIELD (field) && DECL_MODE (field) != BLKmode;
863 /* A type must be as aligned as its most aligned field that is not
864 a bit-field. But this is already enforced by layout_type. */
865 if (rep_level > 0 && !DECL_BIT_FIELD (field))
866 TYPE_ALIGN (record_type)
867 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
872 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
873 size = size_binop (MAX_EXPR, size, this_size);
876 case QUAL_UNION_TYPE:
878 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
879 this_ada_size, ada_size);
880 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
885 /* Since we know here that all fields are sorted in order of
886 increasing bit position, the size of the record is one
887 higher than the ending bit of the last field processed
888 unless we have a rep clause, since in that case we might
889 have a field outside a QUAL_UNION_TYPE that has a higher ending
890 position. So use a MAX in that case. Also, if this field is a
891 QUAL_UNION_TYPE, we need to take into account the previous size in
892 the case of empty variants. */
894 = merge_sizes (ada_size, pos, this_ada_size,
895 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
897 = merge_sizes (size, pos, this_size,
898 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
906 if (code == QUAL_UNION_TYPE)
907 nreverse (fieldlist);
911 /* If this is a padding record, we never want to make the size smaller
912 than what was specified in it, if any. */
913 if (TREE_CODE (record_type) == RECORD_TYPE
914 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
915 size = TYPE_SIZE (record_type);
917 /* Now set any of the values we've just computed that apply. */
918 if (!TYPE_IS_FAT_POINTER_P (record_type)
919 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
920 SET_TYPE_ADA_SIZE (record_type, ada_size);
924 tree size_unit = had_size_unit
925 ? TYPE_SIZE_UNIT (record_type)
927 size_binop (CEIL_DIV_EXPR, size,
929 unsigned int align = TYPE_ALIGN (record_type);
931 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
932 TYPE_SIZE_UNIT (record_type)
933 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
935 compute_record_mode (record_type);
939 if (!do_not_finalize)
940 rest_of_record_type_compilation (record_type);
943 /* Wrap up compilation of RECORD_TYPE, i.e. most notably output all
944 the debug information associated with it. It need not be invoked
945 directly in most cases since finish_record_type takes care of doing
946 so, unless explicitly requested not to through DO_NOT_FINALIZE. */
949 rest_of_record_type_compilation (tree record_type)
951 tree fieldlist = TYPE_FIELDS (record_type);
953 enum tree_code code = TREE_CODE (record_type);
954 bool var_size = false;
956 for (field = fieldlist; field; field = TREE_CHAIN (field))
958 /* We need to make an XVE/XVU record if any field has variable size,
959 whether or not the record does. For example, if we have a union,
960 it may be that all fields, rounded up to the alignment, have the
961 same size, in which case we'll use that size. But the debug
962 output routines (except Dwarf2) won't be able to output the fields,
963 so we need to make the special record. */
964 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
965 /* If a field has a non-constant qualifier, the record will have
966 variable size too. */
967 || (code == QUAL_UNION_TYPE
968 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
975 /* If this record is of variable size, rename it so that the
976 debugger knows it is and make a new, parallel, record
977 that tells the debugger how the record is laid out. See
978 exp_dbug.ads. But don't do this for records that are padding
979 since they confuse GDB. */
981 && !(TREE_CODE (record_type) == RECORD_TYPE
982 && TYPE_IS_PADDING_P (record_type)))
985 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
986 ? UNION_TYPE : TREE_CODE (record_type));
987 tree orig_name = TYPE_NAME (record_type);
989 = (TREE_CODE (orig_name) == TYPE_DECL ? DECL_NAME (orig_name)
992 = concat_id_with_name (orig_id,
993 TREE_CODE (record_type) == QUAL_UNION_TYPE
995 tree last_pos = bitsize_zero_node;
997 tree prev_old_field = 0;
999 TYPE_NAME (new_record_type) = new_id;
1000 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
1001 TYPE_STUB_DECL (new_record_type)
1002 = build_decl (TYPE_DECL, new_id, new_record_type);
1003 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
1004 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
1005 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
1006 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
1007 TYPE_SIZE_UNIT (new_record_type)
1008 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
1010 /* Now scan all the fields, replacing each field with a new
1011 field corresponding to the new encoding. */
1012 for (old_field = TYPE_FIELDS (record_type); old_field;
1013 old_field = TREE_CHAIN (old_field))
1015 tree field_type = TREE_TYPE (old_field);
1016 tree field_name = DECL_NAME (old_field);
1018 tree curpos = bit_position (old_field);
1020 unsigned int align = 0;
1023 /* See how the position was modified from the last position.
1025 There are two basic cases we support: a value was added
1026 to the last position or the last position was rounded to
1027 a boundary and they something was added. Check for the
1028 first case first. If not, see if there is any evidence
1029 of rounding. If so, round the last position and try
1032 If this is a union, the position can be taken as zero. */
1034 if (TREE_CODE (new_record_type) == UNION_TYPE)
1035 pos = bitsize_zero_node, align = 0;
1037 pos = compute_related_constant (curpos, last_pos);
1039 if (!pos && TREE_CODE (curpos) == MULT_EXPR
1040 && host_integerp (TREE_OPERAND (curpos, 1), 1))
1042 tree offset = TREE_OPERAND (curpos, 0);
1043 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
1045 /* Strip off any conversions. */
1046 while (TREE_CODE (offset) == NON_LVALUE_EXPR
1047 || TREE_CODE (offset) == NOP_EXPR
1048 || TREE_CODE (offset) == CONVERT_EXPR)
1049 offset = TREE_OPERAND (offset, 0);
1051 /* An offset which is a bitwise AND with a negative power of 2
1052 means an alignment corresponding to this power of 2. */
1053 if (TREE_CODE (offset) == BIT_AND_EXPR
1054 && host_integerp (TREE_OPERAND (offset, 1), 0)
1055 && tree_int_cst_sgn (TREE_OPERAND (offset, 1)) < 0)
1058 = - tree_low_cst (TREE_OPERAND (offset, 1), 0);
1059 if (exact_log2 (pow) > 0)
1063 pos = compute_related_constant (curpos,
1064 round_up (last_pos, align));
1066 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
1067 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
1068 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
1069 && host_integerp (TREE_OPERAND
1070 (TREE_OPERAND (curpos, 0), 1),
1075 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
1076 pos = compute_related_constant (curpos,
1077 round_up (last_pos, align));
1079 else if (potential_alignment_gap (prev_old_field, old_field,
1082 align = TYPE_ALIGN (field_type);
1083 pos = compute_related_constant (curpos,
1084 round_up (last_pos, align));
1087 /* If we can't compute a position, set it to zero.
1089 ??? We really should abort here, but it's too much work
1090 to get this correct for all cases. */
1093 pos = bitsize_zero_node;
1095 /* See if this type is variable-sized and make a pointer type
1096 and indicate the indirection if so. Beware that the debug
1097 back-end may adjust the position computed above according
1098 to the alignment of the field type, i.e. the pointer type
1099 in this case, if we don't preventively counter that. */
1100 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
1102 field_type = build_pointer_type (field_type);
1103 if (align != 0 && TYPE_ALIGN (field_type) > align)
1105 field_type = copy_node (field_type);
1106 TYPE_ALIGN (field_type) = align;
1111 /* Make a new field name, if necessary. */
1112 if (var || align != 0)
1117 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1118 align / BITS_PER_UNIT);
1120 strcpy (suffix, "XVL");
1122 field_name = concat_id_with_name (field_name, suffix);
1125 new_field = create_field_decl (field_name, field_type,
1127 DECL_SIZE (old_field), pos, 0);
1128 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1129 TYPE_FIELDS (new_record_type) = new_field;
1131 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1132 zero. The only time it's not the last field of the record
1133 is when there are other components at fixed positions after
1134 it (meaning there was a rep clause for every field) and we
1135 want to be able to encode them. */
1136 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1137 (TREE_CODE (TREE_TYPE (old_field))
1140 : DECL_SIZE (old_field));
1141 prev_old_field = old_field;
1144 TYPE_FIELDS (new_record_type)
1145 = nreverse (TYPE_FIELDS (new_record_type));
1147 rest_of_type_decl_compilation (TYPE_STUB_DECL (new_record_type));
1150 rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type));
1153 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1154 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1155 if this represents a QUAL_UNION_TYPE in which case we must look for
1156 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1157 is nonzero, we must take the MAX of the end position of this field
1158 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1160 We return an expression for the size. */
1163 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1166 tree type = TREE_TYPE (last_size);
1169 if (!special || TREE_CODE (size) != COND_EXPR)
1171 new = size_binop (PLUS_EXPR, first_bit, size);
1173 new = size_binop (MAX_EXPR, last_size, new);
1177 new = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1178 integer_zerop (TREE_OPERAND (size, 1))
1179 ? last_size : merge_sizes (last_size, first_bit,
1180 TREE_OPERAND (size, 1),
1182 integer_zerop (TREE_OPERAND (size, 2))
1183 ? last_size : merge_sizes (last_size, first_bit,
1184 TREE_OPERAND (size, 2),
1187 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1188 when fed through substitute_in_expr) into thinking that a constant
1189 size is not constant. */
1190 while (TREE_CODE (new) == NON_LVALUE_EXPR)
1191 new = TREE_OPERAND (new, 0);
1196 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1197 related by the addition of a constant. Return that constant if so. */
1200 compute_related_constant (tree op0, tree op1)
1202 tree op0_var, op1_var;
1203 tree op0_con = split_plus (op0, &op0_var);
1204 tree op1_con = split_plus (op1, &op1_var);
1205 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1207 if (operand_equal_p (op0_var, op1_var, 0))
1209 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1215 /* Utility function of above to split a tree OP which may be a sum, into a
1216 constant part, which is returned, and a variable part, which is stored
1217 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1221 split_plus (tree in, tree *pvar)
1223 /* Strip NOPS in order to ease the tree traversal and maximize the
1224 potential for constant or plus/minus discovery. We need to be careful
1225 to always return and set *pvar to bitsizetype trees, but it's worth
1229 *pvar = convert (bitsizetype, in);
1231 if (TREE_CODE (in) == INTEGER_CST)
1233 *pvar = bitsize_zero_node;
1234 return convert (bitsizetype, in);
1236 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1238 tree lhs_var, rhs_var;
1239 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1240 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1242 if (lhs_var == TREE_OPERAND (in, 0)
1243 && rhs_var == TREE_OPERAND (in, 1))
1244 return bitsize_zero_node;
1246 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1247 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1250 return bitsize_zero_node;
1253 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1254 subprogram. If it is void_type_node, then we are dealing with a procedure,
1255 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1256 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1257 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1258 RETURNS_UNCONSTRAINED is true if the function returns an unconstrained
1259 object. RETURNS_BY_REF is true if the function returns by reference.
1260 RETURNS_BY_TARGET_PTR is true if the function is to be passed (as its
1261 first parameter) the address of the place to copy its result. */
1264 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1265 bool returns_unconstrained, bool returns_by_ref,
1266 bool returns_by_target_ptr)
1268 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1269 the subprogram formal parameters. This list is generated by traversing the
1270 input list of PARM_DECL nodes. */
1271 tree param_type_list = NULL;
1275 for (param_decl = param_decl_list; param_decl;
1276 param_decl = TREE_CHAIN (param_decl))
1277 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1280 /* The list of the function parameter types has to be terminated by the void
1281 type to signal to the back-end that we are not dealing with a variable
1282 parameter subprogram, but that the subprogram has a fixed number of
1284 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1286 /* The list of argument types has been created in reverse
1288 param_type_list = nreverse (param_type_list);
1290 type = build_function_type (return_type, param_type_list);
1292 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1293 or the new type should, make a copy of TYPE. Likewise for
1294 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1295 if (TYPE_CI_CO_LIST (type) || cico_list
1296 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1297 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref
1298 || TYPE_RETURNS_BY_TARGET_PTR_P (type) != returns_by_target_ptr)
1299 type = copy_type (type);
1301 TYPE_CI_CO_LIST (type) = cico_list;
1302 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1303 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1304 TYPE_RETURNS_BY_TARGET_PTR_P (type) = returns_by_target_ptr;
1308 /* Return a copy of TYPE but safe to modify in any way. */
1311 copy_type (tree type)
1313 tree new = copy_node (type);
1315 /* copy_node clears this field instead of copying it, because it is
1316 aliased with TREE_CHAIN. */
1317 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1319 TYPE_POINTER_TO (new) = 0;
1320 TYPE_REFERENCE_TO (new) = 0;
1321 TYPE_MAIN_VARIANT (new) = new;
1322 TYPE_NEXT_VARIANT (new) = 0;
1327 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1328 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position of
1332 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
1334 /* First build a type for the desired range. */
1335 tree type = build_index_2_type (min, max);
1337 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1338 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1339 is set, but not to INDEX, make a copy of this type with the requested
1340 index type. Note that we have no way of sharing these types, but that's
1341 only a small hole. */
1342 if (TYPE_INDEX_TYPE (type) == index)
1344 else if (TYPE_INDEX_TYPE (type))
1345 type = copy_type (type);
1347 SET_TYPE_INDEX_TYPE (type, index);
1348 create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
1352 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1353 string) and TYPE is a ..._TYPE node giving its data type.
1354 ARTIFICIAL_P is true if this is a declaration that was generated
1355 by the compiler. DEBUG_INFO_P is true if we need to write debugging
1356 information about this type. GNAT_NODE is used for the position of
1360 create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1361 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1363 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1364 enum tree_code code = TREE_CODE (type);
1366 DECL_ARTIFICIAL (type_decl) = artificial_p;
1368 if (!TYPE_IS_DUMMY_P (type))
1369 gnat_pushdecl (type_decl, gnat_node);
1371 process_attributes (type_decl, attr_list);
1373 /* Pass type declaration information to the debugger unless this is an
1374 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1375 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately, or
1376 type for which debugging information was not requested. */
1377 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
1378 DECL_IGNORED_P (type_decl) = 1;
1379 else if (code != ENUMERAL_TYPE
1380 && (code != RECORD_TYPE || TYPE_IS_FAT_POINTER_P (type))
1381 && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
1382 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1383 rest_of_type_decl_compilation (type_decl);
1388 /* Helper for create_var_decl and create_true_var_decl. Returns a GCC VAR_DECL
1391 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1392 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1393 the GCC tree for an optional initial expression; NULL_TREE if none.
1395 CONST_FLAG is true if this variable is constant, in which case we might
1396 return a CONST_DECL node unless CONST_DECL_ALLOWED_FLAG is false.
1398 PUBLIC_FLAG is true if this definition is to be made visible outside of
1399 the current compilation unit. This flag should be set when processing the
1400 variable definitions in a package specification. EXTERN_FLAG is nonzero
1401 when processing an external variable declaration (as opposed to a
1402 definition: no storage is to be allocated for the variable here).
1404 STATIC_FLAG is only relevant when not at top level. In that case
1405 it indicates whether to always allocate storage to the variable.
1407 GNAT_NODE is used for the position of the decl. */
1410 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
1411 bool const_flag, bool const_decl_allowed_flag,
1412 bool public_flag, bool extern_flag, bool static_flag,
1413 struct attrib *attr_list, Node_Id gnat_node)
1417 && TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1418 && (global_bindings_p () || static_flag
1419 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
1420 : TREE_CONSTANT (var_init)));
1422 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1423 case the initializer may be used in-lieu of the DECL node (as done in
1424 Identifier_to_gnu). This is useful to prevent the need of elaboration
1425 code when an identifier for which such a decl is made is in turn used as
1426 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1427 but extra constraints apply to this choice (see below) and are not
1428 relevant to the distinction we wish to make. */
1429 bool constant_p = const_flag && init_const;
1431 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1432 and may be used for scalars in general but not for aggregates. */
1434 = build_decl ((constant_p && const_decl_allowed_flag
1435 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
1438 /* If this is external, throw away any initializations (they will be done
1439 elsewhere) unless this is a a constant for which we would like to remain
1440 able to get the initializer. If we are defining a global here, leave a
1441 constant initialization and save any variable elaborations for the
1442 elaboration routine. If we are just annotating types, throw away the
1443 initialization if it isn't a constant. */
1444 if ((extern_flag && !constant_p)
1445 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1446 var_init = NULL_TREE;
1448 /* At the global level, an initializer requiring code to be generated
1449 produces elaboration statements. Check that such statements are allowed,
1450 that is, not violating a No_Elaboration_Code restriction. */
1451 if (global_bindings_p () && var_init != 0 && ! init_const)
1452 Check_Elaboration_Code_Allowed (gnat_node);
1454 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1455 try to fiddle with DECL_COMMON. However, on platforms that don't
1456 support global BSS sections, uninitialized global variables would
1457 go in DATA instead, thus increasing the size of the executable. */
1459 && TREE_CODE (var_decl) == VAR_DECL
1460 && !have_global_bss_p ())
1461 DECL_COMMON (var_decl) = 1;
1462 DECL_INITIAL (var_decl) = var_init;
1463 TREE_READONLY (var_decl) = const_flag;
1464 DECL_EXTERNAL (var_decl) = extern_flag;
1465 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1466 TREE_CONSTANT (var_decl) = constant_p;
1467 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1468 = TYPE_VOLATILE (type);
1470 /* If it's public and not external, always allocate storage for it.
1471 At the global binding level we need to allocate static storage for the
1472 variable if and only if it's not external. If we are not at the top level
1473 we allocate automatic storage unless requested not to. */
1474 TREE_STATIC (var_decl)
1475 = public_flag || (global_bindings_p () ? !extern_flag : static_flag);
1477 if (asm_name && VAR_OR_FUNCTION_DECL_P (var_decl))
1478 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1480 process_attributes (var_decl, attr_list);
1482 /* Add this decl to the current binding level. */
1483 gnat_pushdecl (var_decl, gnat_node);
1485 if (TREE_SIDE_EFFECTS (var_decl))
1486 TREE_ADDRESSABLE (var_decl) = 1;
1488 if (TREE_CODE (var_decl) != CONST_DECL)
1490 if (global_bindings_p ())
1491 rest_of_decl_compilation (var_decl, true, 0);
1494 expand_decl (var_decl);
1499 /* Wrapper around create_var_decl_1 for cases where we don't care whether
1500 a VAR or a CONST decl node is created. */
1503 create_var_decl (tree var_name, tree asm_name, tree type, tree var_init,
1504 bool const_flag, bool public_flag, bool extern_flag,
1505 bool static_flag, struct attrib *attr_list,
1508 return create_var_decl_1 (var_name, asm_name, type, var_init,
1510 public_flag, extern_flag, static_flag,
1511 attr_list, gnat_node);
1514 /* Wrapper around create_var_decl_1 for cases where a VAR_DECL node is
1515 required. The primary intent is for DECL_CONST_CORRESPONDING_VARs, which
1516 must be VAR_DECLs and on which we want TREE_READONLY set to have them
1517 possibly assigned to a readonly data section. */
1520 create_true_var_decl (tree var_name, tree asm_name, tree type, tree var_init,
1521 bool const_flag, bool public_flag, bool extern_flag,
1522 bool static_flag, struct attrib *attr_list,
1525 return create_var_decl_1 (var_name, asm_name, type, var_init,
1527 public_flag, extern_flag, static_flag,
1528 attr_list, gnat_node);
1531 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
1534 aggregate_type_contains_array_p (tree type)
1536 switch (TREE_CODE (type))
1540 case QUAL_UNION_TYPE:
1543 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1544 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
1545 && aggregate_type_contains_array_p (TREE_TYPE (field)))
1558 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1559 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1560 this field is in a record type with a "pragma pack". If SIZE is nonzero
1561 it is the specified size for this field. If POS is nonzero, it is the bit
1562 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1563 the address of this field for aliasing purposes. If it is negative, we
1564 should not make a bitfield, which is used by make_aligning_type. */
1567 create_field_decl (tree field_name, tree field_type, tree record_type,
1568 int packed, tree size, tree pos, int addressable)
1570 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1572 DECL_CONTEXT (field_decl) = record_type;
1573 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1575 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1576 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1577 Likewise for an aggregate without specified position that contains an
1578 array, because in this case slices of variable length of this array
1579 must be handled by GCC and variable-sized objects need to be aligned
1580 to at least a byte boundary. */
1581 if (packed && (TYPE_MODE (field_type) == BLKmode
1583 && AGGREGATE_TYPE_P (field_type)
1584 && aggregate_type_contains_array_p (field_type))))
1585 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1587 /* If a size is specified, use it. Otherwise, if the record type is packed
1588 compute a size to use, which may differ from the object's natural size.
1589 We always set a size in this case to trigger the checks for bitfield
1590 creation below, which is typically required when no position has been
1593 size = convert (bitsizetype, size);
1594 else if (packed == 1)
1596 size = rm_size (field_type);
1598 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1600 if (TREE_CODE (size) == INTEGER_CST
1601 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1602 size = round_up (size, BITS_PER_UNIT);
1605 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1606 specified for two reasons: first if the size differs from the natural
1607 size. Second, if the alignment is insufficient. There are a number of
1608 ways the latter can be true.
1610 We never make a bitfield if the type of the field has a nonconstant size,
1611 because no such entity requiring bitfield operations should reach here.
1613 We do *preventively* make a bitfield when there might be the need for it
1614 but we don't have all the necessary information to decide, as is the case
1615 of a field with no specified position in a packed record.
1617 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1618 in layout_decl or finish_record_type to clear the bit_field indication if
1619 it is in fact not needed. */
1620 if (addressable >= 0
1622 && TREE_CODE (size) == INTEGER_CST
1623 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1624 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
1625 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
1627 || (TYPE_ALIGN (record_type) != 0
1628 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1630 DECL_BIT_FIELD (field_decl) = 1;
1631 DECL_SIZE (field_decl) = size;
1632 if (!packed && !pos)
1633 DECL_ALIGN (field_decl)
1634 = (TYPE_ALIGN (record_type) != 0
1635 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1636 : TYPE_ALIGN (field_type));
1639 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1641 /* Bump the alignment if need be, either for bitfield/packing purposes or
1642 to satisfy the type requirements if no such consideration applies. When
1643 we get the alignment from the type, indicate if this is from an explicit
1644 user request, which prevents stor-layout from lowering it later on. */
1647 = (DECL_BIT_FIELD (field_decl) ? 1
1648 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
1650 if (bit_align > DECL_ALIGN (field_decl))
1651 DECL_ALIGN (field_decl) = bit_align;
1652 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
1654 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1655 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
1661 /* We need to pass in the alignment the DECL is known to have.
1662 This is the lowest-order bit set in POS, but no more than
1663 the alignment of the record, if one is specified. Note
1664 that an alignment of 0 is taken as infinite. */
1665 unsigned int known_align;
1667 if (host_integerp (pos, 1))
1668 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1670 known_align = BITS_PER_UNIT;
1672 if (TYPE_ALIGN (record_type)
1673 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1674 known_align = TYPE_ALIGN (record_type);
1676 layout_decl (field_decl, known_align);
1677 SET_DECL_OFFSET_ALIGN (field_decl,
1678 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1680 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1681 &DECL_FIELD_BIT_OFFSET (field_decl),
1682 DECL_OFFSET_ALIGN (field_decl), pos);
1684 DECL_HAS_REP_P (field_decl) = 1;
1687 /* In addition to what our caller says, claim the field is addressable if we
1688 know that its type is not suitable.
1690 The field may also be "technically" nonaddressable, meaning that even if
1691 we attempt to take the field's address we will actually get the address
1692 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
1693 value we have at this point is not accurate enough, so we don't account
1694 for this here and let finish_record_type decide. */
1695 if (!type_for_nonaliased_component_p (field_type))
1698 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1703 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1704 PARAM_TYPE is its type. READONLY is true if the parameter is
1705 readonly (either an In parameter or an address of a pass-by-ref
1709 create_param_decl (tree param_name, tree param_type, bool readonly)
1711 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1713 /* Honor targetm.calls.promote_prototypes(), as not doing so can
1714 lead to various ABI violations. */
1715 if (targetm.calls.promote_prototypes (param_type)
1716 && (TREE_CODE (param_type) == INTEGER_TYPE
1717 || TREE_CODE (param_type) == ENUMERAL_TYPE)
1718 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1720 /* We have to be careful about biased types here. Make a subtype
1721 of integer_type_node with the proper biasing. */
1722 if (TREE_CODE (param_type) == INTEGER_TYPE
1723 && TYPE_BIASED_REPRESENTATION_P (param_type))
1726 = copy_type (build_range_type (integer_type_node,
1727 TYPE_MIN_VALUE (param_type),
1728 TYPE_MAX_VALUE (param_type)));
1730 TYPE_BIASED_REPRESENTATION_P (param_type) = 1;
1733 param_type = integer_type_node;
1736 DECL_ARG_TYPE (param_decl) = param_type;
1737 TREE_READONLY (param_decl) = readonly;
1741 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1744 process_attributes (tree decl, struct attrib *attr_list)
1746 for (; attr_list; attr_list = attr_list->next)
1747 switch (attr_list->type)
1749 case ATTR_MACHINE_ATTRIBUTE:
1750 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1752 ATTR_FLAG_TYPE_IN_PLACE);
1755 case ATTR_LINK_ALIAS:
1756 if (! DECL_EXTERNAL (decl))
1758 TREE_STATIC (decl) = 1;
1759 assemble_alias (decl, attr_list->name);
1763 case ATTR_WEAK_EXTERNAL:
1765 declare_weak (decl);
1767 post_error ("?weak declarations not supported on this target",
1768 attr_list->error_point);
1771 case ATTR_LINK_SECTION:
1772 if (targetm.have_named_sections)
1774 DECL_SECTION_NAME (decl)
1775 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1776 IDENTIFIER_POINTER (attr_list->name));
1777 DECL_COMMON (decl) = 0;
1780 post_error ("?section attributes are not supported for this target",
1781 attr_list->error_point);
1784 case ATTR_LINK_CONSTRUCTOR:
1785 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1786 TREE_USED (decl) = 1;
1789 case ATTR_LINK_DESTRUCTOR:
1790 DECL_STATIC_DESTRUCTOR (decl) = 1;
1791 TREE_USED (decl) = 1;
1796 /* Record a global renaming pointer. */
1799 record_global_renaming_pointer (tree decl)
1801 gcc_assert (DECL_RENAMED_OBJECT (decl));
1802 VEC_safe_push (tree, gc, global_renaming_pointers, decl);
1805 /* Invalidate the global renaming pointers. */
1808 invalidate_global_renaming_pointers (void)
1813 for (i = 0; VEC_iterate(tree, global_renaming_pointers, i, iter); i++)
1814 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
1816 VEC_free (tree, gc, global_renaming_pointers);
1819 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1823 value_factor_p (tree value, HOST_WIDE_INT factor)
1825 if (host_integerp (value, 1))
1826 return tree_low_cst (value, 1) % factor == 0;
1828 if (TREE_CODE (value) == MULT_EXPR)
1829 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1830 || value_factor_p (TREE_OPERAND (value, 1), factor));
1835 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1836 unless we can prove these 2 fields are laid out in such a way that no gap
1837 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1838 is the distance in bits between the end of PREV_FIELD and the starting
1839 position of CURR_FIELD. It is ignored if null. */
1842 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1844 /* If this is the first field of the record, there cannot be any gap */
1848 /* If the previous field is a union type, then return False: The only
1849 time when such a field is not the last field of the record is when
1850 there are other components at fixed positions after it (meaning there
1851 was a rep clause for every field), in which case we don't want the
1852 alignment constraint to override them. */
1853 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1856 /* If the distance between the end of prev_field and the beginning of
1857 curr_field is constant, then there is a gap if the value of this
1858 constant is not null. */
1859 if (offset && host_integerp (offset, 1))
1860 return !integer_zerop (offset);
1862 /* If the size and position of the previous field are constant,
1863 then check the sum of this size and position. There will be a gap
1864 iff it is not multiple of the current field alignment. */
1865 if (host_integerp (DECL_SIZE (prev_field), 1)
1866 && host_integerp (bit_position (prev_field), 1))
1867 return ((tree_low_cst (bit_position (prev_field), 1)
1868 + tree_low_cst (DECL_SIZE (prev_field), 1))
1869 % DECL_ALIGN (curr_field) != 0);
1871 /* If both the position and size of the previous field are multiples
1872 of the current field alignment, there cannot be any gap. */
1873 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1874 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1877 /* Fallback, return that there may be a potential gap */
1881 /* Returns a LABEL_DECL node for LABEL_NAME. */
1884 create_label_decl (tree label_name)
1886 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1888 DECL_CONTEXT (label_decl) = current_function_decl;
1889 DECL_MODE (label_decl) = VOIDmode;
1890 DECL_SOURCE_LOCATION (label_decl) = input_location;
1895 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1896 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1897 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1898 PARM_DECL nodes chained through the TREE_CHAIN field).
1900 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1901 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1904 create_subprog_decl (tree subprog_name, tree asm_name,
1905 tree subprog_type, tree param_decl_list, bool inline_flag,
1906 bool public_flag, bool extern_flag,
1907 struct attrib *attr_list, Node_Id gnat_node)
1909 tree return_type = TREE_TYPE (subprog_type);
1910 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1912 /* If this is a function nested inside an inlined external function, it
1913 means we aren't going to compile the outer function unless it is
1914 actually inlined, so do the same for us. */
1915 if (current_function_decl && DECL_INLINE (current_function_decl)
1916 && DECL_EXTERNAL (current_function_decl))
1919 DECL_EXTERNAL (subprog_decl) = extern_flag;
1920 TREE_PUBLIC (subprog_decl) = public_flag;
1921 TREE_STATIC (subprog_decl) = 1;
1922 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1923 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1924 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1925 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1926 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1927 DECL_ARTIFICIAL (DECL_RESULT (subprog_decl)) = 1;
1928 DECL_IGNORED_P (DECL_RESULT (subprog_decl)) = 1;
1930 /* TREE_ADDRESSABLE is set on the result type to request the use of the
1931 target by-reference return mechanism. This is not supported all the
1932 way down to RTL expansion with GCC 4, which ICEs on temporary creation
1933 attempts with such a type and expects DECL_BY_REFERENCE to be set on
1934 the RESULT_DECL instead - see gnat_genericize for more details. */
1935 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (subprog_decl))))
1937 tree result_decl = DECL_RESULT (subprog_decl);
1939 TREE_ADDRESSABLE (TREE_TYPE (result_decl)) = 0;
1940 DECL_BY_REFERENCE (result_decl) = 1;
1944 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
1947 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1949 process_attributes (subprog_decl, attr_list);
1951 /* Add this decl to the current binding level. */
1952 gnat_pushdecl (subprog_decl, gnat_node);
1954 /* Output the assembler code and/or RTL for the declaration. */
1955 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
1957 return subprog_decl;
1960 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1961 body. This routine needs to be invoked before processing the declarations
1962 appearing in the subprogram. */
1965 begin_subprog_body (tree subprog_decl)
1969 current_function_decl = subprog_decl;
1970 announce_function (subprog_decl);
1972 /* Enter a new binding level and show that all the parameters belong to
1975 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1976 param_decl = TREE_CHAIN (param_decl))
1977 DECL_CONTEXT (param_decl) = subprog_decl;
1979 make_decl_rtl (subprog_decl);
1981 /* We handle pending sizes via the elaboration of types, so we don't need to
1982 save them. This causes them to be marked as part of the outer function
1983 and then discarded. */
1984 get_pending_sizes ();
1988 /* Helper for the genericization callback. Return a dereference of VAL
1989 if it is of a reference type. */
1992 convert_from_reference (tree val)
1994 tree value_type, ref;
1996 if (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE)
1999 value_type = TREE_TYPE (TREE_TYPE (val));
2000 ref = build1 (INDIRECT_REF, value_type, val);
2002 /* See if what we reference is CONST or VOLATILE, which requires
2003 looking into array types to get to the component type. */
2005 while (TREE_CODE (value_type) == ARRAY_TYPE)
2006 value_type = TREE_TYPE (value_type);
2009 = (TYPE_QUALS (value_type) & TYPE_QUAL_CONST);
2010 TREE_THIS_VOLATILE (ref)
2011 = (TYPE_QUALS (value_type) & TYPE_QUAL_VOLATILE);
2013 TREE_SIDE_EFFECTS (ref)
2014 = (TREE_THIS_VOLATILE (ref) || TREE_SIDE_EFFECTS (val));
2019 /* Helper for the genericization callback. Returns true if T denotes
2020 a RESULT_DECL with DECL_BY_REFERENCE set. */
2023 is_byref_result (tree t)
2025 return (TREE_CODE (t) == RESULT_DECL && DECL_BY_REFERENCE (t));
2029 /* Tree walking callback for gnat_genericize. Currently ...
2031 o Adjust references to the function's DECL_RESULT if it is marked
2032 DECL_BY_REFERENCE and so has had its type turned into a reference
2033 type at the end of the function compilation. */
2036 gnat_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
2038 /* This implementation is modeled after what the C++ front-end is
2039 doing, basis of the downstream passes behavior. */
2041 tree stmt = *stmt_p;
2042 struct pointer_set_t *p_set = (struct pointer_set_t*) data;
2044 /* If we have a direct mention of the result decl, dereference. */
2045 if (is_byref_result (stmt))
2047 *stmt_p = convert_from_reference (stmt);
2052 /* Otherwise, no need to walk the the same tree twice. */
2053 if (pointer_set_contains (p_set, stmt))
2059 /* If we are taking the address of what now is a reference, just get the
2061 if (TREE_CODE (stmt) == ADDR_EXPR
2062 && is_byref_result (TREE_OPERAND (stmt, 0)))
2064 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
2068 /* Don't dereference an by-reference RESULT_DECL inside a RETURN_EXPR. */
2069 else if (TREE_CODE (stmt) == RETURN_EXPR
2070 && TREE_OPERAND (stmt, 0)
2071 && is_byref_result (TREE_OPERAND (stmt, 0)))
2074 /* Don't look inside trees that cannot embed references of interest. */
2075 else if (IS_TYPE_OR_DECL_P (stmt))
2078 pointer_set_insert (p_set, *stmt_p);
2083 /* Perform lowering of Ada trees to GENERIC. In particular:
2085 o Turn a DECL_BY_REFERENCE RESULT_DECL into a real by-reference decl
2086 and adjust all the references to this decl accordingly. */
2089 gnat_genericize (tree fndecl)
2091 /* Prior to GCC 4, an explicit By_Reference result mechanism for a function
2092 was handled by simply setting TREE_ADDRESSABLE on the result type.
2093 Everything required to actually pass by invisible ref using the target
2094 mechanism (e.g. extra parameter) was handled at RTL expansion time.
2096 This doesn't work with GCC 4 any more for several reasons. First, the
2097 gimplification process might need the creation of temporaries of this
2098 type, and the gimplifier ICEs on such attempts. Second, the middle-end
2099 now relies on a different attribute for such cases (DECL_BY_REFERENCE on
2100 RESULT/PARM_DECLs), and expects the user invisible by-reference-ness to
2101 be explicitely accounted for by the front-end in the function body.
2103 We achieve the complete transformation in two steps:
2105 1/ create_subprog_decl performs early attribute tweaks: it clears
2106 TREE_ADDRESSABLE from the result type and sets DECL_BY_REFERENCE on
2107 the result decl. The former ensures that the bit isn't set in the GCC
2108 tree saved for the function, so prevents ICEs on temporary creation.
2109 The latter we use here to trigger the rest of the processing.
2111 2/ This function performs the type transformation on the result decl
2112 and adjusts all the references to this decl from the function body
2115 Clearing TREE_ADDRESSABLE from the type differs from the C++ front-end
2116 strategy, which escapes the gimplifier temporary creation issues by
2117 creating it's own temporaries using TARGET_EXPR nodes. Our way relies
2118 on simple specific support code in aggregate_value_p to look at the
2119 target function result decl explicitely. */
2121 struct pointer_set_t *p_set;
2122 tree decl_result = DECL_RESULT (fndecl);
2124 if (!DECL_BY_REFERENCE (decl_result))
2127 /* Make the DECL_RESULT explicitely by-reference and adjust all the
2128 occurrences in the function body using the common tree-walking facility.
2129 We want to see every occurrence of the result decl to adjust the
2130 referencing tree, so need to use our own pointer set to control which
2131 trees should be visited again or not. */
2133 p_set = pointer_set_create ();
2135 TREE_TYPE (decl_result) = build_reference_type (TREE_TYPE (decl_result));
2136 TREE_ADDRESSABLE (decl_result) = 0;
2137 relayout_decl (decl_result);
2139 walk_tree (&DECL_SAVED_TREE (fndecl), gnat_genericize_r, p_set, NULL);
2141 pointer_set_destroy (p_set);
2144 /* Finish the definition of the current subprogram and compile it all the way
2145 to assembler language output. BODY is the tree corresponding to
2149 end_subprog_body (tree body)
2151 tree fndecl = current_function_decl;
2153 /* Mark the BLOCK for this level as being for this function and pop the
2154 level. Since the vars in it are the parameters, clear them. */
2155 BLOCK_VARS (current_binding_level->block) = 0;
2156 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
2157 DECL_INITIAL (fndecl) = current_binding_level->block;
2160 /* Deal with inline. If declared inline or we should default to inline,
2161 set the flag in the decl. */
2162 DECL_INLINE (fndecl)
2163 = DECL_DECLARED_INLINE_P (fndecl) || flag_inline_trees == 2;
2165 /* We handle pending sizes via the elaboration of types, so we don't
2166 need to save them. */
2167 get_pending_sizes ();
2169 /* Mark the RESULT_DECL as being in this subprogram. */
2170 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
2172 DECL_SAVED_TREE (fndecl) = body;
2174 current_function_decl = DECL_CONTEXT (fndecl);
2177 /* We cannot track the location of errors past this point. */
2178 error_gnat_node = Empty;
2180 /* If we're only annotating types, don't actually compile this function. */
2181 if (type_annotate_only)
2184 /* Perform the required pre-gimplfication transformations on the tree. */
2185 gnat_genericize (fndecl);
2187 /* We do different things for nested and non-nested functions.
2188 ??? This should be in cgraph. */
2189 if (!DECL_CONTEXT (fndecl))
2191 gnat_gimplify_function (fndecl);
2192 cgraph_finalize_function (fndecl, false);
2195 /* Register this function with cgraph just far enough to get it
2196 added to our parent's nested function list. */
2197 (void) cgraph_node (fndecl);
2200 /* Convert FNDECL's code to GIMPLE and handle any nested functions. */
2203 gnat_gimplify_function (tree fndecl)
2205 struct cgraph_node *cgn;
2207 dump_function (TDI_original, fndecl);
2208 gimplify_function_tree (fndecl);
2209 dump_function (TDI_generic, fndecl);
2211 /* Convert all nested functions to GIMPLE now. We do things in this order
2212 so that items like VLA sizes are expanded properly in the context of the
2213 correct function. */
2214 cgn = cgraph_node (fndecl);
2215 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
2216 gnat_gimplify_function (cgn->decl);
2221 gnat_builtin_function (tree decl)
2223 gnat_pushdecl (decl, Empty);
2227 /* Handle a "const" attribute; arguments as in
2228 struct attribute_spec.handler. */
2231 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
2232 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
2235 if (TREE_CODE (*node) == FUNCTION_DECL)
2236 TREE_READONLY (*node) = 1;
2238 *no_add_attrs = true;
2243 /* Handle a "nothrow" attribute; arguments as in
2244 struct attribute_spec.handler. */
2247 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
2248 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
2251 if (TREE_CODE (*node) == FUNCTION_DECL)
2252 TREE_NOTHROW (*node) = 1;
2254 *no_add_attrs = true;
2259 /* Return an integer type with the number of bits of precision given by
2260 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2261 it is a signed type. */
2264 gnat_type_for_size (unsigned precision, int unsignedp)
2269 if (precision <= 2 * MAX_BITS_PER_WORD
2270 && signed_and_unsigned_types[precision][unsignedp])
2271 return signed_and_unsigned_types[precision][unsignedp];
2274 t = make_unsigned_type (precision);
2276 t = make_signed_type (precision);
2278 if (precision <= 2 * MAX_BITS_PER_WORD)
2279 signed_and_unsigned_types[precision][unsignedp] = t;
2283 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2284 TYPE_NAME (t) = get_identifier (type_name);
2290 /* Likewise for floating-point types. */
2293 float_type_for_precision (int precision, enum machine_mode mode)
2298 if (float_types[(int) mode])
2299 return float_types[(int) mode];
2301 float_types[(int) mode] = t = make_node (REAL_TYPE);
2302 TYPE_PRECISION (t) = precision;
2305 gcc_assert (TYPE_MODE (t) == mode);
2308 sprintf (type_name, "FLOAT_%d", precision);
2309 TYPE_NAME (t) = get_identifier (type_name);
2315 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2316 an unsigned type; otherwise a signed type is returned. */
2319 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2321 if (mode == BLKmode)
2323 else if (mode == VOIDmode)
2324 return void_type_node;
2325 else if (COMPLEX_MODE_P (mode))
2327 else if (SCALAR_FLOAT_MODE_P (mode))
2328 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2329 else if (SCALAR_INT_MODE_P (mode))
2330 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2335 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2338 gnat_unsigned_type (tree type_node)
2340 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2342 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2344 type = copy_node (type);
2345 TREE_TYPE (type) = type_node;
2347 else if (TREE_TYPE (type_node)
2348 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2349 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2351 type = copy_node (type);
2352 TREE_TYPE (type) = TREE_TYPE (type_node);
2358 /* Return the signed version of a TYPE_NODE, a scalar type. */
2361 gnat_signed_type (tree type_node)
2363 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2365 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2367 type = copy_node (type);
2368 TREE_TYPE (type) = type_node;
2370 else if (TREE_TYPE (type_node)
2371 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2372 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2374 type = copy_node (type);
2375 TREE_TYPE (type) = TREE_TYPE (type_node);
2382 /* EXP is an expression for the size of an object. If this size contains
2383 discriminant references, replace them with the maximum (if MAX_P) or
2384 minimum (if !MAX_P) possible value of the discriminant. */
2387 max_size (tree exp, bool max_p)
2389 enum tree_code code = TREE_CODE (exp);
2390 tree type = TREE_TYPE (exp);
2392 switch (TREE_CODE_CLASS (code))
2394 case tcc_declaration:
2399 if (code == CALL_EXPR)
2402 int i, n = call_expr_nargs (exp);
2405 argarray = (tree *) alloca (n * sizeof (tree));
2406 for (i = 0; i < n; i++)
2407 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
2408 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
2413 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2414 modify. Otherwise, we treat it like a variable. */
2415 if (!CONTAINS_PLACEHOLDER_P (exp))
2418 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2420 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
2422 case tcc_comparison:
2423 return max_p ? size_one_node : size_zero_node;
2427 case tcc_expression:
2428 switch (TREE_CODE_LENGTH (code))
2431 if (code == NON_LVALUE_EXPR)
2432 return max_size (TREE_OPERAND (exp, 0), max_p);
2435 fold_build1 (code, type,
2436 max_size (TREE_OPERAND (exp, 0),
2437 code == NEGATE_EXPR ? !max_p : max_p));
2440 if (code == COMPOUND_EXPR)
2441 return max_size (TREE_OPERAND (exp, 1), max_p);
2443 /* Calculate "(A ? B : C) - D" as "A ? B - D : C - D" which
2444 may provide a tighter bound on max_size. */
2445 if (code == MINUS_EXPR
2446 && TREE_CODE (TREE_OPERAND (exp, 0)) == COND_EXPR)
2448 tree lhs = fold_build2 (MINUS_EXPR, type,
2449 TREE_OPERAND (TREE_OPERAND (exp, 0), 1),
2450 TREE_OPERAND (exp, 1));
2451 tree rhs = fold_build2 (MINUS_EXPR, type,
2452 TREE_OPERAND (TREE_OPERAND (exp, 0), 2),
2453 TREE_OPERAND (exp, 1));
2454 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2455 max_size (lhs, max_p),
2456 max_size (rhs, max_p));
2460 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2461 tree rhs = max_size (TREE_OPERAND (exp, 1),
2462 code == MINUS_EXPR ? !max_p : max_p);
2464 /* Special-case wanting the maximum value of a MIN_EXPR.
2465 In that case, if one side overflows, return the other.
2466 sizetype is signed, but we know sizes are non-negative.
2467 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2468 overflowing or the maximum possible value and the RHS
2472 && TREE_CODE (rhs) == INTEGER_CST
2473 && TREE_OVERFLOW (rhs))
2477 && TREE_CODE (lhs) == INTEGER_CST
2478 && TREE_OVERFLOW (lhs))
2480 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2481 && ((TREE_CODE (lhs) == INTEGER_CST
2482 && TREE_OVERFLOW (lhs))
2483 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2484 && !TREE_CONSTANT (rhs))
2487 return fold_build2 (code, type, lhs, rhs);
2491 if (code == SAVE_EXPR)
2493 else if (code == COND_EXPR)
2494 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2495 max_size (TREE_OPERAND (exp, 1), max_p),
2496 max_size (TREE_OPERAND (exp, 2), max_p));
2499 /* Other tree classes cannot happen. */
2507 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2508 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2509 Return a constructor for the template. */
2512 build_template (tree template_type, tree array_type, tree expr)
2514 tree template_elts = NULL_TREE;
2515 tree bound_list = NULL_TREE;
2518 while (TREE_CODE (array_type) == RECORD_TYPE
2519 && (TYPE_IS_PADDING_P (array_type)
2520 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2521 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2523 if (TREE_CODE (array_type) == ARRAY_TYPE
2524 || (TREE_CODE (array_type) == INTEGER_TYPE
2525 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2526 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2528 /* First make the list for a CONSTRUCTOR for the template. Go down the
2529 field list of the template instead of the type chain because this
2530 array might be an Ada array of arrays and we can't tell where the
2531 nested arrays stop being the underlying object. */
2533 for (field = TYPE_FIELDS (template_type); field;
2535 ? (bound_list = TREE_CHAIN (bound_list))
2536 : (array_type = TREE_TYPE (array_type))),
2537 field = TREE_CHAIN (TREE_CHAIN (field)))
2539 tree bounds, min, max;
2541 /* If we have a bound list, get the bounds from there. Likewise
2542 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2543 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2544 This will give us a maximum range. */
2546 bounds = TREE_VALUE (bound_list);
2547 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2548 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2549 else if (expr && TREE_CODE (expr) == PARM_DECL
2550 && DECL_BY_COMPONENT_PTR_P (expr))
2551 bounds = TREE_TYPE (field);
2555 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
2556 max = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MAX_VALUE (bounds));
2558 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2559 substitute it from OBJECT. */
2560 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2561 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2563 template_elts = tree_cons (TREE_CHAIN (field), max,
2564 tree_cons (field, min, template_elts));
2567 return gnat_build_constructor (template_type, nreverse (template_elts));
2570 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2571 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2572 in the type contains in its DECL_INITIAL the expression to use when
2573 a constructor is made for the type. GNAT_ENTITY is an entity used
2574 to print out an error message if the mechanism cannot be applied to
2575 an object of that type and also for the name. */
2578 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2580 tree record_type = make_node (RECORD_TYPE);
2581 tree pointer32_type;
2582 tree field_list = 0;
2591 /* If TYPE is an unconstrained array, use the underlying array type. */
2592 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2593 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2595 /* If this is an array, compute the number of dimensions in the array,
2596 get the index types, and point to the inner type. */
2597 if (TREE_CODE (type) != ARRAY_TYPE)
2600 for (ndim = 1, inner_type = type;
2601 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2602 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2603 ndim++, inner_type = TREE_TYPE (inner_type))
2606 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2608 if (mech != By_Descriptor_NCA
2609 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2610 for (i = ndim - 1, inner_type = type;
2612 i--, inner_type = TREE_TYPE (inner_type))
2613 idx_arr[i] = TYPE_DOMAIN (inner_type);
2615 for (i = 0, inner_type = type;
2617 i++, inner_type = TREE_TYPE (inner_type))
2618 idx_arr[i] = TYPE_DOMAIN (inner_type);
2620 /* Now get the DTYPE value. */
2621 switch (TREE_CODE (type))
2625 if (TYPE_VAX_FLOATING_POINT_P (type))
2626 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2639 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2642 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2645 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2648 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2651 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2654 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2660 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2664 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2665 && TYPE_VAX_FLOATING_POINT_P (type))
2666 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2678 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2689 /* Get the CLASS value. */
2692 case By_Descriptor_A:
2695 case By_Descriptor_NCA:
2698 case By_Descriptor_SB:
2702 case By_Descriptor_S:
2708 /* Make the type for a descriptor for VMS. The first four fields
2709 are the same for all types. */
2712 = chainon (field_list,
2713 make_descriptor_field
2714 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2715 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2717 field_list = chainon (field_list,
2718 make_descriptor_field ("DTYPE",
2719 gnat_type_for_size (8, 1),
2720 record_type, size_int (dtype)));
2721 field_list = chainon (field_list,
2722 make_descriptor_field ("CLASS",
2723 gnat_type_for_size (8, 1),
2724 record_type, size_int (class)));
2726 /* Of course this will crash at run-time if the address space is not
2727 within the low 32 bits, but there is nothing else we can do. */
2728 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
2731 = chainon (field_list,
2732 make_descriptor_field
2733 ("POINTER", pointer32_type, record_type,
2734 build_unary_op (ADDR_EXPR,
2736 build0 (PLACEHOLDER_EXPR, type))));
2741 case By_Descriptor_S:
2744 case By_Descriptor_SB:
2746 = chainon (field_list,
2747 make_descriptor_field
2748 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2749 TREE_CODE (type) == ARRAY_TYPE
2750 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2752 = chainon (field_list,
2753 make_descriptor_field
2754 ("SB_U1", gnat_type_for_size (32, 1), record_type,
2755 TREE_CODE (type) == ARRAY_TYPE
2756 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2759 case By_Descriptor_A:
2760 case By_Descriptor_NCA:
2761 field_list = chainon (field_list,
2762 make_descriptor_field ("SCALE",
2763 gnat_type_for_size (8, 1),
2767 field_list = chainon (field_list,
2768 make_descriptor_field ("DIGITS",
2769 gnat_type_for_size (8, 1),
2774 = chainon (field_list,
2775 make_descriptor_field
2776 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2777 size_int (mech == By_Descriptor_NCA
2779 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2780 : (TREE_CODE (type) == ARRAY_TYPE
2781 && TYPE_CONVENTION_FORTRAN_P (type)
2784 field_list = chainon (field_list,
2785 make_descriptor_field ("DIMCT",
2786 gnat_type_for_size (8, 1),
2790 field_list = chainon (field_list,
2791 make_descriptor_field ("ARSIZE",
2792 gnat_type_for_size (32, 1),
2794 size_in_bytes (type)));
2796 /* Now build a pointer to the 0,0,0... element. */
2797 tem = build0 (PLACEHOLDER_EXPR, type);
2798 for (i = 0, inner_type = type; i < ndim;
2799 i++, inner_type = TREE_TYPE (inner_type))
2800 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2801 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2802 NULL_TREE, NULL_TREE);
2805 = chainon (field_list,
2806 make_descriptor_field
2808 build_pointer_type_for_mode (inner_type, SImode, false),
2811 build_pointer_type_for_mode (inner_type, SImode,
2815 /* Next come the addressing coefficients. */
2816 tem = size_one_node;
2817 for (i = 0; i < ndim; i++)
2821 = size_binop (MULT_EXPR, tem,
2822 size_binop (PLUS_EXPR,
2823 size_binop (MINUS_EXPR,
2824 TYPE_MAX_VALUE (idx_arr[i]),
2825 TYPE_MIN_VALUE (idx_arr[i])),
2828 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2829 fname[1] = '0' + i, fname[2] = 0;
2831 = chainon (field_list,
2832 make_descriptor_field (fname,
2833 gnat_type_for_size (32, 1),
2834 record_type, idx_length));
2836 if (mech == By_Descriptor_NCA)
2840 /* Finally here are the bounds. */
2841 for (i = 0; i < ndim; i++)
2845 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2847 = chainon (field_list,
2848 make_descriptor_field
2849 (fname, gnat_type_for_size (32, 1), record_type,
2850 TYPE_MIN_VALUE (idx_arr[i])));
2854 = chainon (field_list,
2855 make_descriptor_field
2856 (fname, gnat_type_for_size (32, 1), record_type,
2857 TYPE_MAX_VALUE (idx_arr[i])));
2862 post_error ("unsupported descriptor type for &", gnat_entity);
2865 finish_record_type (record_type, field_list, 0, true);
2866 create_type_decl (create_concat_name (gnat_entity, "DESC"), record_type,
2867 NULL, true, false, gnat_entity);
2872 /* Utility routine for above code to make a field. */
2875 make_descriptor_field (const char *name, tree type,
2876 tree rec_type, tree initial)
2879 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2881 DECL_INITIAL (field) = initial;
2885 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
2886 pointer or fat pointer type. GNAT_SUBPROG is the subprogram to which
2887 the VMS descriptor is passed. */
2890 convert_vms_descriptor (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
2892 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
2893 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
2894 /* The CLASS field is the 3rd field in the descriptor. */
2895 tree class = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
2896 /* The POINTER field is the 4th field in the descriptor. */
2897 tree pointer = TREE_CHAIN (class);
2899 /* Retrieve the value of the POINTER field. */
2901 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
2903 if (POINTER_TYPE_P (gnu_type))
2904 return convert (gnu_type, gnu_expr);
2906 else if (TYPE_FAT_POINTER_P (gnu_type))
2908 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
2909 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
2910 tree template_type = TREE_TYPE (p_bounds_type);
2911 tree min_field = TYPE_FIELDS (template_type);
2912 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
2913 tree template, template_addr, aflags, dimct, t, u;
2914 /* See the head comment of build_vms_descriptor. */
2915 int iclass = TREE_INT_CST_LOW (DECL_INITIAL (class));
2917 /* Convert POINTER to the type of the P_ARRAY field. */
2918 gnu_expr = convert (p_array_type, gnu_expr);
2922 case 1: /* Class S */
2923 case 15: /* Class SB */
2924 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
2925 t = TYPE_FIELDS (desc_type);
2926 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2927 t = tree_cons (min_field,
2928 convert (TREE_TYPE (min_field), integer_one_node),
2929 tree_cons (max_field,
2930 convert (TREE_TYPE (max_field), t),
2932 template = gnat_build_constructor (template_type, t);
2933 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
2935 /* For class S, we are done. */
2939 /* Test that we really have a SB descriptor, like DEC Ada. */
2940 t = build3 (COMPONENT_REF, TREE_TYPE (class), desc, class, NULL);
2941 u = convert (TREE_TYPE (class), DECL_INITIAL (class));
2942 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
2943 /* If so, there is already a template in the descriptor and
2944 it is located right after the POINTER field. */
2945 t = TREE_CHAIN (pointer);
2946 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2947 /* Otherwise use the {1, LENGTH} template we build above. */
2948 template_addr = build3 (COND_EXPR, p_bounds_type, u,
2949 build_unary_op (ADDR_EXPR, p_bounds_type,
2954 case 4: /* Class A */
2955 /* The AFLAGS field is the 7th field in the descriptor. */
2956 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer)));
2957 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2958 /* The DIMCT field is the 8th field in the descriptor. */
2960 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2961 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
2962 or FL_COEFF or FL_BOUNDS not set. */
2963 u = build_int_cst (TREE_TYPE (aflags), 192);
2964 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
2965 build_binary_op (NE_EXPR, integer_type_node,
2967 convert (TREE_TYPE (dimct),
2969 build_binary_op (NE_EXPR, integer_type_node,
2970 build2 (BIT_AND_EXPR,
2974 add_stmt (build3 (COND_EXPR, void_type_node, u,
2975 build_call_raise (CE_Length_Check_Failed, Empty,
2976 N_Raise_Constraint_Error),
2978 /* There is already a template in the descriptor and it is
2979 located at the start of block 3 (12th field). */
2980 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (t))));
2981 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2982 template_addr = build_unary_op (ADDR_EXPR, p_bounds_type, template);
2985 case 10: /* Class NCA */
2987 post_error ("unsupported descriptor type for &", gnat_subprog);
2988 template_addr = integer_zero_node;
2992 /* Build the fat pointer in the form of a constructor. */
2993 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr,
2994 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
2995 template_addr, NULL_TREE));
2996 return gnat_build_constructor (gnu_type, t);
3003 /* Build a stub for the subprogram specified by the GCC tree GNU_SUBPROG
3004 and the GNAT node GNAT_SUBPROG. */
3007 build_function_stub (tree gnu_subprog, Entity_Id gnat_subprog)
3009 tree gnu_subprog_type, gnu_subprog_addr, gnu_subprog_call;
3010 tree gnu_stub_param, gnu_param_list, gnu_arg_types, gnu_param;
3011 tree gnu_stub_decl = DECL_FUNCTION_STUB (gnu_subprog);
3014 gnu_subprog_type = TREE_TYPE (gnu_subprog);
3015 gnu_param_list = NULL_TREE;
3017 begin_subprog_body (gnu_stub_decl);
3020 start_stmt_group ();
3022 /* Loop over the parameters of the stub and translate any of them
3023 passed by descriptor into a by reference one. */
3024 for (gnu_stub_param = DECL_ARGUMENTS (gnu_stub_decl),
3025 gnu_arg_types = TYPE_ARG_TYPES (gnu_subprog_type);
3027 gnu_stub_param = TREE_CHAIN (gnu_stub_param),
3028 gnu_arg_types = TREE_CHAIN (gnu_arg_types))
3030 if (DECL_BY_DESCRIPTOR_P (gnu_stub_param))
3031 gnu_param = convert_vms_descriptor (TREE_VALUE (gnu_arg_types),
3032 gnu_stub_param, gnat_subprog);
3034 gnu_param = gnu_stub_param;
3036 gnu_param_list = tree_cons (NULL_TREE, gnu_param, gnu_param_list);
3039 gnu_body = end_stmt_group ();
3041 /* Invoke the internal subprogram. */
3042 gnu_subprog_addr = build1 (ADDR_EXPR, build_pointer_type (gnu_subprog_type),
3044 gnu_subprog_call = build_call_list (TREE_TYPE (gnu_subprog_type),
3046 nreverse (gnu_param_list));
3048 /* Propagate the return value, if any. */
3049 if (VOID_TYPE_P (TREE_TYPE (gnu_subprog_type)))
3050 append_to_statement_list (gnu_subprog_call, &gnu_body);
3052 append_to_statement_list (build_return_expr (DECL_RESULT (gnu_stub_decl),
3058 allocate_struct_function (gnu_stub_decl, false);
3059 end_subprog_body (gnu_body);
3062 /* Build a type to be used to represent an aliased object whose nominal
3063 type is an unconstrained array. This consists of a RECORD_TYPE containing
3064 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
3065 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
3066 is used to represent an arbitrary unconstrained object. Use NAME
3067 as the name of the record. */
3070 build_unc_object_type (tree template_type, tree object_type, tree name)
3072 tree type = make_node (RECORD_TYPE);
3073 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
3074 template_type, type, 0, 0, 0, 1);
3075 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
3078 TYPE_NAME (type) = name;
3079 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3080 finish_record_type (type,
3081 chainon (chainon (NULL_TREE, template_field),
3088 /* Same, taking a thin or fat pointer type instead of a template type. */
3091 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3096 gcc_assert (TYPE_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3099 = (TYPE_FAT_POINTER_P (thin_fat_ptr_type)
3100 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3101 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3102 return build_unc_object_type (template_type, object_type, name);
3105 /* Shift the component offsets within an unconstrained object TYPE to make it
3106 suitable for use as a designated type for thin pointers. */
3109 shift_unc_components_for_thin_pointers (tree type)
3111 /* Thin pointer values designate the ARRAY data of an unconstrained object,
3112 allocated past the BOUNDS template. The designated type is adjusted to
3113 have ARRAY at position zero and the template at a negative offset, so
3114 that COMPONENT_REFs on (*thin_ptr) designate the proper location. */
3116 tree bounds_field = TYPE_FIELDS (type);
3117 tree array_field = TREE_CHAIN (TYPE_FIELDS (type));
3119 DECL_FIELD_OFFSET (bounds_field)
3120 = size_binop (MINUS_EXPR, size_zero_node, byte_position (array_field));
3122 DECL_FIELD_OFFSET (array_field) = size_zero_node;
3123 DECL_FIELD_BIT_OFFSET (array_field) = bitsize_zero_node;
3126 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
3127 the normal case this is just two adjustments, but we have more to do
3128 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
3131 update_pointer_to (tree old_type, tree new_type)
3133 tree ptr = TYPE_POINTER_TO (old_type);
3134 tree ref = TYPE_REFERENCE_TO (old_type);
3138 /* If this is the main variant, process all the other variants first. */
3139 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3140 for (type = TYPE_NEXT_VARIANT (old_type); type;
3141 type = TYPE_NEXT_VARIANT (type))
3142 update_pointer_to (type, new_type);
3144 /* If no pointer or reference, we are done. */
3148 /* Merge the old type qualifiers in the new type.
3150 Each old variant has qualifiers for specific reasons, and the new
3151 designated type as well. Each set of qualifiers represents useful
3152 information grabbed at some point, and merging the two simply unifies
3153 these inputs into the final type description.
3155 Consider for instance a volatile type frozen after an access to constant
3156 type designating it. After the designated type freeze, we get here with a
3157 volatile new_type and a dummy old_type with a readonly variant, created
3158 when the access type was processed. We shall make a volatile and readonly
3159 designated type, because that's what it really is.
3161 We might also get here for a non-dummy old_type variant with different
3162 qualifiers than the new_type ones, for instance in some cases of pointers
3163 to private record type elaboration (see the comments around the call to
3164 this routine from gnat_to_gnu_entity/E_Access_Type). We have to merge the
3165 qualifiers in thoses cases too, to avoid accidentally discarding the
3166 initial set, and will often end up with old_type == new_type then. */
3167 new_type = build_qualified_type (new_type,
3168 TYPE_QUALS (old_type)
3169 | TYPE_QUALS (new_type));
3171 /* If the new type and the old one are identical, there is nothing to
3173 if (old_type == new_type)
3176 /* Otherwise, first handle the simple case. */
3177 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3179 TYPE_POINTER_TO (new_type) = ptr;
3180 TYPE_REFERENCE_TO (new_type) = ref;
3182 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3183 for (ptr1 = TYPE_MAIN_VARIANT (ptr); ptr1;
3184 ptr1 = TYPE_NEXT_VARIANT (ptr1))
3185 TREE_TYPE (ptr1) = new_type;
3187 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3188 for (ref1 = TYPE_MAIN_VARIANT (ref); ref1;
3189 ref1 = TYPE_NEXT_VARIANT (ref1))
3190 TREE_TYPE (ref1) = new_type;
3193 /* Now deal with the unconstrained array case. In this case the "pointer"
3194 is actually a RECORD_TYPE where both fields are pointers to dummy nodes.
3195 Turn them into pointers to the correct types using update_pointer_to. */
3196 else if (TREE_CODE (ptr) != RECORD_TYPE || !TYPE_IS_FAT_POINTER_P (ptr))
3201 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
3202 tree array_field = TYPE_FIELDS (ptr);
3203 tree bounds_field = TREE_CHAIN (TYPE_FIELDS (ptr));
3204 tree new_ptr = TYPE_POINTER_TO (new_type);
3208 /* Make pointers to the dummy template point to the real template. */
3210 (TREE_TYPE (TREE_TYPE (bounds_field)),
3211 TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_ptr)))));
3213 /* The references to the template bounds present in the array type
3214 are made through a PLACEHOLDER_EXPR of type new_ptr. Since we
3215 are updating ptr to make it a full replacement for new_ptr as
3216 pointer to new_type, we must rework the PLACEHOLDER_EXPR so as
3217 to make it of type ptr. */
3218 new_ref = build3 (COMPONENT_REF, TREE_TYPE (bounds_field),
3219 build0 (PLACEHOLDER_EXPR, ptr),
3220 bounds_field, NULL_TREE);
3222 /* Create the new array for the new PLACEHOLDER_EXPR and make
3223 pointers to the dummy array point to it.
3225 ??? This is now the only use of substitute_in_type,
3226 which is a very "heavy" routine to do this, so it
3227 should be replaced at some point. */
3229 (TREE_TYPE (TREE_TYPE (array_field)),
3230 substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))),
3231 TREE_CHAIN (TYPE_FIELDS (new_ptr)), new_ref));
3233 /* Make ptr the pointer to new_type. */
3234 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
3235 = TREE_TYPE (new_type) = ptr;
3237 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
3238 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
3240 /* Now handle updating the allocation record, what the thin pointer
3241 points to. Update all pointers from the old record into the new
3242 one, update the type of the array field, and recompute the size. */
3243 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
3245 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
3246 = TREE_TYPE (TREE_TYPE (array_field));
3248 /* The size recomputation needs to account for alignment constraints, so
3249 we let layout_type work it out. This will reset the field offsets to
3250 what they would be in a regular record, so we shift them back to what
3251 we want them to be for a thin pointer designated type afterwards. */
3252 DECL_SIZE (TYPE_FIELDS (new_obj_rec)) = 0;
3253 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))) = 0;
3254 TYPE_SIZE (new_obj_rec) = 0;
3255 layout_type (new_obj_rec);
3257 shift_unc_components_for_thin_pointers (new_obj_rec);
3259 /* We are done, at last. */
3260 rest_of_record_type_compilation (ptr);
3264 /* Convert a pointer to a constrained array into a pointer to a fat
3265 pointer. This involves making or finding a template. */
3268 convert_to_fat_pointer (tree type, tree expr)
3270 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
3271 tree template, template_addr;
3272 tree etype = TREE_TYPE (expr);
3274 /* If EXPR is a constant of zero, we make a fat pointer that has a null
3275 pointer to the template and array. */
3276 if (integer_zerop (expr))
3278 gnat_build_constructor
3280 tree_cons (TYPE_FIELDS (type),
3281 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3282 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3283 convert (build_pointer_type (template_type),
3287 /* If EXPR is a thin pointer, make the template and data from the record. */
3289 else if (TYPE_THIN_POINTER_P (etype))
3291 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
3293 expr = save_expr (expr);
3294 if (TREE_CODE (expr) == ADDR_EXPR)
3295 expr = TREE_OPERAND (expr, 0);
3297 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
3299 template = build_component_ref (expr, NULL_TREE, fields, false);
3300 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3301 build_component_ref (expr, NULL_TREE,
3302 TREE_CHAIN (fields), false));
3305 /* Otherwise, build the constructor for the template. */
3306 template = build_template (template_type, TREE_TYPE (etype), expr);
3308 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
3310 /* The result is a CONSTRUCTOR for the fat pointer.
3312 If expr is an argument of a foreign convention subprogram, the type it
3313 points to is directly the component type. In this case, the expression
3314 type may not match the corresponding FIELD_DECL type at this point, so we
3315 call "convert" here to fix that up if necessary. This type consistency is
3316 required, for instance because it ensures that possible later folding of
3317 component_refs against this constructor always yields something of the
3318 same type as the initial reference.
3320 Note that the call to "build_template" above is still fine, because it
3321 will only refer to the provided template_type in this case. */
3323 gnat_build_constructor
3324 (type, tree_cons (TYPE_FIELDS (type),
3325 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3326 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3327 template_addr, NULL_TREE)));
3330 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
3331 is something that is a fat pointer, so convert to it first if it EXPR
3332 is not already a fat pointer. */
3335 convert_to_thin_pointer (tree type, tree expr)
3337 if (!TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
3339 = convert_to_fat_pointer
3340 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
3342 /* We get the pointer to the data and use a NOP_EXPR to make it the
3344 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)),
3346 expr = build1 (NOP_EXPR, type, expr);
3351 /* Create an expression whose value is that of EXPR,
3352 converted to type TYPE. The TREE_TYPE of the value
3353 is always TYPE. This function implements all reasonable
3354 conversions; callers should filter out those that are
3355 not permitted by the language being compiled. */
3358 convert (tree type, tree expr)
3360 enum tree_code code = TREE_CODE (type);
3361 tree etype = TREE_TYPE (expr);
3362 enum tree_code ecode = TREE_CODE (etype);
3364 /* If EXPR is already the right type, we are done. */
3368 /* If both input and output have padding and are of variable size, do this
3369 as an unchecked conversion. Likewise if one is a mere variant of the
3370 other, so we avoid a pointless unpad/repad sequence. */
3371 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
3372 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
3373 && (!TREE_CONSTANT (TYPE_SIZE (type))
3374 || !TREE_CONSTANT (TYPE_SIZE (etype))
3375 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)))
3378 /* If the output type has padding, make a constructor to build the
3380 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
3382 /* If we previously converted from another type and our type is
3383 of variable size, remove the conversion to avoid the need for
3384 variable-size temporaries. */
3385 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3386 && !TREE_CONSTANT (TYPE_SIZE (type)))
3387 expr = TREE_OPERAND (expr, 0);
3389 /* If we are just removing the padding from expr, convert the original
3390 object if we have variable size. That will avoid the need
3391 for some variable-size temporaries. */
3392 if (TREE_CODE (expr) == COMPONENT_REF
3393 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
3394 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
3395 && !TREE_CONSTANT (TYPE_SIZE (type)))
3396 return convert (type, TREE_OPERAND (expr, 0));
3398 /* If the result type is a padded type with a self-referentially-sized
3399 field and the expression type is a record, do this as an
3400 unchecked conversion. */
3401 else if (TREE_CODE (etype) == RECORD_TYPE
3402 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
3403 return unchecked_convert (type, expr, false);
3407 gnat_build_constructor (type,
3408 tree_cons (TYPE_FIELDS (type),
3410 (TYPE_FIELDS (type)),
3415 /* If the input type has padding, remove it and convert to the output type.
3416 The conditions ordering is arranged to ensure that the output type is not
3417 a padding type here, as it is not clear whether the conversion would
3418 always be correct if this was to happen. */
3419 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
3423 /* If we have just converted to this padded type, just get the
3424 inner expression. */
3425 if (TREE_CODE (expr) == CONSTRUCTOR
3426 && !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (expr))
3427 && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->index
3428 == TYPE_FIELDS (etype))
3430 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->value;
3432 /* Otherwise, build an explicit component reference. */
3435 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
3437 return convert (type, unpadded);
3440 /* If the input is a biased type, adjust first. */
3441 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
3442 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
3443 fold_convert (TREE_TYPE (etype),
3445 TYPE_MIN_VALUE (etype)));
3447 /* If the input is a justified modular type, we need to extract the actual
3448 object before converting it to any other type with the exceptions of an
3449 unconstrained array or of a mere type variant. It is useful to avoid the
3450 extraction and conversion in the type variant case because it could end
3451 up replacing a VAR_DECL expr by a constructor and we might be about the
3452 take the address of the result. */
3453 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
3454 && code != UNCONSTRAINED_ARRAY_TYPE
3455 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
3456 return convert (type, build_component_ref (expr, NULL_TREE,
3457 TYPE_FIELDS (etype), false));
3459 /* If converting to a type that contains a template, convert to the data
3460 type and then build the template. */
3461 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
3463 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
3465 /* If the source already has a template, get a reference to the
3466 associated array only, as we are going to rebuild a template
3467 for the target type anyway. */
3468 expr = maybe_unconstrained_array (expr);
3471 gnat_build_constructor
3473 tree_cons (TYPE_FIELDS (type),
3474 build_template (TREE_TYPE (TYPE_FIELDS (type)),
3475 obj_type, NULL_TREE),
3476 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3477 convert (obj_type, expr), NULL_TREE)));
3480 /* There are some special cases of expressions that we process
3482 switch (TREE_CODE (expr))
3488 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
3489 conversion in gnat_expand_expr. NULL_EXPR does not represent
3490 and actual value, so no conversion is needed. */
3491 expr = copy_node (expr);
3492 TREE_TYPE (expr) = type;
3496 /* If we are converting a STRING_CST to another constrained array type,
3497 just make a new one in the proper type. */
3498 if (code == ecode && AGGREGATE_TYPE_P (etype)
3499 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
3500 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
3502 expr = copy_node (expr);
3503 TREE_TYPE (expr) = type;
3509 /* If we are converting a CONSTRUCTOR to another constrained array type
3510 with the same domain, just make a new one in the proper type. */
3511 if (code == ecode && code == ARRAY_TYPE
3512 && TREE_TYPE (type) == TREE_TYPE (etype)
3513 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (type)),
3514 TYPE_MIN_VALUE (TYPE_DOMAIN (etype)))
3515 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
3516 TYPE_MAX_VALUE (TYPE_DOMAIN (etype))))
3518 expr = copy_node (expr);
3519 TREE_TYPE (expr) = type;
3524 case UNCONSTRAINED_ARRAY_REF:
3525 /* Convert this to the type of the inner array by getting the address of
3526 the array from the template. */
3527 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3528 build_component_ref (TREE_OPERAND (expr, 0),
3529 get_identifier ("P_ARRAY"),
3531 etype = TREE_TYPE (expr);
3532 ecode = TREE_CODE (etype);
3535 case VIEW_CONVERT_EXPR:
3537 /* GCC 4.x is very sensitive to type consistency overall, and view
3538 conversions thus are very frequent. Even though just "convert"ing
3539 the inner operand to the output type is fine in most cases, it
3540 might expose unexpected input/output type mismatches in special
3541 circumstances so we avoid such recursive calls when we can. */
3543 tree op0 = TREE_OPERAND (expr, 0);
3545 /* If we are converting back to the original type, we can just
3546 lift the input conversion. This is a common occurrence with
3547 switches back-and-forth amongst type variants. */
3548 if (type == TREE_TYPE (op0))
3551 /* Otherwise, if we're converting between two aggregate types, we
3552 might be allowed to substitute the VIEW_CONVERT target type in
3553 place or to just convert the inner expression. */
3554 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
3556 /* If we are converting between type variants, we can just
3557 substitute the VIEW_CONVERT in place. */
3558 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
3559 return build1 (VIEW_CONVERT_EXPR, type, op0);
3561 /* Otherwise, we may just bypass the input view conversion unless
3562 one of the types is a fat pointer, which is handled by
3563 specialized code below which relies on exact type matching. */
3564 else if (!TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
3565 return convert (type, op0);
3571 /* If both types are record types, just convert the pointer and
3572 make a new INDIRECT_REF.
3574 ??? Disable this for now since it causes problems with the
3575 code in build_binary_op for MODIFY_EXPR which wants to
3576 strip off conversions. But that code really is a mess and
3577 we need to do this a much better way some time. */
3579 && (TREE_CODE (type) == RECORD_TYPE
3580 || TREE_CODE (type) == UNION_TYPE)
3581 && (TREE_CODE (etype) == RECORD_TYPE
3582 || TREE_CODE (etype) == UNION_TYPE)
3583 && !TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
3584 return build_unary_op (INDIRECT_REF, NULL_TREE,
3585 convert (build_pointer_type (type),
3586 TREE_OPERAND (expr, 0)));
3593 /* Check for converting to a pointer to an unconstrained array. */
3594 if (TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
3595 return convert_to_fat_pointer (type, expr);
3597 /* If we're converting between two aggregate types that have the same main
3598 variant, just make a VIEW_CONVER_EXPR. */
3599 else if (AGGREGATE_TYPE_P (type)
3600 && TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
3601 return build1 (VIEW_CONVERT_EXPR, type, expr);
3603 /* In all other cases of related types, make a NOP_EXPR. */
3604 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
3605 || (code == INTEGER_CST && ecode == INTEGER_CST
3606 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
3607 return fold_convert (type, expr);
3612 return fold_build1 (CONVERT_EXPR, type, expr);
3615 return fold_convert (type, gnat_truthvalue_conversion (expr));
3618 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
3619 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
3620 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
3621 return unchecked_convert (type, expr, false);
3622 else if (TYPE_BIASED_REPRESENTATION_P (type))
3623 return fold_convert (type,
3624 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
3625 convert (TREE_TYPE (type), expr),
3626 TYPE_MIN_VALUE (type)));
3628 /* ... fall through ... */
3631 return fold (convert_to_integer (type, expr));
3634 case REFERENCE_TYPE:
3635 /* If converting between two pointers to records denoting
3636 both a template and type, adjust if needed to account
3637 for any differing offsets, since one might be negative. */
3638 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
3641 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
3642 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
3643 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
3644 sbitsize_int (BITS_PER_UNIT));
3646 expr = build1 (NOP_EXPR, type, expr);
3647 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
3648 if (integer_zerop (byte_diff))
3651 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
3652 fold (convert (sizetype, byte_diff)));
3655 /* If converting to a thin pointer, handle specially. */
3656 if (TYPE_THIN_POINTER_P (type)
3657 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
3658 return convert_to_thin_pointer (type, expr);
3660 /* If converting fat pointer to normal pointer, get the pointer to the
3661 array and then convert it. */
3662 else if (TYPE_FAT_POINTER_P (etype))
3663 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
3666 return fold (convert_to_pointer (type, expr));
3669 return fold (convert_to_real (type, expr));
3672 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
3674 gnat_build_constructor
3675 (type, tree_cons (TYPE_FIELDS (type),
3676 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3679 /* ... fall through ... */
3682 /* In these cases, assume the front-end has validated the conversion.
3683 If the conversion is valid, it will be a bit-wise conversion, so
3684 it can be viewed as an unchecked conversion. */
3685 return unchecked_convert (type, expr, false);
3688 /* This is a either a conversion between a tagged type and some
3689 subtype, which we have to mark as a UNION_TYPE because of
3690 overlapping fields or a conversion of an Unchecked_Union. */
3691 return unchecked_convert (type, expr, false);
3693 case UNCONSTRAINED_ARRAY_TYPE:
3694 /* If EXPR is a constrained array, take its address, convert it to a
3695 fat pointer, and then dereference it. Likewise if EXPR is a
3696 record containing both a template and a constrained array.
3697 Note that a record representing a justified modular type
3698 always represents a packed constrained array. */
3699 if (ecode == ARRAY_TYPE
3700 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
3701 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
3702 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
3705 (INDIRECT_REF, NULL_TREE,
3706 convert_to_fat_pointer (TREE_TYPE (type),
3707 build_unary_op (ADDR_EXPR,
3710 /* Do something very similar for converting one unconstrained
3711 array to another. */
3712 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
3714 build_unary_op (INDIRECT_REF, NULL_TREE,
3715 convert (TREE_TYPE (type),
3716 build_unary_op (ADDR_EXPR,
3722 return fold (convert_to_complex (type, expr));
3729 /* Remove all conversions that are done in EXP. This includes converting
3730 from a padded type or to a justified modular type. If TRUE_ADDRESS
3731 is true, always return the address of the containing object even if
3732 the address is not bit-aligned. */
3735 remove_conversions (tree exp, bool true_address)
3737 switch (TREE_CODE (exp))
3741 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3742 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
3744 remove_conversions (VEC_index (constructor_elt,
3745 CONSTRUCTOR_ELTS (exp), 0)->value,
3750 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
3751 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
3752 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3755 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
3756 case NOP_EXPR: case CONVERT_EXPR:
3757 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3766 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3767 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3768 likewise return an expression pointing to the underlying array. */
3771 maybe_unconstrained_array (tree exp)
3773 enum tree_code code = TREE_CODE (exp);
3776 switch (TREE_CODE (TREE_TYPE (exp)))
3778 case UNCONSTRAINED_ARRAY_TYPE:
3779 if (code == UNCONSTRAINED_ARRAY_REF)
3782 = build_unary_op (INDIRECT_REF, NULL_TREE,
3783 build_component_ref (TREE_OPERAND (exp, 0),
3784 get_identifier ("P_ARRAY"),
3786 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
3790 else if (code == NULL_EXPR)
3791 return build1 (NULL_EXPR,
3792 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3793 (TREE_TYPE (TREE_TYPE (exp))))),
3794 TREE_OPERAND (exp, 0));
3797 /* If this is a padded type, convert to the unpadded type and see if
3798 it contains a template. */
3799 if (TYPE_IS_PADDING_P (TREE_TYPE (exp)))
3801 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
3802 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3803 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
3805 build_component_ref (new, NULL_TREE,
3806 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
3809 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
3811 build_component_ref (exp, NULL_TREE,
3812 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
3822 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
3823 If NOTRUNC_P is true, truncation operations should be suppressed. */
3826 unchecked_convert (tree type, tree expr, bool notrunc_p)
3828 tree etype = TREE_TYPE (expr);
3830 /* If the expression is already the right type, we are done. */
3834 /* If both types types are integral just do a normal conversion.
3835 Likewise for a conversion to an unconstrained array. */
3836 if ((((INTEGRAL_TYPE_P (type)
3837 && !(TREE_CODE (type) == INTEGER_TYPE
3838 && TYPE_VAX_FLOATING_POINT_P (type)))
3839 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
3840 || (TREE_CODE (type) == RECORD_TYPE
3841 && TYPE_JUSTIFIED_MODULAR_P (type)))
3842 && ((INTEGRAL_TYPE_P (etype)
3843 && !(TREE_CODE (etype) == INTEGER_TYPE
3844 && TYPE_VAX_FLOATING_POINT_P (etype)))
3845 || (POINTER_TYPE_P (etype) && !TYPE_THIN_POINTER_P (etype))
3846 || (TREE_CODE (etype) == RECORD_TYPE
3847 && TYPE_JUSTIFIED_MODULAR_P (etype))))
3848 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3851 bool final_unchecked = false;
3853 if (TREE_CODE (etype) == INTEGER_TYPE
3854 && TYPE_BIASED_REPRESENTATION_P (etype))
3856 tree ntype = copy_type (etype);
3858 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
3859 TYPE_MAIN_VARIANT (ntype) = ntype;
3860 expr = build1 (NOP_EXPR, ntype, expr);
3863 if (TREE_CODE (type) == INTEGER_TYPE
3864 && TYPE_BIASED_REPRESENTATION_P (type))
3866 rtype = copy_type (type);
3867 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
3868 TYPE_MAIN_VARIANT (rtype) = rtype;
3871 /* We have another special case: if we are unchecked converting subtype
3872 into a base type, we need to ensure that VRP doesn't propagate range
3873 information since this conversion may be done precisely to validate
3874 that the object is within the range it is supposed to have. */
3875 else if (TREE_CODE (expr) != INTEGER_CST
3876 && TREE_CODE (type) == INTEGER_TYPE && !TREE_TYPE (type)
3877 && ((TREE_CODE (etype) == INTEGER_TYPE && TREE_TYPE (etype))
3878 || TREE_CODE (etype) == ENUMERAL_TYPE
3879 || TREE_CODE (etype) == BOOLEAN_TYPE))
3881 /* The optimization barrier is a VIEW_CONVERT_EXPR node; moreover,
3882 in order not to be deemed an useless type conversion, it must
3883 be from subtype to base type.
3885 ??? This may raise addressability and/or aliasing issues because
3886 VIEW_CONVERT_EXPR gets gimplified as an lvalue, thus causing the
3887 address of its operand to be taken if it is deemed addressable
3888 and not already in GIMPLE form. */
3889 rtype = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
3890 rtype = copy_type (rtype);
3891 TYPE_MAIN_VARIANT (rtype) = rtype;
3892 TREE_TYPE (rtype) = type;
3893 final_unchecked = true;
3896 expr = convert (rtype, expr);
3898 expr = fold_build1 (final_unchecked ? VIEW_CONVERT_EXPR : NOP_EXPR,
3902 /* If we are converting TO an integral type whose precision is not the
3903 same as its size, first unchecked convert to a record that contains
3904 an object of the output type. Then extract the field. */
3905 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
3906 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3907 GET_MODE_BITSIZE (TYPE_MODE (type))))
3909 tree rec_type = make_node (RECORD_TYPE);
3910 tree field = create_field_decl (get_identifier ("OBJ"), type,
3911 rec_type, 1, 0, 0, 0);
3913 TYPE_FIELDS (rec_type) = field;
3914 layout_type (rec_type);
3916 expr = unchecked_convert (rec_type, expr, notrunc_p);
3917 expr = build_component_ref (expr, NULL_TREE, field, 0);
3920 /* Similarly for integral input type whose precision is not equal to its
3922 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype)
3923 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
3924 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3926 tree rec_type = make_node (RECORD_TYPE);
3928 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
3931 TYPE_FIELDS (rec_type) = field;
3932 layout_type (rec_type);
3934 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
3935 expr = unchecked_convert (type, expr, notrunc_p);
3938 /* We have a special case when we are converting between two
3939 unconstrained array types. In that case, take the address,
3940 convert the fat pointer types, and dereference. */
3941 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
3942 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3943 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3944 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
3945 build_unary_op (ADDR_EXPR, NULL_TREE,
3949 expr = maybe_unconstrained_array (expr);
3950 etype = TREE_TYPE (expr);
3951 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
3954 /* If the result is an integral type whose size is not equal to
3955 the size of the underlying machine type, sign- or zero-extend
3956 the result. We need not do this in the case where the input is
3957 an integral type of the same precision and signedness or if the output
3958 is a biased type or if both the input and output are unsigned. */
3960 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
3961 && !(TREE_CODE (type) == INTEGER_TYPE
3962 && TYPE_BIASED_REPRESENTATION_P (type))
3963 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3964 GET_MODE_BITSIZE (TYPE_MODE (type)))
3965 && !(INTEGRAL_TYPE_P (etype)
3966 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
3967 && operand_equal_p (TYPE_RM_SIZE (type),
3968 (TYPE_RM_SIZE (etype) != 0
3969 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
3971 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
3973 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
3974 TYPE_UNSIGNED (type));
3976 = convert (base_type,
3977 size_binop (MINUS_EXPR,
3979 (GET_MODE_BITSIZE (TYPE_MODE (type))),
3980 TYPE_RM_SIZE (type)));
3983 build_binary_op (RSHIFT_EXPR, base_type,
3984 build_binary_op (LSHIFT_EXPR, base_type,
3985 convert (base_type, expr),
3990 /* An unchecked conversion should never raise Constraint_Error. The code
3991 below assumes that GCC's conversion routines overflow the same way that
3992 the underlying hardware does. This is probably true. In the rare case
3993 when it is false, we can rely on the fact that such conversions are
3994 erroneous anyway. */
3995 if (TREE_CODE (expr) == INTEGER_CST)
3996 TREE_OVERFLOW (expr) = 0;
3998 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
3999 show no longer constant. */
4000 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4001 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
4003 TREE_CONSTANT (expr) = 0;
4008 /* Search the chain of currently available builtin declarations for a node
4009 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
4010 found, if any, or NULL_TREE otherwise. */
4012 builtin_decl_for (tree name)
4017 for (i = 0; VEC_iterate(tree, builtin_decls, i, decl); i++)
4018 if (DECL_NAME (decl) == name)
4024 /* Return the appropriate GCC tree code for the specified GNAT type,
4025 the latter being a record type as predicated by Is_Record_Type. */
4028 tree_code_for_record_type (Entity_Id gnat_type)
4030 Node_Id component_list
4031 = Component_List (Type_Definition
4033 (Implementation_Base_Type (gnat_type))));
4036 /* Make this a UNION_TYPE unless it's either not an Unchecked_Union or
4037 we have a non-discriminant field outside a variant. In either case,
4038 it's a RECORD_TYPE. */
4040 if (!Is_Unchecked_Union (gnat_type))
4043 for (component = First_Non_Pragma (Component_Items (component_list));
4044 Present (component);
4045 component = Next_Non_Pragma (component))
4046 if (Ekind (Defining_Entity (component)) == E_Component)
4052 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
4053 component of an aggregate type. */
4056 type_for_nonaliased_component_p (tree gnu_type)
4058 /* If the type is passed by reference, we may have pointers to the
4059 component so it cannot be made non-aliased. */
4060 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
4063 /* We used to say that any component of aggregate type is aliased
4064 because the front-end may take 'Reference of it. The front-end
4065 has been enhanced in the meantime so as to use a renaming instead
4066 in most cases, but the back-end can probably take the address of
4067 such a component too so we go for the conservative stance.
4069 For instance, we might need the address of any array type, even
4070 if normally passed by copy, to construct a fat pointer if the
4071 component is used as an actual for an unconstrained formal.
4073 Likewise for record types: even if a specific record subtype is
4074 passed by copy, the parent type might be passed by ref (e.g. if
4075 it's of variable size) and we might take the address of a child
4076 component to pass to a parent formal. We have no way to check
4077 for such conditions here. */
4078 if (AGGREGATE_TYPE_P (gnu_type))
4084 /* Perform final processing on global variables. */
4087 gnat_write_global_declarations (void)
4089 /* Proceed to optimize and emit assembly.
4090 FIXME: shouldn't be the front end's responsibility to call this. */
4093 /* Emit debug info for all global declarations. */
4094 emit_debug_global_declarations (VEC_address (tree, global_decls),
4095 VEC_length (tree, global_decls));
4098 #include "gt-ada-utils.h"
4099 #include "gtype-ada.h"