1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2008, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 ****************************************************************************/
28 #include "coretypes.h"
41 #include "tree-inline.h"
42 #include "tree-gimple.h"
43 #include "tree-dump.h"
44 #include "pointer-set.h"
60 #ifndef MAX_FIXED_MODE_SIZE
61 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
64 #ifndef MAX_BITS_PER_WORD
65 #define MAX_BITS_PER_WORD BITS_PER_WORD
68 /* If nonzero, pretend we are allocating at global level. */
71 /* Tree nodes for the various types and decls we create. */
72 tree gnat_std_decls[(int) ADT_LAST];
74 /* Functions to call for each of the possible raise reasons. */
75 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
77 /* Forward declarations for handlers of attributes. */
78 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
79 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
81 /* Table of machine-independent internal attributes for Ada. We support
82 this minimal set of attributes to accommodate the Alpha back-end which
83 unconditionally puts them on its builtins. */
84 const struct attribute_spec gnat_internal_attribute_table[] =
86 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
87 { "const", 0, 0, true, false, false, handle_const_attribute },
88 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute },
89 { NULL, 0, 0, false, false, false, NULL }
92 /* Associates a GNAT tree node to a GCC tree node. It is used in
93 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
94 of `save_gnu_tree' for more info. */
95 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
97 #define GET_GNU_TREE(GNAT_ENTITY) \
98 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
100 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
101 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
103 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
104 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
106 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
107 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
109 #define GET_DUMMY_NODE(GNAT_ENTITY) \
110 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
112 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
113 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
115 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
116 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
118 /* This variable keeps a table for types for each precision so that we only
119 allocate each of them once. Signed and unsigned types are kept separate.
121 Note that these types are only used when fold-const requests something
122 special. Perhaps we should NOT share these types; we'll see how it
124 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
126 /* Likewise for float types, but record these by mode. */
127 static GTY(()) tree float_types[NUM_MACHINE_MODES];
129 /* For each binding contour we allocate a binding_level structure to indicate
130 the binding depth. */
132 struct gnat_binding_level GTY((chain_next ("%h.chain")))
134 /* The binding level containing this one (the enclosing binding level). */
135 struct gnat_binding_level *chain;
136 /* The BLOCK node for this level. */
138 /* If nonzero, the setjmp buffer that needs to be updated for any
139 variable-sized definition within this context. */
143 /* The binding level currently in effect. */
144 static GTY(()) struct gnat_binding_level *current_binding_level;
146 /* A chain of gnat_binding_level structures awaiting reuse. */
147 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
149 /* An array of global declarations. */
150 static GTY(()) VEC(tree,gc) *global_decls;
152 /* An array of builtin declarations. */
153 static GTY(()) VEC(tree,gc) *builtin_decls;
155 /* An array of global renaming pointers. */
156 static GTY(()) VEC(tree,gc) *global_renaming_pointers;
158 /* A chain of unused BLOCK nodes. */
159 static GTY((deletable)) tree free_block_chain;
161 static void gnat_install_builtins (void);
162 static tree merge_sizes (tree, tree, tree, bool, bool);
163 static tree compute_related_constant (tree, tree);
164 static tree split_plus (tree, tree *);
165 static void gnat_gimplify_function (tree);
166 static tree float_type_for_precision (int, enum machine_mode);
167 static tree convert_to_fat_pointer (tree, tree);
168 static tree convert_to_thin_pointer (tree, tree);
169 static tree make_descriptor_field (const char *,tree, tree, tree);
170 static bool potential_alignment_gap (tree, tree, tree);
172 /* Initialize the association of GNAT nodes to GCC trees. */
175 init_gnat_to_gnu (void)
177 associate_gnat_to_gnu
178 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
181 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
182 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
183 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
185 If GNU_DECL is zero, a previous association is to be reset. */
188 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
190 /* Check that GNAT_ENTITY is not already defined and that it is being set
191 to something which is a decl. Raise gigi 401 if not. Usually, this
192 means GNAT_ENTITY is defined twice, but occasionally is due to some
194 gcc_assert (!(gnu_decl
195 && (PRESENT_GNU_TREE (gnat_entity)
196 || (!no_check && !DECL_P (gnu_decl)))));
198 SET_GNU_TREE (gnat_entity, gnu_decl);
201 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
202 Return the ..._DECL node that was associated with it. If there is no tree
203 node associated with GNAT_ENTITY, abort.
205 In some cases, such as delayed elaboration or expressions that need to
206 be elaborated only once, GNAT_ENTITY is really not an entity. */
209 get_gnu_tree (Entity_Id gnat_entity)
211 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
212 return GET_GNU_TREE (gnat_entity);
215 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
218 present_gnu_tree (Entity_Id gnat_entity)
220 return PRESENT_GNU_TREE (gnat_entity);
223 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
226 init_dummy_type (void)
229 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
232 /* Make a dummy type corresponding to GNAT_TYPE. */
235 make_dummy_type (Entity_Id gnat_type)
237 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
240 /* If there is an equivalent type, get its underlying type. */
241 if (Present (gnat_underlying))
242 gnat_underlying = Underlying_Type (gnat_underlying);
244 /* If there was no equivalent type (can only happen when just annotating
245 types) or underlying type, go back to the original type. */
246 if (No (gnat_underlying))
247 gnat_underlying = gnat_type;
249 /* If it there already a dummy type, use that one. Else make one. */
250 if (PRESENT_DUMMY_NODE (gnat_underlying))
251 return GET_DUMMY_NODE (gnat_underlying);
253 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
255 gnu_type = make_node (Is_Record_Type (gnat_underlying)
256 ? tree_code_for_record_type (gnat_underlying)
258 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
259 TYPE_DUMMY_P (gnu_type) = 1;
260 if (AGGREGATE_TYPE_P (gnu_type))
262 TYPE_STUB_DECL (gnu_type) = build_decl (TYPE_DECL, NULL_TREE, gnu_type);
263 TYPE_BY_REFERENCE_P (gnu_type) = Is_By_Reference_Type (gnat_type);
266 SET_DUMMY_NODE (gnat_underlying, gnu_type);
271 /* Return nonzero if we are currently in the global binding level. */
274 global_bindings_p (void)
276 return ((force_global || !current_function_decl) ? -1 : 0);
279 /* Enter a new binding level. */
284 struct gnat_binding_level *newlevel = NULL;
286 /* Reuse a struct for this binding level, if there is one. */
287 if (free_binding_level)
289 newlevel = free_binding_level;
290 free_binding_level = free_binding_level->chain;
294 = (struct gnat_binding_level *)
295 ggc_alloc (sizeof (struct gnat_binding_level));
297 /* Use a free BLOCK, if any; otherwise, allocate one. */
298 if (free_block_chain)
300 newlevel->block = free_block_chain;
301 free_block_chain = BLOCK_CHAIN (free_block_chain);
302 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
305 newlevel->block = make_node (BLOCK);
307 /* Point the BLOCK we just made to its parent. */
308 if (current_binding_level)
309 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
311 BLOCK_VARS (newlevel->block) = BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
312 TREE_USED (newlevel->block) = 1;
314 /* Add this level to the front of the chain (stack) of levels that are
316 newlevel->chain = current_binding_level;
317 newlevel->jmpbuf_decl = NULL_TREE;
318 current_binding_level = newlevel;
321 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
322 and point FNDECL to this BLOCK. */
325 set_current_block_context (tree fndecl)
327 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
328 DECL_INITIAL (fndecl) = current_binding_level->block;
331 /* Set the jmpbuf_decl for the current binding level to DECL. */
334 set_block_jmpbuf_decl (tree decl)
336 current_binding_level->jmpbuf_decl = decl;
339 /* Get the jmpbuf_decl, if any, for the current binding level. */
342 get_block_jmpbuf_decl ()
344 return current_binding_level->jmpbuf_decl;
347 /* Exit a binding level. Set any BLOCK into the current code group. */
352 struct gnat_binding_level *level = current_binding_level;
353 tree block = level->block;
355 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
356 BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
358 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
359 are no variables free the block and merge its subblocks into those of its
360 parent block. Otherwise, add it to the list of its parent. */
361 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
363 else if (BLOCK_VARS (block) == NULL_TREE)
365 BLOCK_SUBBLOCKS (level->chain->block)
366 = chainon (BLOCK_SUBBLOCKS (block),
367 BLOCK_SUBBLOCKS (level->chain->block));
368 BLOCK_CHAIN (block) = free_block_chain;
369 free_block_chain = block;
373 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
374 BLOCK_SUBBLOCKS (level->chain->block) = block;
375 TREE_USED (block) = 1;
376 set_block_for_group (block);
379 /* Free this binding structure. */
380 current_binding_level = level->chain;
381 level->chain = free_binding_level;
382 free_binding_level = level;
386 /* Records a ..._DECL node DECL as belonging to the current lexical scope
387 and uses GNAT_NODE for location information and propagating flags. */
390 gnat_pushdecl (tree decl, Node_Id gnat_node)
392 /* If at top level, there is no context. But PARM_DECLs always go in the
393 level of its function. */
394 if (global_bindings_p () && TREE_CODE (decl) != PARM_DECL)
395 DECL_CONTEXT (decl) = 0;
398 DECL_CONTEXT (decl) = current_function_decl;
400 /* Functions imported in another function are not really nested. */
401 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_PUBLIC (decl))
402 DECL_NO_STATIC_CHAIN (decl) = 1;
405 TREE_NO_WARNING (decl) = (gnat_node == Empty || Warnings_Off (gnat_node));
407 /* Set the location of DECL and emit a declaration for it. */
408 if (Present (gnat_node))
409 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
410 add_decl_expr (decl, gnat_node);
412 /* Put the declaration on the list. The list of declarations is in reverse
413 order. The list will be reversed later. Put global variables in the
414 globals list and builtin functions in a dedicated list to speed up
415 further lookups. Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
416 the list, as they will cause trouble with the debugger and aren't needed
418 if (TREE_CODE (decl) != TYPE_DECL
419 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
421 if (global_bindings_p ())
423 VEC_safe_push (tree, gc, global_decls, decl);
425 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
426 VEC_safe_push (tree, gc, builtin_decls, decl);
430 TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
431 BLOCK_VARS (current_binding_level->block) = decl;
435 /* For the declaration of a type, set its name if it either is not already
436 set, was set to an IDENTIFIER_NODE, indicating an internal name,
437 or if the previous type name was not derived from a source name.
438 We'd rather have the type named with a real name and all the pointer
439 types to the same object have the same POINTER_TYPE node. Code in the
440 equivalent function of c-decl.c makes a copy of the type node here, but
441 that may cause us trouble with incomplete types. We make an exception
442 for fat pointer types because the compiler automatically builds them
443 for unconstrained array types and the debugger uses them to represent
444 both these and pointers to these. */
445 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
447 tree t = TREE_TYPE (decl);
449 if (!TYPE_NAME (t) || TREE_CODE (TYPE_NAME (t)) == IDENTIFIER_NODE)
450 TYPE_NAME (t) = decl;
451 else if (TYPE_FAT_POINTER_P (t))
453 tree tt = build_variant_type_copy (t);
454 TYPE_NAME (tt) = decl;
455 TREE_USED (tt) = TREE_USED (t);
456 TREE_TYPE (decl) = tt;
457 DECL_ORIGINAL_TYPE (decl) = t;
459 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
460 TYPE_NAME (t) = decl;
464 /* Do little here. Set up the standard declarations later after the
465 front end has been run. */
468 gnat_init_decl_processing (void)
470 /* Make the binding_level structure for global names. */
471 current_function_decl = 0;
472 current_binding_level = 0;
473 free_binding_level = 0;
476 build_common_tree_nodes (true, true);
478 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
479 corresponding to the size of Pmode. In most cases when ptr_mode and
480 Pmode differ, C will use the width of ptr_mode as sizetype. But we get
481 far better code using the width of Pmode. Make this here since we need
482 this before we can expand the GNAT types. */
483 size_type_node = gnat_type_for_size (GET_MODE_BITSIZE (Pmode), 0);
484 set_sizetype (size_type_node);
485 build_common_tree_nodes_2 (0);
487 ptr_void_type_node = build_pointer_type (void_type_node);
489 gnat_install_builtins ();
492 /* Install the builtin functions we might need. */
495 gnat_install_builtins ()
497 /* Builtins used by generic middle-end optimizers. */
498 build_common_builtin_nodes ();
500 /* Target specific builtins, such as the AltiVec family on ppc. */
501 targetm.init_builtins ();
504 /* Create the predefined scalar types such as `integer_type_node' needed
505 in the gcc back-end and initialize the global binding level. */
508 init_gigi_decls (tree long_long_float_type, tree exception_type)
513 /* Set the types that GCC and Gigi use from the front end. We would like
514 to do this for char_type_node, but it needs to correspond to the C
516 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
518 /* In this case, the builtin floating point types are VAX float,
519 so make up a type for use. */
520 longest_float_type_node = make_node (REAL_TYPE);
521 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
522 layout_type (longest_float_type_node);
523 create_type_decl (get_identifier ("longest float type"),
524 longest_float_type_node, NULL, false, true, Empty);
527 longest_float_type_node = TREE_TYPE (long_long_float_type);
529 except_type_node = TREE_TYPE (exception_type);
531 unsigned_type_node = gnat_type_for_size (INT_TYPE_SIZE, 1);
532 create_type_decl (get_identifier ("unsigned int"), unsigned_type_node,
533 NULL, false, true, Empty);
535 void_type_decl_node = create_type_decl (get_identifier ("void"),
536 void_type_node, NULL, false, true,
539 void_ftype = build_function_type (void_type_node, NULL_TREE);
540 ptr_void_ftype = build_pointer_type (void_ftype);
542 /* Now declare runtime functions. */
543 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
545 /* malloc is a function declaration tree for a function to allocate
547 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
549 build_function_type (ptr_void_type_node,
550 tree_cons (NULL_TREE,
553 NULL_TREE, false, true, true, NULL,
555 DECL_IS_MALLOC (malloc_decl) = 1;
557 /* free is a function declaration tree for a function to free memory. */
559 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
560 build_function_type (void_type_node,
561 tree_cons (NULL_TREE,
564 NULL_TREE, false, true, true, NULL, Empty);
566 /* Make the types and functions used for exception processing. */
568 = build_array_type (gnat_type_for_mode (Pmode, 0),
569 build_index_type (build_int_cst (NULL_TREE, 5)));
570 create_type_decl (get_identifier ("JMPBUF_T"), jmpbuf_type, NULL,
572 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
574 /* Functions to get and set the jumpbuf pointer for the current thread. */
576 = create_subprog_decl
577 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
578 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
579 NULL_TREE, false, true, true, NULL, Empty);
580 /* Avoid creating superfluous edges to __builtin_setjmp receivers. */
581 DECL_IS_PURE (get_jmpbuf_decl) = 1;
584 = create_subprog_decl
585 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
587 build_function_type (void_type_node,
588 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
589 NULL_TREE, false, true, true, NULL, Empty);
591 /* Function to get the current exception. */
593 = create_subprog_decl
594 (get_identifier ("system__soft_links__get_gnat_exception"),
596 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
597 NULL_TREE, false, true, true, NULL, Empty);
598 /* Avoid creating superfluous edges to __builtin_setjmp receivers. */
599 DECL_IS_PURE (get_excptr_decl) = 1;
601 /* Functions that raise exceptions. */
603 = create_subprog_decl
604 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
605 build_function_type (void_type_node,
606 tree_cons (NULL_TREE,
607 build_pointer_type (except_type_node),
609 NULL_TREE, false, true, true, NULL, Empty);
611 /* Dummy objects to materialize "others" and "all others" in the exception
612 tables. These are exported by a-exexpr.adb, so see this unit for the
616 = create_var_decl (get_identifier ("OTHERS"),
617 get_identifier ("__gnat_others_value"),
618 integer_type_node, 0, 1, 0, 1, 1, 0, Empty);
621 = create_var_decl (get_identifier ("ALL_OTHERS"),
622 get_identifier ("__gnat_all_others_value"),
623 integer_type_node, 0, 1, 0, 1, 1, 0, Empty);
625 /* Hooks to call when entering/leaving an exception handler. */
627 = create_subprog_decl (get_identifier ("__gnat_begin_handler"), NULL_TREE,
628 build_function_type (void_type_node,
629 tree_cons (NULL_TREE,
632 NULL_TREE, false, true, true, NULL, Empty);
635 = create_subprog_decl (get_identifier ("__gnat_end_handler"), NULL_TREE,
636 build_function_type (void_type_node,
637 tree_cons (NULL_TREE,
640 NULL_TREE, false, true, true, NULL, Empty);
642 /* If in no exception handlers mode, all raise statements are redirected to
643 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
644 this procedure will never be called in this mode. */
645 if (No_Exception_Handlers_Set ())
648 = create_subprog_decl
649 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE,
650 build_function_type (void_type_node,
651 tree_cons (NULL_TREE,
652 build_pointer_type (char_type_node),
653 tree_cons (NULL_TREE,
656 NULL_TREE, false, true, true, NULL, Empty);
658 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
659 gnat_raise_decls[i] = decl;
662 /* Otherwise, make one decl for each exception reason. */
663 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
667 sprintf (name, "__gnat_rcheck_%.2d", i);
669 = create_subprog_decl
670 (get_identifier (name), NULL_TREE,
671 build_function_type (void_type_node,
672 tree_cons (NULL_TREE,
675 tree_cons (NULL_TREE,
678 NULL_TREE, false, true, true, NULL, Empty);
681 /* Indicate that these never return. */
682 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
683 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
684 TREE_TYPE (raise_nodefer_decl)
685 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
688 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
690 TREE_THIS_VOLATILE (gnat_raise_decls[i]) = 1;
691 TREE_SIDE_EFFECTS (gnat_raise_decls[i]) = 1;
692 TREE_TYPE (gnat_raise_decls[i])
693 = build_qualified_type (TREE_TYPE (gnat_raise_decls[i]),
697 /* setjmp returns an integer and has one operand, which is a pointer to
700 = create_subprog_decl
701 (get_identifier ("__builtin_setjmp"), NULL_TREE,
702 build_function_type (integer_type_node,
703 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
704 NULL_TREE, false, true, true, NULL, Empty);
706 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
707 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
709 /* update_setjmp_buf updates a setjmp buffer from the current stack pointer
711 update_setjmp_buf_decl
712 = create_subprog_decl
713 (get_identifier ("__builtin_update_setjmp_buf"), NULL_TREE,
714 build_function_type (void_type_node,
715 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
716 NULL_TREE, false, true, true, NULL, Empty);
718 DECL_BUILT_IN_CLASS (update_setjmp_buf_decl) = BUILT_IN_NORMAL;
719 DECL_FUNCTION_CODE (update_setjmp_buf_decl) = BUILT_IN_UPDATE_SETJMP_BUF;
721 main_identifier_node = get_identifier ("main");
724 /* Given a record type RECORD_TYPE and a chain of FIELD_DECL nodes FIELDLIST,
725 finish constructing the record or union type. If REP_LEVEL is zero, this
726 record has no representation clause and so will be entirely laid out here.
727 If REP_LEVEL is one, this record has a representation clause and has been
728 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
729 this record is derived from a parent record and thus inherits its layout;
730 only make a pass on the fields to finalize them. If DO_NOT_FINALIZE is
731 true, the record type is expected to be modified afterwards so it will
732 not be sent to the back-end for finalization. */
735 finish_record_type (tree record_type, tree fieldlist, int rep_level,
736 bool do_not_finalize)
738 enum tree_code code = TREE_CODE (record_type);
739 tree name = TYPE_NAME (record_type);
740 tree ada_size = bitsize_zero_node;
741 tree size = bitsize_zero_node;
742 bool had_size = TYPE_SIZE (record_type) != 0;
743 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
744 bool had_align = TYPE_ALIGN (record_type) != 0;
747 if (name && TREE_CODE (name) == TYPE_DECL)
748 name = DECL_NAME (name);
750 TYPE_FIELDS (record_type) = fieldlist;
751 TYPE_STUB_DECL (record_type) = build_decl (TYPE_DECL, name, record_type);
753 /* We don't need both the typedef name and the record name output in
754 the debugging information, since they are the same. */
755 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
757 /* Globally initialize the record first. If this is a rep'ed record,
758 that just means some initializations; otherwise, layout the record. */
761 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
762 TYPE_MODE (record_type) = BLKmode;
765 TYPE_SIZE_UNIT (record_type) = size_zero_node;
767 TYPE_SIZE (record_type) = bitsize_zero_node;
769 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
770 out just like a UNION_TYPE, since the size will be fixed. */
771 else if (code == QUAL_UNION_TYPE)
776 /* Ensure there isn't a size already set. There can be in an error
777 case where there is a rep clause but all fields have errors and
778 no longer have a position. */
779 TYPE_SIZE (record_type) = 0;
780 layout_type (record_type);
783 /* At this point, the position and size of each field is known. It was
784 either set before entry by a rep clause, or by laying out the type above.
786 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
787 to compute the Ada size; the GCC size and alignment (for rep'ed records
788 that are not padding types); and the mode (for rep'ed records). We also
789 clear the DECL_BIT_FIELD indication for the cases we know have not been
790 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
792 if (code == QUAL_UNION_TYPE)
793 fieldlist = nreverse (fieldlist);
795 for (field = fieldlist; field; field = TREE_CHAIN (field))
797 tree type = TREE_TYPE (field);
798 tree pos = bit_position (field);
799 tree this_size = DECL_SIZE (field);
802 if ((TREE_CODE (type) == RECORD_TYPE
803 || TREE_CODE (type) == UNION_TYPE
804 || TREE_CODE (type) == QUAL_UNION_TYPE)
805 && !TYPE_IS_FAT_POINTER_P (type)
806 && !TYPE_CONTAINS_TEMPLATE_P (type)
807 && TYPE_ADA_SIZE (type))
808 this_ada_size = TYPE_ADA_SIZE (type);
810 this_ada_size = this_size;
812 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
813 if (DECL_BIT_FIELD (field)
814 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
816 unsigned int align = TYPE_ALIGN (type);
818 /* In the general case, type alignment is required. */
819 if (value_factor_p (pos, align))
821 /* The enclosing record type must be sufficiently aligned.
822 Otherwise, if no alignment was specified for it and it
823 has been laid out already, bump its alignment to the
824 desired one if this is compatible with its size. */
825 if (TYPE_ALIGN (record_type) >= align)
827 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
828 DECL_BIT_FIELD (field) = 0;
832 && value_factor_p (TYPE_SIZE (record_type), align))
834 TYPE_ALIGN (record_type) = align;
835 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
836 DECL_BIT_FIELD (field) = 0;
840 /* In the non-strict alignment case, only byte alignment is. */
841 if (!STRICT_ALIGNMENT
842 && DECL_BIT_FIELD (field)
843 && value_factor_p (pos, BITS_PER_UNIT))
844 DECL_BIT_FIELD (field) = 0;
847 /* If we still have DECL_BIT_FIELD set at this point, we know the field
848 is technically not addressable. Except that it can actually be
849 addressed if the field is BLKmode and happens to be properly
851 DECL_NONADDRESSABLE_P (field)
852 |= DECL_BIT_FIELD (field) && DECL_MODE (field) != BLKmode;
854 /* A type must be as aligned as its most aligned field that is not
855 a bit-field. But this is already enforced by layout_type. */
856 if (rep_level > 0 && !DECL_BIT_FIELD (field))
857 TYPE_ALIGN (record_type)
858 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
863 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
864 size = size_binop (MAX_EXPR, size, this_size);
867 case QUAL_UNION_TYPE:
869 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
870 this_ada_size, ada_size);
871 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
876 /* Since we know here that all fields are sorted in order of
877 increasing bit position, the size of the record is one
878 higher than the ending bit of the last field processed
879 unless we have a rep clause, since in that case we might
880 have a field outside a QUAL_UNION_TYPE that has a higher ending
881 position. So use a MAX in that case. Also, if this field is a
882 QUAL_UNION_TYPE, we need to take into account the previous size in
883 the case of empty variants. */
885 = merge_sizes (ada_size, pos, this_ada_size,
886 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
888 = merge_sizes (size, pos, this_size,
889 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
897 if (code == QUAL_UNION_TYPE)
898 nreverse (fieldlist);
902 /* If this is a padding record, we never want to make the size smaller
903 than what was specified in it, if any. */
904 if (TREE_CODE (record_type) == RECORD_TYPE
905 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
906 size = TYPE_SIZE (record_type);
908 /* Now set any of the values we've just computed that apply. */
909 if (!TYPE_IS_FAT_POINTER_P (record_type)
910 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
911 SET_TYPE_ADA_SIZE (record_type, ada_size);
915 tree size_unit = had_size_unit
916 ? TYPE_SIZE_UNIT (record_type)
918 size_binop (CEIL_DIV_EXPR, size,
920 unsigned int align = TYPE_ALIGN (record_type);
922 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
923 TYPE_SIZE_UNIT (record_type)
924 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
926 compute_record_mode (record_type);
930 if (!do_not_finalize)
931 rest_of_record_type_compilation (record_type);
934 /* Wrap up compilation of RECORD_TYPE, i.e. most notably output all
935 the debug information associated with it. It need not be invoked
936 directly in most cases since finish_record_type takes care of doing
937 so, unless explicitly requested not to through DO_NOT_FINALIZE. */
940 rest_of_record_type_compilation (tree record_type)
942 tree fieldlist = TYPE_FIELDS (record_type);
944 enum tree_code code = TREE_CODE (record_type);
945 bool var_size = false;
947 for (field = fieldlist; field; field = TREE_CHAIN (field))
949 /* We need to make an XVE/XVU record if any field has variable size,
950 whether or not the record does. For example, if we have a union,
951 it may be that all fields, rounded up to the alignment, have the
952 same size, in which case we'll use that size. But the debug
953 output routines (except Dwarf2) won't be able to output the fields,
954 so we need to make the special record. */
955 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
956 /* If a field has a non-constant qualifier, the record will have
957 variable size too. */
958 || (code == QUAL_UNION_TYPE
959 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
966 /* If this record is of variable size, rename it so that the
967 debugger knows it is and make a new, parallel, record
968 that tells the debugger how the record is laid out. See
969 exp_dbug.ads. But don't do this for records that are padding
970 since they confuse GDB. */
972 && !(TREE_CODE (record_type) == RECORD_TYPE
973 && TYPE_IS_PADDING_P (record_type)))
976 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
977 ? UNION_TYPE : TREE_CODE (record_type));
978 tree orig_name = TYPE_NAME (record_type);
980 = (TREE_CODE (orig_name) == TYPE_DECL ? DECL_NAME (orig_name)
983 = concat_id_with_name (orig_id,
984 TREE_CODE (record_type) == QUAL_UNION_TYPE
986 tree last_pos = bitsize_zero_node;
988 tree prev_old_field = 0;
990 TYPE_NAME (new_record_type) = new_id;
991 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
992 TYPE_STUB_DECL (new_record_type)
993 = build_decl (TYPE_DECL, new_id, new_record_type);
994 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
995 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
996 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
997 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
998 TYPE_SIZE_UNIT (new_record_type)
999 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
1001 /* Now scan all the fields, replacing each field with a new
1002 field corresponding to the new encoding. */
1003 for (old_field = TYPE_FIELDS (record_type); old_field;
1004 old_field = TREE_CHAIN (old_field))
1006 tree field_type = TREE_TYPE (old_field);
1007 tree field_name = DECL_NAME (old_field);
1009 tree curpos = bit_position (old_field);
1011 unsigned int align = 0;
1014 /* See how the position was modified from the last position.
1016 There are two basic cases we support: a value was added
1017 to the last position or the last position was rounded to
1018 a boundary and they something was added. Check for the
1019 first case first. If not, see if there is any evidence
1020 of rounding. If so, round the last position and try
1023 If this is a union, the position can be taken as zero. */
1025 if (TREE_CODE (new_record_type) == UNION_TYPE)
1026 pos = bitsize_zero_node, align = 0;
1028 pos = compute_related_constant (curpos, last_pos);
1030 if (!pos && TREE_CODE (curpos) == MULT_EXPR
1031 && host_integerp (TREE_OPERAND (curpos, 1), 1))
1033 tree offset = TREE_OPERAND (curpos, 0);
1034 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
1036 /* Strip off any conversions. */
1037 while (TREE_CODE (offset) == NON_LVALUE_EXPR
1038 || TREE_CODE (offset) == NOP_EXPR
1039 || TREE_CODE (offset) == CONVERT_EXPR)
1040 offset = TREE_OPERAND (offset, 0);
1042 /* An offset which is a bitwise AND with a negative power of 2
1043 means an alignment corresponding to this power of 2. */
1044 if (TREE_CODE (offset) == BIT_AND_EXPR
1045 && host_integerp (TREE_OPERAND (offset, 1), 0)
1046 && tree_int_cst_sgn (TREE_OPERAND (offset, 1)) < 0)
1049 = - tree_low_cst (TREE_OPERAND (offset, 1), 0);
1050 if (exact_log2 (pow) > 0)
1054 pos = compute_related_constant (curpos,
1055 round_up (last_pos, align));
1057 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
1058 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
1059 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
1060 && host_integerp (TREE_OPERAND
1061 (TREE_OPERAND (curpos, 0), 1),
1066 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
1067 pos = compute_related_constant (curpos,
1068 round_up (last_pos, align));
1070 else if (potential_alignment_gap (prev_old_field, old_field,
1073 align = TYPE_ALIGN (field_type);
1074 pos = compute_related_constant (curpos,
1075 round_up (last_pos, align));
1078 /* If we can't compute a position, set it to zero.
1080 ??? We really should abort here, but it's too much work
1081 to get this correct for all cases. */
1084 pos = bitsize_zero_node;
1086 /* See if this type is variable-sized and make a pointer type
1087 and indicate the indirection if so. Beware that the debug
1088 back-end may adjust the position computed above according
1089 to the alignment of the field type, i.e. the pointer type
1090 in this case, if we don't preventively counter that. */
1091 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
1093 field_type = build_pointer_type (field_type);
1094 if (align != 0 && TYPE_ALIGN (field_type) > align)
1096 field_type = copy_node (field_type);
1097 TYPE_ALIGN (field_type) = align;
1102 /* Make a new field name, if necessary. */
1103 if (var || align != 0)
1108 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1109 align / BITS_PER_UNIT);
1111 strcpy (suffix, "XVL");
1113 field_name = concat_id_with_name (field_name, suffix);
1116 new_field = create_field_decl (field_name, field_type,
1118 DECL_SIZE (old_field), pos, 0);
1119 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1120 TYPE_FIELDS (new_record_type) = new_field;
1122 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1123 zero. The only time it's not the last field of the record
1124 is when there are other components at fixed positions after
1125 it (meaning there was a rep clause for every field) and we
1126 want to be able to encode them. */
1127 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1128 (TREE_CODE (TREE_TYPE (old_field))
1131 : DECL_SIZE (old_field));
1132 prev_old_field = old_field;
1135 TYPE_FIELDS (new_record_type)
1136 = nreverse (TYPE_FIELDS (new_record_type));
1138 rest_of_type_decl_compilation (TYPE_STUB_DECL (new_record_type));
1141 rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type));
1144 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1145 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1146 if this represents a QUAL_UNION_TYPE in which case we must look for
1147 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1148 is nonzero, we must take the MAX of the end position of this field
1149 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1151 We return an expression for the size. */
1154 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1157 tree type = TREE_TYPE (last_size);
1160 if (!special || TREE_CODE (size) != COND_EXPR)
1162 new = size_binop (PLUS_EXPR, first_bit, size);
1164 new = size_binop (MAX_EXPR, last_size, new);
1168 new = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1169 integer_zerop (TREE_OPERAND (size, 1))
1170 ? last_size : merge_sizes (last_size, first_bit,
1171 TREE_OPERAND (size, 1),
1173 integer_zerop (TREE_OPERAND (size, 2))
1174 ? last_size : merge_sizes (last_size, first_bit,
1175 TREE_OPERAND (size, 2),
1178 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1179 when fed through substitute_in_expr) into thinking that a constant
1180 size is not constant. */
1181 while (TREE_CODE (new) == NON_LVALUE_EXPR)
1182 new = TREE_OPERAND (new, 0);
1187 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1188 related by the addition of a constant. Return that constant if so. */
1191 compute_related_constant (tree op0, tree op1)
1193 tree op0_var, op1_var;
1194 tree op0_con = split_plus (op0, &op0_var);
1195 tree op1_con = split_plus (op1, &op1_var);
1196 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1198 if (operand_equal_p (op0_var, op1_var, 0))
1200 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1206 /* Utility function of above to split a tree OP which may be a sum, into a
1207 constant part, which is returned, and a variable part, which is stored
1208 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1212 split_plus (tree in, tree *pvar)
1214 /* Strip NOPS in order to ease the tree traversal and maximize the
1215 potential for constant or plus/minus discovery. We need to be careful
1216 to always return and set *pvar to bitsizetype trees, but it's worth
1220 *pvar = convert (bitsizetype, in);
1222 if (TREE_CODE (in) == INTEGER_CST)
1224 *pvar = bitsize_zero_node;
1225 return convert (bitsizetype, in);
1227 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1229 tree lhs_var, rhs_var;
1230 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1231 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1233 if (lhs_var == TREE_OPERAND (in, 0)
1234 && rhs_var == TREE_OPERAND (in, 1))
1235 return bitsize_zero_node;
1237 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1238 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1241 return bitsize_zero_node;
1244 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1245 subprogram. If it is void_type_node, then we are dealing with a procedure,
1246 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1247 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1248 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1249 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
1250 object. RETURNS_BY_REF is nonzero if the function returns by reference.
1251 RETURNS_WITH_DSP is nonzero if the function is to return with a
1252 depressed stack pointer. RETURNS_BY_TARGET_PTR is true if the function
1253 is to be passed (as its first parameter) the address of the place to copy
1257 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1258 bool returns_unconstrained, bool returns_by_ref,
1259 bool returns_with_dsp, bool returns_by_target_ptr)
1261 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1262 the subprogram formal parameters. This list is generated by traversing the
1263 input list of PARM_DECL nodes. */
1264 tree param_type_list = NULL;
1268 for (param_decl = param_decl_list; param_decl;
1269 param_decl = TREE_CHAIN (param_decl))
1270 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1273 /* The list of the function parameter types has to be terminated by the void
1274 type to signal to the back-end that we are not dealing with a variable
1275 parameter subprogram, but that the subprogram has a fixed number of
1277 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1279 /* The list of argument types has been created in reverse
1281 param_type_list = nreverse (param_type_list);
1283 type = build_function_type (return_type, param_type_list);
1285 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1286 or the new type should, make a copy of TYPE. Likewise for
1287 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1288 if (TYPE_CI_CO_LIST (type) || cico_list
1289 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1290 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref
1291 || TYPE_RETURNS_BY_TARGET_PTR_P (type) != returns_by_target_ptr)
1292 type = copy_type (type);
1294 TYPE_CI_CO_LIST (type) = cico_list;
1295 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1296 TYPE_RETURNS_STACK_DEPRESSED (type) = returns_with_dsp;
1297 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1298 TYPE_RETURNS_BY_TARGET_PTR_P (type) = returns_by_target_ptr;
1302 /* Return a copy of TYPE but safe to modify in any way. */
1305 copy_type (tree type)
1307 tree new = copy_node (type);
1309 /* copy_node clears this field instead of copying it, because it is
1310 aliased with TREE_CHAIN. */
1311 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1313 TYPE_POINTER_TO (new) = 0;
1314 TYPE_REFERENCE_TO (new) = 0;
1315 TYPE_MAIN_VARIANT (new) = new;
1316 TYPE_NEXT_VARIANT (new) = 0;
1321 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1322 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position of
1326 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
1328 /* First build a type for the desired range. */
1329 tree type = build_index_2_type (min, max);
1331 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1332 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1333 is set, but not to INDEX, make a copy of this type with the requested
1334 index type. Note that we have no way of sharing these types, but that's
1335 only a small hole. */
1336 if (TYPE_INDEX_TYPE (type) == index)
1338 else if (TYPE_INDEX_TYPE (type))
1339 type = copy_type (type);
1341 SET_TYPE_INDEX_TYPE (type, index);
1342 create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
1346 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1347 string) and TYPE is a ..._TYPE node giving its data type.
1348 ARTIFICIAL_P is true if this is a declaration that was generated
1349 by the compiler. DEBUG_INFO_P is true if we need to write debugging
1350 information about this type. GNAT_NODE is used for the position of
1354 create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1355 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1357 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1358 enum tree_code code = TREE_CODE (type);
1360 DECL_ARTIFICIAL (type_decl) = artificial_p;
1362 if (!TYPE_IS_DUMMY_P (type))
1363 gnat_pushdecl (type_decl, gnat_node);
1365 process_attributes (type_decl, attr_list);
1367 /* Pass type declaration information to the debugger unless this is an
1368 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1369 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately, or
1370 type for which debugging information was not requested. */
1371 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
1372 DECL_IGNORED_P (type_decl) = 1;
1373 else if (code != ENUMERAL_TYPE
1374 && (code != RECORD_TYPE || TYPE_IS_FAT_POINTER_P (type))
1375 && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
1376 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1377 rest_of_type_decl_compilation (type_decl);
1382 /* Helper for create_var_decl and create_true_var_decl. Returns a GCC VAR_DECL
1385 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1386 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1387 the GCC tree for an optional initial expression; NULL_TREE if none.
1389 CONST_FLAG is true if this variable is constant, in which case we might
1390 return a CONST_DECL node unless CONST_DECL_ALLOWED_FLAG is false.
1392 PUBLIC_FLAG is true if this definition is to be made visible outside of
1393 the current compilation unit. This flag should be set when processing the
1394 variable definitions in a package specification. EXTERN_FLAG is nonzero
1395 when processing an external variable declaration (as opposed to a
1396 definition: no storage is to be allocated for the variable here).
1398 STATIC_FLAG is only relevant when not at top level. In that case
1399 it indicates whether to always allocate storage to the variable.
1401 GNAT_NODE is used for the position of the decl. */
1404 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
1405 bool const_flag, bool const_decl_allowed_flag,
1406 bool public_flag, bool extern_flag, bool static_flag,
1407 struct attrib *attr_list, Node_Id gnat_node)
1411 && TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1412 && (global_bindings_p () || static_flag
1413 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
1414 : TREE_CONSTANT (var_init)));
1416 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1417 case the initializer may be used in-lieu of the DECL node (as done in
1418 Identifier_to_gnu). This is useful to prevent the need of elaboration
1419 code when an identifier for which such a decl is made is in turn used as
1420 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1421 but extra constraints apply to this choice (see below) and are not
1422 relevant to the distinction we wish to make. */
1423 bool constant_p = const_flag && init_const;
1425 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1426 and may be used for scalars in general but not for aggregates. */
1428 = build_decl ((constant_p && const_decl_allowed_flag
1429 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
1432 /* If this is external, throw away any initializations (they will be done
1433 elsewhere) unless this is a a constant for which we would like to remain
1434 able to get the initializer. If we are defining a global here, leave a
1435 constant initialization and save any variable elaborations for the
1436 elaboration routine. If we are just annotating types, throw away the
1437 initialization if it isn't a constant. */
1438 if ((extern_flag && !constant_p)
1439 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1440 var_init = NULL_TREE;
1442 /* At the global level, an initializer requiring code to be generated
1443 produces elaboration statements. Check that such statements are allowed,
1444 that is, not violating a No_Elaboration_Code restriction. */
1445 if (global_bindings_p () && var_init != 0 && ! init_const)
1446 Check_Elaboration_Code_Allowed (gnat_node);
1448 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1449 try to fiddle with DECL_COMMON. However, on platforms that don't
1450 support global BSS sections, uninitialized global variables would
1451 go in DATA instead, thus increasing the size of the executable. */
1453 && TREE_CODE (var_decl) == VAR_DECL
1454 && !have_global_bss_p ())
1455 DECL_COMMON (var_decl) = 1;
1456 DECL_INITIAL (var_decl) = var_init;
1457 TREE_READONLY (var_decl) = const_flag;
1458 DECL_EXTERNAL (var_decl) = extern_flag;
1459 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1460 TREE_CONSTANT (var_decl) = constant_p;
1461 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1462 = TYPE_VOLATILE (type);
1464 /* If it's public and not external, always allocate storage for it.
1465 At the global binding level we need to allocate static storage for the
1466 variable if and only if it's not external. If we are not at the top level
1467 we allocate automatic storage unless requested not to. */
1468 TREE_STATIC (var_decl)
1469 = public_flag || (global_bindings_p () ? !extern_flag : static_flag);
1471 if (asm_name && VAR_OR_FUNCTION_DECL_P (var_decl))
1472 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1474 process_attributes (var_decl, attr_list);
1476 /* Add this decl to the current binding level. */
1477 gnat_pushdecl (var_decl, gnat_node);
1479 if (TREE_SIDE_EFFECTS (var_decl))
1480 TREE_ADDRESSABLE (var_decl) = 1;
1482 if (TREE_CODE (var_decl) != CONST_DECL)
1484 if (global_bindings_p ())
1485 rest_of_decl_compilation (var_decl, true, 0);
1488 expand_decl (var_decl);
1493 /* Wrapper around create_var_decl_1 for cases where we don't care whether
1494 a VAR or a CONST decl node is created. */
1497 create_var_decl (tree var_name, tree asm_name, tree type, tree var_init,
1498 bool const_flag, bool public_flag, bool extern_flag,
1499 bool static_flag, struct attrib *attr_list,
1502 return create_var_decl_1 (var_name, asm_name, type, var_init,
1504 public_flag, extern_flag, static_flag,
1505 attr_list, gnat_node);
1508 /* Wrapper around create_var_decl_1 for cases where a VAR_DECL node is
1509 required. The primary intent is for DECL_CONST_CORRESPONDING_VARs, which
1510 must be VAR_DECLs and on which we want TREE_READONLY set to have them
1511 possibly assigned to a readonly data section. */
1514 create_true_var_decl (tree var_name, tree asm_name, tree type, tree var_init,
1515 bool const_flag, bool public_flag, bool extern_flag,
1516 bool static_flag, struct attrib *attr_list,
1519 return create_var_decl_1 (var_name, asm_name, type, var_init,
1521 public_flag, extern_flag, static_flag,
1522 attr_list, gnat_node);
1525 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
1528 aggregate_type_contains_array_p (tree type)
1530 switch (TREE_CODE (type))
1534 case QUAL_UNION_TYPE:
1537 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1538 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
1539 && aggregate_type_contains_array_p (TREE_TYPE (field)))
1552 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1553 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1554 this field is in a record type with a "pragma pack". If SIZE is nonzero
1555 it is the specified size for this field. If POS is nonzero, it is the bit
1556 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1557 the address of this field for aliasing purposes. If it is negative, we
1558 should not make a bitfield, which is used by make_aligning_type. */
1561 create_field_decl (tree field_name, tree field_type, tree record_type,
1562 int packed, tree size, tree pos, int addressable)
1564 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1566 DECL_CONTEXT (field_decl) = record_type;
1567 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1569 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1570 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1571 Likewise for an aggregate without specified position that contains an
1572 array, because in this case slices of variable length of this array
1573 must be handled by GCC and variable-sized objects need to be aligned
1574 to at least a byte boundary. */
1575 if (packed && (TYPE_MODE (field_type) == BLKmode
1577 && AGGREGATE_TYPE_P (field_type)
1578 && aggregate_type_contains_array_p (field_type))))
1579 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1581 /* If a size is specified, use it. Otherwise, if the record type is packed
1582 compute a size to use, which may differ from the object's natural size.
1583 We always set a size in this case to trigger the checks for bitfield
1584 creation below, which is typically required when no position has been
1587 size = convert (bitsizetype, size);
1588 else if (packed == 1)
1590 size = rm_size (field_type);
1592 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1594 if (TREE_CODE (size) == INTEGER_CST
1595 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1596 size = round_up (size, BITS_PER_UNIT);
1599 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1600 specified for two reasons: first if the size differs from the natural
1601 size. Second, if the alignment is insufficient. There are a number of
1602 ways the latter can be true.
1604 We never make a bitfield if the type of the field has a nonconstant size,
1605 because no such entity requiring bitfield operations should reach here.
1607 We do *preventively* make a bitfield when there might be the need for it
1608 but we don't have all the necessary information to decide, as is the case
1609 of a field with no specified position in a packed record.
1611 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1612 in layout_decl or finish_record_type to clear the bit_field indication if
1613 it is in fact not needed. */
1614 if (addressable >= 0
1616 && TREE_CODE (size) == INTEGER_CST
1617 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1618 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
1619 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
1621 || (TYPE_ALIGN (record_type) != 0
1622 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1624 DECL_BIT_FIELD (field_decl) = 1;
1625 DECL_SIZE (field_decl) = size;
1626 if (!packed && !pos)
1627 DECL_ALIGN (field_decl)
1628 = (TYPE_ALIGN (record_type) != 0
1629 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1630 : TYPE_ALIGN (field_type));
1633 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1635 /* Bump the alignment if need be, either for bitfield/packing purposes or
1636 to satisfy the type requirements if no such consideration applies. When
1637 we get the alignment from the type, indicate if this is from an explicit
1638 user request, which prevents stor-layout from lowering it later on. */
1641 = (DECL_BIT_FIELD (field_decl) ? 1
1642 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
1644 if (bit_align > DECL_ALIGN (field_decl))
1645 DECL_ALIGN (field_decl) = bit_align;
1646 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
1648 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1649 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
1655 /* We need to pass in the alignment the DECL is known to have.
1656 This is the lowest-order bit set in POS, but no more than
1657 the alignment of the record, if one is specified. Note
1658 that an alignment of 0 is taken as infinite. */
1659 unsigned int known_align;
1661 if (host_integerp (pos, 1))
1662 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1664 known_align = BITS_PER_UNIT;
1666 if (TYPE_ALIGN (record_type)
1667 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1668 known_align = TYPE_ALIGN (record_type);
1670 layout_decl (field_decl, known_align);
1671 SET_DECL_OFFSET_ALIGN (field_decl,
1672 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1674 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1675 &DECL_FIELD_BIT_OFFSET (field_decl),
1676 DECL_OFFSET_ALIGN (field_decl), pos);
1678 DECL_HAS_REP_P (field_decl) = 1;
1681 /* In addition to what our caller says, claim the field is addressable if we
1682 know that its type is not suitable.
1684 The field may also be "technically" nonaddressable, meaning that even if
1685 we attempt to take the field's address we will actually get the address
1686 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
1687 value we have at this point is not accurate enough, so we don't account
1688 for this here and let finish_record_type decide. */
1689 if (!type_for_nonaliased_component_p (field_type))
1692 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1697 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1698 PARAM_TYPE is its type. READONLY is true if the parameter is
1699 readonly (either an In parameter or an address of a pass-by-ref
1703 create_param_decl (tree param_name, tree param_type, bool readonly)
1705 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1707 /* Honor targetm.calls.promote_prototypes(), as not doing so can
1708 lead to various ABI violations. */
1709 if (targetm.calls.promote_prototypes (param_type)
1710 && (TREE_CODE (param_type) == INTEGER_TYPE
1711 || TREE_CODE (param_type) == ENUMERAL_TYPE)
1712 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1714 /* We have to be careful about biased types here. Make a subtype
1715 of integer_type_node with the proper biasing. */
1716 if (TREE_CODE (param_type) == INTEGER_TYPE
1717 && TYPE_BIASED_REPRESENTATION_P (param_type))
1720 = copy_type (build_range_type (integer_type_node,
1721 TYPE_MIN_VALUE (param_type),
1722 TYPE_MAX_VALUE (param_type)));
1724 TYPE_BIASED_REPRESENTATION_P (param_type) = 1;
1727 param_type = integer_type_node;
1730 DECL_ARG_TYPE (param_decl) = param_type;
1731 TREE_READONLY (param_decl) = readonly;
1735 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1738 process_attributes (tree decl, struct attrib *attr_list)
1740 for (; attr_list; attr_list = attr_list->next)
1741 switch (attr_list->type)
1743 case ATTR_MACHINE_ATTRIBUTE:
1744 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1746 ATTR_FLAG_TYPE_IN_PLACE);
1749 case ATTR_LINK_ALIAS:
1750 if (! DECL_EXTERNAL (decl))
1752 TREE_STATIC (decl) = 1;
1753 assemble_alias (decl, attr_list->name);
1757 case ATTR_WEAK_EXTERNAL:
1759 declare_weak (decl);
1761 post_error ("?weak declarations not supported on this target",
1762 attr_list->error_point);
1765 case ATTR_LINK_SECTION:
1766 if (targetm.have_named_sections)
1768 DECL_SECTION_NAME (decl)
1769 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1770 IDENTIFIER_POINTER (attr_list->name));
1771 DECL_COMMON (decl) = 0;
1774 post_error ("?section attributes are not supported for this target",
1775 attr_list->error_point);
1778 case ATTR_LINK_CONSTRUCTOR:
1779 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1780 TREE_USED (decl) = 1;
1783 case ATTR_LINK_DESTRUCTOR:
1784 DECL_STATIC_DESTRUCTOR (decl) = 1;
1785 TREE_USED (decl) = 1;
1790 /* Record a global renaming pointer. */
1793 record_global_renaming_pointer (tree decl)
1795 gcc_assert (DECL_RENAMED_OBJECT (decl));
1796 VEC_safe_push (tree, gc, global_renaming_pointers, decl);
1799 /* Invalidate the global renaming pointers. */
1802 invalidate_global_renaming_pointers (void)
1807 for (i = 0; VEC_iterate(tree, global_renaming_pointers, i, iter); i++)
1808 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
1810 VEC_free (tree, gc, global_renaming_pointers);
1813 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1817 value_factor_p (tree value, HOST_WIDE_INT factor)
1819 if (host_integerp (value, 1))
1820 return tree_low_cst (value, 1) % factor == 0;
1822 if (TREE_CODE (value) == MULT_EXPR)
1823 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1824 || value_factor_p (TREE_OPERAND (value, 1), factor));
1829 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1830 unless we can prove these 2 fields are laid out in such a way that no gap
1831 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1832 is the distance in bits between the end of PREV_FIELD and the starting
1833 position of CURR_FIELD. It is ignored if null. */
1836 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1838 /* If this is the first field of the record, there cannot be any gap */
1842 /* If the previous field is a union type, then return False: The only
1843 time when such a field is not the last field of the record is when
1844 there are other components at fixed positions after it (meaning there
1845 was a rep clause for every field), in which case we don't want the
1846 alignment constraint to override them. */
1847 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1850 /* If the distance between the end of prev_field and the beginning of
1851 curr_field is constant, then there is a gap if the value of this
1852 constant is not null. */
1853 if (offset && host_integerp (offset, 1))
1854 return !integer_zerop (offset);
1856 /* If the size and position of the previous field are constant,
1857 then check the sum of this size and position. There will be a gap
1858 iff it is not multiple of the current field alignment. */
1859 if (host_integerp (DECL_SIZE (prev_field), 1)
1860 && host_integerp (bit_position (prev_field), 1))
1861 return ((tree_low_cst (bit_position (prev_field), 1)
1862 + tree_low_cst (DECL_SIZE (prev_field), 1))
1863 % DECL_ALIGN (curr_field) != 0);
1865 /* If both the position and size of the previous field are multiples
1866 of the current field alignment, there cannot be any gap. */
1867 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1868 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1871 /* Fallback, return that there may be a potential gap */
1875 /* Returns a LABEL_DECL node for LABEL_NAME. */
1878 create_label_decl (tree label_name)
1880 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1882 DECL_CONTEXT (label_decl) = current_function_decl;
1883 DECL_MODE (label_decl) = VOIDmode;
1884 DECL_SOURCE_LOCATION (label_decl) = input_location;
1889 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1890 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1891 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1892 PARM_DECL nodes chained through the TREE_CHAIN field).
1894 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1895 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1898 create_subprog_decl (tree subprog_name, tree asm_name,
1899 tree subprog_type, tree param_decl_list, bool inline_flag,
1900 bool public_flag, bool extern_flag,
1901 struct attrib *attr_list, Node_Id gnat_node)
1903 tree return_type = TREE_TYPE (subprog_type);
1904 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1906 /* If this is a function nested inside an inlined external function, it
1907 means we aren't going to compile the outer function unless it is
1908 actually inlined, so do the same for us. */
1909 if (current_function_decl && DECL_INLINE (current_function_decl)
1910 && DECL_EXTERNAL (current_function_decl))
1913 DECL_EXTERNAL (subprog_decl) = extern_flag;
1914 TREE_PUBLIC (subprog_decl) = public_flag;
1915 TREE_STATIC (subprog_decl) = 1;
1916 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1917 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1918 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1919 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1920 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1921 DECL_ARTIFICIAL (DECL_RESULT (subprog_decl)) = 1;
1922 DECL_IGNORED_P (DECL_RESULT (subprog_decl)) = 1;
1924 /* TREE_ADDRESSABLE is set on the result type to request the use of the
1925 target by-reference return mechanism. This is not supported all the
1926 way down to RTL expansion with GCC 4, which ICEs on temporary creation
1927 attempts with such a type and expects DECL_BY_REFERENCE to be set on
1928 the RESULT_DECL instead - see gnat_genericize for more details. */
1929 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (subprog_decl))))
1931 tree result_decl = DECL_RESULT (subprog_decl);
1933 TREE_ADDRESSABLE (TREE_TYPE (result_decl)) = 0;
1934 DECL_BY_REFERENCE (result_decl) = 1;
1938 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
1941 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1943 process_attributes (subprog_decl, attr_list);
1945 /* Add this decl to the current binding level. */
1946 gnat_pushdecl (subprog_decl, gnat_node);
1948 /* Output the assembler code and/or RTL for the declaration. */
1949 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
1951 return subprog_decl;
1954 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1955 body. This routine needs to be invoked before processing the declarations
1956 appearing in the subprogram. */
1959 begin_subprog_body (tree subprog_decl)
1963 current_function_decl = subprog_decl;
1964 announce_function (subprog_decl);
1966 /* Enter a new binding level and show that all the parameters belong to
1969 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1970 param_decl = TREE_CHAIN (param_decl))
1971 DECL_CONTEXT (param_decl) = subprog_decl;
1973 make_decl_rtl (subprog_decl);
1975 /* We handle pending sizes via the elaboration of types, so we don't need to
1976 save them. This causes them to be marked as part of the outer function
1977 and then discarded. */
1978 get_pending_sizes ();
1982 /* Helper for the genericization callback. Return a dereference of VAL
1983 if it is of a reference type. */
1986 convert_from_reference (tree val)
1988 tree value_type, ref;
1990 if (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE)
1993 value_type = TREE_TYPE (TREE_TYPE (val));
1994 ref = build1 (INDIRECT_REF, value_type, val);
1996 /* See if what we reference is CONST or VOLATILE, which requires
1997 looking into array types to get to the component type. */
1999 while (TREE_CODE (value_type) == ARRAY_TYPE)
2000 value_type = TREE_TYPE (value_type);
2003 = (TYPE_QUALS (value_type) & TYPE_QUAL_CONST);
2004 TREE_THIS_VOLATILE (ref)
2005 = (TYPE_QUALS (value_type) & TYPE_QUAL_VOLATILE);
2007 TREE_SIDE_EFFECTS (ref)
2008 = (TREE_THIS_VOLATILE (ref) || TREE_SIDE_EFFECTS (val));
2013 /* Helper for the genericization callback. Returns true if T denotes
2014 a RESULT_DECL with DECL_BY_REFERENCE set. */
2017 is_byref_result (tree t)
2019 return (TREE_CODE (t) == RESULT_DECL && DECL_BY_REFERENCE (t));
2023 /* Tree walking callback for gnat_genericize. Currently ...
2025 o Adjust references to the function's DECL_RESULT if it is marked
2026 DECL_BY_REFERENCE and so has had its type turned into a reference
2027 type at the end of the function compilation. */
2030 gnat_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
2032 /* This implementation is modeled after what the C++ front-end is
2033 doing, basis of the downstream passes behavior. */
2035 tree stmt = *stmt_p;
2036 struct pointer_set_t *p_set = (struct pointer_set_t*) data;
2038 /* If we have a direct mention of the result decl, dereference. */
2039 if (is_byref_result (stmt))
2041 *stmt_p = convert_from_reference (stmt);
2046 /* Otherwise, no need to walk the the same tree twice. */
2047 if (pointer_set_contains (p_set, stmt))
2053 /* If we are taking the address of what now is a reference, just get the
2055 if (TREE_CODE (stmt) == ADDR_EXPR
2056 && is_byref_result (TREE_OPERAND (stmt, 0)))
2058 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
2062 /* Don't dereference an by-reference RESULT_DECL inside a RETURN_EXPR. */
2063 else if (TREE_CODE (stmt) == RETURN_EXPR
2064 && TREE_OPERAND (stmt, 0)
2065 && is_byref_result (TREE_OPERAND (stmt, 0)))
2068 /* Don't look inside trees that cannot embed references of interest. */
2069 else if (IS_TYPE_OR_DECL_P (stmt))
2072 pointer_set_insert (p_set, *stmt_p);
2077 /* Perform lowering of Ada trees to GENERIC. In particular:
2079 o Turn a DECL_BY_REFERENCE RESULT_DECL into a real by-reference decl
2080 and adjust all the references to this decl accordingly. */
2083 gnat_genericize (tree fndecl)
2085 /* Prior to GCC 4, an explicit By_Reference result mechanism for a function
2086 was handled by simply setting TREE_ADDRESSABLE on the result type.
2087 Everything required to actually pass by invisible ref using the target
2088 mechanism (e.g. extra parameter) was handled at RTL expansion time.
2090 This doesn't work with GCC 4 any more for several reasons. First, the
2091 gimplification process might need the creation of temporaries of this
2092 type, and the gimplifier ICEs on such attempts. Second, the middle-end
2093 now relies on a different attribute for such cases (DECL_BY_REFERENCE on
2094 RESULT/PARM_DECLs), and expects the user invisible by-reference-ness to
2095 be explicitely accounted for by the front-end in the function body.
2097 We achieve the complete transformation in two steps:
2099 1/ create_subprog_decl performs early attribute tweaks: it clears
2100 TREE_ADDRESSABLE from the result type and sets DECL_BY_REFERENCE on
2101 the result decl. The former ensures that the bit isn't set in the GCC
2102 tree saved for the function, so prevents ICEs on temporary creation.
2103 The latter we use here to trigger the rest of the processing.
2105 2/ This function performs the type transformation on the result decl
2106 and adjusts all the references to this decl from the function body
2109 Clearing TREE_ADDRESSABLE from the type differs from the C++ front-end
2110 strategy, which escapes the gimplifier temporary creation issues by
2111 creating it's own temporaries using TARGET_EXPR nodes. Our way relies
2112 on simple specific support code in aggregate_value_p to look at the
2113 target function result decl explicitely. */
2115 struct pointer_set_t *p_set;
2116 tree decl_result = DECL_RESULT (fndecl);
2118 if (!DECL_BY_REFERENCE (decl_result))
2121 /* Make the DECL_RESULT explicitely by-reference and adjust all the
2122 occurrences in the function body using the common tree-walking facility.
2123 We want to see every occurrence of the result decl to adjust the
2124 referencing tree, so need to use our own pointer set to control which
2125 trees should be visited again or not. */
2127 p_set = pointer_set_create ();
2129 TREE_TYPE (decl_result) = build_reference_type (TREE_TYPE (decl_result));
2130 TREE_ADDRESSABLE (decl_result) = 0;
2131 relayout_decl (decl_result);
2133 walk_tree (&DECL_SAVED_TREE (fndecl), gnat_genericize_r, p_set, NULL);
2135 pointer_set_destroy (p_set);
2138 /* Finish the definition of the current subprogram and compile it all the way
2139 to assembler language output. BODY is the tree corresponding to
2143 end_subprog_body (tree body)
2145 tree fndecl = current_function_decl;
2147 /* Mark the BLOCK for this level as being for this function and pop the
2148 level. Since the vars in it are the parameters, clear them. */
2149 BLOCK_VARS (current_binding_level->block) = 0;
2150 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
2151 DECL_INITIAL (fndecl) = current_binding_level->block;
2154 /* Deal with inline. If declared inline or we should default to inline,
2155 set the flag in the decl. */
2156 DECL_INLINE (fndecl)
2157 = DECL_DECLARED_INLINE_P (fndecl) || flag_inline_trees == 2;
2159 /* We handle pending sizes via the elaboration of types, so we don't
2160 need to save them. */
2161 get_pending_sizes ();
2163 /* Mark the RESULT_DECL as being in this subprogram. */
2164 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
2166 DECL_SAVED_TREE (fndecl) = body;
2168 current_function_decl = DECL_CONTEXT (fndecl);
2171 /* We cannot track the location of errors past this point. */
2172 error_gnat_node = Empty;
2174 /* If we're only annotating types, don't actually compile this function. */
2175 if (type_annotate_only)
2178 /* Perform the required pre-gimplfication transformations on the tree. */
2179 gnat_genericize (fndecl);
2181 /* We do different things for nested and non-nested functions.
2182 ??? This should be in cgraph. */
2183 if (!DECL_CONTEXT (fndecl))
2185 gnat_gimplify_function (fndecl);
2186 cgraph_finalize_function (fndecl, false);
2189 /* Register this function with cgraph just far enough to get it
2190 added to our parent's nested function list. */
2191 (void) cgraph_node (fndecl);
2194 /* Convert FNDECL's code to GIMPLE and handle any nested functions. */
2197 gnat_gimplify_function (tree fndecl)
2199 struct cgraph_node *cgn;
2201 dump_function (TDI_original, fndecl);
2202 gimplify_function_tree (fndecl);
2203 dump_function (TDI_generic, fndecl);
2205 /* Convert all nested functions to GIMPLE now. We do things in this order
2206 so that items like VLA sizes are expanded properly in the context of the
2207 correct function. */
2208 cgn = cgraph_node (fndecl);
2209 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
2210 gnat_gimplify_function (cgn->decl);
2215 gnat_builtin_function (tree decl)
2217 gnat_pushdecl (decl, Empty);
2221 /* Handle a "const" attribute; arguments as in
2222 struct attribute_spec.handler. */
2225 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
2226 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
2229 if (TREE_CODE (*node) == FUNCTION_DECL)
2230 TREE_READONLY (*node) = 1;
2232 *no_add_attrs = true;
2237 /* Handle a "nothrow" attribute; arguments as in
2238 struct attribute_spec.handler. */
2241 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
2242 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
2245 if (TREE_CODE (*node) == FUNCTION_DECL)
2246 TREE_NOTHROW (*node) = 1;
2248 *no_add_attrs = true;
2253 /* Return an integer type with the number of bits of precision given by
2254 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2255 it is a signed type. */
2258 gnat_type_for_size (unsigned precision, int unsignedp)
2263 if (precision <= 2 * MAX_BITS_PER_WORD
2264 && signed_and_unsigned_types[precision][unsignedp])
2265 return signed_and_unsigned_types[precision][unsignedp];
2268 t = make_unsigned_type (precision);
2270 t = make_signed_type (precision);
2272 if (precision <= 2 * MAX_BITS_PER_WORD)
2273 signed_and_unsigned_types[precision][unsignedp] = t;
2277 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2278 TYPE_NAME (t) = get_identifier (type_name);
2284 /* Likewise for floating-point types. */
2287 float_type_for_precision (int precision, enum machine_mode mode)
2292 if (float_types[(int) mode])
2293 return float_types[(int) mode];
2295 float_types[(int) mode] = t = make_node (REAL_TYPE);
2296 TYPE_PRECISION (t) = precision;
2299 gcc_assert (TYPE_MODE (t) == mode);
2302 sprintf (type_name, "FLOAT_%d", precision);
2303 TYPE_NAME (t) = get_identifier (type_name);
2309 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2310 an unsigned type; otherwise a signed type is returned. */
2313 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2315 if (mode == BLKmode)
2317 else if (mode == VOIDmode)
2318 return void_type_node;
2319 else if (COMPLEX_MODE_P (mode))
2321 else if (SCALAR_FLOAT_MODE_P (mode))
2322 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2323 else if (SCALAR_INT_MODE_P (mode))
2324 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2329 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2332 gnat_unsigned_type (tree type_node)
2334 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2336 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2338 type = copy_node (type);
2339 TREE_TYPE (type) = type_node;
2341 else if (TREE_TYPE (type_node)
2342 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2343 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2345 type = copy_node (type);
2346 TREE_TYPE (type) = TREE_TYPE (type_node);
2352 /* Return the signed version of a TYPE_NODE, a scalar type. */
2355 gnat_signed_type (tree type_node)
2357 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2359 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2361 type = copy_node (type);
2362 TREE_TYPE (type) = type_node;
2364 else if (TREE_TYPE (type_node)
2365 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2366 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2368 type = copy_node (type);
2369 TREE_TYPE (type) = TREE_TYPE (type_node);
2376 /* EXP is an expression for the size of an object. If this size contains
2377 discriminant references, replace them with the maximum (if MAX_P) or
2378 minimum (if !MAX_P) possible value of the discriminant. */
2381 max_size (tree exp, bool max_p)
2383 enum tree_code code = TREE_CODE (exp);
2384 tree type = TREE_TYPE (exp);
2386 switch (TREE_CODE_CLASS (code))
2388 case tcc_declaration:
2393 if (code == CALL_EXPR)
2396 int i, n = call_expr_nargs (exp);
2399 argarray = (tree *) alloca (n * sizeof (tree));
2400 for (i = 0; i < n; i++)
2401 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
2402 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
2407 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2408 modify. Otherwise, we treat it like a variable. */
2409 if (!CONTAINS_PLACEHOLDER_P (exp))
2412 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2414 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
2416 case tcc_comparison:
2417 return max_p ? size_one_node : size_zero_node;
2421 case tcc_expression:
2422 switch (TREE_CODE_LENGTH (code))
2425 if (code == NON_LVALUE_EXPR)
2426 return max_size (TREE_OPERAND (exp, 0), max_p);
2429 fold_build1 (code, type,
2430 max_size (TREE_OPERAND (exp, 0),
2431 code == NEGATE_EXPR ? !max_p : max_p));
2434 if (code == COMPOUND_EXPR)
2435 return max_size (TREE_OPERAND (exp, 1), max_p);
2437 /* Calculate "(A ? B : C) - D" as "A ? B - D : C - D" which
2438 may provide a tighter bound on max_size. */
2439 if (code == MINUS_EXPR
2440 && TREE_CODE (TREE_OPERAND (exp, 0)) == COND_EXPR)
2442 tree lhs = fold_build2 (MINUS_EXPR, type,
2443 TREE_OPERAND (TREE_OPERAND (exp, 0), 1),
2444 TREE_OPERAND (exp, 1));
2445 tree rhs = fold_build2 (MINUS_EXPR, type,
2446 TREE_OPERAND (TREE_OPERAND (exp, 0), 2),
2447 TREE_OPERAND (exp, 1));
2448 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2449 max_size (lhs, max_p),
2450 max_size (rhs, max_p));
2454 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2455 tree rhs = max_size (TREE_OPERAND (exp, 1),
2456 code == MINUS_EXPR ? !max_p : max_p);
2458 /* Special-case wanting the maximum value of a MIN_EXPR.
2459 In that case, if one side overflows, return the other.
2460 sizetype is signed, but we know sizes are non-negative.
2461 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2462 overflowing or the maximum possible value and the RHS
2466 && TREE_CODE (rhs) == INTEGER_CST
2467 && TREE_OVERFLOW (rhs))
2471 && TREE_CODE (lhs) == INTEGER_CST
2472 && TREE_OVERFLOW (lhs))
2474 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2475 && ((TREE_CODE (lhs) == INTEGER_CST
2476 && TREE_OVERFLOW (lhs))
2477 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2478 && !TREE_CONSTANT (rhs))
2481 return fold_build2 (code, type, lhs, rhs);
2485 if (code == SAVE_EXPR)
2487 else if (code == COND_EXPR)
2488 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2489 max_size (TREE_OPERAND (exp, 1), max_p),
2490 max_size (TREE_OPERAND (exp, 2), max_p));
2493 /* Other tree classes cannot happen. */
2501 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2502 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2503 Return a constructor for the template. */
2506 build_template (tree template_type, tree array_type, tree expr)
2508 tree template_elts = NULL_TREE;
2509 tree bound_list = NULL_TREE;
2512 while (TREE_CODE (array_type) == RECORD_TYPE
2513 && (TYPE_IS_PADDING_P (array_type)
2514 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2515 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2517 if (TREE_CODE (array_type) == ARRAY_TYPE
2518 || (TREE_CODE (array_type) == INTEGER_TYPE
2519 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2520 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2522 /* First make the list for a CONSTRUCTOR for the template. Go down the
2523 field list of the template instead of the type chain because this
2524 array might be an Ada array of arrays and we can't tell where the
2525 nested arrays stop being the underlying object. */
2527 for (field = TYPE_FIELDS (template_type); field;
2529 ? (bound_list = TREE_CHAIN (bound_list))
2530 : (array_type = TREE_TYPE (array_type))),
2531 field = TREE_CHAIN (TREE_CHAIN (field)))
2533 tree bounds, min, max;
2535 /* If we have a bound list, get the bounds from there. Likewise
2536 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2537 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2538 This will give us a maximum range. */
2540 bounds = TREE_VALUE (bound_list);
2541 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2542 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2543 else if (expr && TREE_CODE (expr) == PARM_DECL
2544 && DECL_BY_COMPONENT_PTR_P (expr))
2545 bounds = TREE_TYPE (field);
2549 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
2550 max = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MAX_VALUE (bounds));
2552 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2553 substitute it from OBJECT. */
2554 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2555 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2557 template_elts = tree_cons (TREE_CHAIN (field), max,
2558 tree_cons (field, min, template_elts));
2561 return gnat_build_constructor (template_type, nreverse (template_elts));
2564 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2565 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2566 in the type contains in its DECL_INITIAL the expression to use when
2567 a constructor is made for the type. GNAT_ENTITY is an entity used
2568 to print out an error message if the mechanism cannot be applied to
2569 an object of that type and also for the name. */
2572 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2574 tree record_type = make_node (RECORD_TYPE);
2575 tree pointer32_type;
2576 tree field_list = 0;
2585 /* If TYPE is an unconstrained array, use the underlying array type. */
2586 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2587 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2589 /* If this is an array, compute the number of dimensions in the array,
2590 get the index types, and point to the inner type. */
2591 if (TREE_CODE (type) != ARRAY_TYPE)
2594 for (ndim = 1, inner_type = type;
2595 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2596 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2597 ndim++, inner_type = TREE_TYPE (inner_type))
2600 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2602 if (mech != By_Descriptor_NCA
2603 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2604 for (i = ndim - 1, inner_type = type;
2606 i--, inner_type = TREE_TYPE (inner_type))
2607 idx_arr[i] = TYPE_DOMAIN (inner_type);
2609 for (i = 0, inner_type = type;
2611 i++, inner_type = TREE_TYPE (inner_type))
2612 idx_arr[i] = TYPE_DOMAIN (inner_type);
2614 /* Now get the DTYPE value. */
2615 switch (TREE_CODE (type))
2619 if (TYPE_VAX_FLOATING_POINT_P (type))
2620 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2633 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2636 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2639 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2642 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2645 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2648 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2654 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2658 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2659 && TYPE_VAX_FLOATING_POINT_P (type))
2660 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2672 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2683 /* Get the CLASS value. */
2686 case By_Descriptor_A:
2689 case By_Descriptor_NCA:
2692 case By_Descriptor_SB:
2696 case By_Descriptor_S:
2702 /* Make the type for a descriptor for VMS. The first four fields
2703 are the same for all types. */
2706 = chainon (field_list,
2707 make_descriptor_field
2708 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2709 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2711 field_list = chainon (field_list,
2712 make_descriptor_field ("DTYPE",
2713 gnat_type_for_size (8, 1),
2714 record_type, size_int (dtype)));
2715 field_list = chainon (field_list,
2716 make_descriptor_field ("CLASS",
2717 gnat_type_for_size (8, 1),
2718 record_type, size_int (class)));
2720 /* Of course this will crash at run-time if the address space is not
2721 within the low 32 bits, but there is nothing else we can do. */
2722 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
2725 = chainon (field_list,
2726 make_descriptor_field
2727 ("POINTER", pointer32_type, record_type,
2728 build_unary_op (ADDR_EXPR,
2730 build0 (PLACEHOLDER_EXPR, type))));
2735 case By_Descriptor_S:
2738 case By_Descriptor_SB:
2740 = chainon (field_list,
2741 make_descriptor_field
2742 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2743 TREE_CODE (type) == ARRAY_TYPE
2744 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2746 = chainon (field_list,
2747 make_descriptor_field
2748 ("SB_U1", gnat_type_for_size (32, 1), record_type,
2749 TREE_CODE (type) == ARRAY_TYPE
2750 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2753 case By_Descriptor_A:
2754 case By_Descriptor_NCA:
2755 field_list = chainon (field_list,
2756 make_descriptor_field ("SCALE",
2757 gnat_type_for_size (8, 1),
2761 field_list = chainon (field_list,
2762 make_descriptor_field ("DIGITS",
2763 gnat_type_for_size (8, 1),
2768 = chainon (field_list,
2769 make_descriptor_field
2770 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2771 size_int (mech == By_Descriptor_NCA
2773 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2774 : (TREE_CODE (type) == ARRAY_TYPE
2775 && TYPE_CONVENTION_FORTRAN_P (type)
2778 field_list = chainon (field_list,
2779 make_descriptor_field ("DIMCT",
2780 gnat_type_for_size (8, 1),
2784 field_list = chainon (field_list,
2785 make_descriptor_field ("ARSIZE",
2786 gnat_type_for_size (32, 1),
2788 size_in_bytes (type)));
2790 /* Now build a pointer to the 0,0,0... element. */
2791 tem = build0 (PLACEHOLDER_EXPR, type);
2792 for (i = 0, inner_type = type; i < ndim;
2793 i++, inner_type = TREE_TYPE (inner_type))
2794 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2795 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2796 NULL_TREE, NULL_TREE);
2799 = chainon (field_list,
2800 make_descriptor_field
2802 build_pointer_type_for_mode (inner_type, SImode, false),
2805 build_pointer_type_for_mode (inner_type, SImode,
2809 /* Next come the addressing coefficients. */
2810 tem = size_one_node;
2811 for (i = 0; i < ndim; i++)
2815 = size_binop (MULT_EXPR, tem,
2816 size_binop (PLUS_EXPR,
2817 size_binop (MINUS_EXPR,
2818 TYPE_MAX_VALUE (idx_arr[i]),
2819 TYPE_MIN_VALUE (idx_arr[i])),
2822 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2823 fname[1] = '0' + i, fname[2] = 0;
2825 = chainon (field_list,
2826 make_descriptor_field (fname,
2827 gnat_type_for_size (32, 1),
2828 record_type, idx_length));
2830 if (mech == By_Descriptor_NCA)
2834 /* Finally here are the bounds. */
2835 for (i = 0; i < ndim; i++)
2839 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2841 = chainon (field_list,
2842 make_descriptor_field
2843 (fname, gnat_type_for_size (32, 1), record_type,
2844 TYPE_MIN_VALUE (idx_arr[i])));
2848 = chainon (field_list,
2849 make_descriptor_field
2850 (fname, gnat_type_for_size (32, 1), record_type,
2851 TYPE_MAX_VALUE (idx_arr[i])));
2856 post_error ("unsupported descriptor type for &", gnat_entity);
2859 finish_record_type (record_type, field_list, 0, true);
2860 create_type_decl (create_concat_name (gnat_entity, "DESC"), record_type,
2861 NULL, true, false, gnat_entity);
2866 /* Utility routine for above code to make a field. */
2869 make_descriptor_field (const char *name, tree type,
2870 tree rec_type, tree initial)
2873 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2875 DECL_INITIAL (field) = initial;
2879 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
2880 pointer or fat pointer type. GNAT_SUBPROG is the subprogram to which
2881 the VMS descriptor is passed. */
2884 convert_vms_descriptor (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
2886 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
2887 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
2888 /* The CLASS field is the 3rd field in the descriptor. */
2889 tree class = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
2890 /* The POINTER field is the 4th field in the descriptor. */
2891 tree pointer = TREE_CHAIN (class);
2893 /* Retrieve the value of the POINTER field. */
2895 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
2897 if (POINTER_TYPE_P (gnu_type))
2898 return convert (gnu_type, gnu_expr);
2900 else if (TYPE_FAT_POINTER_P (gnu_type))
2902 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
2903 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
2904 tree template_type = TREE_TYPE (p_bounds_type);
2905 tree min_field = TYPE_FIELDS (template_type);
2906 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
2907 tree template, template_addr, aflags, dimct, t, u;
2908 /* See the head comment of build_vms_descriptor. */
2909 int iclass = TREE_INT_CST_LOW (DECL_INITIAL (class));
2911 /* Convert POINTER to the type of the P_ARRAY field. */
2912 gnu_expr = convert (p_array_type, gnu_expr);
2916 case 1: /* Class S */
2917 case 15: /* Class SB */
2918 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
2919 t = TYPE_FIELDS (desc_type);
2920 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2921 t = tree_cons (min_field,
2922 convert (TREE_TYPE (min_field), integer_one_node),
2923 tree_cons (max_field,
2924 convert (TREE_TYPE (max_field), t),
2926 template = gnat_build_constructor (template_type, t);
2927 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
2929 /* For class S, we are done. */
2933 /* Test that we really have a SB descriptor, like DEC Ada. */
2934 t = build3 (COMPONENT_REF, TREE_TYPE (class), desc, class, NULL);
2935 u = convert (TREE_TYPE (class), DECL_INITIAL (class));
2936 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
2937 /* If so, there is already a template in the descriptor and
2938 it is located right after the POINTER field. */
2939 t = TREE_CHAIN (pointer);
2940 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2941 /* Otherwise use the {1, LENGTH} template we build above. */
2942 template_addr = build3 (COND_EXPR, p_bounds_type, u,
2943 build_unary_op (ADDR_EXPR, p_bounds_type,
2948 case 4: /* Class A */
2949 /* The AFLAGS field is the 7th field in the descriptor. */
2950 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer)));
2951 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2952 /* The DIMCT field is the 8th field in the descriptor. */
2954 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2955 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
2956 or FL_COEFF or FL_BOUNDS not set. */
2957 u = build_int_cst (TREE_TYPE (aflags), 192);
2958 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
2959 build_binary_op (NE_EXPR, integer_type_node,
2961 convert (TREE_TYPE (dimct),
2963 build_binary_op (NE_EXPR, integer_type_node,
2964 build2 (BIT_AND_EXPR,
2968 add_stmt (build3 (COND_EXPR, void_type_node, u,
2969 build_call_raise (CE_Length_Check_Failed, Empty,
2970 N_Raise_Constraint_Error),
2972 /* There is already a template in the descriptor and it is
2973 located at the start of block 3 (12th field). */
2974 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (t))));
2975 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2976 template_addr = build_unary_op (ADDR_EXPR, p_bounds_type, template);
2979 case 10: /* Class NCA */
2981 post_error ("unsupported descriptor type for &", gnat_subprog);
2982 template_addr = integer_zero_node;
2986 /* Build the fat pointer in the form of a constructor. */
2987 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr,
2988 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
2989 template_addr, NULL_TREE));
2990 return gnat_build_constructor (gnu_type, t);
2997 /* Build a stub for the subprogram specified by the GCC tree GNU_SUBPROG
2998 and the GNAT node GNAT_SUBPROG. */
3001 build_function_stub (tree gnu_subprog, Entity_Id gnat_subprog)
3003 tree gnu_subprog_type, gnu_subprog_addr, gnu_subprog_call;
3004 tree gnu_stub_param, gnu_param_list, gnu_arg_types, gnu_param;
3005 tree gnu_stub_decl = DECL_FUNCTION_STUB (gnu_subprog);
3008 gnu_subprog_type = TREE_TYPE (gnu_subprog);
3009 gnu_param_list = NULL_TREE;
3011 begin_subprog_body (gnu_stub_decl);
3014 start_stmt_group ();
3016 /* Loop over the parameters of the stub and translate any of them
3017 passed by descriptor into a by reference one. */
3018 for (gnu_stub_param = DECL_ARGUMENTS (gnu_stub_decl),
3019 gnu_arg_types = TYPE_ARG_TYPES (gnu_subprog_type);
3021 gnu_stub_param = TREE_CHAIN (gnu_stub_param),
3022 gnu_arg_types = TREE_CHAIN (gnu_arg_types))
3024 if (DECL_BY_DESCRIPTOR_P (gnu_stub_param))
3025 gnu_param = convert_vms_descriptor (TREE_VALUE (gnu_arg_types),
3026 gnu_stub_param, gnat_subprog);
3028 gnu_param = gnu_stub_param;
3030 gnu_param_list = tree_cons (NULL_TREE, gnu_param, gnu_param_list);
3033 gnu_body = end_stmt_group ();
3035 /* Invoke the internal subprogram. */
3036 gnu_subprog_addr = build1 (ADDR_EXPR, build_pointer_type (gnu_subprog_type),
3038 gnu_subprog_call = build_call_list (TREE_TYPE (gnu_subprog_type),
3040 nreverse (gnu_param_list));
3042 /* Propagate the return value, if any. */
3043 if (VOID_TYPE_P (TREE_TYPE (gnu_subprog_type)))
3044 append_to_statement_list (gnu_subprog_call, &gnu_body);
3046 append_to_statement_list (build_return_expr (DECL_RESULT (gnu_stub_decl),
3052 allocate_struct_function (gnu_stub_decl, false);
3053 end_subprog_body (gnu_body);
3056 /* Build a type to be used to represent an aliased object whose nominal
3057 type is an unconstrained array. This consists of a RECORD_TYPE containing
3058 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
3059 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
3060 is used to represent an arbitrary unconstrained object. Use NAME
3061 as the name of the record. */
3064 build_unc_object_type (tree template_type, tree object_type, tree name)
3066 tree type = make_node (RECORD_TYPE);
3067 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
3068 template_type, type, 0, 0, 0, 1);
3069 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
3072 TYPE_NAME (type) = name;
3073 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3074 finish_record_type (type,
3075 chainon (chainon (NULL_TREE, template_field),
3082 /* Same, taking a thin or fat pointer type instead of a template type. */
3085 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3090 gcc_assert (TYPE_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3093 = (TYPE_FAT_POINTER_P (thin_fat_ptr_type)
3094 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3095 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3096 return build_unc_object_type (template_type, object_type, name);
3099 /* Shift the component offsets within an unconstrained object TYPE to make it
3100 suitable for use as a designated type for thin pointers. */
3103 shift_unc_components_for_thin_pointers (tree type)
3105 /* Thin pointer values designate the ARRAY data of an unconstrained object,
3106 allocated past the BOUNDS template. The designated type is adjusted to
3107 have ARRAY at position zero and the template at a negative offset, so
3108 that COMPONENT_REFs on (*thin_ptr) designate the proper location. */
3110 tree bounds_field = TYPE_FIELDS (type);
3111 tree array_field = TREE_CHAIN (TYPE_FIELDS (type));
3113 DECL_FIELD_OFFSET (bounds_field)
3114 = size_binop (MINUS_EXPR, size_zero_node, byte_position (array_field));
3116 DECL_FIELD_OFFSET (array_field) = size_zero_node;
3117 DECL_FIELD_BIT_OFFSET (array_field) = bitsize_zero_node;
3120 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
3121 the normal case this is just two adjustments, but we have more to do
3122 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
3125 update_pointer_to (tree old_type, tree new_type)
3127 tree ptr = TYPE_POINTER_TO (old_type);
3128 tree ref = TYPE_REFERENCE_TO (old_type);
3132 /* If this is the main variant, process all the other variants first. */
3133 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3134 for (type = TYPE_NEXT_VARIANT (old_type); type;
3135 type = TYPE_NEXT_VARIANT (type))
3136 update_pointer_to (type, new_type);
3138 /* If no pointer or reference, we are done. */
3142 /* Merge the old type qualifiers in the new type.
3144 Each old variant has qualifiers for specific reasons, and the new
3145 designated type as well. Each set of qualifiers represents useful
3146 information grabbed at some point, and merging the two simply unifies
3147 these inputs into the final type description.
3149 Consider for instance a volatile type frozen after an access to constant
3150 type designating it. After the designated type freeze, we get here with a
3151 volatile new_type and a dummy old_type with a readonly variant, created
3152 when the access type was processed. We shall make a volatile and readonly
3153 designated type, because that's what it really is.
3155 We might also get here for a non-dummy old_type variant with different
3156 qualifiers than the new_type ones, for instance in some cases of pointers
3157 to private record type elaboration (see the comments around the call to
3158 this routine from gnat_to_gnu_entity/E_Access_Type). We have to merge the
3159 qualifiers in thoses cases too, to avoid accidentally discarding the
3160 initial set, and will often end up with old_type == new_type then. */
3161 new_type = build_qualified_type (new_type,
3162 TYPE_QUALS (old_type)
3163 | TYPE_QUALS (new_type));
3165 /* If the new type and the old one are identical, there is nothing to
3167 if (old_type == new_type)
3170 /* Otherwise, first handle the simple case. */
3171 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3173 TYPE_POINTER_TO (new_type) = ptr;
3174 TYPE_REFERENCE_TO (new_type) = ref;
3176 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3177 for (ptr1 = TYPE_MAIN_VARIANT (ptr); ptr1;
3178 ptr1 = TYPE_NEXT_VARIANT (ptr1))
3179 TREE_TYPE (ptr1) = new_type;
3181 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3182 for (ref1 = TYPE_MAIN_VARIANT (ref); ref1;
3183 ref1 = TYPE_NEXT_VARIANT (ref1))
3184 TREE_TYPE (ref1) = new_type;
3187 /* Now deal with the unconstrained array case. In this case the "pointer"
3188 is actually a RECORD_TYPE where both fields are pointers to dummy nodes.
3189 Turn them into pointers to the correct types using update_pointer_to. */
3190 else if (TREE_CODE (ptr) != RECORD_TYPE || !TYPE_IS_FAT_POINTER_P (ptr))
3195 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
3196 tree array_field = TYPE_FIELDS (ptr);
3197 tree bounds_field = TREE_CHAIN (TYPE_FIELDS (ptr));
3198 tree new_ptr = TYPE_POINTER_TO (new_type);
3202 /* Make pointers to the dummy template point to the real template. */
3204 (TREE_TYPE (TREE_TYPE (bounds_field)),
3205 TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_ptr)))));
3207 /* The references to the template bounds present in the array type
3208 are made through a PLACEHOLDER_EXPR of type new_ptr. Since we
3209 are updating ptr to make it a full replacement for new_ptr as
3210 pointer to new_type, we must rework the PLACEHOLDER_EXPR so as
3211 to make it of type ptr. */
3212 new_ref = build3 (COMPONENT_REF, TREE_TYPE (bounds_field),
3213 build0 (PLACEHOLDER_EXPR, ptr),
3214 bounds_field, NULL_TREE);
3216 /* Create the new array for the new PLACEHOLDER_EXPR and make
3217 pointers to the dummy array point to it.
3219 ??? This is now the only use of substitute_in_type,
3220 which is a very "heavy" routine to do this, so it
3221 should be replaced at some point. */
3223 (TREE_TYPE (TREE_TYPE (array_field)),
3224 substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))),
3225 TREE_CHAIN (TYPE_FIELDS (new_ptr)), new_ref));
3227 /* Make ptr the pointer to new_type. */
3228 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
3229 = TREE_TYPE (new_type) = ptr;
3231 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
3232 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
3234 /* Now handle updating the allocation record, what the thin pointer
3235 points to. Update all pointers from the old record into the new
3236 one, update the type of the array field, and recompute the size. */
3237 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
3239 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
3240 = TREE_TYPE (TREE_TYPE (array_field));
3242 /* The size recomputation needs to account for alignment constraints, so
3243 we let layout_type work it out. This will reset the field offsets to
3244 what they would be in a regular record, so we shift them back to what
3245 we want them to be for a thin pointer designated type afterwards. */
3246 DECL_SIZE (TYPE_FIELDS (new_obj_rec)) = 0;
3247 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))) = 0;
3248 TYPE_SIZE (new_obj_rec) = 0;
3249 layout_type (new_obj_rec);
3251 shift_unc_components_for_thin_pointers (new_obj_rec);
3253 /* We are done, at last. */
3254 rest_of_record_type_compilation (ptr);
3258 /* Convert a pointer to a constrained array into a pointer to a fat
3259 pointer. This involves making or finding a template. */
3262 convert_to_fat_pointer (tree type, tree expr)
3264 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
3265 tree template, template_addr;
3266 tree etype = TREE_TYPE (expr);
3268 /* If EXPR is a constant of zero, we make a fat pointer that has a null
3269 pointer to the template and array. */
3270 if (integer_zerop (expr))
3272 gnat_build_constructor
3274 tree_cons (TYPE_FIELDS (type),
3275 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3276 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3277 convert (build_pointer_type (template_type),
3281 /* If EXPR is a thin pointer, make the template and data from the record. */
3283 else if (TYPE_THIN_POINTER_P (etype))
3285 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
3287 expr = save_expr (expr);
3288 if (TREE_CODE (expr) == ADDR_EXPR)
3289 expr = TREE_OPERAND (expr, 0);
3291 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
3293 template = build_component_ref (expr, NULL_TREE, fields, false);
3294 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3295 build_component_ref (expr, NULL_TREE,
3296 TREE_CHAIN (fields), false));
3299 /* Otherwise, build the constructor for the template. */
3300 template = build_template (template_type, TREE_TYPE (etype), expr);
3302 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
3304 /* The result is a CONSTRUCTOR for the fat pointer.
3306 If expr is an argument of a foreign convention subprogram, the type it
3307 points to is directly the component type. In this case, the expression
3308 type may not match the corresponding FIELD_DECL type at this point, so we
3309 call "convert" here to fix that up if necessary. This type consistency is
3310 required, for instance because it ensures that possible later folding of
3311 component_refs against this constructor always yields something of the
3312 same type as the initial reference.
3314 Note that the call to "build_template" above is still fine, because it
3315 will only refer to the provided template_type in this case. */
3317 gnat_build_constructor
3318 (type, tree_cons (TYPE_FIELDS (type),
3319 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3320 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3321 template_addr, NULL_TREE)));
3324 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
3325 is something that is a fat pointer, so convert to it first if it EXPR
3326 is not already a fat pointer. */
3329 convert_to_thin_pointer (tree type, tree expr)
3331 if (!TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
3333 = convert_to_fat_pointer
3334 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
3336 /* We get the pointer to the data and use a NOP_EXPR to make it the
3338 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)),
3340 expr = build1 (NOP_EXPR, type, expr);
3345 /* Create an expression whose value is that of EXPR,
3346 converted to type TYPE. The TREE_TYPE of the value
3347 is always TYPE. This function implements all reasonable
3348 conversions; callers should filter out those that are
3349 not permitted by the language being compiled. */
3352 convert (tree type, tree expr)
3354 enum tree_code code = TREE_CODE (type);
3355 tree etype = TREE_TYPE (expr);
3356 enum tree_code ecode = TREE_CODE (etype);
3358 /* If EXPR is already the right type, we are done. */
3362 /* If both input and output have padding and are of variable size, do this
3363 as an unchecked conversion. Likewise if one is a mere variant of the
3364 other, so we avoid a pointless unpad/repad sequence. */
3365 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
3366 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
3367 && (!TREE_CONSTANT (TYPE_SIZE (type))
3368 || !TREE_CONSTANT (TYPE_SIZE (etype))
3369 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)))
3372 /* If the output type has padding, make a constructor to build the
3374 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
3376 /* If we previously converted from another type and our type is
3377 of variable size, remove the conversion to avoid the need for
3378 variable-size temporaries. */
3379 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3380 && !TREE_CONSTANT (TYPE_SIZE (type)))
3381 expr = TREE_OPERAND (expr, 0);
3383 /* If we are just removing the padding from expr, convert the original
3384 object if we have variable size. That will avoid the need
3385 for some variable-size temporaries. */
3386 if (TREE_CODE (expr) == COMPONENT_REF
3387 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
3388 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
3389 && !TREE_CONSTANT (TYPE_SIZE (type)))
3390 return convert (type, TREE_OPERAND (expr, 0));
3392 /* If the result type is a padded type with a self-referentially-sized
3393 field and the expression type is a record, do this as an
3394 unchecked conversion. */
3395 else if (TREE_CODE (etype) == RECORD_TYPE
3396 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
3397 return unchecked_convert (type, expr, false);
3401 gnat_build_constructor (type,
3402 tree_cons (TYPE_FIELDS (type),
3404 (TYPE_FIELDS (type)),
3409 /* If the input type has padding, remove it and convert to the output type.
3410 The conditions ordering is arranged to ensure that the output type is not
3411 a padding type here, as it is not clear whether the conversion would
3412 always be correct if this was to happen. */
3413 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
3417 /* If we have just converted to this padded type, just get the
3418 inner expression. */
3419 if (TREE_CODE (expr) == CONSTRUCTOR
3420 && !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (expr))
3421 && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->index
3422 == TYPE_FIELDS (etype))
3424 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->value;
3426 /* Otherwise, build an explicit component reference. */
3429 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
3431 return convert (type, unpadded);
3434 /* If the input is a biased type, adjust first. */
3435 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
3436 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
3437 fold_convert (TREE_TYPE (etype),
3439 TYPE_MIN_VALUE (etype)));
3441 /* If the input is a justified modular type, we need to extract the actual
3442 object before converting it to any other type with the exceptions of an
3443 unconstrained array or of a mere type variant. It is useful to avoid the
3444 extraction and conversion in the type variant case because it could end
3445 up replacing a VAR_DECL expr by a constructor and we might be about the
3446 take the address of the result. */
3447 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
3448 && code != UNCONSTRAINED_ARRAY_TYPE
3449 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
3450 return convert (type, build_component_ref (expr, NULL_TREE,
3451 TYPE_FIELDS (etype), false));
3453 /* If converting to a type that contains a template, convert to the data
3454 type and then build the template. */
3455 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
3457 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
3459 /* If the source already has a template, get a reference to the
3460 associated array only, as we are going to rebuild a template
3461 for the target type anyway. */
3462 expr = maybe_unconstrained_array (expr);
3465 gnat_build_constructor
3467 tree_cons (TYPE_FIELDS (type),
3468 build_template (TREE_TYPE (TYPE_FIELDS (type)),
3469 obj_type, NULL_TREE),
3470 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3471 convert (obj_type, expr), NULL_TREE)));
3474 /* There are some special cases of expressions that we process
3476 switch (TREE_CODE (expr))
3482 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
3483 conversion in gnat_expand_expr. NULL_EXPR does not represent
3484 and actual value, so no conversion is needed. */
3485 expr = copy_node (expr);
3486 TREE_TYPE (expr) = type;
3490 /* If we are converting a STRING_CST to another constrained array type,
3491 just make a new one in the proper type. */
3492 if (code == ecode && AGGREGATE_TYPE_P (etype)
3493 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
3494 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
3496 expr = copy_node (expr);
3497 TREE_TYPE (expr) = type;
3503 /* If we are converting a CONSTRUCTOR to another constrained array type
3504 with the same domain, just make a new one in the proper type. */
3505 if (code == ecode && code == ARRAY_TYPE
3506 && TREE_TYPE (type) == TREE_TYPE (etype)
3507 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (type)),
3508 TYPE_MIN_VALUE (TYPE_DOMAIN (etype)))
3509 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
3510 TYPE_MAX_VALUE (TYPE_DOMAIN (etype))))
3512 expr = copy_node (expr);
3513 TREE_TYPE (expr) = type;
3518 case UNCONSTRAINED_ARRAY_REF:
3519 /* Convert this to the type of the inner array by getting the address of
3520 the array from the template. */
3521 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3522 build_component_ref (TREE_OPERAND (expr, 0),
3523 get_identifier ("P_ARRAY"),
3525 etype = TREE_TYPE (expr);
3526 ecode = TREE_CODE (etype);
3529 case VIEW_CONVERT_EXPR:
3531 /* GCC 4.x is very sensitive to type consistency overall, and view
3532 conversions thus are very frequent. Even though just "convert"ing
3533 the inner operand to the output type is fine in most cases, it
3534 might expose unexpected input/output type mismatches in special
3535 circumstances so we avoid such recursive calls when we can. */
3537 tree op0 = TREE_OPERAND (expr, 0);
3539 /* If we are converting back to the original type, we can just
3540 lift the input conversion. This is a common occurrence with
3541 switches back-and-forth amongst type variants. */
3542 if (type == TREE_TYPE (op0))
3545 /* Otherwise, if we're converting between two aggregate types, we
3546 might be allowed to substitute the VIEW_CONVERT target type in
3547 place or to just convert the inner expression. */
3548 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
3550 /* If we are converting between type variants, we can just
3551 substitute the VIEW_CONVERT in place. */
3552 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
3553 return build1 (VIEW_CONVERT_EXPR, type, op0);
3555 /* Otherwise, we may just bypass the input view conversion unless
3556 one of the types is a fat pointer, which is handled by
3557 specialized code below which relies on exact type matching. */
3558 else if (!TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
3559 return convert (type, op0);
3565 /* If both types are record types, just convert the pointer and
3566 make a new INDIRECT_REF.
3568 ??? Disable this for now since it causes problems with the
3569 code in build_binary_op for MODIFY_EXPR which wants to
3570 strip off conversions. But that code really is a mess and
3571 we need to do this a much better way some time. */
3573 && (TREE_CODE (type) == RECORD_TYPE
3574 || TREE_CODE (type) == UNION_TYPE)
3575 && (TREE_CODE (etype) == RECORD_TYPE
3576 || TREE_CODE (etype) == UNION_TYPE)
3577 && !TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
3578 return build_unary_op (INDIRECT_REF, NULL_TREE,
3579 convert (build_pointer_type (type),
3580 TREE_OPERAND (expr, 0)));
3587 /* Check for converting to a pointer to an unconstrained array. */
3588 if (TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
3589 return convert_to_fat_pointer (type, expr);
3591 /* If we're converting between two aggregate types that have the same main
3592 variant, just make a VIEW_CONVER_EXPR. */
3593 else if (AGGREGATE_TYPE_P (type)
3594 && TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
3595 return build1 (VIEW_CONVERT_EXPR, type, expr);
3597 /* In all other cases of related types, make a NOP_EXPR. */
3598 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
3599 || (code == INTEGER_CST && ecode == INTEGER_CST
3600 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
3601 return fold_convert (type, expr);
3606 return fold_build1 (CONVERT_EXPR, type, expr);
3609 return fold_convert (type, gnat_truthvalue_conversion (expr));
3612 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
3613 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
3614 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
3615 return unchecked_convert (type, expr, false);
3616 else if (TYPE_BIASED_REPRESENTATION_P (type))
3617 return fold_convert (type,
3618 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
3619 convert (TREE_TYPE (type), expr),
3620 TYPE_MIN_VALUE (type)));
3622 /* ... fall through ... */
3625 return fold (convert_to_integer (type, expr));
3628 case REFERENCE_TYPE:
3629 /* If converting between two pointers to records denoting
3630 both a template and type, adjust if needed to account
3631 for any differing offsets, since one might be negative. */
3632 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
3635 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
3636 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
3637 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
3638 sbitsize_int (BITS_PER_UNIT));
3640 expr = build1 (NOP_EXPR, type, expr);
3641 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
3642 if (integer_zerop (byte_diff))
3645 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
3646 fold (convert (sizetype, byte_diff)));
3649 /* If converting to a thin pointer, handle specially. */
3650 if (TYPE_THIN_POINTER_P (type)
3651 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
3652 return convert_to_thin_pointer (type, expr);
3654 /* If converting fat pointer to normal pointer, get the pointer to the
3655 array and then convert it. */
3656 else if (TYPE_FAT_POINTER_P (etype))
3657 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
3660 return fold (convert_to_pointer (type, expr));
3663 return fold (convert_to_real (type, expr));
3666 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
3668 gnat_build_constructor
3669 (type, tree_cons (TYPE_FIELDS (type),
3670 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3673 /* ... fall through ... */
3676 /* In these cases, assume the front-end has validated the conversion.
3677 If the conversion is valid, it will be a bit-wise conversion, so
3678 it can be viewed as an unchecked conversion. */
3679 return unchecked_convert (type, expr, false);
3682 /* This is a either a conversion between a tagged type and some
3683 subtype, which we have to mark as a UNION_TYPE because of
3684 overlapping fields or a conversion of an Unchecked_Union. */
3685 return unchecked_convert (type, expr, false);
3687 case UNCONSTRAINED_ARRAY_TYPE:
3688 /* If EXPR is a constrained array, take its address, convert it to a
3689 fat pointer, and then dereference it. Likewise if EXPR is a
3690 record containing both a template and a constrained array.
3691 Note that a record representing a justified modular type
3692 always represents a packed constrained array. */
3693 if (ecode == ARRAY_TYPE
3694 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
3695 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
3696 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
3699 (INDIRECT_REF, NULL_TREE,
3700 convert_to_fat_pointer (TREE_TYPE (type),
3701 build_unary_op (ADDR_EXPR,
3704 /* Do something very similar for converting one unconstrained
3705 array to another. */
3706 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
3708 build_unary_op (INDIRECT_REF, NULL_TREE,
3709 convert (TREE_TYPE (type),
3710 build_unary_op (ADDR_EXPR,
3716 return fold (convert_to_complex (type, expr));
3723 /* Remove all conversions that are done in EXP. This includes converting
3724 from a padded type or to a justified modular type. If TRUE_ADDRESS
3725 is true, always return the address of the containing object even if
3726 the address is not bit-aligned. */
3729 remove_conversions (tree exp, bool true_address)
3731 switch (TREE_CODE (exp))
3735 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3736 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
3738 remove_conversions (VEC_index (constructor_elt,
3739 CONSTRUCTOR_ELTS (exp), 0)->value,
3744 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
3745 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
3746 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3749 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
3750 case NOP_EXPR: case CONVERT_EXPR:
3751 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3760 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3761 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3762 likewise return an expression pointing to the underlying array. */
3765 maybe_unconstrained_array (tree exp)
3767 enum tree_code code = TREE_CODE (exp);
3770 switch (TREE_CODE (TREE_TYPE (exp)))
3772 case UNCONSTRAINED_ARRAY_TYPE:
3773 if (code == UNCONSTRAINED_ARRAY_REF)
3776 = build_unary_op (INDIRECT_REF, NULL_TREE,
3777 build_component_ref (TREE_OPERAND (exp, 0),
3778 get_identifier ("P_ARRAY"),
3780 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
3784 else if (code == NULL_EXPR)
3785 return build1 (NULL_EXPR,
3786 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3787 (TREE_TYPE (TREE_TYPE (exp))))),
3788 TREE_OPERAND (exp, 0));
3791 /* If this is a padded type, convert to the unpadded type and see if
3792 it contains a template. */
3793 if (TYPE_IS_PADDING_P (TREE_TYPE (exp)))
3795 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
3796 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3797 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
3799 build_component_ref (new, NULL_TREE,
3800 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
3803 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
3805 build_component_ref (exp, NULL_TREE,
3806 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
3816 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
3817 If NOTRUNC_P is true, truncation operations should be suppressed. */
3820 unchecked_convert (tree type, tree expr, bool notrunc_p)
3822 tree etype = TREE_TYPE (expr);
3824 /* If the expression is already the right type, we are done. */
3828 /* If both types types are integral just do a normal conversion.
3829 Likewise for a conversion to an unconstrained array. */
3830 if ((((INTEGRAL_TYPE_P (type)
3831 && !(TREE_CODE (type) == INTEGER_TYPE
3832 && TYPE_VAX_FLOATING_POINT_P (type)))
3833 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
3834 || (TREE_CODE (type) == RECORD_TYPE
3835 && TYPE_JUSTIFIED_MODULAR_P (type)))
3836 && ((INTEGRAL_TYPE_P (etype)
3837 && !(TREE_CODE (etype) == INTEGER_TYPE
3838 && TYPE_VAX_FLOATING_POINT_P (etype)))
3839 || (POINTER_TYPE_P (etype) && !TYPE_THIN_POINTER_P (etype))
3840 || (TREE_CODE (etype) == RECORD_TYPE
3841 && TYPE_JUSTIFIED_MODULAR_P (etype))))
3842 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3845 bool final_unchecked = false;
3847 if (TREE_CODE (etype) == INTEGER_TYPE
3848 && TYPE_BIASED_REPRESENTATION_P (etype))
3850 tree ntype = copy_type (etype);
3852 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
3853 TYPE_MAIN_VARIANT (ntype) = ntype;
3854 expr = build1 (NOP_EXPR, ntype, expr);
3857 if (TREE_CODE (type) == INTEGER_TYPE
3858 && TYPE_BIASED_REPRESENTATION_P (type))
3860 rtype = copy_type (type);
3861 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
3862 TYPE_MAIN_VARIANT (rtype) = rtype;
3865 /* We have another special case: if we are unchecked converting subtype
3866 into a base type, we need to ensure that VRP doesn't propagate range
3867 information since this conversion may be done precisely to validate
3868 that the object is within the range it is supposed to have. */
3869 else if (TREE_CODE (expr) != INTEGER_CST
3870 && TREE_CODE (type) == INTEGER_TYPE && !TREE_TYPE (type)
3871 && ((TREE_CODE (etype) == INTEGER_TYPE && TREE_TYPE (etype))
3872 || TREE_CODE (etype) == ENUMERAL_TYPE
3873 || TREE_CODE (etype) == BOOLEAN_TYPE))
3875 /* The optimization barrier is a VIEW_CONVERT_EXPR node; moreover,
3876 in order not to be deemed an useless type conversion, it must
3877 be from subtype to base type.
3879 ??? This may raise addressability and/or aliasing issues because
3880 VIEW_CONVERT_EXPR gets gimplified as an lvalue, thus causing the
3881 address of its operand to be taken if it is deemed addressable
3882 and not already in GIMPLE form. */
3883 rtype = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
3884 rtype = copy_type (rtype);
3885 TYPE_MAIN_VARIANT (rtype) = rtype;
3886 TREE_TYPE (rtype) = type;
3887 final_unchecked = true;
3890 expr = convert (rtype, expr);
3892 expr = fold_build1 (final_unchecked ? VIEW_CONVERT_EXPR : NOP_EXPR,
3896 /* If we are converting TO an integral type whose precision is not the
3897 same as its size, first unchecked convert to a record that contains
3898 an object of the output type. Then extract the field. */
3899 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
3900 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3901 GET_MODE_BITSIZE (TYPE_MODE (type))))
3903 tree rec_type = make_node (RECORD_TYPE);
3904 tree field = create_field_decl (get_identifier ("OBJ"), type,
3905 rec_type, 1, 0, 0, 0);
3907 TYPE_FIELDS (rec_type) = field;
3908 layout_type (rec_type);
3910 expr = unchecked_convert (rec_type, expr, notrunc_p);
3911 expr = build_component_ref (expr, NULL_TREE, field, 0);
3914 /* Similarly for integral input type whose precision is not equal to its
3916 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype)
3917 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
3918 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3920 tree rec_type = make_node (RECORD_TYPE);
3922 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
3925 TYPE_FIELDS (rec_type) = field;
3926 layout_type (rec_type);
3928 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
3929 expr = unchecked_convert (type, expr, notrunc_p);
3932 /* We have a special case when we are converting between two
3933 unconstrained array types. In that case, take the address,
3934 convert the fat pointer types, and dereference. */
3935 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
3936 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3937 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3938 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
3939 build_unary_op (ADDR_EXPR, NULL_TREE,
3943 expr = maybe_unconstrained_array (expr);
3944 etype = TREE_TYPE (expr);
3945 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
3948 /* If the result is an integral type whose size is not equal to
3949 the size of the underlying machine type, sign- or zero-extend
3950 the result. We need not do this in the case where the input is
3951 an integral type of the same precision and signedness or if the output
3952 is a biased type or if both the input and output are unsigned. */
3954 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
3955 && !(TREE_CODE (type) == INTEGER_TYPE
3956 && TYPE_BIASED_REPRESENTATION_P (type))
3957 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3958 GET_MODE_BITSIZE (TYPE_MODE (type)))
3959 && !(INTEGRAL_TYPE_P (etype)
3960 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
3961 && operand_equal_p (TYPE_RM_SIZE (type),
3962 (TYPE_RM_SIZE (etype) != 0
3963 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
3965 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
3967 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
3968 TYPE_UNSIGNED (type));
3970 = convert (base_type,
3971 size_binop (MINUS_EXPR,
3973 (GET_MODE_BITSIZE (TYPE_MODE (type))),
3974 TYPE_RM_SIZE (type)));
3977 build_binary_op (RSHIFT_EXPR, base_type,
3978 build_binary_op (LSHIFT_EXPR, base_type,
3979 convert (base_type, expr),
3984 /* An unchecked conversion should never raise Constraint_Error. The code
3985 below assumes that GCC's conversion routines overflow the same way that
3986 the underlying hardware does. This is probably true. In the rare case
3987 when it is false, we can rely on the fact that such conversions are
3988 erroneous anyway. */
3989 if (TREE_CODE (expr) == INTEGER_CST)
3990 TREE_OVERFLOW (expr) = 0;
3992 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
3993 show no longer constant. */
3994 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3995 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
3997 TREE_CONSTANT (expr) = 0;
4002 /* Search the chain of currently available builtin declarations for a node
4003 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
4004 found, if any, or NULL_TREE otherwise. */
4006 builtin_decl_for (tree name)
4011 for (i = 0; VEC_iterate(tree, builtin_decls, i, decl); i++)
4012 if (DECL_NAME (decl) == name)
4018 /* Return the appropriate GCC tree code for the specified GNAT type,
4019 the latter being a record type as predicated by Is_Record_Type. */
4022 tree_code_for_record_type (Entity_Id gnat_type)
4024 Node_Id component_list
4025 = Component_List (Type_Definition
4027 (Implementation_Base_Type (gnat_type))));
4030 /* Make this a UNION_TYPE unless it's either not an Unchecked_Union or
4031 we have a non-discriminant field outside a variant. In either case,
4032 it's a RECORD_TYPE. */
4034 if (!Is_Unchecked_Union (gnat_type))
4037 for (component = First_Non_Pragma (Component_Items (component_list));
4038 Present (component);
4039 component = Next_Non_Pragma (component))
4040 if (Ekind (Defining_Entity (component)) == E_Component)
4046 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
4047 component of an aggregate type. */
4050 type_for_nonaliased_component_p (tree gnu_type)
4052 /* If the type is passed by reference, we may have pointers to the
4053 component so it cannot be made non-aliased. */
4054 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
4057 /* We used to say that any component of aggregate type is aliased
4058 because the front-end may take 'Reference of it. The front-end
4059 has been enhanced in the meantime so as to use a renaming instead
4060 in most cases, but the back-end can probably take the address of
4061 such a component too so we go for the conservative stance.
4063 For instance, we might need the address of any array type, even
4064 if normally passed by copy, to construct a fat pointer if the
4065 component is used as an actual for an unconstrained formal.
4067 Likewise for record types: even if a specific record subtype is
4068 passed by copy, the parent type might be passed by ref (e.g. if
4069 it's of variable size) and we might take the address of a child
4070 component to pass to a parent formal. We have no way to check
4071 for such conditions here. */
4072 if (AGGREGATE_TYPE_P (gnu_type))
4078 /* Perform final processing on global variables. */
4081 gnat_write_global_declarations (void)
4083 /* Proceed to optimize and emit assembly.
4084 FIXME: shouldn't be the front end's responsibility to call this. */
4087 /* Emit debug info for all global declarations. */
4088 emit_debug_global_declarations (VEC_address (tree, global_decls),
4089 VEC_length (tree, global_decls));
4092 #include "gt-ada-utils.h"
4093 #include "gtype-ada.h"