1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2008, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 ****************************************************************************/
28 #include "coretypes.h"
41 #include "tree-inline.h"
42 #include "tree-gimple.h"
43 #include "tree-dump.h"
44 #include "pointer-set.h"
60 #ifndef MAX_FIXED_MODE_SIZE
61 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
64 #ifndef MAX_BITS_PER_WORD
65 #define MAX_BITS_PER_WORD BITS_PER_WORD
68 /* If nonzero, pretend we are allocating at global level. */
71 /* Tree nodes for the various types and decls we create. */
72 tree gnat_std_decls[(int) ADT_LAST];
74 /* Functions to call for each of the possible raise reasons. */
75 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
77 /* Forward declarations for handlers of attributes. */
78 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
79 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
81 /* Table of machine-independent internal attributes for Ada. We support
82 this minimal set of attributes to accommodate the Alpha back-end which
83 unconditionally puts them on its builtins. */
84 const struct attribute_spec gnat_internal_attribute_table[] =
86 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
87 { "const", 0, 0, true, false, false, handle_const_attribute },
88 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute },
89 { NULL, 0, 0, false, false, false, NULL }
92 /* Associates a GNAT tree node to a GCC tree node. It is used in
93 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
94 of `save_gnu_tree' for more info. */
95 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
97 #define GET_GNU_TREE(GNAT_ENTITY) \
98 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
100 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
101 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
103 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
104 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
106 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
107 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
109 #define GET_DUMMY_NODE(GNAT_ENTITY) \
110 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
112 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
113 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
115 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
116 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
118 /* This variable keeps a table for types for each precision so that we only
119 allocate each of them once. Signed and unsigned types are kept separate.
121 Note that these types are only used when fold-const requests something
122 special. Perhaps we should NOT share these types; we'll see how it
124 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
126 /* Likewise for float types, but record these by mode. */
127 static GTY(()) tree float_types[NUM_MACHINE_MODES];
129 /* For each binding contour we allocate a binding_level structure to indicate
130 the binding depth. */
132 struct gnat_binding_level GTY((chain_next ("%h.chain")))
134 /* The binding level containing this one (the enclosing binding level). */
135 struct gnat_binding_level *chain;
136 /* The BLOCK node for this level. */
138 /* If nonzero, the setjmp buffer that needs to be updated for any
139 variable-sized definition within this context. */
143 /* The binding level currently in effect. */
144 static GTY(()) struct gnat_binding_level *current_binding_level;
146 /* A chain of gnat_binding_level structures awaiting reuse. */
147 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
149 /* An array of global declarations. */
150 static GTY(()) VEC(tree,gc) *global_decls;
152 /* An array of builtin declarations. */
153 static GTY(()) VEC(tree,gc) *builtin_decls;
155 /* An array of global renaming pointers. */
156 static GTY(()) VEC(tree,gc) *global_renaming_pointers;
158 /* A chain of unused BLOCK nodes. */
159 static GTY((deletable)) tree free_block_chain;
161 static void gnat_install_builtins (void);
162 static tree merge_sizes (tree, tree, tree, bool, bool);
163 static tree compute_related_constant (tree, tree);
164 static tree split_plus (tree, tree *);
165 static void gnat_gimplify_function (tree);
166 static tree float_type_for_precision (int, enum machine_mode);
167 static tree convert_to_fat_pointer (tree, tree);
168 static tree convert_to_thin_pointer (tree, tree);
169 static tree make_descriptor_field (const char *,tree, tree, tree);
170 static bool potential_alignment_gap (tree, tree, tree);
172 /* Initialize the association of GNAT nodes to GCC trees. */
175 init_gnat_to_gnu (void)
177 associate_gnat_to_gnu
178 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
181 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
182 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
183 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
185 If GNU_DECL is zero, a previous association is to be reset. */
188 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
190 /* Check that GNAT_ENTITY is not already defined and that it is being set
191 to something which is a decl. Raise gigi 401 if not. Usually, this
192 means GNAT_ENTITY is defined twice, but occasionally is due to some
194 gcc_assert (!(gnu_decl
195 && (PRESENT_GNU_TREE (gnat_entity)
196 || (!no_check && !DECL_P (gnu_decl)))));
198 SET_GNU_TREE (gnat_entity, gnu_decl);
201 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
202 Return the ..._DECL node that was associated with it. If there is no tree
203 node associated with GNAT_ENTITY, abort.
205 In some cases, such as delayed elaboration or expressions that need to
206 be elaborated only once, GNAT_ENTITY is really not an entity. */
209 get_gnu_tree (Entity_Id gnat_entity)
211 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
212 return GET_GNU_TREE (gnat_entity);
215 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
218 present_gnu_tree (Entity_Id gnat_entity)
220 return PRESENT_GNU_TREE (gnat_entity);
223 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
226 init_dummy_type (void)
229 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
232 /* Make a dummy type corresponding to GNAT_TYPE. */
235 make_dummy_type (Entity_Id gnat_type)
237 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
240 /* If there is an equivalent type, get its underlying type. */
241 if (Present (gnat_underlying))
242 gnat_underlying = Underlying_Type (gnat_underlying);
244 /* If there was no equivalent type (can only happen when just annotating
245 types) or underlying type, go back to the original type. */
246 if (No (gnat_underlying))
247 gnat_underlying = gnat_type;
249 /* If it there already a dummy type, use that one. Else make one. */
250 if (PRESENT_DUMMY_NODE (gnat_underlying))
251 return GET_DUMMY_NODE (gnat_underlying);
253 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
255 gnu_type = make_node (Is_Record_Type (gnat_underlying)
256 ? tree_code_for_record_type (gnat_underlying)
258 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
259 TYPE_DUMMY_P (gnu_type) = 1;
260 if (AGGREGATE_TYPE_P (gnu_type))
262 TYPE_STUB_DECL (gnu_type) = build_decl (TYPE_DECL, NULL_TREE, gnu_type);
263 TYPE_BY_REFERENCE_P (gnu_type) = Is_By_Reference_Type (gnat_type);
266 SET_DUMMY_NODE (gnat_underlying, gnu_type);
271 /* Return nonzero if we are currently in the global binding level. */
274 global_bindings_p (void)
276 return ((force_global || !current_function_decl) ? -1 : 0);
279 /* Enter a new binding level. */
284 struct gnat_binding_level *newlevel = NULL;
286 /* Reuse a struct for this binding level, if there is one. */
287 if (free_binding_level)
289 newlevel = free_binding_level;
290 free_binding_level = free_binding_level->chain;
294 = (struct gnat_binding_level *)
295 ggc_alloc (sizeof (struct gnat_binding_level));
297 /* Use a free BLOCK, if any; otherwise, allocate one. */
298 if (free_block_chain)
300 newlevel->block = free_block_chain;
301 free_block_chain = BLOCK_CHAIN (free_block_chain);
302 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
305 newlevel->block = make_node (BLOCK);
307 /* Point the BLOCK we just made to its parent. */
308 if (current_binding_level)
309 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
311 BLOCK_VARS (newlevel->block) = BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
312 TREE_USED (newlevel->block) = 1;
314 /* Add this level to the front of the chain (stack) of levels that are
316 newlevel->chain = current_binding_level;
317 newlevel->jmpbuf_decl = NULL_TREE;
318 current_binding_level = newlevel;
321 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
322 and point FNDECL to this BLOCK. */
325 set_current_block_context (tree fndecl)
327 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
328 DECL_INITIAL (fndecl) = current_binding_level->block;
331 /* Set the jmpbuf_decl for the current binding level to DECL. */
334 set_block_jmpbuf_decl (tree decl)
336 current_binding_level->jmpbuf_decl = decl;
339 /* Get the jmpbuf_decl, if any, for the current binding level. */
342 get_block_jmpbuf_decl ()
344 return current_binding_level->jmpbuf_decl;
347 /* Exit a binding level. Set any BLOCK into the current code group. */
352 struct gnat_binding_level *level = current_binding_level;
353 tree block = level->block;
355 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
356 BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
358 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
359 are no variables free the block and merge its subblocks into those of its
360 parent block. Otherwise, add it to the list of its parent. */
361 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
363 else if (BLOCK_VARS (block) == NULL_TREE)
365 BLOCK_SUBBLOCKS (level->chain->block)
366 = chainon (BLOCK_SUBBLOCKS (block),
367 BLOCK_SUBBLOCKS (level->chain->block));
368 BLOCK_CHAIN (block) = free_block_chain;
369 free_block_chain = block;
373 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
374 BLOCK_SUBBLOCKS (level->chain->block) = block;
375 TREE_USED (block) = 1;
376 set_block_for_group (block);
379 /* Free this binding structure. */
380 current_binding_level = level->chain;
381 level->chain = free_binding_level;
382 free_binding_level = level;
386 /* Records a ..._DECL node DECL as belonging to the current lexical scope
387 and uses GNAT_NODE for location information and propagating flags. */
390 gnat_pushdecl (tree decl, Node_Id gnat_node)
392 /* If at top level, there is no context. But PARM_DECLs always go in the
393 level of its function. */
394 if (global_bindings_p () && TREE_CODE (decl) != PARM_DECL)
395 DECL_CONTEXT (decl) = 0;
398 DECL_CONTEXT (decl) = current_function_decl;
400 /* Functions imported in another function are not really nested. */
401 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_PUBLIC (decl))
402 DECL_NO_STATIC_CHAIN (decl) = 1;
405 TREE_NO_WARNING (decl) = (gnat_node == Empty || Warnings_Off (gnat_node));
407 /* Set the location of DECL and emit a declaration for it. */
408 if (Present (gnat_node))
409 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
410 add_decl_expr (decl, gnat_node);
412 /* Put the declaration on the list. The list of declarations is in reverse
413 order. The list will be reversed later. Put global variables in the
414 globals list and builtin functions in a dedicated list to speed up
415 further lookups. Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
416 the list, as they will cause trouble with the debugger and aren't needed
418 if (TREE_CODE (decl) != TYPE_DECL
419 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
421 if (global_bindings_p ())
423 VEC_safe_push (tree, gc, global_decls, decl);
425 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
426 VEC_safe_push (tree, gc, builtin_decls, decl);
430 TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
431 BLOCK_VARS (current_binding_level->block) = decl;
435 /* For the declaration of a type, set its name if it either is not already
436 set, was set to an IDENTIFIER_NODE, indicating an internal name,
437 or if the previous type name was not derived from a source name.
438 We'd rather have the type named with a real name and all the pointer
439 types to the same object have the same POINTER_TYPE node. Code in the
440 equivalent function of c-decl.c makes a copy of the type node here, but
441 that may cause us trouble with incomplete types. We make an exception
442 for fat pointer types because the compiler automatically builds them
443 for unconstrained array types and the debugger uses them to represent
444 both these and pointers to these. */
445 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
447 tree t = TREE_TYPE (decl);
449 if (!TYPE_NAME (t) || TREE_CODE (TYPE_NAME (t)) == IDENTIFIER_NODE)
451 else if (TYPE_FAT_POINTER_P (t))
453 tree tt = build_variant_type_copy (t);
454 TYPE_NAME (tt) = decl;
455 TREE_USED (tt) = TREE_USED (t);
456 TREE_TYPE (decl) = tt;
457 DECL_ORIGINAL_TYPE (decl) = t;
460 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
465 /* Propagate the name to all the variants. This is needed for
466 the type qualifiers machinery to work properly. */
468 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
469 TYPE_NAME (t) = decl;
473 /* Do little here. Set up the standard declarations later after the
474 front end has been run. */
477 gnat_init_decl_processing (void)
479 /* Make the binding_level structure for global names. */
480 current_function_decl = 0;
481 current_binding_level = 0;
482 free_binding_level = 0;
485 build_common_tree_nodes (true, true);
487 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
488 corresponding to the size of Pmode. In most cases when ptr_mode and
489 Pmode differ, C will use the width of ptr_mode as sizetype. But we get
490 far better code using the width of Pmode. Make this here since we need
491 this before we can expand the GNAT types. */
492 size_type_node = gnat_type_for_size (GET_MODE_BITSIZE (Pmode), 0);
493 set_sizetype (size_type_node);
494 build_common_tree_nodes_2 (0);
496 ptr_void_type_node = build_pointer_type (void_type_node);
498 gnat_install_builtins ();
501 /* Install the builtin functions we might need. */
504 gnat_install_builtins ()
506 /* Builtins used by generic middle-end optimizers. */
507 build_common_builtin_nodes ();
509 /* Target specific builtins, such as the AltiVec family on ppc. */
510 targetm.init_builtins ();
513 /* Create the predefined scalar types such as `integer_type_node' needed
514 in the gcc back-end and initialize the global binding level. */
517 init_gigi_decls (tree long_long_float_type, tree exception_type)
522 /* Set the types that GCC and Gigi use from the front end. We would like
523 to do this for char_type_node, but it needs to correspond to the C
525 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
527 /* In this case, the builtin floating point types are VAX float,
528 so make up a type for use. */
529 longest_float_type_node = make_node (REAL_TYPE);
530 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
531 layout_type (longest_float_type_node);
532 create_type_decl (get_identifier ("longest float type"),
533 longest_float_type_node, NULL, false, true, Empty);
536 longest_float_type_node = TREE_TYPE (long_long_float_type);
538 except_type_node = TREE_TYPE (exception_type);
540 unsigned_type_node = gnat_type_for_size (INT_TYPE_SIZE, 1);
541 create_type_decl (get_identifier ("unsigned int"), unsigned_type_node,
542 NULL, false, true, Empty);
544 void_type_decl_node = create_type_decl (get_identifier ("void"),
545 void_type_node, NULL, false, true,
548 void_ftype = build_function_type (void_type_node, NULL_TREE);
549 ptr_void_ftype = build_pointer_type (void_ftype);
551 /* Build the special descriptor type and its null node if needed. */
552 if (TARGET_VTABLE_USES_DESCRIPTORS)
554 tree field_list = NULL_TREE, null_list = NULL_TREE;
557 fdesc_type_node = make_node (RECORD_TYPE);
559 for (j = 0; j < TARGET_VTABLE_USES_DESCRIPTORS; j++)
561 tree field = create_field_decl (NULL_TREE, ptr_void_ftype,
562 fdesc_type_node, 0, 0, 0, 1);
563 TREE_CHAIN (field) = field_list;
565 null_list = tree_cons (field, null_pointer_node, null_list);
568 finish_record_type (fdesc_type_node, nreverse (field_list), 0, false);
569 null_fdesc_node = gnat_build_constructor (fdesc_type_node, null_list);
572 /* Now declare runtime functions. */
573 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
575 /* malloc is a function declaration tree for a function to allocate
577 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
579 build_function_type (ptr_void_type_node,
580 tree_cons (NULL_TREE,
583 NULL_TREE, false, true, true, NULL,
585 DECL_IS_MALLOC (malloc_decl) = 1;
587 /* free is a function declaration tree for a function to free memory. */
589 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
590 build_function_type (void_type_node,
591 tree_cons (NULL_TREE,
594 NULL_TREE, false, true, true, NULL, Empty);
596 /* Make the types and functions used for exception processing. */
598 = build_array_type (gnat_type_for_mode (Pmode, 0),
599 build_index_type (build_int_cst (NULL_TREE, 5)));
600 create_type_decl (get_identifier ("JMPBUF_T"), jmpbuf_type, NULL,
602 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
604 /* Functions to get and set the jumpbuf pointer for the current thread. */
606 = create_subprog_decl
607 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
608 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
609 NULL_TREE, false, true, true, NULL, Empty);
610 /* Avoid creating superfluous edges to __builtin_setjmp receivers. */
611 DECL_IS_PURE (get_jmpbuf_decl) = 1;
614 = create_subprog_decl
615 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
617 build_function_type (void_type_node,
618 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
619 NULL_TREE, false, true, true, NULL, Empty);
621 /* Function to get the current exception. */
623 = create_subprog_decl
624 (get_identifier ("system__soft_links__get_gnat_exception"),
626 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
627 NULL_TREE, false, true, true, NULL, Empty);
628 /* Avoid creating superfluous edges to __builtin_setjmp receivers. */
629 DECL_IS_PURE (get_excptr_decl) = 1;
631 /* Functions that raise exceptions. */
633 = create_subprog_decl
634 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
635 build_function_type (void_type_node,
636 tree_cons (NULL_TREE,
637 build_pointer_type (except_type_node),
639 NULL_TREE, false, true, true, NULL, Empty);
641 /* Dummy objects to materialize "others" and "all others" in the exception
642 tables. These are exported by a-exexpr.adb, so see this unit for the
646 = create_var_decl (get_identifier ("OTHERS"),
647 get_identifier ("__gnat_others_value"),
648 integer_type_node, 0, 1, 0, 1, 1, 0, Empty);
651 = create_var_decl (get_identifier ("ALL_OTHERS"),
652 get_identifier ("__gnat_all_others_value"),
653 integer_type_node, 0, 1, 0, 1, 1, 0, Empty);
655 /* Hooks to call when entering/leaving an exception handler. */
657 = create_subprog_decl (get_identifier ("__gnat_begin_handler"), NULL_TREE,
658 build_function_type (void_type_node,
659 tree_cons (NULL_TREE,
662 NULL_TREE, false, true, true, NULL, Empty);
665 = create_subprog_decl (get_identifier ("__gnat_end_handler"), NULL_TREE,
666 build_function_type (void_type_node,
667 tree_cons (NULL_TREE,
670 NULL_TREE, false, true, true, NULL, Empty);
672 /* If in no exception handlers mode, all raise statements are redirected to
673 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
674 this procedure will never be called in this mode. */
675 if (No_Exception_Handlers_Set ())
678 = create_subprog_decl
679 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE,
680 build_function_type (void_type_node,
681 tree_cons (NULL_TREE,
682 build_pointer_type (char_type_node),
683 tree_cons (NULL_TREE,
686 NULL_TREE, false, true, true, NULL, Empty);
688 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
689 gnat_raise_decls[i] = decl;
692 /* Otherwise, make one decl for each exception reason. */
693 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
697 sprintf (name, "__gnat_rcheck_%.2d", i);
699 = create_subprog_decl
700 (get_identifier (name), NULL_TREE,
701 build_function_type (void_type_node,
702 tree_cons (NULL_TREE,
705 tree_cons (NULL_TREE,
708 NULL_TREE, false, true, true, NULL, Empty);
711 /* Indicate that these never return. */
712 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
713 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
714 TREE_TYPE (raise_nodefer_decl)
715 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
718 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
720 TREE_THIS_VOLATILE (gnat_raise_decls[i]) = 1;
721 TREE_SIDE_EFFECTS (gnat_raise_decls[i]) = 1;
722 TREE_TYPE (gnat_raise_decls[i])
723 = build_qualified_type (TREE_TYPE (gnat_raise_decls[i]),
727 /* setjmp returns an integer and has one operand, which is a pointer to
730 = create_subprog_decl
731 (get_identifier ("__builtin_setjmp"), NULL_TREE,
732 build_function_type (integer_type_node,
733 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
734 NULL_TREE, false, true, true, NULL, Empty);
736 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
737 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
739 /* update_setjmp_buf updates a setjmp buffer from the current stack pointer
741 update_setjmp_buf_decl
742 = create_subprog_decl
743 (get_identifier ("__builtin_update_setjmp_buf"), NULL_TREE,
744 build_function_type (void_type_node,
745 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
746 NULL_TREE, false, true, true, NULL, Empty);
748 DECL_BUILT_IN_CLASS (update_setjmp_buf_decl) = BUILT_IN_NORMAL;
749 DECL_FUNCTION_CODE (update_setjmp_buf_decl) = BUILT_IN_UPDATE_SETJMP_BUF;
751 main_identifier_node = get_identifier ("main");
754 /* Given a record type RECORD_TYPE and a chain of FIELD_DECL nodes FIELDLIST,
755 finish constructing the record or union type. If REP_LEVEL is zero, this
756 record has no representation clause and so will be entirely laid out here.
757 If REP_LEVEL is one, this record has a representation clause and has been
758 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
759 this record is derived from a parent record and thus inherits its layout;
760 only make a pass on the fields to finalize them. If DO_NOT_FINALIZE is
761 true, the record type is expected to be modified afterwards so it will
762 not be sent to the back-end for finalization. */
765 finish_record_type (tree record_type, tree fieldlist, int rep_level,
766 bool do_not_finalize)
768 enum tree_code code = TREE_CODE (record_type);
769 tree name = TYPE_NAME (record_type);
770 tree ada_size = bitsize_zero_node;
771 tree size = bitsize_zero_node;
772 bool had_size = TYPE_SIZE (record_type) != 0;
773 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
774 bool had_align = TYPE_ALIGN (record_type) != 0;
777 if (name && TREE_CODE (name) == TYPE_DECL)
778 name = DECL_NAME (name);
780 TYPE_FIELDS (record_type) = fieldlist;
781 TYPE_STUB_DECL (record_type) = build_decl (TYPE_DECL, name, record_type);
783 /* We don't need both the typedef name and the record name output in
784 the debugging information, since they are the same. */
785 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
787 /* Globally initialize the record first. If this is a rep'ed record,
788 that just means some initializations; otherwise, layout the record. */
791 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
792 TYPE_MODE (record_type) = BLKmode;
795 TYPE_SIZE_UNIT (record_type) = size_zero_node;
797 TYPE_SIZE (record_type) = bitsize_zero_node;
799 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
800 out just like a UNION_TYPE, since the size will be fixed. */
801 else if (code == QUAL_UNION_TYPE)
806 /* Ensure there isn't a size already set. There can be in an error
807 case where there is a rep clause but all fields have errors and
808 no longer have a position. */
809 TYPE_SIZE (record_type) = 0;
810 layout_type (record_type);
813 /* At this point, the position and size of each field is known. It was
814 either set before entry by a rep clause, or by laying out the type above.
816 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
817 to compute the Ada size; the GCC size and alignment (for rep'ed records
818 that are not padding types); and the mode (for rep'ed records). We also
819 clear the DECL_BIT_FIELD indication for the cases we know have not been
820 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
822 if (code == QUAL_UNION_TYPE)
823 fieldlist = nreverse (fieldlist);
825 for (field = fieldlist; field; field = TREE_CHAIN (field))
827 tree type = TREE_TYPE (field);
828 tree pos = bit_position (field);
829 tree this_size = DECL_SIZE (field);
832 if ((TREE_CODE (type) == RECORD_TYPE
833 || TREE_CODE (type) == UNION_TYPE
834 || TREE_CODE (type) == QUAL_UNION_TYPE)
835 && !TYPE_IS_FAT_POINTER_P (type)
836 && !TYPE_CONTAINS_TEMPLATE_P (type)
837 && TYPE_ADA_SIZE (type))
838 this_ada_size = TYPE_ADA_SIZE (type);
840 this_ada_size = this_size;
842 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
843 if (DECL_BIT_FIELD (field)
844 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
846 unsigned int align = TYPE_ALIGN (type);
848 /* In the general case, type alignment is required. */
849 if (value_factor_p (pos, align))
851 /* The enclosing record type must be sufficiently aligned.
852 Otherwise, if no alignment was specified for it and it
853 has been laid out already, bump its alignment to the
854 desired one if this is compatible with its size. */
855 if (TYPE_ALIGN (record_type) >= align)
857 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
858 DECL_BIT_FIELD (field) = 0;
862 && value_factor_p (TYPE_SIZE (record_type), align))
864 TYPE_ALIGN (record_type) = align;
865 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
866 DECL_BIT_FIELD (field) = 0;
870 /* In the non-strict alignment case, only byte alignment is. */
871 if (!STRICT_ALIGNMENT
872 && DECL_BIT_FIELD (field)
873 && value_factor_p (pos, BITS_PER_UNIT))
874 DECL_BIT_FIELD (field) = 0;
877 /* If we still have DECL_BIT_FIELD set at this point, we know the field
878 is technically not addressable. Except that it can actually be
879 addressed if the field is BLKmode and happens to be properly
881 DECL_NONADDRESSABLE_P (field)
882 |= DECL_BIT_FIELD (field) && DECL_MODE (field) != BLKmode;
884 /* A type must be as aligned as its most aligned field that is not
885 a bit-field. But this is already enforced by layout_type. */
886 if (rep_level > 0 && !DECL_BIT_FIELD (field))
887 TYPE_ALIGN (record_type)
888 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
893 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
894 size = size_binop (MAX_EXPR, size, this_size);
897 case QUAL_UNION_TYPE:
899 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
900 this_ada_size, ada_size);
901 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
906 /* Since we know here that all fields are sorted in order of
907 increasing bit position, the size of the record is one
908 higher than the ending bit of the last field processed
909 unless we have a rep clause, since in that case we might
910 have a field outside a QUAL_UNION_TYPE that has a higher ending
911 position. So use a MAX in that case. Also, if this field is a
912 QUAL_UNION_TYPE, we need to take into account the previous size in
913 the case of empty variants. */
915 = merge_sizes (ada_size, pos, this_ada_size,
916 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
918 = merge_sizes (size, pos, this_size,
919 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
927 if (code == QUAL_UNION_TYPE)
928 nreverse (fieldlist);
932 /* If this is a padding record, we never want to make the size smaller
933 than what was specified in it, if any. */
934 if (TREE_CODE (record_type) == RECORD_TYPE
935 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
936 size = TYPE_SIZE (record_type);
938 /* Now set any of the values we've just computed that apply. */
939 if (!TYPE_IS_FAT_POINTER_P (record_type)
940 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
941 SET_TYPE_ADA_SIZE (record_type, ada_size);
945 tree size_unit = had_size_unit
946 ? TYPE_SIZE_UNIT (record_type)
948 size_binop (CEIL_DIV_EXPR, size,
950 unsigned int align = TYPE_ALIGN (record_type);
952 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
953 TYPE_SIZE_UNIT (record_type)
954 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
956 compute_record_mode (record_type);
960 if (!do_not_finalize)
961 rest_of_record_type_compilation (record_type);
964 /* Wrap up compilation of RECORD_TYPE, i.e. most notably output all
965 the debug information associated with it. It need not be invoked
966 directly in most cases since finish_record_type takes care of doing
967 so, unless explicitly requested not to through DO_NOT_FINALIZE. */
970 rest_of_record_type_compilation (tree record_type)
972 tree fieldlist = TYPE_FIELDS (record_type);
974 enum tree_code code = TREE_CODE (record_type);
975 bool var_size = false;
977 for (field = fieldlist; field; field = TREE_CHAIN (field))
979 /* We need to make an XVE/XVU record if any field has variable size,
980 whether or not the record does. For example, if we have a union,
981 it may be that all fields, rounded up to the alignment, have the
982 same size, in which case we'll use that size. But the debug
983 output routines (except Dwarf2) won't be able to output the fields,
984 so we need to make the special record. */
985 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
986 /* If a field has a non-constant qualifier, the record will have
987 variable size too. */
988 || (code == QUAL_UNION_TYPE
989 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
996 /* If this record is of variable size, rename it so that the
997 debugger knows it is and make a new, parallel, record
998 that tells the debugger how the record is laid out. See
999 exp_dbug.ads. But don't do this for records that are padding
1000 since they confuse GDB. */
1002 && !(TREE_CODE (record_type) == RECORD_TYPE
1003 && TYPE_IS_PADDING_P (record_type)))
1005 tree new_record_type
1006 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
1007 ? UNION_TYPE : TREE_CODE (record_type));
1008 tree orig_name = TYPE_NAME (record_type);
1010 = (TREE_CODE (orig_name) == TYPE_DECL ? DECL_NAME (orig_name)
1013 = concat_id_with_name (orig_id,
1014 TREE_CODE (record_type) == QUAL_UNION_TYPE
1016 tree last_pos = bitsize_zero_node;
1018 tree prev_old_field = 0;
1020 TYPE_NAME (new_record_type) = new_id;
1021 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
1022 TYPE_STUB_DECL (new_record_type)
1023 = build_decl (TYPE_DECL, new_id, new_record_type);
1024 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
1025 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
1026 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
1027 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
1028 TYPE_SIZE_UNIT (new_record_type)
1029 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
1031 /* Now scan all the fields, replacing each field with a new
1032 field corresponding to the new encoding. */
1033 for (old_field = TYPE_FIELDS (record_type); old_field;
1034 old_field = TREE_CHAIN (old_field))
1036 tree field_type = TREE_TYPE (old_field);
1037 tree field_name = DECL_NAME (old_field);
1039 tree curpos = bit_position (old_field);
1041 unsigned int align = 0;
1044 /* See how the position was modified from the last position.
1046 There are two basic cases we support: a value was added
1047 to the last position or the last position was rounded to
1048 a boundary and they something was added. Check for the
1049 first case first. If not, see if there is any evidence
1050 of rounding. If so, round the last position and try
1053 If this is a union, the position can be taken as zero. */
1055 if (TREE_CODE (new_record_type) == UNION_TYPE)
1056 pos = bitsize_zero_node, align = 0;
1058 pos = compute_related_constant (curpos, last_pos);
1060 if (!pos && TREE_CODE (curpos) == MULT_EXPR
1061 && host_integerp (TREE_OPERAND (curpos, 1), 1))
1063 tree offset = TREE_OPERAND (curpos, 0);
1064 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
1066 /* Strip off any conversions. */
1067 while (TREE_CODE (offset) == NON_LVALUE_EXPR
1068 || TREE_CODE (offset) == NOP_EXPR
1069 || TREE_CODE (offset) == CONVERT_EXPR)
1070 offset = TREE_OPERAND (offset, 0);
1072 /* An offset which is a bitwise AND with a negative power of 2
1073 means an alignment corresponding to this power of 2. */
1074 if (TREE_CODE (offset) == BIT_AND_EXPR
1075 && host_integerp (TREE_OPERAND (offset, 1), 0)
1076 && tree_int_cst_sgn (TREE_OPERAND (offset, 1)) < 0)
1079 = - tree_low_cst (TREE_OPERAND (offset, 1), 0);
1080 if (exact_log2 (pow) > 0)
1084 pos = compute_related_constant (curpos,
1085 round_up (last_pos, align));
1087 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
1088 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
1089 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
1090 && host_integerp (TREE_OPERAND
1091 (TREE_OPERAND (curpos, 0), 1),
1096 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
1097 pos = compute_related_constant (curpos,
1098 round_up (last_pos, align));
1100 else if (potential_alignment_gap (prev_old_field, old_field,
1103 align = TYPE_ALIGN (field_type);
1104 pos = compute_related_constant (curpos,
1105 round_up (last_pos, align));
1108 /* If we can't compute a position, set it to zero.
1110 ??? We really should abort here, but it's too much work
1111 to get this correct for all cases. */
1114 pos = bitsize_zero_node;
1116 /* See if this type is variable-sized and make a pointer type
1117 and indicate the indirection if so. Beware that the debug
1118 back-end may adjust the position computed above according
1119 to the alignment of the field type, i.e. the pointer type
1120 in this case, if we don't preventively counter that. */
1121 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
1123 field_type = build_pointer_type (field_type);
1124 if (align != 0 && TYPE_ALIGN (field_type) > align)
1126 field_type = copy_node (field_type);
1127 TYPE_ALIGN (field_type) = align;
1132 /* Make a new field name, if necessary. */
1133 if (var || align != 0)
1138 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1139 align / BITS_PER_UNIT);
1141 strcpy (suffix, "XVL");
1143 field_name = concat_id_with_name (field_name, suffix);
1146 new_field = create_field_decl (field_name, field_type,
1148 DECL_SIZE (old_field), pos, 0);
1149 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1150 TYPE_FIELDS (new_record_type) = new_field;
1152 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1153 zero. The only time it's not the last field of the record
1154 is when there are other components at fixed positions after
1155 it (meaning there was a rep clause for every field) and we
1156 want to be able to encode them. */
1157 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1158 (TREE_CODE (TREE_TYPE (old_field))
1161 : DECL_SIZE (old_field));
1162 prev_old_field = old_field;
1165 TYPE_FIELDS (new_record_type)
1166 = nreverse (TYPE_FIELDS (new_record_type));
1168 rest_of_type_decl_compilation (TYPE_STUB_DECL (new_record_type));
1171 rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type));
1174 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1175 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1176 if this represents a QUAL_UNION_TYPE in which case we must look for
1177 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1178 is nonzero, we must take the MAX of the end position of this field
1179 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1181 We return an expression for the size. */
1184 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1187 tree type = TREE_TYPE (last_size);
1190 if (!special || TREE_CODE (size) != COND_EXPR)
1192 new = size_binop (PLUS_EXPR, first_bit, size);
1194 new = size_binop (MAX_EXPR, last_size, new);
1198 new = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1199 integer_zerop (TREE_OPERAND (size, 1))
1200 ? last_size : merge_sizes (last_size, first_bit,
1201 TREE_OPERAND (size, 1),
1203 integer_zerop (TREE_OPERAND (size, 2))
1204 ? last_size : merge_sizes (last_size, first_bit,
1205 TREE_OPERAND (size, 2),
1208 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1209 when fed through substitute_in_expr) into thinking that a constant
1210 size is not constant. */
1211 while (TREE_CODE (new) == NON_LVALUE_EXPR)
1212 new = TREE_OPERAND (new, 0);
1217 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1218 related by the addition of a constant. Return that constant if so. */
1221 compute_related_constant (tree op0, tree op1)
1223 tree op0_var, op1_var;
1224 tree op0_con = split_plus (op0, &op0_var);
1225 tree op1_con = split_plus (op1, &op1_var);
1226 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1228 if (operand_equal_p (op0_var, op1_var, 0))
1230 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1236 /* Utility function of above to split a tree OP which may be a sum, into a
1237 constant part, which is returned, and a variable part, which is stored
1238 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1242 split_plus (tree in, tree *pvar)
1244 /* Strip NOPS in order to ease the tree traversal and maximize the
1245 potential for constant or plus/minus discovery. We need to be careful
1246 to always return and set *pvar to bitsizetype trees, but it's worth
1250 *pvar = convert (bitsizetype, in);
1252 if (TREE_CODE (in) == INTEGER_CST)
1254 *pvar = bitsize_zero_node;
1255 return convert (bitsizetype, in);
1257 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1259 tree lhs_var, rhs_var;
1260 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1261 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1263 if (lhs_var == TREE_OPERAND (in, 0)
1264 && rhs_var == TREE_OPERAND (in, 1))
1265 return bitsize_zero_node;
1267 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1268 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1271 return bitsize_zero_node;
1274 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1275 subprogram. If it is void_type_node, then we are dealing with a procedure,
1276 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1277 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1278 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1279 RETURNS_UNCONSTRAINED is true if the function returns an unconstrained
1280 object. RETURNS_BY_REF is true if the function returns by reference.
1281 RETURNS_BY_TARGET_PTR is true if the function is to be passed (as its
1282 first parameter) the address of the place to copy its result. */
1285 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1286 bool returns_unconstrained, bool returns_by_ref,
1287 bool returns_by_target_ptr)
1289 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1290 the subprogram formal parameters. This list is generated by traversing the
1291 input list of PARM_DECL nodes. */
1292 tree param_type_list = NULL;
1296 for (param_decl = param_decl_list; param_decl;
1297 param_decl = TREE_CHAIN (param_decl))
1298 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1301 /* The list of the function parameter types has to be terminated by the void
1302 type to signal to the back-end that we are not dealing with a variable
1303 parameter subprogram, but that the subprogram has a fixed number of
1305 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1307 /* The list of argument types has been created in reverse
1309 param_type_list = nreverse (param_type_list);
1311 type = build_function_type (return_type, param_type_list);
1313 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1314 or the new type should, make a copy of TYPE. Likewise for
1315 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1316 if (TYPE_CI_CO_LIST (type) || cico_list
1317 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1318 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref
1319 || TYPE_RETURNS_BY_TARGET_PTR_P (type) != returns_by_target_ptr)
1320 type = copy_type (type);
1322 TYPE_CI_CO_LIST (type) = cico_list;
1323 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1324 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1325 TYPE_RETURNS_BY_TARGET_PTR_P (type) = returns_by_target_ptr;
1329 /* Return a copy of TYPE but safe to modify in any way. */
1332 copy_type (tree type)
1334 tree new = copy_node (type);
1336 /* copy_node clears this field instead of copying it, because it is
1337 aliased with TREE_CHAIN. */
1338 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1340 TYPE_POINTER_TO (new) = 0;
1341 TYPE_REFERENCE_TO (new) = 0;
1342 TYPE_MAIN_VARIANT (new) = new;
1343 TYPE_NEXT_VARIANT (new) = 0;
1348 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1349 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position of
1353 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
1355 /* First build a type for the desired range. */
1356 tree type = build_index_2_type (min, max);
1358 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1359 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1360 is set, but not to INDEX, make a copy of this type with the requested
1361 index type. Note that we have no way of sharing these types, but that's
1362 only a small hole. */
1363 if (TYPE_INDEX_TYPE (type) == index)
1365 else if (TYPE_INDEX_TYPE (type))
1366 type = copy_type (type);
1368 SET_TYPE_INDEX_TYPE (type, index);
1369 create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
1373 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1374 string) and TYPE is a ..._TYPE node giving its data type.
1375 ARTIFICIAL_P is true if this is a declaration that was generated
1376 by the compiler. DEBUG_INFO_P is true if we need to write debugging
1377 information about this type. GNAT_NODE is used for the position of
1381 create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1382 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1384 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1385 enum tree_code code = TREE_CODE (type);
1387 DECL_ARTIFICIAL (type_decl) = artificial_p;
1389 if (!TYPE_IS_DUMMY_P (type))
1390 gnat_pushdecl (type_decl, gnat_node);
1392 process_attributes (type_decl, attr_list);
1394 /* Pass type declaration information to the debugger unless this is an
1395 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1396 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately, or
1397 type for which debugging information was not requested. */
1398 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
1399 DECL_IGNORED_P (type_decl) = 1;
1400 else if (code != ENUMERAL_TYPE
1401 && (code != RECORD_TYPE || TYPE_IS_FAT_POINTER_P (type))
1402 && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
1403 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1404 rest_of_type_decl_compilation (type_decl);
1409 /* Helper for create_var_decl and create_true_var_decl. Returns a GCC VAR_DECL
1412 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1413 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1414 the GCC tree for an optional initial expression; NULL_TREE if none.
1416 CONST_FLAG is true if this variable is constant, in which case we might
1417 return a CONST_DECL node unless CONST_DECL_ALLOWED_FLAG is false.
1419 PUBLIC_FLAG is true if this definition is to be made visible outside of
1420 the current compilation unit. This flag should be set when processing the
1421 variable definitions in a package specification. EXTERN_FLAG is nonzero
1422 when processing an external variable declaration (as opposed to a
1423 definition: no storage is to be allocated for the variable here).
1425 STATIC_FLAG is only relevant when not at top level. In that case
1426 it indicates whether to always allocate storage to the variable.
1428 GNAT_NODE is used for the position of the decl. */
1431 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
1432 bool const_flag, bool const_decl_allowed_flag,
1433 bool public_flag, bool extern_flag, bool static_flag,
1434 struct attrib *attr_list, Node_Id gnat_node)
1438 && TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1439 && (global_bindings_p () || static_flag
1440 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
1441 : TREE_CONSTANT (var_init)));
1443 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1444 case the initializer may be used in-lieu of the DECL node (as done in
1445 Identifier_to_gnu). This is useful to prevent the need of elaboration
1446 code when an identifier for which such a decl is made is in turn used as
1447 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1448 but extra constraints apply to this choice (see below) and are not
1449 relevant to the distinction we wish to make. */
1450 bool constant_p = const_flag && init_const;
1452 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1453 and may be used for scalars in general but not for aggregates. */
1455 = build_decl ((constant_p && const_decl_allowed_flag
1456 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
1459 /* If this is external, throw away any initializations (they will be done
1460 elsewhere) unless this is a a constant for which we would like to remain
1461 able to get the initializer. If we are defining a global here, leave a
1462 constant initialization and save any variable elaborations for the
1463 elaboration routine. If we are just annotating types, throw away the
1464 initialization if it isn't a constant. */
1465 if ((extern_flag && !constant_p)
1466 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1467 var_init = NULL_TREE;
1469 /* At the global level, an initializer requiring code to be generated
1470 produces elaboration statements. Check that such statements are allowed,
1471 that is, not violating a No_Elaboration_Code restriction. */
1472 if (global_bindings_p () && var_init != 0 && ! init_const)
1473 Check_Elaboration_Code_Allowed (gnat_node);
1475 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1476 try to fiddle with DECL_COMMON. However, on platforms that don't
1477 support global BSS sections, uninitialized global variables would
1478 go in DATA instead, thus increasing the size of the executable. */
1480 && TREE_CODE (var_decl) == VAR_DECL
1481 && !have_global_bss_p ())
1482 DECL_COMMON (var_decl) = 1;
1483 DECL_INITIAL (var_decl) = var_init;
1484 TREE_READONLY (var_decl) = const_flag;
1485 DECL_EXTERNAL (var_decl) = extern_flag;
1486 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1487 TREE_CONSTANT (var_decl) = constant_p;
1488 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1489 = TYPE_VOLATILE (type);
1491 /* If it's public and not external, always allocate storage for it.
1492 At the global binding level we need to allocate static storage for the
1493 variable if and only if it's not external. If we are not at the top level
1494 we allocate automatic storage unless requested not to. */
1495 TREE_STATIC (var_decl)
1496 = public_flag || (global_bindings_p () ? !extern_flag : static_flag);
1498 if (asm_name && VAR_OR_FUNCTION_DECL_P (var_decl))
1499 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1501 process_attributes (var_decl, attr_list);
1503 /* Add this decl to the current binding level. */
1504 gnat_pushdecl (var_decl, gnat_node);
1506 if (TREE_SIDE_EFFECTS (var_decl))
1507 TREE_ADDRESSABLE (var_decl) = 1;
1509 if (TREE_CODE (var_decl) != CONST_DECL)
1511 if (global_bindings_p ())
1512 rest_of_decl_compilation (var_decl, true, 0);
1515 expand_decl (var_decl);
1520 /* Wrapper around create_var_decl_1 for cases where we don't care whether
1521 a VAR or a CONST decl node is created. */
1524 create_var_decl (tree var_name, tree asm_name, tree type, tree var_init,
1525 bool const_flag, bool public_flag, bool extern_flag,
1526 bool static_flag, struct attrib *attr_list,
1529 return create_var_decl_1 (var_name, asm_name, type, var_init,
1531 public_flag, extern_flag, static_flag,
1532 attr_list, gnat_node);
1535 /* Wrapper around create_var_decl_1 for cases where a VAR_DECL node is
1536 required. The primary intent is for DECL_CONST_CORRESPONDING_VARs, which
1537 must be VAR_DECLs and on which we want TREE_READONLY set to have them
1538 possibly assigned to a readonly data section. */
1541 create_true_var_decl (tree var_name, tree asm_name, tree type, tree var_init,
1542 bool const_flag, bool public_flag, bool extern_flag,
1543 bool static_flag, struct attrib *attr_list,
1546 return create_var_decl_1 (var_name, asm_name, type, var_init,
1548 public_flag, extern_flag, static_flag,
1549 attr_list, gnat_node);
1552 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
1555 aggregate_type_contains_array_p (tree type)
1557 switch (TREE_CODE (type))
1561 case QUAL_UNION_TYPE:
1564 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1565 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
1566 && aggregate_type_contains_array_p (TREE_TYPE (field)))
1579 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1580 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1581 this field is in a record type with a "pragma pack". If SIZE is nonzero
1582 it is the specified size for this field. If POS is nonzero, it is the bit
1583 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1584 the address of this field for aliasing purposes. If it is negative, we
1585 should not make a bitfield, which is used by make_aligning_type. */
1588 create_field_decl (tree field_name, tree field_type, tree record_type,
1589 int packed, tree size, tree pos, int addressable)
1591 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1593 DECL_CONTEXT (field_decl) = record_type;
1594 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1596 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1597 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1598 Likewise for an aggregate without specified position that contains an
1599 array, because in this case slices of variable length of this array
1600 must be handled by GCC and variable-sized objects need to be aligned
1601 to at least a byte boundary. */
1602 if (packed && (TYPE_MODE (field_type) == BLKmode
1604 && AGGREGATE_TYPE_P (field_type)
1605 && aggregate_type_contains_array_p (field_type))))
1606 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1608 /* If a size is specified, use it. Otherwise, if the record type is packed
1609 compute a size to use, which may differ from the object's natural size.
1610 We always set a size in this case to trigger the checks for bitfield
1611 creation below, which is typically required when no position has been
1614 size = convert (bitsizetype, size);
1615 else if (packed == 1)
1617 size = rm_size (field_type);
1619 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1621 if (TREE_CODE (size) == INTEGER_CST
1622 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1623 size = round_up (size, BITS_PER_UNIT);
1626 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1627 specified for two reasons: first if the size differs from the natural
1628 size. Second, if the alignment is insufficient. There are a number of
1629 ways the latter can be true.
1631 We never make a bitfield if the type of the field has a nonconstant size,
1632 because no such entity requiring bitfield operations should reach here.
1634 We do *preventively* make a bitfield when there might be the need for it
1635 but we don't have all the necessary information to decide, as is the case
1636 of a field with no specified position in a packed record.
1638 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1639 in layout_decl or finish_record_type to clear the bit_field indication if
1640 it is in fact not needed. */
1641 if (addressable >= 0
1643 && TREE_CODE (size) == INTEGER_CST
1644 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1645 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
1646 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
1648 || (TYPE_ALIGN (record_type) != 0
1649 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1651 DECL_BIT_FIELD (field_decl) = 1;
1652 DECL_SIZE (field_decl) = size;
1653 if (!packed && !pos)
1654 DECL_ALIGN (field_decl)
1655 = (TYPE_ALIGN (record_type) != 0
1656 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1657 : TYPE_ALIGN (field_type));
1660 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1662 /* Bump the alignment if need be, either for bitfield/packing purposes or
1663 to satisfy the type requirements if no such consideration applies. When
1664 we get the alignment from the type, indicate if this is from an explicit
1665 user request, which prevents stor-layout from lowering it later on. */
1668 = (DECL_BIT_FIELD (field_decl) ? 1
1669 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
1671 if (bit_align > DECL_ALIGN (field_decl))
1672 DECL_ALIGN (field_decl) = bit_align;
1673 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
1675 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1676 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
1682 /* We need to pass in the alignment the DECL is known to have.
1683 This is the lowest-order bit set in POS, but no more than
1684 the alignment of the record, if one is specified. Note
1685 that an alignment of 0 is taken as infinite. */
1686 unsigned int known_align;
1688 if (host_integerp (pos, 1))
1689 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1691 known_align = BITS_PER_UNIT;
1693 if (TYPE_ALIGN (record_type)
1694 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1695 known_align = TYPE_ALIGN (record_type);
1697 layout_decl (field_decl, known_align);
1698 SET_DECL_OFFSET_ALIGN (field_decl,
1699 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1701 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1702 &DECL_FIELD_BIT_OFFSET (field_decl),
1703 DECL_OFFSET_ALIGN (field_decl), pos);
1705 DECL_HAS_REP_P (field_decl) = 1;
1708 /* In addition to what our caller says, claim the field is addressable if we
1709 know that its type is not suitable.
1711 The field may also be "technically" nonaddressable, meaning that even if
1712 we attempt to take the field's address we will actually get the address
1713 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
1714 value we have at this point is not accurate enough, so we don't account
1715 for this here and let finish_record_type decide. */
1716 if (!type_for_nonaliased_component_p (field_type))
1719 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1724 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1725 PARAM_TYPE is its type. READONLY is true if the parameter is
1726 readonly (either an In parameter or an address of a pass-by-ref
1730 create_param_decl (tree param_name, tree param_type, bool readonly)
1732 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1734 /* Honor targetm.calls.promote_prototypes(), as not doing so can
1735 lead to various ABI violations. */
1736 if (targetm.calls.promote_prototypes (param_type)
1737 && (TREE_CODE (param_type) == INTEGER_TYPE
1738 || TREE_CODE (param_type) == ENUMERAL_TYPE)
1739 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1741 /* We have to be careful about biased types here. Make a subtype
1742 of integer_type_node with the proper biasing. */
1743 if (TREE_CODE (param_type) == INTEGER_TYPE
1744 && TYPE_BIASED_REPRESENTATION_P (param_type))
1747 = copy_type (build_range_type (integer_type_node,
1748 TYPE_MIN_VALUE (param_type),
1749 TYPE_MAX_VALUE (param_type)));
1751 TYPE_BIASED_REPRESENTATION_P (param_type) = 1;
1754 param_type = integer_type_node;
1757 DECL_ARG_TYPE (param_decl) = param_type;
1758 TREE_READONLY (param_decl) = readonly;
1762 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1765 process_attributes (tree decl, struct attrib *attr_list)
1767 for (; attr_list; attr_list = attr_list->next)
1768 switch (attr_list->type)
1770 case ATTR_MACHINE_ATTRIBUTE:
1771 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1773 ATTR_FLAG_TYPE_IN_PLACE);
1776 case ATTR_LINK_ALIAS:
1777 if (! DECL_EXTERNAL (decl))
1779 TREE_STATIC (decl) = 1;
1780 assemble_alias (decl, attr_list->name);
1784 case ATTR_WEAK_EXTERNAL:
1786 declare_weak (decl);
1788 post_error ("?weak declarations not supported on this target",
1789 attr_list->error_point);
1792 case ATTR_LINK_SECTION:
1793 if (targetm.have_named_sections)
1795 DECL_SECTION_NAME (decl)
1796 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1797 IDENTIFIER_POINTER (attr_list->name));
1798 DECL_COMMON (decl) = 0;
1801 post_error ("?section attributes are not supported for this target",
1802 attr_list->error_point);
1805 case ATTR_LINK_CONSTRUCTOR:
1806 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1807 TREE_USED (decl) = 1;
1810 case ATTR_LINK_DESTRUCTOR:
1811 DECL_STATIC_DESTRUCTOR (decl) = 1;
1812 TREE_USED (decl) = 1;
1817 /* Record a global renaming pointer. */
1820 record_global_renaming_pointer (tree decl)
1822 gcc_assert (DECL_RENAMED_OBJECT (decl));
1823 VEC_safe_push (tree, gc, global_renaming_pointers, decl);
1826 /* Invalidate the global renaming pointers. */
1829 invalidate_global_renaming_pointers (void)
1834 for (i = 0; VEC_iterate(tree, global_renaming_pointers, i, iter); i++)
1835 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
1837 VEC_free (tree, gc, global_renaming_pointers);
1840 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1844 value_factor_p (tree value, HOST_WIDE_INT factor)
1846 if (host_integerp (value, 1))
1847 return tree_low_cst (value, 1) % factor == 0;
1849 if (TREE_CODE (value) == MULT_EXPR)
1850 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1851 || value_factor_p (TREE_OPERAND (value, 1), factor));
1856 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1857 unless we can prove these 2 fields are laid out in such a way that no gap
1858 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1859 is the distance in bits between the end of PREV_FIELD and the starting
1860 position of CURR_FIELD. It is ignored if null. */
1863 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1865 /* If this is the first field of the record, there cannot be any gap */
1869 /* If the previous field is a union type, then return False: The only
1870 time when such a field is not the last field of the record is when
1871 there are other components at fixed positions after it (meaning there
1872 was a rep clause for every field), in which case we don't want the
1873 alignment constraint to override them. */
1874 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1877 /* If the distance between the end of prev_field and the beginning of
1878 curr_field is constant, then there is a gap if the value of this
1879 constant is not null. */
1880 if (offset && host_integerp (offset, 1))
1881 return !integer_zerop (offset);
1883 /* If the size and position of the previous field are constant,
1884 then check the sum of this size and position. There will be a gap
1885 iff it is not multiple of the current field alignment. */
1886 if (host_integerp (DECL_SIZE (prev_field), 1)
1887 && host_integerp (bit_position (prev_field), 1))
1888 return ((tree_low_cst (bit_position (prev_field), 1)
1889 + tree_low_cst (DECL_SIZE (prev_field), 1))
1890 % DECL_ALIGN (curr_field) != 0);
1892 /* If both the position and size of the previous field are multiples
1893 of the current field alignment, there cannot be any gap. */
1894 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1895 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1898 /* Fallback, return that there may be a potential gap */
1902 /* Returns a LABEL_DECL node for LABEL_NAME. */
1905 create_label_decl (tree label_name)
1907 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1909 DECL_CONTEXT (label_decl) = current_function_decl;
1910 DECL_MODE (label_decl) = VOIDmode;
1911 DECL_SOURCE_LOCATION (label_decl) = input_location;
1916 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1917 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1918 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1919 PARM_DECL nodes chained through the TREE_CHAIN field).
1921 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1922 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1925 create_subprog_decl (tree subprog_name, tree asm_name,
1926 tree subprog_type, tree param_decl_list, bool inline_flag,
1927 bool public_flag, bool extern_flag,
1928 struct attrib *attr_list, Node_Id gnat_node)
1930 tree return_type = TREE_TYPE (subprog_type);
1931 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1933 /* If this is a function nested inside an inlined external function, it
1934 means we aren't going to compile the outer function unless it is
1935 actually inlined, so do the same for us. */
1936 if (current_function_decl && DECL_INLINE (current_function_decl)
1937 && DECL_EXTERNAL (current_function_decl))
1940 DECL_EXTERNAL (subprog_decl) = extern_flag;
1941 TREE_PUBLIC (subprog_decl) = public_flag;
1942 TREE_STATIC (subprog_decl) = 1;
1943 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1944 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1945 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1946 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1947 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1948 DECL_ARTIFICIAL (DECL_RESULT (subprog_decl)) = 1;
1949 DECL_IGNORED_P (DECL_RESULT (subprog_decl)) = 1;
1951 /* TREE_ADDRESSABLE is set on the result type to request the use of the
1952 target by-reference return mechanism. This is not supported all the
1953 way down to RTL expansion with GCC 4, which ICEs on temporary creation
1954 attempts with such a type and expects DECL_BY_REFERENCE to be set on
1955 the RESULT_DECL instead - see gnat_genericize for more details. */
1956 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (subprog_decl))))
1958 tree result_decl = DECL_RESULT (subprog_decl);
1960 TREE_ADDRESSABLE (TREE_TYPE (result_decl)) = 0;
1961 DECL_BY_REFERENCE (result_decl) = 1;
1965 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
1968 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1970 process_attributes (subprog_decl, attr_list);
1972 /* Add this decl to the current binding level. */
1973 gnat_pushdecl (subprog_decl, gnat_node);
1975 /* Output the assembler code and/or RTL for the declaration. */
1976 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
1978 return subprog_decl;
1981 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1982 body. This routine needs to be invoked before processing the declarations
1983 appearing in the subprogram. */
1986 begin_subprog_body (tree subprog_decl)
1990 current_function_decl = subprog_decl;
1991 announce_function (subprog_decl);
1993 /* Enter a new binding level and show that all the parameters belong to
1996 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1997 param_decl = TREE_CHAIN (param_decl))
1998 DECL_CONTEXT (param_decl) = subprog_decl;
2000 make_decl_rtl (subprog_decl);
2002 /* We handle pending sizes via the elaboration of types, so we don't need to
2003 save them. This causes them to be marked as part of the outer function
2004 and then discarded. */
2005 get_pending_sizes ();
2009 /* Helper for the genericization callback. Return a dereference of VAL
2010 if it is of a reference type. */
2013 convert_from_reference (tree val)
2015 tree value_type, ref;
2017 if (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE)
2020 value_type = TREE_TYPE (TREE_TYPE (val));
2021 ref = build1 (INDIRECT_REF, value_type, val);
2023 /* See if what we reference is CONST or VOLATILE, which requires
2024 looking into array types to get to the component type. */
2026 while (TREE_CODE (value_type) == ARRAY_TYPE)
2027 value_type = TREE_TYPE (value_type);
2030 = (TYPE_QUALS (value_type) & TYPE_QUAL_CONST);
2031 TREE_THIS_VOLATILE (ref)
2032 = (TYPE_QUALS (value_type) & TYPE_QUAL_VOLATILE);
2034 TREE_SIDE_EFFECTS (ref)
2035 = (TREE_THIS_VOLATILE (ref) || TREE_SIDE_EFFECTS (val));
2040 /* Helper for the genericization callback. Returns true if T denotes
2041 a RESULT_DECL with DECL_BY_REFERENCE set. */
2044 is_byref_result (tree t)
2046 return (TREE_CODE (t) == RESULT_DECL && DECL_BY_REFERENCE (t));
2050 /* Tree walking callback for gnat_genericize. Currently ...
2052 o Adjust references to the function's DECL_RESULT if it is marked
2053 DECL_BY_REFERENCE and so has had its type turned into a reference
2054 type at the end of the function compilation. */
2057 gnat_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
2059 /* This implementation is modeled after what the C++ front-end is
2060 doing, basis of the downstream passes behavior. */
2062 tree stmt = *stmt_p;
2063 struct pointer_set_t *p_set = (struct pointer_set_t*) data;
2065 /* If we have a direct mention of the result decl, dereference. */
2066 if (is_byref_result (stmt))
2068 *stmt_p = convert_from_reference (stmt);
2073 /* Otherwise, no need to walk the the same tree twice. */
2074 if (pointer_set_contains (p_set, stmt))
2080 /* If we are taking the address of what now is a reference, just get the
2082 if (TREE_CODE (stmt) == ADDR_EXPR
2083 && is_byref_result (TREE_OPERAND (stmt, 0)))
2085 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
2089 /* Don't dereference an by-reference RESULT_DECL inside a RETURN_EXPR. */
2090 else if (TREE_CODE (stmt) == RETURN_EXPR
2091 && TREE_OPERAND (stmt, 0)
2092 && is_byref_result (TREE_OPERAND (stmt, 0)))
2095 /* Don't look inside trees that cannot embed references of interest. */
2096 else if (IS_TYPE_OR_DECL_P (stmt))
2099 pointer_set_insert (p_set, *stmt_p);
2104 /* Perform lowering of Ada trees to GENERIC. In particular:
2106 o Turn a DECL_BY_REFERENCE RESULT_DECL into a real by-reference decl
2107 and adjust all the references to this decl accordingly. */
2110 gnat_genericize (tree fndecl)
2112 /* Prior to GCC 4, an explicit By_Reference result mechanism for a function
2113 was handled by simply setting TREE_ADDRESSABLE on the result type.
2114 Everything required to actually pass by invisible ref using the target
2115 mechanism (e.g. extra parameter) was handled at RTL expansion time.
2117 This doesn't work with GCC 4 any more for several reasons. First, the
2118 gimplification process might need the creation of temporaries of this
2119 type, and the gimplifier ICEs on such attempts. Second, the middle-end
2120 now relies on a different attribute for such cases (DECL_BY_REFERENCE on
2121 RESULT/PARM_DECLs), and expects the user invisible by-reference-ness to
2122 be explicitly accounted for by the front-end in the function body.
2124 We achieve the complete transformation in two steps:
2126 1/ create_subprog_decl performs early attribute tweaks: it clears
2127 TREE_ADDRESSABLE from the result type and sets DECL_BY_REFERENCE on
2128 the result decl. The former ensures that the bit isn't set in the GCC
2129 tree saved for the function, so prevents ICEs on temporary creation.
2130 The latter we use here to trigger the rest of the processing.
2132 2/ This function performs the type transformation on the result decl
2133 and adjusts all the references to this decl from the function body
2136 Clearing TREE_ADDRESSABLE from the type differs from the C++ front-end
2137 strategy, which escapes the gimplifier temporary creation issues by
2138 creating it's own temporaries using TARGET_EXPR nodes. Our way relies
2139 on simple specific support code in aggregate_value_p to look at the
2140 target function result decl explicitly. */
2142 struct pointer_set_t *p_set;
2143 tree decl_result = DECL_RESULT (fndecl);
2145 if (!DECL_BY_REFERENCE (decl_result))
2148 /* Make the DECL_RESULT explicitly by-reference and adjust all the
2149 occurrences in the function body using the common tree-walking facility.
2150 We want to see every occurrence of the result decl to adjust the
2151 referencing tree, so need to use our own pointer set to control which
2152 trees should be visited again or not. */
2154 p_set = pointer_set_create ();
2156 TREE_TYPE (decl_result) = build_reference_type (TREE_TYPE (decl_result));
2157 TREE_ADDRESSABLE (decl_result) = 0;
2158 relayout_decl (decl_result);
2160 walk_tree (&DECL_SAVED_TREE (fndecl), gnat_genericize_r, p_set, NULL);
2162 pointer_set_destroy (p_set);
2165 /* Finish the definition of the current subprogram and compile it all the way
2166 to assembler language output. BODY is the tree corresponding to
2170 end_subprog_body (tree body)
2172 tree fndecl = current_function_decl;
2174 /* Mark the BLOCK for this level as being for this function and pop the
2175 level. Since the vars in it are the parameters, clear them. */
2176 BLOCK_VARS (current_binding_level->block) = 0;
2177 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
2178 DECL_INITIAL (fndecl) = current_binding_level->block;
2181 /* Deal with inline. If declared inline or we should default to inline,
2182 set the flag in the decl. */
2183 DECL_INLINE (fndecl)
2184 = DECL_DECLARED_INLINE_P (fndecl) || flag_inline_trees == 2;
2186 /* We handle pending sizes via the elaboration of types, so we don't
2187 need to save them. */
2188 get_pending_sizes ();
2190 /* Mark the RESULT_DECL as being in this subprogram. */
2191 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
2193 DECL_SAVED_TREE (fndecl) = body;
2195 current_function_decl = DECL_CONTEXT (fndecl);
2198 /* We cannot track the location of errors past this point. */
2199 error_gnat_node = Empty;
2201 /* If we're only annotating types, don't actually compile this function. */
2202 if (type_annotate_only)
2205 /* Perform the required pre-gimplfication transformations on the tree. */
2206 gnat_genericize (fndecl);
2208 /* We do different things for nested and non-nested functions.
2209 ??? This should be in cgraph. */
2210 if (!DECL_CONTEXT (fndecl))
2212 gnat_gimplify_function (fndecl);
2213 cgraph_finalize_function (fndecl, false);
2216 /* Register this function with cgraph just far enough to get it
2217 added to our parent's nested function list. */
2218 (void) cgraph_node (fndecl);
2221 /* Convert FNDECL's code to GIMPLE and handle any nested functions. */
2224 gnat_gimplify_function (tree fndecl)
2226 struct cgraph_node *cgn;
2228 dump_function (TDI_original, fndecl);
2229 gimplify_function_tree (fndecl);
2230 dump_function (TDI_generic, fndecl);
2232 /* Convert all nested functions to GIMPLE now. We do things in this order
2233 so that items like VLA sizes are expanded properly in the context of the
2234 correct function. */
2235 cgn = cgraph_node (fndecl);
2236 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
2237 gnat_gimplify_function (cgn->decl);
2242 gnat_builtin_function (tree decl)
2244 gnat_pushdecl (decl, Empty);
2248 /* Handle a "const" attribute; arguments as in
2249 struct attribute_spec.handler. */
2252 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
2253 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
2256 if (TREE_CODE (*node) == FUNCTION_DECL)
2257 TREE_READONLY (*node) = 1;
2259 *no_add_attrs = true;
2264 /* Handle a "nothrow" attribute; arguments as in
2265 struct attribute_spec.handler. */
2268 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
2269 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
2272 if (TREE_CODE (*node) == FUNCTION_DECL)
2273 TREE_NOTHROW (*node) = 1;
2275 *no_add_attrs = true;
2280 /* Return an integer type with the number of bits of precision given by
2281 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2282 it is a signed type. */
2285 gnat_type_for_size (unsigned precision, int unsignedp)
2290 if (precision <= 2 * MAX_BITS_PER_WORD
2291 && signed_and_unsigned_types[precision][unsignedp])
2292 return signed_and_unsigned_types[precision][unsignedp];
2295 t = make_unsigned_type (precision);
2297 t = make_signed_type (precision);
2299 if (precision <= 2 * MAX_BITS_PER_WORD)
2300 signed_and_unsigned_types[precision][unsignedp] = t;
2304 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2305 TYPE_NAME (t) = get_identifier (type_name);
2311 /* Likewise for floating-point types. */
2314 float_type_for_precision (int precision, enum machine_mode mode)
2319 if (float_types[(int) mode])
2320 return float_types[(int) mode];
2322 float_types[(int) mode] = t = make_node (REAL_TYPE);
2323 TYPE_PRECISION (t) = precision;
2326 gcc_assert (TYPE_MODE (t) == mode);
2329 sprintf (type_name, "FLOAT_%d", precision);
2330 TYPE_NAME (t) = get_identifier (type_name);
2336 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2337 an unsigned type; otherwise a signed type is returned. */
2340 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2342 if (mode == BLKmode)
2344 else if (mode == VOIDmode)
2345 return void_type_node;
2346 else if (COMPLEX_MODE_P (mode))
2348 else if (SCALAR_FLOAT_MODE_P (mode))
2349 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2350 else if (SCALAR_INT_MODE_P (mode))
2351 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2356 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2359 gnat_unsigned_type (tree type_node)
2361 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2363 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2365 type = copy_node (type);
2366 TREE_TYPE (type) = type_node;
2368 else if (TREE_TYPE (type_node)
2369 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2370 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2372 type = copy_node (type);
2373 TREE_TYPE (type) = TREE_TYPE (type_node);
2379 /* Return the signed version of a TYPE_NODE, a scalar type. */
2382 gnat_signed_type (tree type_node)
2384 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2386 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2388 type = copy_node (type);
2389 TREE_TYPE (type) = type_node;
2391 else if (TREE_TYPE (type_node)
2392 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2393 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2395 type = copy_node (type);
2396 TREE_TYPE (type) = TREE_TYPE (type_node);
2402 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2403 transparently converted to each other. */
2406 gnat_types_compatible_p (tree t1, tree t2)
2408 enum tree_code code;
2410 /* This is the default criterion. */
2411 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2414 /* We only check structural equivalence here. */
2415 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2418 /* Array types are also compatible if they are constrained and have
2419 the same component type and the same domain. */
2420 if (code == ARRAY_TYPE
2421 && TREE_TYPE (t1) == TREE_TYPE (t2)
2422 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2423 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2424 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
2425 TYPE_MAX_VALUE (TYPE_DOMAIN (t2))))
2428 /* Padding record types are also compatible if they pad the same
2429 type and have the same constant size. */
2430 if (code == RECORD_TYPE
2431 && TYPE_IS_PADDING_P (t1) && TYPE_IS_PADDING_P (t2)
2432 && TREE_TYPE (TYPE_FIELDS (t1)) == TREE_TYPE (TYPE_FIELDS (t2))
2433 && tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2)))
2439 /* EXP is an expression for the size of an object. If this size contains
2440 discriminant references, replace them with the maximum (if MAX_P) or
2441 minimum (if !MAX_P) possible value of the discriminant. */
2444 max_size (tree exp, bool max_p)
2446 enum tree_code code = TREE_CODE (exp);
2447 tree type = TREE_TYPE (exp);
2449 switch (TREE_CODE_CLASS (code))
2451 case tcc_declaration:
2456 if (code == CALL_EXPR)
2459 int i, n = call_expr_nargs (exp);
2462 argarray = (tree *) alloca (n * sizeof (tree));
2463 for (i = 0; i < n; i++)
2464 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
2465 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
2470 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2471 modify. Otherwise, we treat it like a variable. */
2472 if (!CONTAINS_PLACEHOLDER_P (exp))
2475 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2477 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
2479 case tcc_comparison:
2480 return max_p ? size_one_node : size_zero_node;
2484 case tcc_expression:
2485 switch (TREE_CODE_LENGTH (code))
2488 if (code == NON_LVALUE_EXPR)
2489 return max_size (TREE_OPERAND (exp, 0), max_p);
2492 fold_build1 (code, type,
2493 max_size (TREE_OPERAND (exp, 0),
2494 code == NEGATE_EXPR ? !max_p : max_p));
2497 if (code == COMPOUND_EXPR)
2498 return max_size (TREE_OPERAND (exp, 1), max_p);
2500 /* Calculate "(A ? B : C) - D" as "A ? B - D : C - D" which
2501 may provide a tighter bound on max_size. */
2502 if (code == MINUS_EXPR
2503 && TREE_CODE (TREE_OPERAND (exp, 0)) == COND_EXPR)
2505 tree lhs = fold_build2 (MINUS_EXPR, type,
2506 TREE_OPERAND (TREE_OPERAND (exp, 0), 1),
2507 TREE_OPERAND (exp, 1));
2508 tree rhs = fold_build2 (MINUS_EXPR, type,
2509 TREE_OPERAND (TREE_OPERAND (exp, 0), 2),
2510 TREE_OPERAND (exp, 1));
2511 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2512 max_size (lhs, max_p),
2513 max_size (rhs, max_p));
2517 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2518 tree rhs = max_size (TREE_OPERAND (exp, 1),
2519 code == MINUS_EXPR ? !max_p : max_p);
2521 /* Special-case wanting the maximum value of a MIN_EXPR.
2522 In that case, if one side overflows, return the other.
2523 sizetype is signed, but we know sizes are non-negative.
2524 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2525 overflowing or the maximum possible value and the RHS
2529 && TREE_CODE (rhs) == INTEGER_CST
2530 && TREE_OVERFLOW (rhs))
2534 && TREE_CODE (lhs) == INTEGER_CST
2535 && TREE_OVERFLOW (lhs))
2537 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2538 && ((TREE_CODE (lhs) == INTEGER_CST
2539 && TREE_OVERFLOW (lhs))
2540 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2541 && !TREE_CONSTANT (rhs))
2544 return fold_build2 (code, type, lhs, rhs);
2548 if (code == SAVE_EXPR)
2550 else if (code == COND_EXPR)
2551 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2552 max_size (TREE_OPERAND (exp, 1), max_p),
2553 max_size (TREE_OPERAND (exp, 2), max_p));
2556 /* Other tree classes cannot happen. */
2564 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2565 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2566 Return a constructor for the template. */
2569 build_template (tree template_type, tree array_type, tree expr)
2571 tree template_elts = NULL_TREE;
2572 tree bound_list = NULL_TREE;
2575 while (TREE_CODE (array_type) == RECORD_TYPE
2576 && (TYPE_IS_PADDING_P (array_type)
2577 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2578 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2580 if (TREE_CODE (array_type) == ARRAY_TYPE
2581 || (TREE_CODE (array_type) == INTEGER_TYPE
2582 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2583 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2585 /* First make the list for a CONSTRUCTOR for the template. Go down the
2586 field list of the template instead of the type chain because this
2587 array might be an Ada array of arrays and we can't tell where the
2588 nested arrays stop being the underlying object. */
2590 for (field = TYPE_FIELDS (template_type); field;
2592 ? (bound_list = TREE_CHAIN (bound_list))
2593 : (array_type = TREE_TYPE (array_type))),
2594 field = TREE_CHAIN (TREE_CHAIN (field)))
2596 tree bounds, min, max;
2598 /* If we have a bound list, get the bounds from there. Likewise
2599 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2600 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2601 This will give us a maximum range. */
2603 bounds = TREE_VALUE (bound_list);
2604 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2605 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2606 else if (expr && TREE_CODE (expr) == PARM_DECL
2607 && DECL_BY_COMPONENT_PTR_P (expr))
2608 bounds = TREE_TYPE (field);
2612 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
2613 max = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MAX_VALUE (bounds));
2615 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2616 substitute it from OBJECT. */
2617 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2618 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2620 template_elts = tree_cons (TREE_CHAIN (field), max,
2621 tree_cons (field, min, template_elts));
2624 return gnat_build_constructor (template_type, nreverse (template_elts));
2627 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2628 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2629 in the type contains in its DECL_INITIAL the expression to use when
2630 a constructor is made for the type. GNAT_ENTITY is an entity used
2631 to print out an error message if the mechanism cannot be applied to
2632 an object of that type and also for the name. */
2635 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2637 tree record_type = make_node (RECORD_TYPE);
2638 tree pointer32_type;
2639 tree field_list = 0;
2648 /* If TYPE is an unconstrained array, use the underlying array type. */
2649 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2650 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2652 /* If this is an array, compute the number of dimensions in the array,
2653 get the index types, and point to the inner type. */
2654 if (TREE_CODE (type) != ARRAY_TYPE)
2657 for (ndim = 1, inner_type = type;
2658 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2659 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2660 ndim++, inner_type = TREE_TYPE (inner_type))
2663 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2665 if (mech != By_Descriptor_NCA
2666 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2667 for (i = ndim - 1, inner_type = type;
2669 i--, inner_type = TREE_TYPE (inner_type))
2670 idx_arr[i] = TYPE_DOMAIN (inner_type);
2672 for (i = 0, inner_type = type;
2674 i++, inner_type = TREE_TYPE (inner_type))
2675 idx_arr[i] = TYPE_DOMAIN (inner_type);
2677 /* Now get the DTYPE value. */
2678 switch (TREE_CODE (type))
2682 if (TYPE_VAX_FLOATING_POINT_P (type))
2683 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2696 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2699 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2702 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2705 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2708 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2711 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2717 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2721 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2722 && TYPE_VAX_FLOATING_POINT_P (type))
2723 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2735 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2746 /* Get the CLASS value. */
2749 case By_Descriptor_A:
2752 case By_Descriptor_NCA:
2755 case By_Descriptor_SB:
2759 case By_Descriptor_S:
2765 /* Make the type for a descriptor for VMS. The first four fields
2766 are the same for all types. */
2769 = chainon (field_list,
2770 make_descriptor_field
2771 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2772 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2774 field_list = chainon (field_list,
2775 make_descriptor_field ("DTYPE",
2776 gnat_type_for_size (8, 1),
2777 record_type, size_int (dtype)));
2778 field_list = chainon (field_list,
2779 make_descriptor_field ("CLASS",
2780 gnat_type_for_size (8, 1),
2781 record_type, size_int (class)));
2783 /* Of course this will crash at run-time if the address space is not
2784 within the low 32 bits, but there is nothing else we can do. */
2785 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
2788 = chainon (field_list,
2789 make_descriptor_field
2790 ("POINTER", pointer32_type, record_type,
2791 build_unary_op (ADDR_EXPR,
2793 build0 (PLACEHOLDER_EXPR, type))));
2798 case By_Descriptor_S:
2801 case By_Descriptor_SB:
2803 = chainon (field_list,
2804 make_descriptor_field
2805 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2806 TREE_CODE (type) == ARRAY_TYPE
2807 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2809 = chainon (field_list,
2810 make_descriptor_field
2811 ("SB_U1", gnat_type_for_size (32, 1), record_type,
2812 TREE_CODE (type) == ARRAY_TYPE
2813 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2816 case By_Descriptor_A:
2817 case By_Descriptor_NCA:
2818 field_list = chainon (field_list,
2819 make_descriptor_field ("SCALE",
2820 gnat_type_for_size (8, 1),
2824 field_list = chainon (field_list,
2825 make_descriptor_field ("DIGITS",
2826 gnat_type_for_size (8, 1),
2831 = chainon (field_list,
2832 make_descriptor_field
2833 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2834 size_int (mech == By_Descriptor_NCA
2836 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2837 : (TREE_CODE (type) == ARRAY_TYPE
2838 && TYPE_CONVENTION_FORTRAN_P (type)
2841 field_list = chainon (field_list,
2842 make_descriptor_field ("DIMCT",
2843 gnat_type_for_size (8, 1),
2847 field_list = chainon (field_list,
2848 make_descriptor_field ("ARSIZE",
2849 gnat_type_for_size (32, 1),
2851 size_in_bytes (type)));
2853 /* Now build a pointer to the 0,0,0... element. */
2854 tem = build0 (PLACEHOLDER_EXPR, type);
2855 for (i = 0, inner_type = type; i < ndim;
2856 i++, inner_type = TREE_TYPE (inner_type))
2857 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2858 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2859 NULL_TREE, NULL_TREE);
2862 = chainon (field_list,
2863 make_descriptor_field
2865 build_pointer_type_for_mode (inner_type, SImode, false),
2868 build_pointer_type_for_mode (inner_type, SImode,
2872 /* Next come the addressing coefficients. */
2873 tem = size_one_node;
2874 for (i = 0; i < ndim; i++)
2878 = size_binop (MULT_EXPR, tem,
2879 size_binop (PLUS_EXPR,
2880 size_binop (MINUS_EXPR,
2881 TYPE_MAX_VALUE (idx_arr[i]),
2882 TYPE_MIN_VALUE (idx_arr[i])),
2885 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2886 fname[1] = '0' + i, fname[2] = 0;
2888 = chainon (field_list,
2889 make_descriptor_field (fname,
2890 gnat_type_for_size (32, 1),
2891 record_type, idx_length));
2893 if (mech == By_Descriptor_NCA)
2897 /* Finally here are the bounds. */
2898 for (i = 0; i < ndim; i++)
2902 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2904 = chainon (field_list,
2905 make_descriptor_field
2906 (fname, gnat_type_for_size (32, 1), record_type,
2907 TYPE_MIN_VALUE (idx_arr[i])));
2911 = chainon (field_list,
2912 make_descriptor_field
2913 (fname, gnat_type_for_size (32, 1), record_type,
2914 TYPE_MAX_VALUE (idx_arr[i])));
2919 post_error ("unsupported descriptor type for &", gnat_entity);
2922 finish_record_type (record_type, field_list, 0, true);
2923 create_type_decl (create_concat_name (gnat_entity, "DESC"), record_type,
2924 NULL, true, false, gnat_entity);
2929 /* Utility routine for above code to make a field. */
2932 make_descriptor_field (const char *name, tree type,
2933 tree rec_type, tree initial)
2936 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2938 DECL_INITIAL (field) = initial;
2942 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
2943 pointer or fat pointer type. GNAT_SUBPROG is the subprogram to which
2944 the VMS descriptor is passed. */
2947 convert_vms_descriptor (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
2949 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
2950 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
2951 /* The CLASS field is the 3rd field in the descriptor. */
2952 tree class = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
2953 /* The POINTER field is the 4th field in the descriptor. */
2954 tree pointer = TREE_CHAIN (class);
2956 /* Retrieve the value of the POINTER field. */
2958 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
2960 if (POINTER_TYPE_P (gnu_type))
2961 return convert (gnu_type, gnu_expr);
2963 else if (TYPE_FAT_POINTER_P (gnu_type))
2965 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
2966 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
2967 tree template_type = TREE_TYPE (p_bounds_type);
2968 tree min_field = TYPE_FIELDS (template_type);
2969 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
2970 tree template, template_addr, aflags, dimct, t, u;
2971 /* See the head comment of build_vms_descriptor. */
2972 int iclass = TREE_INT_CST_LOW (DECL_INITIAL (class));
2974 /* Convert POINTER to the type of the P_ARRAY field. */
2975 gnu_expr = convert (p_array_type, gnu_expr);
2979 case 1: /* Class S */
2980 case 15: /* Class SB */
2981 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
2982 t = TYPE_FIELDS (desc_type);
2983 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2984 t = tree_cons (min_field,
2985 convert (TREE_TYPE (min_field), integer_one_node),
2986 tree_cons (max_field,
2987 convert (TREE_TYPE (max_field), t),
2989 template = gnat_build_constructor (template_type, t);
2990 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
2992 /* For class S, we are done. */
2996 /* Test that we really have a SB descriptor, like DEC Ada. */
2997 t = build3 (COMPONENT_REF, TREE_TYPE (class), desc, class, NULL);
2998 u = convert (TREE_TYPE (class), DECL_INITIAL (class));
2999 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
3000 /* If so, there is already a template in the descriptor and
3001 it is located right after the POINTER field. */
3002 t = TREE_CHAIN (pointer);
3003 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3004 /* Otherwise use the {1, LENGTH} template we build above. */
3005 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3006 build_unary_op (ADDR_EXPR, p_bounds_type,
3011 case 4: /* Class A */
3012 /* The AFLAGS field is the 7th field in the descriptor. */
3013 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer)));
3014 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3015 /* The DIMCT field is the 8th field in the descriptor. */
3017 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3018 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3019 or FL_COEFF or FL_BOUNDS not set. */
3020 u = build_int_cst (TREE_TYPE (aflags), 192);
3021 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3022 build_binary_op (NE_EXPR, integer_type_node,
3024 convert (TREE_TYPE (dimct),
3026 build_binary_op (NE_EXPR, integer_type_node,
3027 build2 (BIT_AND_EXPR,
3031 add_stmt (build3 (COND_EXPR, void_type_node, u,
3032 build_call_raise (CE_Length_Check_Failed, Empty,
3033 N_Raise_Constraint_Error),
3035 /* There is already a template in the descriptor and it is
3036 located at the start of block 3 (12th field). */
3037 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (t))));
3038 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3039 template_addr = build_unary_op (ADDR_EXPR, p_bounds_type, template);
3042 case 10: /* Class NCA */
3044 post_error ("unsupported descriptor type for &", gnat_subprog);
3045 template_addr = integer_zero_node;
3049 /* Build the fat pointer in the form of a constructor. */
3050 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr,
3051 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3052 template_addr, NULL_TREE));
3053 return gnat_build_constructor (gnu_type, t);
3060 /* Build a stub for the subprogram specified by the GCC tree GNU_SUBPROG
3061 and the GNAT node GNAT_SUBPROG. */
3064 build_function_stub (tree gnu_subprog, Entity_Id gnat_subprog)
3066 tree gnu_subprog_type, gnu_subprog_addr, gnu_subprog_call;
3067 tree gnu_stub_param, gnu_param_list, gnu_arg_types, gnu_param;
3068 tree gnu_stub_decl = DECL_FUNCTION_STUB (gnu_subprog);
3071 gnu_subprog_type = TREE_TYPE (gnu_subprog);
3072 gnu_param_list = NULL_TREE;
3074 begin_subprog_body (gnu_stub_decl);
3077 start_stmt_group ();
3079 /* Loop over the parameters of the stub and translate any of them
3080 passed by descriptor into a by reference one. */
3081 for (gnu_stub_param = DECL_ARGUMENTS (gnu_stub_decl),
3082 gnu_arg_types = TYPE_ARG_TYPES (gnu_subprog_type);
3084 gnu_stub_param = TREE_CHAIN (gnu_stub_param),
3085 gnu_arg_types = TREE_CHAIN (gnu_arg_types))
3087 if (DECL_BY_DESCRIPTOR_P (gnu_stub_param))
3088 gnu_param = convert_vms_descriptor (TREE_VALUE (gnu_arg_types),
3089 gnu_stub_param, gnat_subprog);
3091 gnu_param = gnu_stub_param;
3093 gnu_param_list = tree_cons (NULL_TREE, gnu_param, gnu_param_list);
3096 gnu_body = end_stmt_group ();
3098 /* Invoke the internal subprogram. */
3099 gnu_subprog_addr = build1 (ADDR_EXPR, build_pointer_type (gnu_subprog_type),
3101 gnu_subprog_call = build_call_list (TREE_TYPE (gnu_subprog_type),
3103 nreverse (gnu_param_list));
3105 /* Propagate the return value, if any. */
3106 if (VOID_TYPE_P (TREE_TYPE (gnu_subprog_type)))
3107 append_to_statement_list (gnu_subprog_call, &gnu_body);
3109 append_to_statement_list (build_return_expr (DECL_RESULT (gnu_stub_decl),
3115 allocate_struct_function (gnu_stub_decl, false);
3116 end_subprog_body (gnu_body);
3119 /* Build a type to be used to represent an aliased object whose nominal
3120 type is an unconstrained array. This consists of a RECORD_TYPE containing
3121 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
3122 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
3123 is used to represent an arbitrary unconstrained object. Use NAME
3124 as the name of the record. */
3127 build_unc_object_type (tree template_type, tree object_type, tree name)
3129 tree type = make_node (RECORD_TYPE);
3130 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
3131 template_type, type, 0, 0, 0, 1);
3132 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
3135 TYPE_NAME (type) = name;
3136 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3137 finish_record_type (type,
3138 chainon (chainon (NULL_TREE, template_field),
3145 /* Same, taking a thin or fat pointer type instead of a template type. */
3148 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3153 gcc_assert (TYPE_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3156 = (TYPE_FAT_POINTER_P (thin_fat_ptr_type)
3157 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3158 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3159 return build_unc_object_type (template_type, object_type, name);
3162 /* Shift the component offsets within an unconstrained object TYPE to make it
3163 suitable for use as a designated type for thin pointers. */
3166 shift_unc_components_for_thin_pointers (tree type)
3168 /* Thin pointer values designate the ARRAY data of an unconstrained object,
3169 allocated past the BOUNDS template. The designated type is adjusted to
3170 have ARRAY at position zero and the template at a negative offset, so
3171 that COMPONENT_REFs on (*thin_ptr) designate the proper location. */
3173 tree bounds_field = TYPE_FIELDS (type);
3174 tree array_field = TREE_CHAIN (TYPE_FIELDS (type));
3176 DECL_FIELD_OFFSET (bounds_field)
3177 = size_binop (MINUS_EXPR, size_zero_node, byte_position (array_field));
3179 DECL_FIELD_OFFSET (array_field) = size_zero_node;
3180 DECL_FIELD_BIT_OFFSET (array_field) = bitsize_zero_node;
3183 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
3184 the normal case this is just two adjustments, but we have more to do
3185 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
3188 update_pointer_to (tree old_type, tree new_type)
3190 tree ptr = TYPE_POINTER_TO (old_type);
3191 tree ref = TYPE_REFERENCE_TO (old_type);
3195 /* If this is the main variant, process all the other variants first. */
3196 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3197 for (type = TYPE_NEXT_VARIANT (old_type); type;
3198 type = TYPE_NEXT_VARIANT (type))
3199 update_pointer_to (type, new_type);
3201 /* If no pointer or reference, we are done. */
3205 /* Merge the old type qualifiers in the new type.
3207 Each old variant has qualifiers for specific reasons, and the new
3208 designated type as well. Each set of qualifiers represents useful
3209 information grabbed at some point, and merging the two simply unifies
3210 these inputs into the final type description.
3212 Consider for instance a volatile type frozen after an access to constant
3213 type designating it. After the designated type freeze, we get here with a
3214 volatile new_type and a dummy old_type with a readonly variant, created
3215 when the access type was processed. We shall make a volatile and readonly
3216 designated type, because that's what it really is.
3218 We might also get here for a non-dummy old_type variant with different
3219 qualifiers than the new_type ones, for instance in some cases of pointers
3220 to private record type elaboration (see the comments around the call to
3221 this routine from gnat_to_gnu_entity/E_Access_Type). We have to merge the
3222 qualifiers in thoses cases too, to avoid accidentally discarding the
3223 initial set, and will often end up with old_type == new_type then. */
3224 new_type = build_qualified_type (new_type,
3225 TYPE_QUALS (old_type)
3226 | TYPE_QUALS (new_type));
3228 /* If the new type and the old one are identical, there is nothing to
3230 if (old_type == new_type)
3233 /* Otherwise, first handle the simple case. */
3234 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3236 TYPE_POINTER_TO (new_type) = ptr;
3237 TYPE_REFERENCE_TO (new_type) = ref;
3239 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3240 for (ptr1 = TYPE_MAIN_VARIANT (ptr); ptr1;
3241 ptr1 = TYPE_NEXT_VARIANT (ptr1))
3242 TREE_TYPE (ptr1) = new_type;
3244 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3245 for (ref1 = TYPE_MAIN_VARIANT (ref); ref1;
3246 ref1 = TYPE_NEXT_VARIANT (ref1))
3247 TREE_TYPE (ref1) = new_type;
3250 /* Now deal with the unconstrained array case. In this case the "pointer"
3251 is actually a RECORD_TYPE where both fields are pointers to dummy nodes.
3252 Turn them into pointers to the correct types using update_pointer_to. */
3253 else if (TREE_CODE (ptr) != RECORD_TYPE || !TYPE_IS_FAT_POINTER_P (ptr))
3258 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
3259 tree array_field = TYPE_FIELDS (ptr);
3260 tree bounds_field = TREE_CHAIN (TYPE_FIELDS (ptr));
3261 tree new_ptr = TYPE_POINTER_TO (new_type);
3265 /* Make pointers to the dummy template point to the real template. */
3267 (TREE_TYPE (TREE_TYPE (bounds_field)),
3268 TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_ptr)))));
3270 /* The references to the template bounds present in the array type
3271 are made through a PLACEHOLDER_EXPR of type new_ptr. Since we
3272 are updating ptr to make it a full replacement for new_ptr as
3273 pointer to new_type, we must rework the PLACEHOLDER_EXPR so as
3274 to make it of type ptr. */
3275 new_ref = build3 (COMPONENT_REF, TREE_TYPE (bounds_field),
3276 build0 (PLACEHOLDER_EXPR, ptr),
3277 bounds_field, NULL_TREE);
3279 /* Create the new array for the new PLACEHOLDER_EXPR and make
3280 pointers to the dummy array point to it.
3282 ??? This is now the only use of substitute_in_type,
3283 which is a very "heavy" routine to do this, so it
3284 should be replaced at some point. */
3286 (TREE_TYPE (TREE_TYPE (array_field)),
3287 substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))),
3288 TREE_CHAIN (TYPE_FIELDS (new_ptr)), new_ref));
3290 /* Make ptr the pointer to new_type. */
3291 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
3292 = TREE_TYPE (new_type) = ptr;
3294 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
3295 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
3297 /* Now handle updating the allocation record, what the thin pointer
3298 points to. Update all pointers from the old record into the new
3299 one, update the type of the array field, and recompute the size. */
3300 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
3302 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
3303 = TREE_TYPE (TREE_TYPE (array_field));
3305 /* The size recomputation needs to account for alignment constraints, so
3306 we let layout_type work it out. This will reset the field offsets to
3307 what they would be in a regular record, so we shift them back to what
3308 we want them to be for a thin pointer designated type afterwards. */
3309 DECL_SIZE (TYPE_FIELDS (new_obj_rec)) = 0;
3310 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))) = 0;
3311 TYPE_SIZE (new_obj_rec) = 0;
3312 layout_type (new_obj_rec);
3314 shift_unc_components_for_thin_pointers (new_obj_rec);
3316 /* We are done, at last. */
3317 rest_of_record_type_compilation (ptr);
3321 /* Convert a pointer to a constrained array into a pointer to a fat
3322 pointer. This involves making or finding a template. */
3325 convert_to_fat_pointer (tree type, tree expr)
3327 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
3328 tree template, template_addr;
3329 tree etype = TREE_TYPE (expr);
3331 /* If EXPR is a constant of zero, we make a fat pointer that has a null
3332 pointer to the template and array. */
3333 if (integer_zerop (expr))
3335 gnat_build_constructor
3337 tree_cons (TYPE_FIELDS (type),
3338 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3339 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3340 convert (build_pointer_type (template_type),
3344 /* If EXPR is a thin pointer, make the template and data from the record. */
3346 else if (TYPE_THIN_POINTER_P (etype))
3348 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
3350 expr = save_expr (expr);
3351 if (TREE_CODE (expr) == ADDR_EXPR)
3352 expr = TREE_OPERAND (expr, 0);
3354 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
3356 template = build_component_ref (expr, NULL_TREE, fields, false);
3357 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3358 build_component_ref (expr, NULL_TREE,
3359 TREE_CHAIN (fields), false));
3362 /* Otherwise, build the constructor for the template. */
3363 template = build_template (template_type, TREE_TYPE (etype), expr);
3365 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
3367 /* The result is a CONSTRUCTOR for the fat pointer.
3369 If expr is an argument of a foreign convention subprogram, the type it
3370 points to is directly the component type. In this case, the expression
3371 type may not match the corresponding FIELD_DECL type at this point, so we
3372 call "convert" here to fix that up if necessary. This type consistency is
3373 required, for instance because it ensures that possible later folding of
3374 component_refs against this constructor always yields something of the
3375 same type as the initial reference.
3377 Note that the call to "build_template" above is still fine, because it
3378 will only refer to the provided template_type in this case. */
3380 gnat_build_constructor
3381 (type, tree_cons (TYPE_FIELDS (type),
3382 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3383 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3384 template_addr, NULL_TREE)));
3387 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
3388 is something that is a fat pointer, so convert to it first if it EXPR
3389 is not already a fat pointer. */
3392 convert_to_thin_pointer (tree type, tree expr)
3394 if (!TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
3396 = convert_to_fat_pointer
3397 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
3399 /* We get the pointer to the data and use a NOP_EXPR to make it the
3401 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)),
3403 expr = build1 (NOP_EXPR, type, expr);
3408 /* Create an expression whose value is that of EXPR,
3409 converted to type TYPE. The TREE_TYPE of the value
3410 is always TYPE. This function implements all reasonable
3411 conversions; callers should filter out those that are
3412 not permitted by the language being compiled. */
3415 convert (tree type, tree expr)
3417 enum tree_code code = TREE_CODE (type);
3418 tree etype = TREE_TYPE (expr);
3419 enum tree_code ecode = TREE_CODE (etype);
3421 /* If EXPR is already the right type, we are done. */
3425 /* If both input and output have padding and are of variable size, do this
3426 as an unchecked conversion. Likewise if one is a mere variant of the
3427 other, so we avoid a pointless unpad/repad sequence. */
3428 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
3429 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
3430 && (!TREE_CONSTANT (TYPE_SIZE (type))
3431 || !TREE_CONSTANT (TYPE_SIZE (etype))
3432 || gnat_types_compatible_p (type, etype)))
3435 /* If the output type has padding, convert to the inner type and
3436 make a constructor to build the record. */
3437 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
3439 /* If we previously converted from another type and our type is
3440 of variable size, remove the conversion to avoid the need for
3441 variable-size temporaries. */
3442 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3443 && !TREE_CONSTANT (TYPE_SIZE (type)))
3444 expr = TREE_OPERAND (expr, 0);
3446 /* If we are just removing the padding from expr, convert the original
3447 object if we have variable size in order to avoid the need for some
3448 variable-size temporaries. Likewise if the padding is a mere variant
3449 of the other, so we avoid a pointless unpad/repad sequence. */
3450 if (TREE_CODE (expr) == COMPONENT_REF
3451 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
3452 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
3453 && (!TREE_CONSTANT (TYPE_SIZE (type))
3454 || gnat_types_compatible_p (type,
3455 TREE_TYPE (TREE_OPERAND (expr, 0)))))
3456 return convert (type, TREE_OPERAND (expr, 0));
3458 /* If the result type is a padded type with a self-referentially-sized
3459 field and the expression type is a record, do this as an
3460 unchecked conversion. */
3461 else if (TREE_CODE (etype) == RECORD_TYPE
3462 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
3463 return unchecked_convert (type, expr, false);
3467 gnat_build_constructor (type,
3468 tree_cons (TYPE_FIELDS (type),
3470 (TYPE_FIELDS (type)),
3475 /* If the input type has padding, remove it and convert to the output type.
3476 The conditions ordering is arranged to ensure that the output type is not
3477 a padding type here, as it is not clear whether the conversion would
3478 always be correct if this was to happen. */
3479 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
3483 /* If we have just converted to this padded type, just get the
3484 inner expression. */
3485 if (TREE_CODE (expr) == CONSTRUCTOR
3486 && !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (expr))
3487 && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->index
3488 == TYPE_FIELDS (etype))
3490 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->value;
3492 /* Otherwise, build an explicit component reference. */
3495 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
3497 return convert (type, unpadded);
3500 /* If the input is a biased type, adjust first. */
3501 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
3502 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
3503 fold_convert (TREE_TYPE (etype),
3505 TYPE_MIN_VALUE (etype)));
3507 /* If the input is a justified modular type, we need to extract the actual
3508 object before converting it to any other type with the exceptions of an
3509 unconstrained array or of a mere type variant. It is useful to avoid the
3510 extraction and conversion in the type variant case because it could end
3511 up replacing a VAR_DECL expr by a constructor and we might be about the
3512 take the address of the result. */
3513 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
3514 && code != UNCONSTRAINED_ARRAY_TYPE
3515 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
3516 return convert (type, build_component_ref (expr, NULL_TREE,
3517 TYPE_FIELDS (etype), false));
3519 /* If converting to a type that contains a template, convert to the data
3520 type and then build the template. */
3521 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
3523 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
3525 /* If the source already has a template, get a reference to the
3526 associated array only, as we are going to rebuild a template
3527 for the target type anyway. */
3528 expr = maybe_unconstrained_array (expr);
3531 gnat_build_constructor
3533 tree_cons (TYPE_FIELDS (type),
3534 build_template (TREE_TYPE (TYPE_FIELDS (type)),
3535 obj_type, NULL_TREE),
3536 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3537 convert (obj_type, expr), NULL_TREE)));
3540 /* There are some special cases of expressions that we process
3542 switch (TREE_CODE (expr))
3548 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
3549 conversion in gnat_expand_expr. NULL_EXPR does not represent
3550 and actual value, so no conversion is needed. */
3551 expr = copy_node (expr);
3552 TREE_TYPE (expr) = type;
3556 /* If we are converting a STRING_CST to another constrained array type,
3557 just make a new one in the proper type. */
3558 if (code == ecode && AGGREGATE_TYPE_P (etype)
3559 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
3560 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
3562 expr = copy_node (expr);
3563 TREE_TYPE (expr) = type;
3569 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
3570 a new one in the proper type. */
3571 if (gnat_types_compatible_p (type, etype))
3573 expr = copy_node (expr);
3574 TREE_TYPE (expr) = type;
3579 case UNCONSTRAINED_ARRAY_REF:
3580 /* Convert this to the type of the inner array by getting the address of
3581 the array from the template. */
3582 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3583 build_component_ref (TREE_OPERAND (expr, 0),
3584 get_identifier ("P_ARRAY"),
3586 etype = TREE_TYPE (expr);
3587 ecode = TREE_CODE (etype);
3590 case VIEW_CONVERT_EXPR:
3592 /* GCC 4.x is very sensitive to type consistency overall, and view
3593 conversions thus are very frequent. Even though just "convert"ing
3594 the inner operand to the output type is fine in most cases, it
3595 might expose unexpected input/output type mismatches in special
3596 circumstances so we avoid such recursive calls when we can. */
3597 tree op0 = TREE_OPERAND (expr, 0);
3599 /* If we are converting back to the original type, we can just
3600 lift the input conversion. This is a common occurrence with
3601 switches back-and-forth amongst type variants. */
3602 if (type == TREE_TYPE (op0))
3605 /* Otherwise, if we're converting between two aggregate types, we
3606 might be allowed to substitute the VIEW_CONVERT_EXPR target type
3607 in place or to just convert the inner expression. */
3608 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
3610 /* If we are converting between mere variants, we can just
3611 substitute the VIEW_CONVERT_EXPR in place. */
3612 if (gnat_types_compatible_p (type, etype))
3613 return build1 (VIEW_CONVERT_EXPR, type, op0);
3615 /* Otherwise, we may just bypass the input view conversion unless
3616 one of the types is a fat pointer, which is handled by
3617 specialized code below which relies on exact type matching. */
3618 else if (!TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
3619 return convert (type, op0);
3625 /* If both types are record types, just convert the pointer and
3626 make a new INDIRECT_REF.
3628 ??? Disable this for now since it causes problems with the
3629 code in build_binary_op for MODIFY_EXPR which wants to
3630 strip off conversions. But that code really is a mess and
3631 we need to do this a much better way some time. */
3633 && (TREE_CODE (type) == RECORD_TYPE
3634 || TREE_CODE (type) == UNION_TYPE)
3635 && (TREE_CODE (etype) == RECORD_TYPE
3636 || TREE_CODE (etype) == UNION_TYPE)
3637 && !TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
3638 return build_unary_op (INDIRECT_REF, NULL_TREE,
3639 convert (build_pointer_type (type),
3640 TREE_OPERAND (expr, 0)));
3647 /* Check for converting to a pointer to an unconstrained array. */
3648 if (TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
3649 return convert_to_fat_pointer (type, expr);
3651 /* If we're converting between two aggregate types that are mere
3652 variants, just make a VIEW_CONVERT_EXPR. */
3653 else if (AGGREGATE_TYPE_P (type)
3654 && gnat_types_compatible_p (type, etype))
3655 return build1 (VIEW_CONVERT_EXPR, type, expr);
3657 /* In all other cases of related types, make a NOP_EXPR. */
3658 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
3659 || (code == INTEGER_CST && ecode == INTEGER_CST
3660 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
3661 return fold_convert (type, expr);
3666 return fold_build1 (CONVERT_EXPR, type, expr);
3669 return fold_convert (type, gnat_truthvalue_conversion (expr));
3672 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
3673 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
3674 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
3675 return unchecked_convert (type, expr, false);
3676 else if (TYPE_BIASED_REPRESENTATION_P (type))
3677 return fold_convert (type,
3678 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
3679 convert (TREE_TYPE (type), expr),
3680 TYPE_MIN_VALUE (type)));
3682 /* ... fall through ... */
3685 return fold (convert_to_integer (type, expr));
3688 case REFERENCE_TYPE:
3689 /* If converting between two pointers to records denoting
3690 both a template and type, adjust if needed to account
3691 for any differing offsets, since one might be negative. */
3692 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
3695 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
3696 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
3697 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
3698 sbitsize_int (BITS_PER_UNIT));
3700 expr = build1 (NOP_EXPR, type, expr);
3701 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
3702 if (integer_zerop (byte_diff))
3705 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
3706 fold (convert (sizetype, byte_diff)));
3709 /* If converting to a thin pointer, handle specially. */
3710 if (TYPE_THIN_POINTER_P (type)
3711 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
3712 return convert_to_thin_pointer (type, expr);
3714 /* If converting fat pointer to normal pointer, get the pointer to the
3715 array and then convert it. */
3716 else if (TYPE_FAT_POINTER_P (etype))
3717 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
3720 return fold (convert_to_pointer (type, expr));
3723 return fold (convert_to_real (type, expr));
3726 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
3728 gnat_build_constructor
3729 (type, tree_cons (TYPE_FIELDS (type),
3730 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3733 /* ... fall through ... */
3736 /* In these cases, assume the front-end has validated the conversion.
3737 If the conversion is valid, it will be a bit-wise conversion, so
3738 it can be viewed as an unchecked conversion. */
3739 return unchecked_convert (type, expr, false);
3742 /* This is a either a conversion between a tagged type and some
3743 subtype, which we have to mark as a UNION_TYPE because of
3744 overlapping fields or a conversion of an Unchecked_Union. */
3745 return unchecked_convert (type, expr, false);
3747 case UNCONSTRAINED_ARRAY_TYPE:
3748 /* If EXPR is a constrained array, take its address, convert it to a
3749 fat pointer, and then dereference it. Likewise if EXPR is a
3750 record containing both a template and a constrained array.
3751 Note that a record representing a justified modular type
3752 always represents a packed constrained array. */
3753 if (ecode == ARRAY_TYPE
3754 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
3755 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
3756 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
3759 (INDIRECT_REF, NULL_TREE,
3760 convert_to_fat_pointer (TREE_TYPE (type),
3761 build_unary_op (ADDR_EXPR,
3764 /* Do something very similar for converting one unconstrained
3765 array to another. */
3766 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
3768 build_unary_op (INDIRECT_REF, NULL_TREE,
3769 convert (TREE_TYPE (type),
3770 build_unary_op (ADDR_EXPR,
3776 return fold (convert_to_complex (type, expr));
3783 /* Remove all conversions that are done in EXP. This includes converting
3784 from a padded type or to a justified modular type. If TRUE_ADDRESS
3785 is true, always return the address of the containing object even if
3786 the address is not bit-aligned. */
3789 remove_conversions (tree exp, bool true_address)
3791 switch (TREE_CODE (exp))
3795 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3796 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
3798 remove_conversions (VEC_index (constructor_elt,
3799 CONSTRUCTOR_ELTS (exp), 0)->value,
3804 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
3805 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
3806 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3809 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
3810 case NOP_EXPR: case CONVERT_EXPR:
3811 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3820 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3821 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3822 likewise return an expression pointing to the underlying array. */
3825 maybe_unconstrained_array (tree exp)
3827 enum tree_code code = TREE_CODE (exp);
3830 switch (TREE_CODE (TREE_TYPE (exp)))
3832 case UNCONSTRAINED_ARRAY_TYPE:
3833 if (code == UNCONSTRAINED_ARRAY_REF)
3836 = build_unary_op (INDIRECT_REF, NULL_TREE,
3837 build_component_ref (TREE_OPERAND (exp, 0),
3838 get_identifier ("P_ARRAY"),
3840 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
3844 else if (code == NULL_EXPR)
3845 return build1 (NULL_EXPR,
3846 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3847 (TREE_TYPE (TREE_TYPE (exp))))),
3848 TREE_OPERAND (exp, 0));
3851 /* If this is a padded type, convert to the unpadded type and see if
3852 it contains a template. */
3853 if (TYPE_IS_PADDING_P (TREE_TYPE (exp)))
3855 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
3856 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3857 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
3859 build_component_ref (new, NULL_TREE,
3860 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
3863 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
3865 build_component_ref (exp, NULL_TREE,
3866 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
3876 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
3877 If NOTRUNC_P is true, truncation operations should be suppressed. */
3880 unchecked_convert (tree type, tree expr, bool notrunc_p)
3882 tree etype = TREE_TYPE (expr);
3884 /* If the expression is already the right type, we are done. */
3888 /* If both types types are integral just do a normal conversion.
3889 Likewise for a conversion to an unconstrained array. */
3890 if ((((INTEGRAL_TYPE_P (type)
3891 && !(TREE_CODE (type) == INTEGER_TYPE
3892 && TYPE_VAX_FLOATING_POINT_P (type)))
3893 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
3894 || (TREE_CODE (type) == RECORD_TYPE
3895 && TYPE_JUSTIFIED_MODULAR_P (type)))
3896 && ((INTEGRAL_TYPE_P (etype)
3897 && !(TREE_CODE (etype) == INTEGER_TYPE
3898 && TYPE_VAX_FLOATING_POINT_P (etype)))
3899 || (POINTER_TYPE_P (etype) && !TYPE_THIN_POINTER_P (etype))
3900 || (TREE_CODE (etype) == RECORD_TYPE
3901 && TYPE_JUSTIFIED_MODULAR_P (etype))))
3902 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3905 bool final_unchecked = false;
3907 if (TREE_CODE (etype) == INTEGER_TYPE
3908 && TYPE_BIASED_REPRESENTATION_P (etype))
3910 tree ntype = copy_type (etype);
3912 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
3913 TYPE_MAIN_VARIANT (ntype) = ntype;
3914 expr = build1 (NOP_EXPR, ntype, expr);
3917 if (TREE_CODE (type) == INTEGER_TYPE
3918 && TYPE_BIASED_REPRESENTATION_P (type))
3920 rtype = copy_type (type);
3921 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
3922 TYPE_MAIN_VARIANT (rtype) = rtype;
3925 /* We have another special case: if we are unchecked converting subtype
3926 into a base type, we need to ensure that VRP doesn't propagate range
3927 information since this conversion may be done precisely to validate
3928 that the object is within the range it is supposed to have. */
3929 else if (TREE_CODE (expr) != INTEGER_CST
3930 && TREE_CODE (type) == INTEGER_TYPE && !TREE_TYPE (type)
3931 && ((TREE_CODE (etype) == INTEGER_TYPE && TREE_TYPE (etype))
3932 || TREE_CODE (etype) == ENUMERAL_TYPE
3933 || TREE_CODE (etype) == BOOLEAN_TYPE))
3935 /* The optimization barrier is a VIEW_CONVERT_EXPR node; moreover,
3936 in order not to be deemed an useless type conversion, it must
3937 be from subtype to base type.
3939 ??? This may raise addressability and/or aliasing issues because
3940 VIEW_CONVERT_EXPR gets gimplified as an lvalue, thus causing the
3941 address of its operand to be taken if it is deemed addressable
3942 and not already in GIMPLE form. */
3943 rtype = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
3944 rtype = copy_type (rtype);
3945 TYPE_MAIN_VARIANT (rtype) = rtype;
3946 TREE_TYPE (rtype) = type;
3947 final_unchecked = true;
3950 expr = convert (rtype, expr);
3952 expr = fold_build1 (final_unchecked ? VIEW_CONVERT_EXPR : NOP_EXPR,
3956 /* If we are converting TO an integral type whose precision is not the
3957 same as its size, first unchecked convert to a record that contains
3958 an object of the output type. Then extract the field. */
3959 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
3960 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3961 GET_MODE_BITSIZE (TYPE_MODE (type))))
3963 tree rec_type = make_node (RECORD_TYPE);
3964 tree field = create_field_decl (get_identifier ("OBJ"), type,
3965 rec_type, 1, 0, 0, 0);
3967 TYPE_FIELDS (rec_type) = field;
3968 layout_type (rec_type);
3970 expr = unchecked_convert (rec_type, expr, notrunc_p);
3971 expr = build_component_ref (expr, NULL_TREE, field, 0);
3974 /* Similarly for integral input type whose precision is not equal to its
3976 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype)
3977 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
3978 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3980 tree rec_type = make_node (RECORD_TYPE);
3982 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
3985 TYPE_FIELDS (rec_type) = field;
3986 layout_type (rec_type);
3988 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
3989 expr = unchecked_convert (type, expr, notrunc_p);
3992 /* We have a special case when we are converting between two
3993 unconstrained array types. In that case, take the address,
3994 convert the fat pointer types, and dereference. */
3995 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
3996 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3997 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3998 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
3999 build_unary_op (ADDR_EXPR, NULL_TREE,
4003 expr = maybe_unconstrained_array (expr);
4004 etype = TREE_TYPE (expr);
4005 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
4008 /* If the result is an integral type whose size is not equal to
4009 the size of the underlying machine type, sign- or zero-extend
4010 the result. We need not do this in the case where the input is
4011 an integral type of the same precision and signedness or if the output
4012 is a biased type or if both the input and output are unsigned. */
4014 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4015 && !(TREE_CODE (type) == INTEGER_TYPE
4016 && TYPE_BIASED_REPRESENTATION_P (type))
4017 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4018 GET_MODE_BITSIZE (TYPE_MODE (type)))
4019 && !(INTEGRAL_TYPE_P (etype)
4020 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
4021 && operand_equal_p (TYPE_RM_SIZE (type),
4022 (TYPE_RM_SIZE (etype) != 0
4023 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
4025 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
4027 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
4028 TYPE_UNSIGNED (type));
4030 = convert (base_type,
4031 size_binop (MINUS_EXPR,
4033 (GET_MODE_BITSIZE (TYPE_MODE (type))),
4034 TYPE_RM_SIZE (type)));
4037 build_binary_op (RSHIFT_EXPR, base_type,
4038 build_binary_op (LSHIFT_EXPR, base_type,
4039 convert (base_type, expr),
4044 /* An unchecked conversion should never raise Constraint_Error. The code
4045 below assumes that GCC's conversion routines overflow the same way that
4046 the underlying hardware does. This is probably true. In the rare case
4047 when it is false, we can rely on the fact that such conversions are
4048 erroneous anyway. */
4049 if (TREE_CODE (expr) == INTEGER_CST)
4050 TREE_OVERFLOW (expr) = 0;
4052 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
4053 show no longer constant. */
4054 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4055 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
4057 TREE_CONSTANT (expr) = 0;
4062 /* Search the chain of currently available builtin declarations for a node
4063 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
4064 found, if any, or NULL_TREE otherwise. */
4066 builtin_decl_for (tree name)
4071 for (i = 0; VEC_iterate(tree, builtin_decls, i, decl); i++)
4072 if (DECL_NAME (decl) == name)
4078 /* Return the appropriate GCC tree code for the specified GNAT type,
4079 the latter being a record type as predicated by Is_Record_Type. */
4082 tree_code_for_record_type (Entity_Id gnat_type)
4084 Node_Id component_list
4085 = Component_List (Type_Definition
4087 (Implementation_Base_Type (gnat_type))));
4090 /* Make this a UNION_TYPE unless it's either not an Unchecked_Union or
4091 we have a non-discriminant field outside a variant. In either case,
4092 it's a RECORD_TYPE. */
4094 if (!Is_Unchecked_Union (gnat_type))
4097 for (component = First_Non_Pragma (Component_Items (component_list));
4098 Present (component);
4099 component = Next_Non_Pragma (component))
4100 if (Ekind (Defining_Entity (component)) == E_Component)
4106 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
4107 component of an aggregate type. */
4110 type_for_nonaliased_component_p (tree gnu_type)
4112 /* If the type is passed by reference, we may have pointers to the
4113 component so it cannot be made non-aliased. */
4114 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
4117 /* We used to say that any component of aggregate type is aliased
4118 because the front-end may take 'Reference of it. The front-end
4119 has been enhanced in the meantime so as to use a renaming instead
4120 in most cases, but the back-end can probably take the address of
4121 such a component too so we go for the conservative stance.
4123 For instance, we might need the address of any array type, even
4124 if normally passed by copy, to construct a fat pointer if the
4125 component is used as an actual for an unconstrained formal.
4127 Likewise for record types: even if a specific record subtype is
4128 passed by copy, the parent type might be passed by ref (e.g. if
4129 it's of variable size) and we might take the address of a child
4130 component to pass to a parent formal. We have no way to check
4131 for such conditions here. */
4132 if (AGGREGATE_TYPE_P (gnu_type))
4138 /* Perform final processing on global variables. */
4141 gnat_write_global_declarations (void)
4143 /* Proceed to optimize and emit assembly.
4144 FIXME: shouldn't be the front end's responsibility to call this. */
4147 /* Emit debug info for all global declarations. */
4148 emit_debug_global_declarations (VEC_address (tree, global_decls),
4149 VEC_length (tree, global_decls));
4152 #include "gt-ada-utils.h"
4153 #include "gtype-ada.h"