1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2008, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 ****************************************************************************/
28 #include "coretypes.h"
41 #include "tree-inline.h"
42 #include "tree-gimple.h"
43 #include "tree-dump.h"
44 #include "pointer-set.h"
60 #ifndef MAX_FIXED_MODE_SIZE
61 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
64 #ifndef MAX_BITS_PER_WORD
65 #define MAX_BITS_PER_WORD BITS_PER_WORD
68 /* If nonzero, pretend we are allocating at global level. */
71 /* Tree nodes for the various types and decls we create. */
72 tree gnat_std_decls[(int) ADT_LAST];
74 /* Functions to call for each of the possible raise reasons. */
75 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
77 /* Forward declarations for handlers of attributes. */
78 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
79 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
81 /* Table of machine-independent internal attributes for Ada. We support
82 this minimal set of attributes to accommodate the Alpha back-end which
83 unconditionally puts them on its builtins. */
84 const struct attribute_spec gnat_internal_attribute_table[] =
86 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
87 { "const", 0, 0, true, false, false, handle_const_attribute },
88 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute },
89 { NULL, 0, 0, false, false, false, NULL }
92 /* Associates a GNAT tree node to a GCC tree node. It is used in
93 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
94 of `save_gnu_tree' for more info. */
95 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
97 #define GET_GNU_TREE(GNAT_ENTITY) \
98 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
100 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
101 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
103 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
104 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
106 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
107 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
109 #define GET_DUMMY_NODE(GNAT_ENTITY) \
110 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
112 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
113 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
115 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
116 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
118 /* This variable keeps a table for types for each precision so that we only
119 allocate each of them once. Signed and unsigned types are kept separate.
121 Note that these types are only used when fold-const requests something
122 special. Perhaps we should NOT share these types; we'll see how it
124 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
126 /* Likewise for float types, but record these by mode. */
127 static GTY(()) tree float_types[NUM_MACHINE_MODES];
129 /* For each binding contour we allocate a binding_level structure to indicate
130 the binding depth. */
132 struct gnat_binding_level GTY((chain_next ("%h.chain")))
134 /* The binding level containing this one (the enclosing binding level). */
135 struct gnat_binding_level *chain;
136 /* The BLOCK node for this level. */
138 /* If nonzero, the setjmp buffer that needs to be updated for any
139 variable-sized definition within this context. */
143 /* The binding level currently in effect. */
144 static GTY(()) struct gnat_binding_level *current_binding_level;
146 /* A chain of gnat_binding_level structures awaiting reuse. */
147 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
149 /* An array of global declarations. */
150 static GTY(()) VEC(tree,gc) *global_decls;
152 /* An array of builtin declarations. */
153 static GTY(()) VEC(tree,gc) *builtin_decls;
155 /* An array of global renaming pointers. */
156 static GTY(()) VEC(tree,gc) *global_renaming_pointers;
158 /* A chain of unused BLOCK nodes. */
159 static GTY((deletable)) tree free_block_chain;
161 static void gnat_install_builtins (void);
162 static tree merge_sizes (tree, tree, tree, bool, bool);
163 static tree compute_related_constant (tree, tree);
164 static tree split_plus (tree, tree *);
165 static void gnat_gimplify_function (tree);
166 static tree float_type_for_precision (int, enum machine_mode);
167 static tree convert_to_fat_pointer (tree, tree);
168 static tree convert_to_thin_pointer (tree, tree);
169 static tree make_descriptor_field (const char *,tree, tree, tree);
170 static bool potential_alignment_gap (tree, tree, tree);
172 /* Initialize the association of GNAT nodes to GCC trees. */
175 init_gnat_to_gnu (void)
177 associate_gnat_to_gnu
178 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
181 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
182 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
183 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
185 If GNU_DECL is zero, a previous association is to be reset. */
188 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
190 /* Check that GNAT_ENTITY is not already defined and that it is being set
191 to something which is a decl. Raise gigi 401 if not. Usually, this
192 means GNAT_ENTITY is defined twice, but occasionally is due to some
194 gcc_assert (!(gnu_decl
195 && (PRESENT_GNU_TREE (gnat_entity)
196 || (!no_check && !DECL_P (gnu_decl)))));
198 SET_GNU_TREE (gnat_entity, gnu_decl);
201 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
202 Return the ..._DECL node that was associated with it. If there is no tree
203 node associated with GNAT_ENTITY, abort.
205 In some cases, such as delayed elaboration or expressions that need to
206 be elaborated only once, GNAT_ENTITY is really not an entity. */
209 get_gnu_tree (Entity_Id gnat_entity)
211 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
212 return GET_GNU_TREE (gnat_entity);
215 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
218 present_gnu_tree (Entity_Id gnat_entity)
220 return PRESENT_GNU_TREE (gnat_entity);
223 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
226 init_dummy_type (void)
229 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
232 /* Make a dummy type corresponding to GNAT_TYPE. */
235 make_dummy_type (Entity_Id gnat_type)
237 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
240 /* If there is an equivalent type, get its underlying type. */
241 if (Present (gnat_underlying))
242 gnat_underlying = Underlying_Type (gnat_underlying);
244 /* If there was no equivalent type (can only happen when just annotating
245 types) or underlying type, go back to the original type. */
246 if (No (gnat_underlying))
247 gnat_underlying = gnat_type;
249 /* If it there already a dummy type, use that one. Else make one. */
250 if (PRESENT_DUMMY_NODE (gnat_underlying))
251 return GET_DUMMY_NODE (gnat_underlying);
253 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
255 gnu_type = make_node (Is_Record_Type (gnat_underlying)
256 ? tree_code_for_record_type (gnat_underlying)
258 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
259 TYPE_DUMMY_P (gnu_type) = 1;
260 if (AGGREGATE_TYPE_P (gnu_type))
262 TYPE_STUB_DECL (gnu_type) = build_decl (TYPE_DECL, NULL_TREE, gnu_type);
263 TYPE_BY_REFERENCE_P (gnu_type) = Is_By_Reference_Type (gnat_type);
266 SET_DUMMY_NODE (gnat_underlying, gnu_type);
271 /* Return nonzero if we are currently in the global binding level. */
274 global_bindings_p (void)
276 return ((force_global || !current_function_decl) ? -1 : 0);
279 /* Enter a new binding level. */
284 struct gnat_binding_level *newlevel = NULL;
286 /* Reuse a struct for this binding level, if there is one. */
287 if (free_binding_level)
289 newlevel = free_binding_level;
290 free_binding_level = free_binding_level->chain;
294 = (struct gnat_binding_level *)
295 ggc_alloc (sizeof (struct gnat_binding_level));
297 /* Use a free BLOCK, if any; otherwise, allocate one. */
298 if (free_block_chain)
300 newlevel->block = free_block_chain;
301 free_block_chain = BLOCK_CHAIN (free_block_chain);
302 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
305 newlevel->block = make_node (BLOCK);
307 /* Point the BLOCK we just made to its parent. */
308 if (current_binding_level)
309 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
311 BLOCK_VARS (newlevel->block) = BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
312 TREE_USED (newlevel->block) = 1;
314 /* Add this level to the front of the chain (stack) of levels that are
316 newlevel->chain = current_binding_level;
317 newlevel->jmpbuf_decl = NULL_TREE;
318 current_binding_level = newlevel;
321 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
322 and point FNDECL to this BLOCK. */
325 set_current_block_context (tree fndecl)
327 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
328 DECL_INITIAL (fndecl) = current_binding_level->block;
331 /* Set the jmpbuf_decl for the current binding level to DECL. */
334 set_block_jmpbuf_decl (tree decl)
336 current_binding_level->jmpbuf_decl = decl;
339 /* Get the jmpbuf_decl, if any, for the current binding level. */
342 get_block_jmpbuf_decl ()
344 return current_binding_level->jmpbuf_decl;
347 /* Exit a binding level. Set any BLOCK into the current code group. */
352 struct gnat_binding_level *level = current_binding_level;
353 tree block = level->block;
355 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
356 BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
358 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
359 are no variables free the block and merge its subblocks into those of its
360 parent block. Otherwise, add it to the list of its parent. */
361 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
363 else if (BLOCK_VARS (block) == NULL_TREE)
365 BLOCK_SUBBLOCKS (level->chain->block)
366 = chainon (BLOCK_SUBBLOCKS (block),
367 BLOCK_SUBBLOCKS (level->chain->block));
368 BLOCK_CHAIN (block) = free_block_chain;
369 free_block_chain = block;
373 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
374 BLOCK_SUBBLOCKS (level->chain->block) = block;
375 TREE_USED (block) = 1;
376 set_block_for_group (block);
379 /* Free this binding structure. */
380 current_binding_level = level->chain;
381 level->chain = free_binding_level;
382 free_binding_level = level;
386 /* Records a ..._DECL node DECL as belonging to the current lexical scope
387 and uses GNAT_NODE for location information and propagating flags. */
390 gnat_pushdecl (tree decl, Node_Id gnat_node)
392 /* If at top level, there is no context. But PARM_DECLs always go in the
393 level of its function. */
394 if (global_bindings_p () && TREE_CODE (decl) != PARM_DECL)
395 DECL_CONTEXT (decl) = 0;
398 DECL_CONTEXT (decl) = current_function_decl;
400 /* Functions imported in another function are not really nested. */
401 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_PUBLIC (decl))
402 DECL_NO_STATIC_CHAIN (decl) = 1;
405 TREE_NO_WARNING (decl) = (gnat_node == Empty || Warnings_Off (gnat_node));
407 /* Set the location of DECL and emit a declaration for it. */
408 if (Present (gnat_node))
409 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
410 add_decl_expr (decl, gnat_node);
412 /* Put the declaration on the list. The list of declarations is in reverse
413 order. The list will be reversed later. Put global variables in the
414 globals list and builtin functions in a dedicated list to speed up
415 further lookups. Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
416 the list, as they will cause trouble with the debugger and aren't needed
418 if (TREE_CODE (decl) != TYPE_DECL
419 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
421 if (global_bindings_p ())
423 VEC_safe_push (tree, gc, global_decls, decl);
425 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
426 VEC_safe_push (tree, gc, builtin_decls, decl);
430 TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
431 BLOCK_VARS (current_binding_level->block) = decl;
435 /* For the declaration of a type, set its name if it either is not already
436 set, was set to an IDENTIFIER_NODE, indicating an internal name,
437 or if the previous type name was not derived from a source name.
438 We'd rather have the type named with a real name and all the pointer
439 types to the same object have the same POINTER_TYPE node. Code in the
440 equivalent function of c-decl.c makes a copy of the type node here, but
441 that may cause us trouble with incomplete types. We make an exception
442 for fat pointer types because the compiler automatically builds them
443 for unconstrained array types and the debugger uses them to represent
444 both these and pointers to these. */
445 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
447 tree t = TREE_TYPE (decl);
449 if (!TYPE_NAME (t) || TREE_CODE (TYPE_NAME (t)) == IDENTIFIER_NODE)
451 else if (TYPE_FAT_POINTER_P (t))
453 tree tt = build_variant_type_copy (t);
454 TYPE_NAME (tt) = decl;
455 TREE_USED (tt) = TREE_USED (t);
456 TREE_TYPE (decl) = tt;
457 DECL_ORIGINAL_TYPE (decl) = t;
460 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
465 /* Propagate the name to all the variants. This is needed for
466 the type qualifiers machinery to work properly. */
468 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
469 TYPE_NAME (t) = decl;
473 /* Do little here. Set up the standard declarations later after the
474 front end has been run. */
477 gnat_init_decl_processing (void)
479 /* Make the binding_level structure for global names. */
480 current_function_decl = 0;
481 current_binding_level = 0;
482 free_binding_level = 0;
485 build_common_tree_nodes (true, true);
487 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
488 corresponding to the size of Pmode. In most cases when ptr_mode and
489 Pmode differ, C will use the width of ptr_mode as sizetype. But we get
490 far better code using the width of Pmode. Make this here since we need
491 this before we can expand the GNAT types. */
492 size_type_node = gnat_type_for_size (GET_MODE_BITSIZE (Pmode), 0);
493 set_sizetype (size_type_node);
494 build_common_tree_nodes_2 (0);
496 ptr_void_type_node = build_pointer_type (void_type_node);
498 gnat_install_builtins ();
501 /* Install the builtin functions we might need. */
504 gnat_install_builtins ()
506 /* Builtins used by generic middle-end optimizers. */
507 build_common_builtin_nodes ();
509 /* Target specific builtins, such as the AltiVec family on ppc. */
510 targetm.init_builtins ();
513 /* Create the predefined scalar types such as `integer_type_node' needed
514 in the gcc back-end and initialize the global binding level. */
517 init_gigi_decls (tree long_long_float_type, tree exception_type)
522 /* Set the types that GCC and Gigi use from the front end. We would like
523 to do this for char_type_node, but it needs to correspond to the C
525 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
527 /* In this case, the builtin floating point types are VAX float,
528 so make up a type for use. */
529 longest_float_type_node = make_node (REAL_TYPE);
530 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
531 layout_type (longest_float_type_node);
532 create_type_decl (get_identifier ("longest float type"),
533 longest_float_type_node, NULL, false, true, Empty);
536 longest_float_type_node = TREE_TYPE (long_long_float_type);
538 except_type_node = TREE_TYPE (exception_type);
540 unsigned_type_node = gnat_type_for_size (INT_TYPE_SIZE, 1);
541 create_type_decl (get_identifier ("unsigned int"), unsigned_type_node,
542 NULL, false, true, Empty);
544 void_type_decl_node = create_type_decl (get_identifier ("void"),
545 void_type_node, NULL, false, true,
548 void_ftype = build_function_type (void_type_node, NULL_TREE);
549 ptr_void_ftype = build_pointer_type (void_ftype);
551 /* Now declare runtime functions. */
552 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
554 /* malloc is a function declaration tree for a function to allocate
556 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
558 build_function_type (ptr_void_type_node,
559 tree_cons (NULL_TREE,
562 NULL_TREE, false, true, true, NULL,
564 DECL_IS_MALLOC (malloc_decl) = 1;
566 /* free is a function declaration tree for a function to free memory. */
568 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
569 build_function_type (void_type_node,
570 tree_cons (NULL_TREE,
573 NULL_TREE, false, true, true, NULL, Empty);
575 /* Make the types and functions used for exception processing. */
577 = build_array_type (gnat_type_for_mode (Pmode, 0),
578 build_index_type (build_int_cst (NULL_TREE, 5)));
579 create_type_decl (get_identifier ("JMPBUF_T"), jmpbuf_type, NULL,
581 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
583 /* Functions to get and set the jumpbuf pointer for the current thread. */
585 = create_subprog_decl
586 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
587 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
588 NULL_TREE, false, true, true, NULL, Empty);
589 /* Avoid creating superfluous edges to __builtin_setjmp receivers. */
590 DECL_IS_PURE (get_jmpbuf_decl) = 1;
593 = create_subprog_decl
594 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
596 build_function_type (void_type_node,
597 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
598 NULL_TREE, false, true, true, NULL, Empty);
600 /* Function to get the current exception. */
602 = create_subprog_decl
603 (get_identifier ("system__soft_links__get_gnat_exception"),
605 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
606 NULL_TREE, false, true, true, NULL, Empty);
607 /* Avoid creating superfluous edges to __builtin_setjmp receivers. */
608 DECL_IS_PURE (get_excptr_decl) = 1;
610 /* Functions that raise exceptions. */
612 = create_subprog_decl
613 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
614 build_function_type (void_type_node,
615 tree_cons (NULL_TREE,
616 build_pointer_type (except_type_node),
618 NULL_TREE, false, true, true, NULL, Empty);
620 /* Dummy objects to materialize "others" and "all others" in the exception
621 tables. These are exported by a-exexpr.adb, so see this unit for the
625 = create_var_decl (get_identifier ("OTHERS"),
626 get_identifier ("__gnat_others_value"),
627 integer_type_node, 0, 1, 0, 1, 1, 0, Empty);
630 = create_var_decl (get_identifier ("ALL_OTHERS"),
631 get_identifier ("__gnat_all_others_value"),
632 integer_type_node, 0, 1, 0, 1, 1, 0, Empty);
634 /* Hooks to call when entering/leaving an exception handler. */
636 = create_subprog_decl (get_identifier ("__gnat_begin_handler"), NULL_TREE,
637 build_function_type (void_type_node,
638 tree_cons (NULL_TREE,
641 NULL_TREE, false, true, true, NULL, Empty);
644 = create_subprog_decl (get_identifier ("__gnat_end_handler"), NULL_TREE,
645 build_function_type (void_type_node,
646 tree_cons (NULL_TREE,
649 NULL_TREE, false, true, true, NULL, Empty);
651 /* If in no exception handlers mode, all raise statements are redirected to
652 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
653 this procedure will never be called in this mode. */
654 if (No_Exception_Handlers_Set ())
657 = create_subprog_decl
658 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE,
659 build_function_type (void_type_node,
660 tree_cons (NULL_TREE,
661 build_pointer_type (char_type_node),
662 tree_cons (NULL_TREE,
665 NULL_TREE, false, true, true, NULL, Empty);
667 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
668 gnat_raise_decls[i] = decl;
671 /* Otherwise, make one decl for each exception reason. */
672 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
676 sprintf (name, "__gnat_rcheck_%.2d", i);
678 = create_subprog_decl
679 (get_identifier (name), NULL_TREE,
680 build_function_type (void_type_node,
681 tree_cons (NULL_TREE,
684 tree_cons (NULL_TREE,
687 NULL_TREE, false, true, true, NULL, Empty);
690 /* Indicate that these never return. */
691 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
692 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
693 TREE_TYPE (raise_nodefer_decl)
694 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
697 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
699 TREE_THIS_VOLATILE (gnat_raise_decls[i]) = 1;
700 TREE_SIDE_EFFECTS (gnat_raise_decls[i]) = 1;
701 TREE_TYPE (gnat_raise_decls[i])
702 = build_qualified_type (TREE_TYPE (gnat_raise_decls[i]),
706 /* setjmp returns an integer and has one operand, which is a pointer to
709 = create_subprog_decl
710 (get_identifier ("__builtin_setjmp"), NULL_TREE,
711 build_function_type (integer_type_node,
712 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
713 NULL_TREE, false, true, true, NULL, Empty);
715 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
716 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
718 /* update_setjmp_buf updates a setjmp buffer from the current stack pointer
720 update_setjmp_buf_decl
721 = create_subprog_decl
722 (get_identifier ("__builtin_update_setjmp_buf"), NULL_TREE,
723 build_function_type (void_type_node,
724 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
725 NULL_TREE, false, true, true, NULL, Empty);
727 DECL_BUILT_IN_CLASS (update_setjmp_buf_decl) = BUILT_IN_NORMAL;
728 DECL_FUNCTION_CODE (update_setjmp_buf_decl) = BUILT_IN_UPDATE_SETJMP_BUF;
730 main_identifier_node = get_identifier ("main");
733 /* Given a record type RECORD_TYPE and a chain of FIELD_DECL nodes FIELDLIST,
734 finish constructing the record or union type. If REP_LEVEL is zero, this
735 record has no representation clause and so will be entirely laid out here.
736 If REP_LEVEL is one, this record has a representation clause and has been
737 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
738 this record is derived from a parent record and thus inherits its layout;
739 only make a pass on the fields to finalize them. If DO_NOT_FINALIZE is
740 true, the record type is expected to be modified afterwards so it will
741 not be sent to the back-end for finalization. */
744 finish_record_type (tree record_type, tree fieldlist, int rep_level,
745 bool do_not_finalize)
747 enum tree_code code = TREE_CODE (record_type);
748 tree name = TYPE_NAME (record_type);
749 tree ada_size = bitsize_zero_node;
750 tree size = bitsize_zero_node;
751 bool had_size = TYPE_SIZE (record_type) != 0;
752 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
753 bool had_align = TYPE_ALIGN (record_type) != 0;
756 if (name && TREE_CODE (name) == TYPE_DECL)
757 name = DECL_NAME (name);
759 TYPE_FIELDS (record_type) = fieldlist;
760 TYPE_STUB_DECL (record_type) = build_decl (TYPE_DECL, name, record_type);
762 /* We don't need both the typedef name and the record name output in
763 the debugging information, since they are the same. */
764 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
766 /* Globally initialize the record first. If this is a rep'ed record,
767 that just means some initializations; otherwise, layout the record. */
770 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
771 TYPE_MODE (record_type) = BLKmode;
774 TYPE_SIZE_UNIT (record_type) = size_zero_node;
776 TYPE_SIZE (record_type) = bitsize_zero_node;
778 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
779 out just like a UNION_TYPE, since the size will be fixed. */
780 else if (code == QUAL_UNION_TYPE)
785 /* Ensure there isn't a size already set. There can be in an error
786 case where there is a rep clause but all fields have errors and
787 no longer have a position. */
788 TYPE_SIZE (record_type) = 0;
789 layout_type (record_type);
792 /* At this point, the position and size of each field is known. It was
793 either set before entry by a rep clause, or by laying out the type above.
795 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
796 to compute the Ada size; the GCC size and alignment (for rep'ed records
797 that are not padding types); and the mode (for rep'ed records). We also
798 clear the DECL_BIT_FIELD indication for the cases we know have not been
799 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
801 if (code == QUAL_UNION_TYPE)
802 fieldlist = nreverse (fieldlist);
804 for (field = fieldlist; field; field = TREE_CHAIN (field))
806 tree type = TREE_TYPE (field);
807 tree pos = bit_position (field);
808 tree this_size = DECL_SIZE (field);
811 if ((TREE_CODE (type) == RECORD_TYPE
812 || TREE_CODE (type) == UNION_TYPE
813 || TREE_CODE (type) == QUAL_UNION_TYPE)
814 && !TYPE_IS_FAT_POINTER_P (type)
815 && !TYPE_CONTAINS_TEMPLATE_P (type)
816 && TYPE_ADA_SIZE (type))
817 this_ada_size = TYPE_ADA_SIZE (type);
819 this_ada_size = this_size;
821 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
822 if (DECL_BIT_FIELD (field)
823 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
825 unsigned int align = TYPE_ALIGN (type);
827 /* In the general case, type alignment is required. */
828 if (value_factor_p (pos, align))
830 /* The enclosing record type must be sufficiently aligned.
831 Otherwise, if no alignment was specified for it and it
832 has been laid out already, bump its alignment to the
833 desired one if this is compatible with its size. */
834 if (TYPE_ALIGN (record_type) >= align)
836 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
837 DECL_BIT_FIELD (field) = 0;
841 && value_factor_p (TYPE_SIZE (record_type), align))
843 TYPE_ALIGN (record_type) = align;
844 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
845 DECL_BIT_FIELD (field) = 0;
849 /* In the non-strict alignment case, only byte alignment is. */
850 if (!STRICT_ALIGNMENT
851 && DECL_BIT_FIELD (field)
852 && value_factor_p (pos, BITS_PER_UNIT))
853 DECL_BIT_FIELD (field) = 0;
856 /* If we still have DECL_BIT_FIELD set at this point, we know the field
857 is technically not addressable. Except that it can actually be
858 addressed if the field is BLKmode and happens to be properly
860 DECL_NONADDRESSABLE_P (field)
861 |= DECL_BIT_FIELD (field) && DECL_MODE (field) != BLKmode;
863 /* A type must be as aligned as its most aligned field that is not
864 a bit-field. But this is already enforced by layout_type. */
865 if (rep_level > 0 && !DECL_BIT_FIELD (field))
866 TYPE_ALIGN (record_type)
867 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
872 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
873 size = size_binop (MAX_EXPR, size, this_size);
876 case QUAL_UNION_TYPE:
878 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
879 this_ada_size, ada_size);
880 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
885 /* Since we know here that all fields are sorted in order of
886 increasing bit position, the size of the record is one
887 higher than the ending bit of the last field processed
888 unless we have a rep clause, since in that case we might
889 have a field outside a QUAL_UNION_TYPE that has a higher ending
890 position. So use a MAX in that case. Also, if this field is a
891 QUAL_UNION_TYPE, we need to take into account the previous size in
892 the case of empty variants. */
894 = merge_sizes (ada_size, pos, this_ada_size,
895 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
897 = merge_sizes (size, pos, this_size,
898 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
906 if (code == QUAL_UNION_TYPE)
907 nreverse (fieldlist);
911 /* If this is a padding record, we never want to make the size smaller
912 than what was specified in it, if any. */
913 if (TREE_CODE (record_type) == RECORD_TYPE
914 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
915 size = TYPE_SIZE (record_type);
917 /* Now set any of the values we've just computed that apply. */
918 if (!TYPE_IS_FAT_POINTER_P (record_type)
919 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
920 SET_TYPE_ADA_SIZE (record_type, ada_size);
924 tree size_unit = had_size_unit
925 ? TYPE_SIZE_UNIT (record_type)
927 size_binop (CEIL_DIV_EXPR, size,
929 unsigned int align = TYPE_ALIGN (record_type);
931 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
932 TYPE_SIZE_UNIT (record_type)
933 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
935 compute_record_mode (record_type);
939 if (!do_not_finalize)
940 rest_of_record_type_compilation (record_type);
943 /* Wrap up compilation of RECORD_TYPE, i.e. most notably output all
944 the debug information associated with it. It need not be invoked
945 directly in most cases since finish_record_type takes care of doing
946 so, unless explicitly requested not to through DO_NOT_FINALIZE. */
949 rest_of_record_type_compilation (tree record_type)
951 tree fieldlist = TYPE_FIELDS (record_type);
953 enum tree_code code = TREE_CODE (record_type);
954 bool var_size = false;
956 for (field = fieldlist; field; field = TREE_CHAIN (field))
958 /* We need to make an XVE/XVU record if any field has variable size,
959 whether or not the record does. For example, if we have a union,
960 it may be that all fields, rounded up to the alignment, have the
961 same size, in which case we'll use that size. But the debug
962 output routines (except Dwarf2) won't be able to output the fields,
963 so we need to make the special record. */
964 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
965 /* If a field has a non-constant qualifier, the record will have
966 variable size too. */
967 || (code == QUAL_UNION_TYPE
968 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
975 /* If this record is of variable size, rename it so that the
976 debugger knows it is and make a new, parallel, record
977 that tells the debugger how the record is laid out. See
978 exp_dbug.ads. But don't do this for records that are padding
979 since they confuse GDB. */
981 && !(TREE_CODE (record_type) == RECORD_TYPE
982 && TYPE_IS_PADDING_P (record_type)))
985 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
986 ? UNION_TYPE : TREE_CODE (record_type));
987 tree orig_name = TYPE_NAME (record_type);
989 = (TREE_CODE (orig_name) == TYPE_DECL ? DECL_NAME (orig_name)
992 = concat_id_with_name (orig_id,
993 TREE_CODE (record_type) == QUAL_UNION_TYPE
995 tree last_pos = bitsize_zero_node;
997 tree prev_old_field = 0;
999 TYPE_NAME (new_record_type) = new_id;
1000 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
1001 TYPE_STUB_DECL (new_record_type)
1002 = build_decl (TYPE_DECL, new_id, new_record_type);
1003 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
1004 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
1005 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
1006 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
1007 TYPE_SIZE_UNIT (new_record_type)
1008 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
1010 /* Now scan all the fields, replacing each field with a new
1011 field corresponding to the new encoding. */
1012 for (old_field = TYPE_FIELDS (record_type); old_field;
1013 old_field = TREE_CHAIN (old_field))
1015 tree field_type = TREE_TYPE (old_field);
1016 tree field_name = DECL_NAME (old_field);
1018 tree curpos = bit_position (old_field);
1020 unsigned int align = 0;
1023 /* See how the position was modified from the last position.
1025 There are two basic cases we support: a value was added
1026 to the last position or the last position was rounded to
1027 a boundary and they something was added. Check for the
1028 first case first. If not, see if there is any evidence
1029 of rounding. If so, round the last position and try
1032 If this is a union, the position can be taken as zero. */
1034 if (TREE_CODE (new_record_type) == UNION_TYPE)
1035 pos = bitsize_zero_node, align = 0;
1037 pos = compute_related_constant (curpos, last_pos);
1039 if (!pos && TREE_CODE (curpos) == MULT_EXPR
1040 && host_integerp (TREE_OPERAND (curpos, 1), 1))
1042 tree offset = TREE_OPERAND (curpos, 0);
1043 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
1045 /* Strip off any conversions. */
1046 while (TREE_CODE (offset) == NON_LVALUE_EXPR
1047 || TREE_CODE (offset) == NOP_EXPR
1048 || TREE_CODE (offset) == CONVERT_EXPR)
1049 offset = TREE_OPERAND (offset, 0);
1051 /* An offset which is a bitwise AND with a negative power of 2
1052 means an alignment corresponding to this power of 2. */
1053 if (TREE_CODE (offset) == BIT_AND_EXPR
1054 && host_integerp (TREE_OPERAND (offset, 1), 0)
1055 && tree_int_cst_sgn (TREE_OPERAND (offset, 1)) < 0)
1058 = - tree_low_cst (TREE_OPERAND (offset, 1), 0);
1059 if (exact_log2 (pow) > 0)
1063 pos = compute_related_constant (curpos,
1064 round_up (last_pos, align));
1066 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
1067 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
1068 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
1069 && host_integerp (TREE_OPERAND
1070 (TREE_OPERAND (curpos, 0), 1),
1075 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
1076 pos = compute_related_constant (curpos,
1077 round_up (last_pos, align));
1079 else if (potential_alignment_gap (prev_old_field, old_field,
1082 align = TYPE_ALIGN (field_type);
1083 pos = compute_related_constant (curpos,
1084 round_up (last_pos, align));
1087 /* If we can't compute a position, set it to zero.
1089 ??? We really should abort here, but it's too much work
1090 to get this correct for all cases. */
1093 pos = bitsize_zero_node;
1095 /* See if this type is variable-sized and make a pointer type
1096 and indicate the indirection if so. Beware that the debug
1097 back-end may adjust the position computed above according
1098 to the alignment of the field type, i.e. the pointer type
1099 in this case, if we don't preventively counter that. */
1100 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
1102 field_type = build_pointer_type (field_type);
1103 if (align != 0 && TYPE_ALIGN (field_type) > align)
1105 field_type = copy_node (field_type);
1106 TYPE_ALIGN (field_type) = align;
1111 /* Make a new field name, if necessary. */
1112 if (var || align != 0)
1117 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1118 align / BITS_PER_UNIT);
1120 strcpy (suffix, "XVL");
1122 field_name = concat_id_with_name (field_name, suffix);
1125 new_field = create_field_decl (field_name, field_type,
1127 DECL_SIZE (old_field), pos, 0);
1128 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1129 TYPE_FIELDS (new_record_type) = new_field;
1131 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1132 zero. The only time it's not the last field of the record
1133 is when there are other components at fixed positions after
1134 it (meaning there was a rep clause for every field) and we
1135 want to be able to encode them. */
1136 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1137 (TREE_CODE (TREE_TYPE (old_field))
1140 : DECL_SIZE (old_field));
1141 prev_old_field = old_field;
1144 TYPE_FIELDS (new_record_type)
1145 = nreverse (TYPE_FIELDS (new_record_type));
1147 rest_of_type_decl_compilation (TYPE_STUB_DECL (new_record_type));
1150 rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type));
1153 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1154 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1155 if this represents a QUAL_UNION_TYPE in which case we must look for
1156 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1157 is nonzero, we must take the MAX of the end position of this field
1158 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1160 We return an expression for the size. */
1163 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1166 tree type = TREE_TYPE (last_size);
1169 if (!special || TREE_CODE (size) != COND_EXPR)
1171 new = size_binop (PLUS_EXPR, first_bit, size);
1173 new = size_binop (MAX_EXPR, last_size, new);
1177 new = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1178 integer_zerop (TREE_OPERAND (size, 1))
1179 ? last_size : merge_sizes (last_size, first_bit,
1180 TREE_OPERAND (size, 1),
1182 integer_zerop (TREE_OPERAND (size, 2))
1183 ? last_size : merge_sizes (last_size, first_bit,
1184 TREE_OPERAND (size, 2),
1187 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1188 when fed through substitute_in_expr) into thinking that a constant
1189 size is not constant. */
1190 while (TREE_CODE (new) == NON_LVALUE_EXPR)
1191 new = TREE_OPERAND (new, 0);
1196 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1197 related by the addition of a constant. Return that constant if so. */
1200 compute_related_constant (tree op0, tree op1)
1202 tree op0_var, op1_var;
1203 tree op0_con = split_plus (op0, &op0_var);
1204 tree op1_con = split_plus (op1, &op1_var);
1205 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1207 if (operand_equal_p (op0_var, op1_var, 0))
1209 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1215 /* Utility function of above to split a tree OP which may be a sum, into a
1216 constant part, which is returned, and a variable part, which is stored
1217 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1221 split_plus (tree in, tree *pvar)
1223 /* Strip NOPS in order to ease the tree traversal and maximize the
1224 potential for constant or plus/minus discovery. We need to be careful
1225 to always return and set *pvar to bitsizetype trees, but it's worth
1229 *pvar = convert (bitsizetype, in);
1231 if (TREE_CODE (in) == INTEGER_CST)
1233 *pvar = bitsize_zero_node;
1234 return convert (bitsizetype, in);
1236 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1238 tree lhs_var, rhs_var;
1239 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1240 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1242 if (lhs_var == TREE_OPERAND (in, 0)
1243 && rhs_var == TREE_OPERAND (in, 1))
1244 return bitsize_zero_node;
1246 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1247 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1250 return bitsize_zero_node;
1253 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1254 subprogram. If it is void_type_node, then we are dealing with a procedure,
1255 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1256 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1257 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1258 RETURNS_UNCONSTRAINED is true if the function returns an unconstrained
1259 object. RETURNS_BY_REF is true if the function returns by reference.
1260 RETURNS_BY_TARGET_PTR is true if the function is to be passed (as its
1261 first parameter) the address of the place to copy its result. */
1264 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1265 bool returns_unconstrained, bool returns_by_ref,
1266 bool returns_by_target_ptr)
1268 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1269 the subprogram formal parameters. This list is generated by traversing the
1270 input list of PARM_DECL nodes. */
1271 tree param_type_list = NULL;
1275 for (param_decl = param_decl_list; param_decl;
1276 param_decl = TREE_CHAIN (param_decl))
1277 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1280 /* The list of the function parameter types has to be terminated by the void
1281 type to signal to the back-end that we are not dealing with a variable
1282 parameter subprogram, but that the subprogram has a fixed number of
1284 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1286 /* The list of argument types has been created in reverse
1288 param_type_list = nreverse (param_type_list);
1290 type = build_function_type (return_type, param_type_list);
1292 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1293 or the new type should, make a copy of TYPE. Likewise for
1294 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1295 if (TYPE_CI_CO_LIST (type) || cico_list
1296 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1297 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref
1298 || TYPE_RETURNS_BY_TARGET_PTR_P (type) != returns_by_target_ptr)
1299 type = copy_type (type);
1301 TYPE_CI_CO_LIST (type) = cico_list;
1302 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1303 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1304 TYPE_RETURNS_BY_TARGET_PTR_P (type) = returns_by_target_ptr;
1308 /* Return a copy of TYPE but safe to modify in any way. */
1311 copy_type (tree type)
1313 tree new = copy_node (type);
1315 /* copy_node clears this field instead of copying it, because it is
1316 aliased with TREE_CHAIN. */
1317 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1319 TYPE_POINTER_TO (new) = 0;
1320 TYPE_REFERENCE_TO (new) = 0;
1321 TYPE_MAIN_VARIANT (new) = new;
1322 TYPE_NEXT_VARIANT (new) = 0;
1327 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1328 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position of
1332 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
1334 /* First build a type for the desired range. */
1335 tree type = build_index_2_type (min, max);
1337 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1338 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1339 is set, but not to INDEX, make a copy of this type with the requested
1340 index type. Note that we have no way of sharing these types, but that's
1341 only a small hole. */
1342 if (TYPE_INDEX_TYPE (type) == index)
1344 else if (TYPE_INDEX_TYPE (type))
1345 type = copy_type (type);
1347 SET_TYPE_INDEX_TYPE (type, index);
1348 create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
1352 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1353 string) and TYPE is a ..._TYPE node giving its data type.
1354 ARTIFICIAL_P is true if this is a declaration that was generated
1355 by the compiler. DEBUG_INFO_P is true if we need to write debugging
1356 information about this type. GNAT_NODE is used for the position of
1360 create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1361 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1363 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1364 enum tree_code code = TREE_CODE (type);
1366 DECL_ARTIFICIAL (type_decl) = artificial_p;
1368 if (!TYPE_IS_DUMMY_P (type))
1369 gnat_pushdecl (type_decl, gnat_node);
1371 process_attributes (type_decl, attr_list);
1373 /* Pass type declaration information to the debugger unless this is an
1374 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1375 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately, or
1376 type for which debugging information was not requested. */
1377 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
1378 DECL_IGNORED_P (type_decl) = 1;
1379 else if (code != ENUMERAL_TYPE
1380 && (code != RECORD_TYPE || TYPE_IS_FAT_POINTER_P (type))
1381 && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
1382 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1383 rest_of_type_decl_compilation (type_decl);
1388 /* Helper for create_var_decl and create_true_var_decl. Returns a GCC VAR_DECL
1391 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1392 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1393 the GCC tree for an optional initial expression; NULL_TREE if none.
1395 CONST_FLAG is true if this variable is constant, in which case we might
1396 return a CONST_DECL node unless CONST_DECL_ALLOWED_FLAG is false.
1398 PUBLIC_FLAG is true if this definition is to be made visible outside of
1399 the current compilation unit. This flag should be set when processing the
1400 variable definitions in a package specification. EXTERN_FLAG is nonzero
1401 when processing an external variable declaration (as opposed to a
1402 definition: no storage is to be allocated for the variable here).
1404 STATIC_FLAG is only relevant when not at top level. In that case
1405 it indicates whether to always allocate storage to the variable.
1407 GNAT_NODE is used for the position of the decl. */
1410 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
1411 bool const_flag, bool const_decl_allowed_flag,
1412 bool public_flag, bool extern_flag, bool static_flag,
1413 struct attrib *attr_list, Node_Id gnat_node)
1417 && TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1418 && (global_bindings_p () || static_flag
1419 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
1420 : TREE_CONSTANT (var_init)));
1422 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1423 case the initializer may be used in-lieu of the DECL node (as done in
1424 Identifier_to_gnu). This is useful to prevent the need of elaboration
1425 code when an identifier for which such a decl is made is in turn used as
1426 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1427 but extra constraints apply to this choice (see below) and are not
1428 relevant to the distinction we wish to make. */
1429 bool constant_p = const_flag && init_const;
1431 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1432 and may be used for scalars in general but not for aggregates. */
1434 = build_decl ((constant_p && const_decl_allowed_flag
1435 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
1438 /* If this is external, throw away any initializations (they will be done
1439 elsewhere) unless this is a a constant for which we would like to remain
1440 able to get the initializer. If we are defining a global here, leave a
1441 constant initialization and save any variable elaborations for the
1442 elaboration routine. If we are just annotating types, throw away the
1443 initialization if it isn't a constant. */
1444 if ((extern_flag && !constant_p)
1445 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1446 var_init = NULL_TREE;
1448 /* At the global level, an initializer requiring code to be generated
1449 produces elaboration statements. Check that such statements are allowed,
1450 that is, not violating a No_Elaboration_Code restriction. */
1451 if (global_bindings_p () && var_init != 0 && ! init_const)
1452 Check_Elaboration_Code_Allowed (gnat_node);
1454 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1455 try to fiddle with DECL_COMMON. However, on platforms that don't
1456 support global BSS sections, uninitialized global variables would
1457 go in DATA instead, thus increasing the size of the executable. */
1459 && TREE_CODE (var_decl) == VAR_DECL
1460 && !have_global_bss_p ())
1461 DECL_COMMON (var_decl) = 1;
1462 DECL_INITIAL (var_decl) = var_init;
1463 TREE_READONLY (var_decl) = const_flag;
1464 DECL_EXTERNAL (var_decl) = extern_flag;
1465 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1466 TREE_CONSTANT (var_decl) = constant_p;
1467 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1468 = TYPE_VOLATILE (type);
1470 /* If it's public and not external, always allocate storage for it.
1471 At the global binding level we need to allocate static storage for the
1472 variable if and only if it's not external. If we are not at the top level
1473 we allocate automatic storage unless requested not to. */
1474 TREE_STATIC (var_decl)
1475 = public_flag || (global_bindings_p () ? !extern_flag : static_flag);
1477 if (asm_name && VAR_OR_FUNCTION_DECL_P (var_decl))
1478 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1480 process_attributes (var_decl, attr_list);
1482 /* Add this decl to the current binding level. */
1483 gnat_pushdecl (var_decl, gnat_node);
1485 if (TREE_SIDE_EFFECTS (var_decl))
1486 TREE_ADDRESSABLE (var_decl) = 1;
1488 if (TREE_CODE (var_decl) != CONST_DECL)
1490 if (global_bindings_p ())
1491 rest_of_decl_compilation (var_decl, true, 0);
1494 expand_decl (var_decl);
1499 /* Wrapper around create_var_decl_1 for cases where we don't care whether
1500 a VAR or a CONST decl node is created. */
1503 create_var_decl (tree var_name, tree asm_name, tree type, tree var_init,
1504 bool const_flag, bool public_flag, bool extern_flag,
1505 bool static_flag, struct attrib *attr_list,
1508 return create_var_decl_1 (var_name, asm_name, type, var_init,
1510 public_flag, extern_flag, static_flag,
1511 attr_list, gnat_node);
1514 /* Wrapper around create_var_decl_1 for cases where a VAR_DECL node is
1515 required. The primary intent is for DECL_CONST_CORRESPONDING_VARs, which
1516 must be VAR_DECLs and on which we want TREE_READONLY set to have them
1517 possibly assigned to a readonly data section. */
1520 create_true_var_decl (tree var_name, tree asm_name, tree type, tree var_init,
1521 bool const_flag, bool public_flag, bool extern_flag,
1522 bool static_flag, struct attrib *attr_list,
1525 return create_var_decl_1 (var_name, asm_name, type, var_init,
1527 public_flag, extern_flag, static_flag,
1528 attr_list, gnat_node);
1531 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
1534 aggregate_type_contains_array_p (tree type)
1536 switch (TREE_CODE (type))
1540 case QUAL_UNION_TYPE:
1543 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1544 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
1545 && aggregate_type_contains_array_p (TREE_TYPE (field)))
1558 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1559 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1560 this field is in a record type with a "pragma pack". If SIZE is nonzero
1561 it is the specified size for this field. If POS is nonzero, it is the bit
1562 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1563 the address of this field for aliasing purposes. If it is negative, we
1564 should not make a bitfield, which is used by make_aligning_type. */
1567 create_field_decl (tree field_name, tree field_type, tree record_type,
1568 int packed, tree size, tree pos, int addressable)
1570 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1572 DECL_CONTEXT (field_decl) = record_type;
1573 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1575 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1576 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1577 Likewise for an aggregate without specified position that contains an
1578 array, because in this case slices of variable length of this array
1579 must be handled by GCC and variable-sized objects need to be aligned
1580 to at least a byte boundary. */
1581 if (packed && (TYPE_MODE (field_type) == BLKmode
1583 && AGGREGATE_TYPE_P (field_type)
1584 && aggregate_type_contains_array_p (field_type))))
1585 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1587 /* If a size is specified, use it. Otherwise, if the record type is packed
1588 compute a size to use, which may differ from the object's natural size.
1589 We always set a size in this case to trigger the checks for bitfield
1590 creation below, which is typically required when no position has been
1593 size = convert (bitsizetype, size);
1594 else if (packed == 1)
1596 size = rm_size (field_type);
1598 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1600 if (TREE_CODE (size) == INTEGER_CST
1601 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1602 size = round_up (size, BITS_PER_UNIT);
1605 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1606 specified for two reasons: first if the size differs from the natural
1607 size. Second, if the alignment is insufficient. There are a number of
1608 ways the latter can be true.
1610 We never make a bitfield if the type of the field has a nonconstant size,
1611 because no such entity requiring bitfield operations should reach here.
1613 We do *preventively* make a bitfield when there might be the need for it
1614 but we don't have all the necessary information to decide, as is the case
1615 of a field with no specified position in a packed record.
1617 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1618 in layout_decl or finish_record_type to clear the bit_field indication if
1619 it is in fact not needed. */
1620 if (addressable >= 0
1622 && TREE_CODE (size) == INTEGER_CST
1623 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1624 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
1625 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
1627 || (TYPE_ALIGN (record_type) != 0
1628 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1630 DECL_BIT_FIELD (field_decl) = 1;
1631 DECL_SIZE (field_decl) = size;
1632 if (!packed && !pos)
1633 DECL_ALIGN (field_decl)
1634 = (TYPE_ALIGN (record_type) != 0
1635 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1636 : TYPE_ALIGN (field_type));
1639 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1641 /* Bump the alignment if need be, either for bitfield/packing purposes or
1642 to satisfy the type requirements if no such consideration applies. When
1643 we get the alignment from the type, indicate if this is from an explicit
1644 user request, which prevents stor-layout from lowering it later on. */
1647 = (DECL_BIT_FIELD (field_decl) ? 1
1648 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
1650 if (bit_align > DECL_ALIGN (field_decl))
1651 DECL_ALIGN (field_decl) = bit_align;
1652 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
1654 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1655 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
1661 /* We need to pass in the alignment the DECL is known to have.
1662 This is the lowest-order bit set in POS, but no more than
1663 the alignment of the record, if one is specified. Note
1664 that an alignment of 0 is taken as infinite. */
1665 unsigned int known_align;
1667 if (host_integerp (pos, 1))
1668 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1670 known_align = BITS_PER_UNIT;
1672 if (TYPE_ALIGN (record_type)
1673 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1674 known_align = TYPE_ALIGN (record_type);
1676 layout_decl (field_decl, known_align);
1677 SET_DECL_OFFSET_ALIGN (field_decl,
1678 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1680 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1681 &DECL_FIELD_BIT_OFFSET (field_decl),
1682 DECL_OFFSET_ALIGN (field_decl), pos);
1684 DECL_HAS_REP_P (field_decl) = 1;
1687 /* In addition to what our caller says, claim the field is addressable if we
1688 know that its type is not suitable.
1690 The field may also be "technically" nonaddressable, meaning that even if
1691 we attempt to take the field's address we will actually get the address
1692 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
1693 value we have at this point is not accurate enough, so we don't account
1694 for this here and let finish_record_type decide. */
1695 if (!type_for_nonaliased_component_p (field_type))
1698 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1703 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1704 PARAM_TYPE is its type. READONLY is true if the parameter is
1705 readonly (either an In parameter or an address of a pass-by-ref
1709 create_param_decl (tree param_name, tree param_type, bool readonly)
1711 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1713 /* Honor targetm.calls.promote_prototypes(), as not doing so can
1714 lead to various ABI violations. */
1715 if (targetm.calls.promote_prototypes (param_type)
1716 && (TREE_CODE (param_type) == INTEGER_TYPE
1717 || TREE_CODE (param_type) == ENUMERAL_TYPE)
1718 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1720 /* We have to be careful about biased types here. Make a subtype
1721 of integer_type_node with the proper biasing. */
1722 if (TREE_CODE (param_type) == INTEGER_TYPE
1723 && TYPE_BIASED_REPRESENTATION_P (param_type))
1726 = copy_type (build_range_type (integer_type_node,
1727 TYPE_MIN_VALUE (param_type),
1728 TYPE_MAX_VALUE (param_type)));
1730 TYPE_BIASED_REPRESENTATION_P (param_type) = 1;
1733 param_type = integer_type_node;
1736 DECL_ARG_TYPE (param_decl) = param_type;
1737 TREE_READONLY (param_decl) = readonly;
1741 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1744 process_attributes (tree decl, struct attrib *attr_list)
1746 for (; attr_list; attr_list = attr_list->next)
1747 switch (attr_list->type)
1749 case ATTR_MACHINE_ATTRIBUTE:
1750 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1752 ATTR_FLAG_TYPE_IN_PLACE);
1755 case ATTR_LINK_ALIAS:
1756 if (! DECL_EXTERNAL (decl))
1758 TREE_STATIC (decl) = 1;
1759 assemble_alias (decl, attr_list->name);
1763 case ATTR_WEAK_EXTERNAL:
1765 declare_weak (decl);
1767 post_error ("?weak declarations not supported on this target",
1768 attr_list->error_point);
1771 case ATTR_LINK_SECTION:
1772 if (targetm.have_named_sections)
1774 DECL_SECTION_NAME (decl)
1775 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1776 IDENTIFIER_POINTER (attr_list->name));
1777 DECL_COMMON (decl) = 0;
1780 post_error ("?section attributes are not supported for this target",
1781 attr_list->error_point);
1784 case ATTR_LINK_CONSTRUCTOR:
1785 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1786 TREE_USED (decl) = 1;
1789 case ATTR_LINK_DESTRUCTOR:
1790 DECL_STATIC_DESTRUCTOR (decl) = 1;
1791 TREE_USED (decl) = 1;
1796 /* Record a global renaming pointer. */
1799 record_global_renaming_pointer (tree decl)
1801 gcc_assert (DECL_RENAMED_OBJECT (decl));
1802 VEC_safe_push (tree, gc, global_renaming_pointers, decl);
1805 /* Invalidate the global renaming pointers. */
1808 invalidate_global_renaming_pointers (void)
1813 for (i = 0; VEC_iterate(tree, global_renaming_pointers, i, iter); i++)
1814 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
1816 VEC_free (tree, gc, global_renaming_pointers);
1819 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1823 value_factor_p (tree value, HOST_WIDE_INT factor)
1825 if (host_integerp (value, 1))
1826 return tree_low_cst (value, 1) % factor == 0;
1828 if (TREE_CODE (value) == MULT_EXPR)
1829 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1830 || value_factor_p (TREE_OPERAND (value, 1), factor));
1835 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1836 unless we can prove these 2 fields are laid out in such a way that no gap
1837 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1838 is the distance in bits between the end of PREV_FIELD and the starting
1839 position of CURR_FIELD. It is ignored if null. */
1842 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1844 /* If this is the first field of the record, there cannot be any gap */
1848 /* If the previous field is a union type, then return False: The only
1849 time when such a field is not the last field of the record is when
1850 there are other components at fixed positions after it (meaning there
1851 was a rep clause for every field), in which case we don't want the
1852 alignment constraint to override them. */
1853 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1856 /* If the distance between the end of prev_field and the beginning of
1857 curr_field is constant, then there is a gap if the value of this
1858 constant is not null. */
1859 if (offset && host_integerp (offset, 1))
1860 return !integer_zerop (offset);
1862 /* If the size and position of the previous field are constant,
1863 then check the sum of this size and position. There will be a gap
1864 iff it is not multiple of the current field alignment. */
1865 if (host_integerp (DECL_SIZE (prev_field), 1)
1866 && host_integerp (bit_position (prev_field), 1))
1867 return ((tree_low_cst (bit_position (prev_field), 1)
1868 + tree_low_cst (DECL_SIZE (prev_field), 1))
1869 % DECL_ALIGN (curr_field) != 0);
1871 /* If both the position and size of the previous field are multiples
1872 of the current field alignment, there cannot be any gap. */
1873 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1874 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1877 /* Fallback, return that there may be a potential gap */
1881 /* Returns a LABEL_DECL node for LABEL_NAME. */
1884 create_label_decl (tree label_name)
1886 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1888 DECL_CONTEXT (label_decl) = current_function_decl;
1889 DECL_MODE (label_decl) = VOIDmode;
1890 DECL_SOURCE_LOCATION (label_decl) = input_location;
1895 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1896 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1897 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1898 PARM_DECL nodes chained through the TREE_CHAIN field).
1900 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1901 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1904 create_subprog_decl (tree subprog_name, tree asm_name,
1905 tree subprog_type, tree param_decl_list, bool inline_flag,
1906 bool public_flag, bool extern_flag,
1907 struct attrib *attr_list, Node_Id gnat_node)
1909 tree return_type = TREE_TYPE (subprog_type);
1910 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1912 /* If this is a function nested inside an inlined external function, it
1913 means we aren't going to compile the outer function unless it is
1914 actually inlined, so do the same for us. */
1915 if (current_function_decl && DECL_INLINE (current_function_decl)
1916 && DECL_EXTERNAL (current_function_decl))
1919 DECL_EXTERNAL (subprog_decl) = extern_flag;
1920 TREE_PUBLIC (subprog_decl) = public_flag;
1921 TREE_STATIC (subprog_decl) = 1;
1922 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1923 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1924 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1925 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1926 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1927 DECL_ARTIFICIAL (DECL_RESULT (subprog_decl)) = 1;
1928 DECL_IGNORED_P (DECL_RESULT (subprog_decl)) = 1;
1930 /* TREE_ADDRESSABLE is set on the result type to request the use of the
1931 target by-reference return mechanism. This is not supported all the
1932 way down to RTL expansion with GCC 4, which ICEs on temporary creation
1933 attempts with such a type and expects DECL_BY_REFERENCE to be set on
1934 the RESULT_DECL instead - see gnat_genericize for more details. */
1935 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (subprog_decl))))
1937 tree result_decl = DECL_RESULT (subprog_decl);
1939 TREE_ADDRESSABLE (TREE_TYPE (result_decl)) = 0;
1940 DECL_BY_REFERENCE (result_decl) = 1;
1944 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
1947 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1949 process_attributes (subprog_decl, attr_list);
1951 /* Add this decl to the current binding level. */
1952 gnat_pushdecl (subprog_decl, gnat_node);
1954 /* Output the assembler code and/or RTL for the declaration. */
1955 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
1957 return subprog_decl;
1960 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1961 body. This routine needs to be invoked before processing the declarations
1962 appearing in the subprogram. */
1965 begin_subprog_body (tree subprog_decl)
1969 current_function_decl = subprog_decl;
1970 announce_function (subprog_decl);
1972 /* Enter a new binding level and show that all the parameters belong to
1975 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1976 param_decl = TREE_CHAIN (param_decl))
1977 DECL_CONTEXT (param_decl) = subprog_decl;
1979 make_decl_rtl (subprog_decl);
1981 /* We handle pending sizes via the elaboration of types, so we don't need to
1982 save them. This causes them to be marked as part of the outer function
1983 and then discarded. */
1984 get_pending_sizes ();
1988 /* Helper for the genericization callback. Return a dereference of VAL
1989 if it is of a reference type. */
1992 convert_from_reference (tree val)
1994 tree value_type, ref;
1996 if (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE)
1999 value_type = TREE_TYPE (TREE_TYPE (val));
2000 ref = build1 (INDIRECT_REF, value_type, val);
2002 /* See if what we reference is CONST or VOLATILE, which requires
2003 looking into array types to get to the component type. */
2005 while (TREE_CODE (value_type) == ARRAY_TYPE)
2006 value_type = TREE_TYPE (value_type);
2009 = (TYPE_QUALS (value_type) & TYPE_QUAL_CONST);
2010 TREE_THIS_VOLATILE (ref)
2011 = (TYPE_QUALS (value_type) & TYPE_QUAL_VOLATILE);
2013 TREE_SIDE_EFFECTS (ref)
2014 = (TREE_THIS_VOLATILE (ref) || TREE_SIDE_EFFECTS (val));
2019 /* Helper for the genericization callback. Returns true if T denotes
2020 a RESULT_DECL with DECL_BY_REFERENCE set. */
2023 is_byref_result (tree t)
2025 return (TREE_CODE (t) == RESULT_DECL && DECL_BY_REFERENCE (t));
2029 /* Tree walking callback for gnat_genericize. Currently ...
2031 o Adjust references to the function's DECL_RESULT if it is marked
2032 DECL_BY_REFERENCE and so has had its type turned into a reference
2033 type at the end of the function compilation. */
2036 gnat_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
2038 /* This implementation is modeled after what the C++ front-end is
2039 doing, basis of the downstream passes behavior. */
2041 tree stmt = *stmt_p;
2042 struct pointer_set_t *p_set = (struct pointer_set_t*) data;
2044 /* If we have a direct mention of the result decl, dereference. */
2045 if (is_byref_result (stmt))
2047 *stmt_p = convert_from_reference (stmt);
2052 /* Otherwise, no need to walk the the same tree twice. */
2053 if (pointer_set_contains (p_set, stmt))
2059 /* If we are taking the address of what now is a reference, just get the
2061 if (TREE_CODE (stmt) == ADDR_EXPR
2062 && is_byref_result (TREE_OPERAND (stmt, 0)))
2064 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
2068 /* Don't dereference an by-reference RESULT_DECL inside a RETURN_EXPR. */
2069 else if (TREE_CODE (stmt) == RETURN_EXPR
2070 && TREE_OPERAND (stmt, 0)
2071 && is_byref_result (TREE_OPERAND (stmt, 0)))
2074 /* Don't look inside trees that cannot embed references of interest. */
2075 else if (IS_TYPE_OR_DECL_P (stmt))
2078 pointer_set_insert (p_set, *stmt_p);
2083 /* Perform lowering of Ada trees to GENERIC. In particular:
2085 o Turn a DECL_BY_REFERENCE RESULT_DECL into a real by-reference decl
2086 and adjust all the references to this decl accordingly. */
2089 gnat_genericize (tree fndecl)
2091 /* Prior to GCC 4, an explicit By_Reference result mechanism for a function
2092 was handled by simply setting TREE_ADDRESSABLE on the result type.
2093 Everything required to actually pass by invisible ref using the target
2094 mechanism (e.g. extra parameter) was handled at RTL expansion time.
2096 This doesn't work with GCC 4 any more for several reasons. First, the
2097 gimplification process might need the creation of temporaries of this
2098 type, and the gimplifier ICEs on such attempts. Second, the middle-end
2099 now relies on a different attribute for such cases (DECL_BY_REFERENCE on
2100 RESULT/PARM_DECLs), and expects the user invisible by-reference-ness to
2101 be explicitely accounted for by the front-end in the function body.
2103 We achieve the complete transformation in two steps:
2105 1/ create_subprog_decl performs early attribute tweaks: it clears
2106 TREE_ADDRESSABLE from the result type and sets DECL_BY_REFERENCE on
2107 the result decl. The former ensures that the bit isn't set in the GCC
2108 tree saved for the function, so prevents ICEs on temporary creation.
2109 The latter we use here to trigger the rest of the processing.
2111 2/ This function performs the type transformation on the result decl
2112 and adjusts all the references to this decl from the function body
2115 Clearing TREE_ADDRESSABLE from the type differs from the C++ front-end
2116 strategy, which escapes the gimplifier temporary creation issues by
2117 creating it's own temporaries using TARGET_EXPR nodes. Our way relies
2118 on simple specific support code in aggregate_value_p to look at the
2119 target function result decl explicitely. */
2121 struct pointer_set_t *p_set;
2122 tree decl_result = DECL_RESULT (fndecl);
2124 if (!DECL_BY_REFERENCE (decl_result))
2127 /* Make the DECL_RESULT explicitely by-reference and adjust all the
2128 occurrences in the function body using the common tree-walking facility.
2129 We want to see every occurrence of the result decl to adjust the
2130 referencing tree, so need to use our own pointer set to control which
2131 trees should be visited again or not. */
2133 p_set = pointer_set_create ();
2135 TREE_TYPE (decl_result) = build_reference_type (TREE_TYPE (decl_result));
2136 TREE_ADDRESSABLE (decl_result) = 0;
2137 relayout_decl (decl_result);
2139 walk_tree (&DECL_SAVED_TREE (fndecl), gnat_genericize_r, p_set, NULL);
2141 pointer_set_destroy (p_set);
2144 /* Finish the definition of the current subprogram and compile it all the way
2145 to assembler language output. BODY is the tree corresponding to
2149 end_subprog_body (tree body)
2151 tree fndecl = current_function_decl;
2153 /* Mark the BLOCK for this level as being for this function and pop the
2154 level. Since the vars in it are the parameters, clear them. */
2155 BLOCK_VARS (current_binding_level->block) = 0;
2156 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
2157 DECL_INITIAL (fndecl) = current_binding_level->block;
2160 /* Deal with inline. If declared inline or we should default to inline,
2161 set the flag in the decl. */
2162 DECL_INLINE (fndecl)
2163 = DECL_DECLARED_INLINE_P (fndecl) || flag_inline_trees == 2;
2165 /* We handle pending sizes via the elaboration of types, so we don't
2166 need to save them. */
2167 get_pending_sizes ();
2169 /* Mark the RESULT_DECL as being in this subprogram. */
2170 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
2172 DECL_SAVED_TREE (fndecl) = body;
2174 current_function_decl = DECL_CONTEXT (fndecl);
2177 /* We cannot track the location of errors past this point. */
2178 error_gnat_node = Empty;
2180 /* If we're only annotating types, don't actually compile this function. */
2181 if (type_annotate_only)
2184 /* Perform the required pre-gimplfication transformations on the tree. */
2185 gnat_genericize (fndecl);
2187 /* We do different things for nested and non-nested functions.
2188 ??? This should be in cgraph. */
2189 if (!DECL_CONTEXT (fndecl))
2191 gnat_gimplify_function (fndecl);
2192 cgraph_finalize_function (fndecl, false);
2195 /* Register this function with cgraph just far enough to get it
2196 added to our parent's nested function list. */
2197 (void) cgraph_node (fndecl);
2200 /* Convert FNDECL's code to GIMPLE and handle any nested functions. */
2203 gnat_gimplify_function (tree fndecl)
2205 struct cgraph_node *cgn;
2207 dump_function (TDI_original, fndecl);
2208 gimplify_function_tree (fndecl);
2209 dump_function (TDI_generic, fndecl);
2211 /* Convert all nested functions to GIMPLE now. We do things in this order
2212 so that items like VLA sizes are expanded properly in the context of the
2213 correct function. */
2214 cgn = cgraph_node (fndecl);
2215 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
2216 gnat_gimplify_function (cgn->decl);
2221 gnat_builtin_function (tree decl)
2223 gnat_pushdecl (decl, Empty);
2227 /* Handle a "const" attribute; arguments as in
2228 struct attribute_spec.handler. */
2231 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
2232 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
2235 if (TREE_CODE (*node) == FUNCTION_DECL)
2236 TREE_READONLY (*node) = 1;
2238 *no_add_attrs = true;
2243 /* Handle a "nothrow" attribute; arguments as in
2244 struct attribute_spec.handler. */
2247 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
2248 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
2251 if (TREE_CODE (*node) == FUNCTION_DECL)
2252 TREE_NOTHROW (*node) = 1;
2254 *no_add_attrs = true;
2259 /* Return an integer type with the number of bits of precision given by
2260 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2261 it is a signed type. */
2264 gnat_type_for_size (unsigned precision, int unsignedp)
2269 if (precision <= 2 * MAX_BITS_PER_WORD
2270 && signed_and_unsigned_types[precision][unsignedp])
2271 return signed_and_unsigned_types[precision][unsignedp];
2274 t = make_unsigned_type (precision);
2276 t = make_signed_type (precision);
2278 if (precision <= 2 * MAX_BITS_PER_WORD)
2279 signed_and_unsigned_types[precision][unsignedp] = t;
2283 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2284 TYPE_NAME (t) = get_identifier (type_name);
2290 /* Likewise for floating-point types. */
2293 float_type_for_precision (int precision, enum machine_mode mode)
2298 if (float_types[(int) mode])
2299 return float_types[(int) mode];
2301 float_types[(int) mode] = t = make_node (REAL_TYPE);
2302 TYPE_PRECISION (t) = precision;
2305 gcc_assert (TYPE_MODE (t) == mode);
2308 sprintf (type_name, "FLOAT_%d", precision);
2309 TYPE_NAME (t) = get_identifier (type_name);
2315 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2316 an unsigned type; otherwise a signed type is returned. */
2319 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2321 if (mode == BLKmode)
2323 else if (mode == VOIDmode)
2324 return void_type_node;
2325 else if (COMPLEX_MODE_P (mode))
2327 else if (SCALAR_FLOAT_MODE_P (mode))
2328 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2329 else if (SCALAR_INT_MODE_P (mode))
2330 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2335 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2338 gnat_unsigned_type (tree type_node)
2340 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2342 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2344 type = copy_node (type);
2345 TREE_TYPE (type) = type_node;
2347 else if (TREE_TYPE (type_node)
2348 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2349 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2351 type = copy_node (type);
2352 TREE_TYPE (type) = TREE_TYPE (type_node);
2358 /* Return the signed version of a TYPE_NODE, a scalar type. */
2361 gnat_signed_type (tree type_node)
2363 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2365 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2367 type = copy_node (type);
2368 TREE_TYPE (type) = type_node;
2370 else if (TREE_TYPE (type_node)
2371 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2372 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2374 type = copy_node (type);
2375 TREE_TYPE (type) = TREE_TYPE (type_node);
2381 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2382 transparently converted to each other. */
2385 gnat_types_compatible_p (tree t1, tree t2)
2387 enum tree_code code;
2389 /* This is the default criterion. */
2390 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2393 /* We only check structural equivalence here. */
2394 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2397 /* Array types are also compatible if they are constrained and have
2398 the same component type and the same domain. */
2399 if (code == ARRAY_TYPE
2400 && TREE_TYPE (t1) == TREE_TYPE (t2)
2401 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2402 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2403 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
2404 TYPE_MAX_VALUE (TYPE_DOMAIN (t2))))
2407 /* Padding record types are also compatible if they pad the same
2408 type and have the same constant size. */
2409 if (code == RECORD_TYPE
2410 && TYPE_IS_PADDING_P (t1) && TYPE_IS_PADDING_P (t2)
2411 && TREE_TYPE (TYPE_FIELDS (t1)) == TREE_TYPE (TYPE_FIELDS (t2))
2412 && tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2)))
2418 /* EXP is an expression for the size of an object. If this size contains
2419 discriminant references, replace them with the maximum (if MAX_P) or
2420 minimum (if !MAX_P) possible value of the discriminant. */
2423 max_size (tree exp, bool max_p)
2425 enum tree_code code = TREE_CODE (exp);
2426 tree type = TREE_TYPE (exp);
2428 switch (TREE_CODE_CLASS (code))
2430 case tcc_declaration:
2435 if (code == CALL_EXPR)
2438 int i, n = call_expr_nargs (exp);
2441 argarray = (tree *) alloca (n * sizeof (tree));
2442 for (i = 0; i < n; i++)
2443 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
2444 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
2449 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2450 modify. Otherwise, we treat it like a variable. */
2451 if (!CONTAINS_PLACEHOLDER_P (exp))
2454 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2456 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
2458 case tcc_comparison:
2459 return max_p ? size_one_node : size_zero_node;
2463 case tcc_expression:
2464 switch (TREE_CODE_LENGTH (code))
2467 if (code == NON_LVALUE_EXPR)
2468 return max_size (TREE_OPERAND (exp, 0), max_p);
2471 fold_build1 (code, type,
2472 max_size (TREE_OPERAND (exp, 0),
2473 code == NEGATE_EXPR ? !max_p : max_p));
2476 if (code == COMPOUND_EXPR)
2477 return max_size (TREE_OPERAND (exp, 1), max_p);
2479 /* Calculate "(A ? B : C) - D" as "A ? B - D : C - D" which
2480 may provide a tighter bound on max_size. */
2481 if (code == MINUS_EXPR
2482 && TREE_CODE (TREE_OPERAND (exp, 0)) == COND_EXPR)
2484 tree lhs = fold_build2 (MINUS_EXPR, type,
2485 TREE_OPERAND (TREE_OPERAND (exp, 0), 1),
2486 TREE_OPERAND (exp, 1));
2487 tree rhs = fold_build2 (MINUS_EXPR, type,
2488 TREE_OPERAND (TREE_OPERAND (exp, 0), 2),
2489 TREE_OPERAND (exp, 1));
2490 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2491 max_size (lhs, max_p),
2492 max_size (rhs, max_p));
2496 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2497 tree rhs = max_size (TREE_OPERAND (exp, 1),
2498 code == MINUS_EXPR ? !max_p : max_p);
2500 /* Special-case wanting the maximum value of a MIN_EXPR.
2501 In that case, if one side overflows, return the other.
2502 sizetype is signed, but we know sizes are non-negative.
2503 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2504 overflowing or the maximum possible value and the RHS
2508 && TREE_CODE (rhs) == INTEGER_CST
2509 && TREE_OVERFLOW (rhs))
2513 && TREE_CODE (lhs) == INTEGER_CST
2514 && TREE_OVERFLOW (lhs))
2516 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2517 && ((TREE_CODE (lhs) == INTEGER_CST
2518 && TREE_OVERFLOW (lhs))
2519 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2520 && !TREE_CONSTANT (rhs))
2523 return fold_build2 (code, type, lhs, rhs);
2527 if (code == SAVE_EXPR)
2529 else if (code == COND_EXPR)
2530 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2531 max_size (TREE_OPERAND (exp, 1), max_p),
2532 max_size (TREE_OPERAND (exp, 2), max_p));
2535 /* Other tree classes cannot happen. */
2543 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2544 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2545 Return a constructor for the template. */
2548 build_template (tree template_type, tree array_type, tree expr)
2550 tree template_elts = NULL_TREE;
2551 tree bound_list = NULL_TREE;
2554 while (TREE_CODE (array_type) == RECORD_TYPE
2555 && (TYPE_IS_PADDING_P (array_type)
2556 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2557 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2559 if (TREE_CODE (array_type) == ARRAY_TYPE
2560 || (TREE_CODE (array_type) == INTEGER_TYPE
2561 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2562 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2564 /* First make the list for a CONSTRUCTOR for the template. Go down the
2565 field list of the template instead of the type chain because this
2566 array might be an Ada array of arrays and we can't tell where the
2567 nested arrays stop being the underlying object. */
2569 for (field = TYPE_FIELDS (template_type); field;
2571 ? (bound_list = TREE_CHAIN (bound_list))
2572 : (array_type = TREE_TYPE (array_type))),
2573 field = TREE_CHAIN (TREE_CHAIN (field)))
2575 tree bounds, min, max;
2577 /* If we have a bound list, get the bounds from there. Likewise
2578 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2579 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2580 This will give us a maximum range. */
2582 bounds = TREE_VALUE (bound_list);
2583 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2584 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2585 else if (expr && TREE_CODE (expr) == PARM_DECL
2586 && DECL_BY_COMPONENT_PTR_P (expr))
2587 bounds = TREE_TYPE (field);
2591 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
2592 max = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MAX_VALUE (bounds));
2594 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2595 substitute it from OBJECT. */
2596 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2597 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2599 template_elts = tree_cons (TREE_CHAIN (field), max,
2600 tree_cons (field, min, template_elts));
2603 return gnat_build_constructor (template_type, nreverse (template_elts));
2606 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2607 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2608 in the type contains in its DECL_INITIAL the expression to use when
2609 a constructor is made for the type. GNAT_ENTITY is an entity used
2610 to print out an error message if the mechanism cannot be applied to
2611 an object of that type and also for the name. */
2614 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2616 tree record_type = make_node (RECORD_TYPE);
2617 tree pointer32_type;
2618 tree field_list = 0;
2627 /* If TYPE is an unconstrained array, use the underlying array type. */
2628 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2629 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2631 /* If this is an array, compute the number of dimensions in the array,
2632 get the index types, and point to the inner type. */
2633 if (TREE_CODE (type) != ARRAY_TYPE)
2636 for (ndim = 1, inner_type = type;
2637 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2638 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2639 ndim++, inner_type = TREE_TYPE (inner_type))
2642 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2644 if (mech != By_Descriptor_NCA
2645 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2646 for (i = ndim - 1, inner_type = type;
2648 i--, inner_type = TREE_TYPE (inner_type))
2649 idx_arr[i] = TYPE_DOMAIN (inner_type);
2651 for (i = 0, inner_type = type;
2653 i++, inner_type = TREE_TYPE (inner_type))
2654 idx_arr[i] = TYPE_DOMAIN (inner_type);
2656 /* Now get the DTYPE value. */
2657 switch (TREE_CODE (type))
2661 if (TYPE_VAX_FLOATING_POINT_P (type))
2662 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2675 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2678 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2681 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2684 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2687 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2690 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2696 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2700 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2701 && TYPE_VAX_FLOATING_POINT_P (type))
2702 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2714 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2725 /* Get the CLASS value. */
2728 case By_Descriptor_A:
2731 case By_Descriptor_NCA:
2734 case By_Descriptor_SB:
2738 case By_Descriptor_S:
2744 /* Make the type for a descriptor for VMS. The first four fields
2745 are the same for all types. */
2748 = chainon (field_list,
2749 make_descriptor_field
2750 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2751 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2753 field_list = chainon (field_list,
2754 make_descriptor_field ("DTYPE",
2755 gnat_type_for_size (8, 1),
2756 record_type, size_int (dtype)));
2757 field_list = chainon (field_list,
2758 make_descriptor_field ("CLASS",
2759 gnat_type_for_size (8, 1),
2760 record_type, size_int (class)));
2762 /* Of course this will crash at run-time if the address space is not
2763 within the low 32 bits, but there is nothing else we can do. */
2764 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
2767 = chainon (field_list,
2768 make_descriptor_field
2769 ("POINTER", pointer32_type, record_type,
2770 build_unary_op (ADDR_EXPR,
2772 build0 (PLACEHOLDER_EXPR, type))));
2777 case By_Descriptor_S:
2780 case By_Descriptor_SB:
2782 = chainon (field_list,
2783 make_descriptor_field
2784 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2785 TREE_CODE (type) == ARRAY_TYPE
2786 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2788 = chainon (field_list,
2789 make_descriptor_field
2790 ("SB_U1", gnat_type_for_size (32, 1), record_type,
2791 TREE_CODE (type) == ARRAY_TYPE
2792 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2795 case By_Descriptor_A:
2796 case By_Descriptor_NCA:
2797 field_list = chainon (field_list,
2798 make_descriptor_field ("SCALE",
2799 gnat_type_for_size (8, 1),
2803 field_list = chainon (field_list,
2804 make_descriptor_field ("DIGITS",
2805 gnat_type_for_size (8, 1),
2810 = chainon (field_list,
2811 make_descriptor_field
2812 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2813 size_int (mech == By_Descriptor_NCA
2815 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2816 : (TREE_CODE (type) == ARRAY_TYPE
2817 && TYPE_CONVENTION_FORTRAN_P (type)
2820 field_list = chainon (field_list,
2821 make_descriptor_field ("DIMCT",
2822 gnat_type_for_size (8, 1),
2826 field_list = chainon (field_list,
2827 make_descriptor_field ("ARSIZE",
2828 gnat_type_for_size (32, 1),
2830 size_in_bytes (type)));
2832 /* Now build a pointer to the 0,0,0... element. */
2833 tem = build0 (PLACEHOLDER_EXPR, type);
2834 for (i = 0, inner_type = type; i < ndim;
2835 i++, inner_type = TREE_TYPE (inner_type))
2836 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2837 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2838 NULL_TREE, NULL_TREE);
2841 = chainon (field_list,
2842 make_descriptor_field
2844 build_pointer_type_for_mode (inner_type, SImode, false),
2847 build_pointer_type_for_mode (inner_type, SImode,
2851 /* Next come the addressing coefficients. */
2852 tem = size_one_node;
2853 for (i = 0; i < ndim; i++)
2857 = size_binop (MULT_EXPR, tem,
2858 size_binop (PLUS_EXPR,
2859 size_binop (MINUS_EXPR,
2860 TYPE_MAX_VALUE (idx_arr[i]),
2861 TYPE_MIN_VALUE (idx_arr[i])),
2864 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2865 fname[1] = '0' + i, fname[2] = 0;
2867 = chainon (field_list,
2868 make_descriptor_field (fname,
2869 gnat_type_for_size (32, 1),
2870 record_type, idx_length));
2872 if (mech == By_Descriptor_NCA)
2876 /* Finally here are the bounds. */
2877 for (i = 0; i < ndim; i++)
2881 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2883 = chainon (field_list,
2884 make_descriptor_field
2885 (fname, gnat_type_for_size (32, 1), record_type,
2886 TYPE_MIN_VALUE (idx_arr[i])));
2890 = chainon (field_list,
2891 make_descriptor_field
2892 (fname, gnat_type_for_size (32, 1), record_type,
2893 TYPE_MAX_VALUE (idx_arr[i])));
2898 post_error ("unsupported descriptor type for &", gnat_entity);
2901 finish_record_type (record_type, field_list, 0, true);
2902 create_type_decl (create_concat_name (gnat_entity, "DESC"), record_type,
2903 NULL, true, false, gnat_entity);
2908 /* Utility routine for above code to make a field. */
2911 make_descriptor_field (const char *name, tree type,
2912 tree rec_type, tree initial)
2915 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2917 DECL_INITIAL (field) = initial;
2921 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
2922 pointer or fat pointer type. GNAT_SUBPROG is the subprogram to which
2923 the VMS descriptor is passed. */
2926 convert_vms_descriptor (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
2928 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
2929 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
2930 /* The CLASS field is the 3rd field in the descriptor. */
2931 tree class = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
2932 /* The POINTER field is the 4th field in the descriptor. */
2933 tree pointer = TREE_CHAIN (class);
2935 /* Retrieve the value of the POINTER field. */
2937 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
2939 if (POINTER_TYPE_P (gnu_type))
2940 return convert (gnu_type, gnu_expr);
2942 else if (TYPE_FAT_POINTER_P (gnu_type))
2944 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
2945 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
2946 tree template_type = TREE_TYPE (p_bounds_type);
2947 tree min_field = TYPE_FIELDS (template_type);
2948 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
2949 tree template, template_addr, aflags, dimct, t, u;
2950 /* See the head comment of build_vms_descriptor. */
2951 int iclass = TREE_INT_CST_LOW (DECL_INITIAL (class));
2953 /* Convert POINTER to the type of the P_ARRAY field. */
2954 gnu_expr = convert (p_array_type, gnu_expr);
2958 case 1: /* Class S */
2959 case 15: /* Class SB */
2960 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
2961 t = TYPE_FIELDS (desc_type);
2962 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2963 t = tree_cons (min_field,
2964 convert (TREE_TYPE (min_field), integer_one_node),
2965 tree_cons (max_field,
2966 convert (TREE_TYPE (max_field), t),
2968 template = gnat_build_constructor (template_type, t);
2969 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
2971 /* For class S, we are done. */
2975 /* Test that we really have a SB descriptor, like DEC Ada. */
2976 t = build3 (COMPONENT_REF, TREE_TYPE (class), desc, class, NULL);
2977 u = convert (TREE_TYPE (class), DECL_INITIAL (class));
2978 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
2979 /* If so, there is already a template in the descriptor and
2980 it is located right after the POINTER field. */
2981 t = TREE_CHAIN (pointer);
2982 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2983 /* Otherwise use the {1, LENGTH} template we build above. */
2984 template_addr = build3 (COND_EXPR, p_bounds_type, u,
2985 build_unary_op (ADDR_EXPR, p_bounds_type,
2990 case 4: /* Class A */
2991 /* The AFLAGS field is the 7th field in the descriptor. */
2992 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer)));
2993 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2994 /* The DIMCT field is the 8th field in the descriptor. */
2996 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2997 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
2998 or FL_COEFF or FL_BOUNDS not set. */
2999 u = build_int_cst (TREE_TYPE (aflags), 192);
3000 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3001 build_binary_op (NE_EXPR, integer_type_node,
3003 convert (TREE_TYPE (dimct),
3005 build_binary_op (NE_EXPR, integer_type_node,
3006 build2 (BIT_AND_EXPR,
3010 add_stmt (build3 (COND_EXPR, void_type_node, u,
3011 build_call_raise (CE_Length_Check_Failed, Empty,
3012 N_Raise_Constraint_Error),
3014 /* There is already a template in the descriptor and it is
3015 located at the start of block 3 (12th field). */
3016 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (t))));
3017 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3018 template_addr = build_unary_op (ADDR_EXPR, p_bounds_type, template);
3021 case 10: /* Class NCA */
3023 post_error ("unsupported descriptor type for &", gnat_subprog);
3024 template_addr = integer_zero_node;
3028 /* Build the fat pointer in the form of a constructor. */
3029 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr,
3030 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3031 template_addr, NULL_TREE));
3032 return gnat_build_constructor (gnu_type, t);
3039 /* Build a stub for the subprogram specified by the GCC tree GNU_SUBPROG
3040 and the GNAT node GNAT_SUBPROG. */
3043 build_function_stub (tree gnu_subprog, Entity_Id gnat_subprog)
3045 tree gnu_subprog_type, gnu_subprog_addr, gnu_subprog_call;
3046 tree gnu_stub_param, gnu_param_list, gnu_arg_types, gnu_param;
3047 tree gnu_stub_decl = DECL_FUNCTION_STUB (gnu_subprog);
3050 gnu_subprog_type = TREE_TYPE (gnu_subprog);
3051 gnu_param_list = NULL_TREE;
3053 begin_subprog_body (gnu_stub_decl);
3056 start_stmt_group ();
3058 /* Loop over the parameters of the stub and translate any of them
3059 passed by descriptor into a by reference one. */
3060 for (gnu_stub_param = DECL_ARGUMENTS (gnu_stub_decl),
3061 gnu_arg_types = TYPE_ARG_TYPES (gnu_subprog_type);
3063 gnu_stub_param = TREE_CHAIN (gnu_stub_param),
3064 gnu_arg_types = TREE_CHAIN (gnu_arg_types))
3066 if (DECL_BY_DESCRIPTOR_P (gnu_stub_param))
3067 gnu_param = convert_vms_descriptor (TREE_VALUE (gnu_arg_types),
3068 gnu_stub_param, gnat_subprog);
3070 gnu_param = gnu_stub_param;
3072 gnu_param_list = tree_cons (NULL_TREE, gnu_param, gnu_param_list);
3075 gnu_body = end_stmt_group ();
3077 /* Invoke the internal subprogram. */
3078 gnu_subprog_addr = build1 (ADDR_EXPR, build_pointer_type (gnu_subprog_type),
3080 gnu_subprog_call = build_call_list (TREE_TYPE (gnu_subprog_type),
3082 nreverse (gnu_param_list));
3084 /* Propagate the return value, if any. */
3085 if (VOID_TYPE_P (TREE_TYPE (gnu_subprog_type)))
3086 append_to_statement_list (gnu_subprog_call, &gnu_body);
3088 append_to_statement_list (build_return_expr (DECL_RESULT (gnu_stub_decl),
3094 allocate_struct_function (gnu_stub_decl, false);
3095 end_subprog_body (gnu_body);
3098 /* Build a type to be used to represent an aliased object whose nominal
3099 type is an unconstrained array. This consists of a RECORD_TYPE containing
3100 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
3101 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
3102 is used to represent an arbitrary unconstrained object. Use NAME
3103 as the name of the record. */
3106 build_unc_object_type (tree template_type, tree object_type, tree name)
3108 tree type = make_node (RECORD_TYPE);
3109 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
3110 template_type, type, 0, 0, 0, 1);
3111 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
3114 TYPE_NAME (type) = name;
3115 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3116 finish_record_type (type,
3117 chainon (chainon (NULL_TREE, template_field),
3124 /* Same, taking a thin or fat pointer type instead of a template type. */
3127 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3132 gcc_assert (TYPE_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3135 = (TYPE_FAT_POINTER_P (thin_fat_ptr_type)
3136 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3137 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3138 return build_unc_object_type (template_type, object_type, name);
3141 /* Shift the component offsets within an unconstrained object TYPE to make it
3142 suitable for use as a designated type for thin pointers. */
3145 shift_unc_components_for_thin_pointers (tree type)
3147 /* Thin pointer values designate the ARRAY data of an unconstrained object,
3148 allocated past the BOUNDS template. The designated type is adjusted to
3149 have ARRAY at position zero and the template at a negative offset, so
3150 that COMPONENT_REFs on (*thin_ptr) designate the proper location. */
3152 tree bounds_field = TYPE_FIELDS (type);
3153 tree array_field = TREE_CHAIN (TYPE_FIELDS (type));
3155 DECL_FIELD_OFFSET (bounds_field)
3156 = size_binop (MINUS_EXPR, size_zero_node, byte_position (array_field));
3158 DECL_FIELD_OFFSET (array_field) = size_zero_node;
3159 DECL_FIELD_BIT_OFFSET (array_field) = bitsize_zero_node;
3162 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
3163 the normal case this is just two adjustments, but we have more to do
3164 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
3167 update_pointer_to (tree old_type, tree new_type)
3169 tree ptr = TYPE_POINTER_TO (old_type);
3170 tree ref = TYPE_REFERENCE_TO (old_type);
3174 /* If this is the main variant, process all the other variants first. */
3175 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3176 for (type = TYPE_NEXT_VARIANT (old_type); type;
3177 type = TYPE_NEXT_VARIANT (type))
3178 update_pointer_to (type, new_type);
3180 /* If no pointer or reference, we are done. */
3184 /* Merge the old type qualifiers in the new type.
3186 Each old variant has qualifiers for specific reasons, and the new
3187 designated type as well. Each set of qualifiers represents useful
3188 information grabbed at some point, and merging the two simply unifies
3189 these inputs into the final type description.
3191 Consider for instance a volatile type frozen after an access to constant
3192 type designating it. After the designated type freeze, we get here with a
3193 volatile new_type and a dummy old_type with a readonly variant, created
3194 when the access type was processed. We shall make a volatile and readonly
3195 designated type, because that's what it really is.
3197 We might also get here for a non-dummy old_type variant with different
3198 qualifiers than the new_type ones, for instance in some cases of pointers
3199 to private record type elaboration (see the comments around the call to
3200 this routine from gnat_to_gnu_entity/E_Access_Type). We have to merge the
3201 qualifiers in thoses cases too, to avoid accidentally discarding the
3202 initial set, and will often end up with old_type == new_type then. */
3203 new_type = build_qualified_type (new_type,
3204 TYPE_QUALS (old_type)
3205 | TYPE_QUALS (new_type));
3207 /* If the new type and the old one are identical, there is nothing to
3209 if (old_type == new_type)
3212 /* Otherwise, first handle the simple case. */
3213 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3215 TYPE_POINTER_TO (new_type) = ptr;
3216 TYPE_REFERENCE_TO (new_type) = ref;
3218 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3219 for (ptr1 = TYPE_MAIN_VARIANT (ptr); ptr1;
3220 ptr1 = TYPE_NEXT_VARIANT (ptr1))
3221 TREE_TYPE (ptr1) = new_type;
3223 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3224 for (ref1 = TYPE_MAIN_VARIANT (ref); ref1;
3225 ref1 = TYPE_NEXT_VARIANT (ref1))
3226 TREE_TYPE (ref1) = new_type;
3229 /* Now deal with the unconstrained array case. In this case the "pointer"
3230 is actually a RECORD_TYPE where both fields are pointers to dummy nodes.
3231 Turn them into pointers to the correct types using update_pointer_to. */
3232 else if (TREE_CODE (ptr) != RECORD_TYPE || !TYPE_IS_FAT_POINTER_P (ptr))
3237 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
3238 tree array_field = TYPE_FIELDS (ptr);
3239 tree bounds_field = TREE_CHAIN (TYPE_FIELDS (ptr));
3240 tree new_ptr = TYPE_POINTER_TO (new_type);
3244 /* Make pointers to the dummy template point to the real template. */
3246 (TREE_TYPE (TREE_TYPE (bounds_field)),
3247 TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_ptr)))));
3249 /* The references to the template bounds present in the array type
3250 are made through a PLACEHOLDER_EXPR of type new_ptr. Since we
3251 are updating ptr to make it a full replacement for new_ptr as
3252 pointer to new_type, we must rework the PLACEHOLDER_EXPR so as
3253 to make it of type ptr. */
3254 new_ref = build3 (COMPONENT_REF, TREE_TYPE (bounds_field),
3255 build0 (PLACEHOLDER_EXPR, ptr),
3256 bounds_field, NULL_TREE);
3258 /* Create the new array for the new PLACEHOLDER_EXPR and make
3259 pointers to the dummy array point to it.
3261 ??? This is now the only use of substitute_in_type,
3262 which is a very "heavy" routine to do this, so it
3263 should be replaced at some point. */
3265 (TREE_TYPE (TREE_TYPE (array_field)),
3266 substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))),
3267 TREE_CHAIN (TYPE_FIELDS (new_ptr)), new_ref));
3269 /* Make ptr the pointer to new_type. */
3270 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
3271 = TREE_TYPE (new_type) = ptr;
3273 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
3274 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
3276 /* Now handle updating the allocation record, what the thin pointer
3277 points to. Update all pointers from the old record into the new
3278 one, update the type of the array field, and recompute the size. */
3279 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
3281 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
3282 = TREE_TYPE (TREE_TYPE (array_field));
3284 /* The size recomputation needs to account for alignment constraints, so
3285 we let layout_type work it out. This will reset the field offsets to
3286 what they would be in a regular record, so we shift them back to what
3287 we want them to be for a thin pointer designated type afterwards. */
3288 DECL_SIZE (TYPE_FIELDS (new_obj_rec)) = 0;
3289 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))) = 0;
3290 TYPE_SIZE (new_obj_rec) = 0;
3291 layout_type (new_obj_rec);
3293 shift_unc_components_for_thin_pointers (new_obj_rec);
3295 /* We are done, at last. */
3296 rest_of_record_type_compilation (ptr);
3300 /* Convert a pointer to a constrained array into a pointer to a fat
3301 pointer. This involves making or finding a template. */
3304 convert_to_fat_pointer (tree type, tree expr)
3306 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
3307 tree template, template_addr;
3308 tree etype = TREE_TYPE (expr);
3310 /* If EXPR is a constant of zero, we make a fat pointer that has a null
3311 pointer to the template and array. */
3312 if (integer_zerop (expr))
3314 gnat_build_constructor
3316 tree_cons (TYPE_FIELDS (type),
3317 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3318 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3319 convert (build_pointer_type (template_type),
3323 /* If EXPR is a thin pointer, make the template and data from the record. */
3325 else if (TYPE_THIN_POINTER_P (etype))
3327 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
3329 expr = save_expr (expr);
3330 if (TREE_CODE (expr) == ADDR_EXPR)
3331 expr = TREE_OPERAND (expr, 0);
3333 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
3335 template = build_component_ref (expr, NULL_TREE, fields, false);
3336 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3337 build_component_ref (expr, NULL_TREE,
3338 TREE_CHAIN (fields), false));
3341 /* Otherwise, build the constructor for the template. */
3342 template = build_template (template_type, TREE_TYPE (etype), expr);
3344 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
3346 /* The result is a CONSTRUCTOR for the fat pointer.
3348 If expr is an argument of a foreign convention subprogram, the type it
3349 points to is directly the component type. In this case, the expression
3350 type may not match the corresponding FIELD_DECL type at this point, so we
3351 call "convert" here to fix that up if necessary. This type consistency is
3352 required, for instance because it ensures that possible later folding of
3353 component_refs against this constructor always yields something of the
3354 same type as the initial reference.
3356 Note that the call to "build_template" above is still fine, because it
3357 will only refer to the provided template_type in this case. */
3359 gnat_build_constructor
3360 (type, tree_cons (TYPE_FIELDS (type),
3361 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3362 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3363 template_addr, NULL_TREE)));
3366 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
3367 is something that is a fat pointer, so convert to it first if it EXPR
3368 is not already a fat pointer. */
3371 convert_to_thin_pointer (tree type, tree expr)
3373 if (!TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
3375 = convert_to_fat_pointer
3376 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
3378 /* We get the pointer to the data and use a NOP_EXPR to make it the
3380 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)),
3382 expr = build1 (NOP_EXPR, type, expr);
3387 /* Create an expression whose value is that of EXPR,
3388 converted to type TYPE. The TREE_TYPE of the value
3389 is always TYPE. This function implements all reasonable
3390 conversions; callers should filter out those that are
3391 not permitted by the language being compiled. */
3394 convert (tree type, tree expr)
3396 enum tree_code code = TREE_CODE (type);
3397 tree etype = TREE_TYPE (expr);
3398 enum tree_code ecode = TREE_CODE (etype);
3400 /* If EXPR is already the right type, we are done. */
3404 /* If both input and output have padding and are of variable size, do this
3405 as an unchecked conversion. Likewise if one is a mere variant of the
3406 other, so we avoid a pointless unpad/repad sequence. */
3407 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
3408 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
3409 && (!TREE_CONSTANT (TYPE_SIZE (type))
3410 || !TREE_CONSTANT (TYPE_SIZE (etype))
3411 || gnat_types_compatible_p (type, etype)))
3414 /* If the output type has padding, convert to the inner type and
3415 make a constructor to build the record. */
3416 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
3418 /* If we previously converted from another type and our type is
3419 of variable size, remove the conversion to avoid the need for
3420 variable-size temporaries. */
3421 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3422 && !TREE_CONSTANT (TYPE_SIZE (type)))
3423 expr = TREE_OPERAND (expr, 0);
3425 /* If we are just removing the padding from expr, convert the original
3426 object if we have variable size in order to avoid the need for some
3427 variable-size temporaries. Likewise if the padding is a mere variant
3428 of the other, so we avoid a pointless unpad/repad sequence. */
3429 if (TREE_CODE (expr) == COMPONENT_REF
3430 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
3431 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
3432 && (!TREE_CONSTANT (TYPE_SIZE (type))
3433 || gnat_types_compatible_p (type,
3434 TREE_TYPE (TREE_OPERAND (expr, 0)))))
3435 return convert (type, TREE_OPERAND (expr, 0));
3437 /* If the result type is a padded type with a self-referentially-sized
3438 field and the expression type is a record, do this as an
3439 unchecked conversion. */
3440 else if (TREE_CODE (etype) == RECORD_TYPE
3441 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
3442 return unchecked_convert (type, expr, false);
3446 gnat_build_constructor (type,
3447 tree_cons (TYPE_FIELDS (type),
3449 (TYPE_FIELDS (type)),
3454 /* If the input type has padding, remove it and convert to the output type.
3455 The conditions ordering is arranged to ensure that the output type is not
3456 a padding type here, as it is not clear whether the conversion would
3457 always be correct if this was to happen. */
3458 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
3462 /* If we have just converted to this padded type, just get the
3463 inner expression. */
3464 if (TREE_CODE (expr) == CONSTRUCTOR
3465 && !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (expr))
3466 && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->index
3467 == TYPE_FIELDS (etype))
3469 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->value;
3471 /* Otherwise, build an explicit component reference. */
3474 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
3476 return convert (type, unpadded);
3479 /* If the input is a biased type, adjust first. */
3480 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
3481 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
3482 fold_convert (TREE_TYPE (etype),
3484 TYPE_MIN_VALUE (etype)));
3486 /* If the input is a justified modular type, we need to extract the actual
3487 object before converting it to any other type with the exceptions of an
3488 unconstrained array or of a mere type variant. It is useful to avoid the
3489 extraction and conversion in the type variant case because it could end
3490 up replacing a VAR_DECL expr by a constructor and we might be about the
3491 take the address of the result. */
3492 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
3493 && code != UNCONSTRAINED_ARRAY_TYPE
3494 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
3495 return convert (type, build_component_ref (expr, NULL_TREE,
3496 TYPE_FIELDS (etype), false));
3498 /* If converting to a type that contains a template, convert to the data
3499 type and then build the template. */
3500 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
3502 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
3504 /* If the source already has a template, get a reference to the
3505 associated array only, as we are going to rebuild a template
3506 for the target type anyway. */
3507 expr = maybe_unconstrained_array (expr);
3510 gnat_build_constructor
3512 tree_cons (TYPE_FIELDS (type),
3513 build_template (TREE_TYPE (TYPE_FIELDS (type)),
3514 obj_type, NULL_TREE),
3515 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3516 convert (obj_type, expr), NULL_TREE)));
3519 /* There are some special cases of expressions that we process
3521 switch (TREE_CODE (expr))
3527 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
3528 conversion in gnat_expand_expr. NULL_EXPR does not represent
3529 and actual value, so no conversion is needed. */
3530 expr = copy_node (expr);
3531 TREE_TYPE (expr) = type;
3535 /* If we are converting a STRING_CST to another constrained array type,
3536 just make a new one in the proper type. */
3537 if (code == ecode && AGGREGATE_TYPE_P (etype)
3538 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
3539 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
3541 expr = copy_node (expr);
3542 TREE_TYPE (expr) = type;
3548 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
3549 a new one in the proper type. */
3550 if (gnat_types_compatible_p (type, etype))
3552 expr = copy_node (expr);
3553 TREE_TYPE (expr) = type;
3558 case UNCONSTRAINED_ARRAY_REF:
3559 /* Convert this to the type of the inner array by getting the address of
3560 the array from the template. */
3561 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3562 build_component_ref (TREE_OPERAND (expr, 0),
3563 get_identifier ("P_ARRAY"),
3565 etype = TREE_TYPE (expr);
3566 ecode = TREE_CODE (etype);
3569 case VIEW_CONVERT_EXPR:
3571 /* GCC 4.x is very sensitive to type consistency overall, and view
3572 conversions thus are very frequent. Even though just "convert"ing
3573 the inner operand to the output type is fine in most cases, it
3574 might expose unexpected input/output type mismatches in special
3575 circumstances so we avoid such recursive calls when we can. */
3576 tree op0 = TREE_OPERAND (expr, 0);
3578 /* If we are converting back to the original type, we can just
3579 lift the input conversion. This is a common occurrence with
3580 switches back-and-forth amongst type variants. */
3581 if (type == TREE_TYPE (op0))
3584 /* Otherwise, if we're converting between two aggregate types, we
3585 might be allowed to substitute the VIEW_CONVERT_EXPR target type
3586 in place or to just convert the inner expression. */
3587 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
3589 /* If we are converting between mere variants, we can just
3590 substitute the VIEW_CONVERT_EXPR in place. */
3591 if (gnat_types_compatible_p (type, etype))
3592 return build1 (VIEW_CONVERT_EXPR, type, op0);
3594 /* Otherwise, we may just bypass the input view conversion unless
3595 one of the types is a fat pointer, which is handled by
3596 specialized code below which relies on exact type matching. */
3597 else if (!TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
3598 return convert (type, op0);
3604 /* If both types are record types, just convert the pointer and
3605 make a new INDIRECT_REF.
3607 ??? Disable this for now since it causes problems with the
3608 code in build_binary_op for MODIFY_EXPR which wants to
3609 strip off conversions. But that code really is a mess and
3610 we need to do this a much better way some time. */
3612 && (TREE_CODE (type) == RECORD_TYPE
3613 || TREE_CODE (type) == UNION_TYPE)
3614 && (TREE_CODE (etype) == RECORD_TYPE
3615 || TREE_CODE (etype) == UNION_TYPE)
3616 && !TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
3617 return build_unary_op (INDIRECT_REF, NULL_TREE,
3618 convert (build_pointer_type (type),
3619 TREE_OPERAND (expr, 0)));
3626 /* Check for converting to a pointer to an unconstrained array. */
3627 if (TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
3628 return convert_to_fat_pointer (type, expr);
3630 /* If we're converting between two aggregate types that are mere
3631 variants, just make a VIEW_CONVERT_EXPR. */
3632 else if (AGGREGATE_TYPE_P (type)
3633 && gnat_types_compatible_p (type, etype))
3634 return build1 (VIEW_CONVERT_EXPR, type, expr);
3636 /* In all other cases of related types, make a NOP_EXPR. */
3637 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
3638 || (code == INTEGER_CST && ecode == INTEGER_CST
3639 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
3640 return fold_convert (type, expr);
3645 return fold_build1 (CONVERT_EXPR, type, expr);
3648 return fold_convert (type, gnat_truthvalue_conversion (expr));
3651 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
3652 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
3653 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
3654 return unchecked_convert (type, expr, false);
3655 else if (TYPE_BIASED_REPRESENTATION_P (type))
3656 return fold_convert (type,
3657 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
3658 convert (TREE_TYPE (type), expr),
3659 TYPE_MIN_VALUE (type)));
3661 /* ... fall through ... */
3664 return fold (convert_to_integer (type, expr));
3667 case REFERENCE_TYPE:
3668 /* If converting between two pointers to records denoting
3669 both a template and type, adjust if needed to account
3670 for any differing offsets, since one might be negative. */
3671 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
3674 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
3675 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
3676 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
3677 sbitsize_int (BITS_PER_UNIT));
3679 expr = build1 (NOP_EXPR, type, expr);
3680 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
3681 if (integer_zerop (byte_diff))
3684 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
3685 fold (convert (sizetype, byte_diff)));
3688 /* If converting to a thin pointer, handle specially. */
3689 if (TYPE_THIN_POINTER_P (type)
3690 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
3691 return convert_to_thin_pointer (type, expr);
3693 /* If converting fat pointer to normal pointer, get the pointer to the
3694 array and then convert it. */
3695 else if (TYPE_FAT_POINTER_P (etype))
3696 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
3699 return fold (convert_to_pointer (type, expr));
3702 return fold (convert_to_real (type, expr));
3705 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
3707 gnat_build_constructor
3708 (type, tree_cons (TYPE_FIELDS (type),
3709 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3712 /* ... fall through ... */
3715 /* In these cases, assume the front-end has validated the conversion.
3716 If the conversion is valid, it will be a bit-wise conversion, so
3717 it can be viewed as an unchecked conversion. */
3718 return unchecked_convert (type, expr, false);
3721 /* This is a either a conversion between a tagged type and some
3722 subtype, which we have to mark as a UNION_TYPE because of
3723 overlapping fields or a conversion of an Unchecked_Union. */
3724 return unchecked_convert (type, expr, false);
3726 case UNCONSTRAINED_ARRAY_TYPE:
3727 /* If EXPR is a constrained array, take its address, convert it to a
3728 fat pointer, and then dereference it. Likewise if EXPR is a
3729 record containing both a template and a constrained array.
3730 Note that a record representing a justified modular type
3731 always represents a packed constrained array. */
3732 if (ecode == ARRAY_TYPE
3733 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
3734 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
3735 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
3738 (INDIRECT_REF, NULL_TREE,
3739 convert_to_fat_pointer (TREE_TYPE (type),
3740 build_unary_op (ADDR_EXPR,
3743 /* Do something very similar for converting one unconstrained
3744 array to another. */
3745 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
3747 build_unary_op (INDIRECT_REF, NULL_TREE,
3748 convert (TREE_TYPE (type),
3749 build_unary_op (ADDR_EXPR,
3755 return fold (convert_to_complex (type, expr));
3762 /* Remove all conversions that are done in EXP. This includes converting
3763 from a padded type or to a justified modular type. If TRUE_ADDRESS
3764 is true, always return the address of the containing object even if
3765 the address is not bit-aligned. */
3768 remove_conversions (tree exp, bool true_address)
3770 switch (TREE_CODE (exp))
3774 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3775 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
3777 remove_conversions (VEC_index (constructor_elt,
3778 CONSTRUCTOR_ELTS (exp), 0)->value,
3783 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
3784 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
3785 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3788 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
3789 case NOP_EXPR: case CONVERT_EXPR:
3790 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3799 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3800 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3801 likewise return an expression pointing to the underlying array. */
3804 maybe_unconstrained_array (tree exp)
3806 enum tree_code code = TREE_CODE (exp);
3809 switch (TREE_CODE (TREE_TYPE (exp)))
3811 case UNCONSTRAINED_ARRAY_TYPE:
3812 if (code == UNCONSTRAINED_ARRAY_REF)
3815 = build_unary_op (INDIRECT_REF, NULL_TREE,
3816 build_component_ref (TREE_OPERAND (exp, 0),
3817 get_identifier ("P_ARRAY"),
3819 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
3823 else if (code == NULL_EXPR)
3824 return build1 (NULL_EXPR,
3825 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3826 (TREE_TYPE (TREE_TYPE (exp))))),
3827 TREE_OPERAND (exp, 0));
3830 /* If this is a padded type, convert to the unpadded type and see if
3831 it contains a template. */
3832 if (TYPE_IS_PADDING_P (TREE_TYPE (exp)))
3834 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
3835 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3836 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
3838 build_component_ref (new, NULL_TREE,
3839 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
3842 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
3844 build_component_ref (exp, NULL_TREE,
3845 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
3855 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
3856 If NOTRUNC_P is true, truncation operations should be suppressed. */
3859 unchecked_convert (tree type, tree expr, bool notrunc_p)
3861 tree etype = TREE_TYPE (expr);
3863 /* If the expression is already the right type, we are done. */
3867 /* If both types types are integral just do a normal conversion.
3868 Likewise for a conversion to an unconstrained array. */
3869 if ((((INTEGRAL_TYPE_P (type)
3870 && !(TREE_CODE (type) == INTEGER_TYPE
3871 && TYPE_VAX_FLOATING_POINT_P (type)))
3872 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
3873 || (TREE_CODE (type) == RECORD_TYPE
3874 && TYPE_JUSTIFIED_MODULAR_P (type)))
3875 && ((INTEGRAL_TYPE_P (etype)
3876 && !(TREE_CODE (etype) == INTEGER_TYPE
3877 && TYPE_VAX_FLOATING_POINT_P (etype)))
3878 || (POINTER_TYPE_P (etype) && !TYPE_THIN_POINTER_P (etype))
3879 || (TREE_CODE (etype) == RECORD_TYPE
3880 && TYPE_JUSTIFIED_MODULAR_P (etype))))
3881 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3884 bool final_unchecked = false;
3886 if (TREE_CODE (etype) == INTEGER_TYPE
3887 && TYPE_BIASED_REPRESENTATION_P (etype))
3889 tree ntype = copy_type (etype);
3891 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
3892 TYPE_MAIN_VARIANT (ntype) = ntype;
3893 expr = build1 (NOP_EXPR, ntype, expr);
3896 if (TREE_CODE (type) == INTEGER_TYPE
3897 && TYPE_BIASED_REPRESENTATION_P (type))
3899 rtype = copy_type (type);
3900 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
3901 TYPE_MAIN_VARIANT (rtype) = rtype;
3904 /* We have another special case: if we are unchecked converting subtype
3905 into a base type, we need to ensure that VRP doesn't propagate range
3906 information since this conversion may be done precisely to validate
3907 that the object is within the range it is supposed to have. */
3908 else if (TREE_CODE (expr) != INTEGER_CST
3909 && TREE_CODE (type) == INTEGER_TYPE && !TREE_TYPE (type)
3910 && ((TREE_CODE (etype) == INTEGER_TYPE && TREE_TYPE (etype))
3911 || TREE_CODE (etype) == ENUMERAL_TYPE
3912 || TREE_CODE (etype) == BOOLEAN_TYPE))
3914 /* The optimization barrier is a VIEW_CONVERT_EXPR node; moreover,
3915 in order not to be deemed an useless type conversion, it must
3916 be from subtype to base type.
3918 ??? This may raise addressability and/or aliasing issues because
3919 VIEW_CONVERT_EXPR gets gimplified as an lvalue, thus causing the
3920 address of its operand to be taken if it is deemed addressable
3921 and not already in GIMPLE form. */
3922 rtype = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
3923 rtype = copy_type (rtype);
3924 TYPE_MAIN_VARIANT (rtype) = rtype;
3925 TREE_TYPE (rtype) = type;
3926 final_unchecked = true;
3929 expr = convert (rtype, expr);
3931 expr = fold_build1 (final_unchecked ? VIEW_CONVERT_EXPR : NOP_EXPR,
3935 /* If we are converting TO an integral type whose precision is not the
3936 same as its size, first unchecked convert to a record that contains
3937 an object of the output type. Then extract the field. */
3938 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
3939 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3940 GET_MODE_BITSIZE (TYPE_MODE (type))))
3942 tree rec_type = make_node (RECORD_TYPE);
3943 tree field = create_field_decl (get_identifier ("OBJ"), type,
3944 rec_type, 1, 0, 0, 0);
3946 TYPE_FIELDS (rec_type) = field;
3947 layout_type (rec_type);
3949 expr = unchecked_convert (rec_type, expr, notrunc_p);
3950 expr = build_component_ref (expr, NULL_TREE, field, 0);
3953 /* Similarly for integral input type whose precision is not equal to its
3955 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype)
3956 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
3957 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3959 tree rec_type = make_node (RECORD_TYPE);
3961 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
3964 TYPE_FIELDS (rec_type) = field;
3965 layout_type (rec_type);
3967 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
3968 expr = unchecked_convert (type, expr, notrunc_p);
3971 /* We have a special case when we are converting between two
3972 unconstrained array types. In that case, take the address,
3973 convert the fat pointer types, and dereference. */
3974 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
3975 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3976 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3977 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
3978 build_unary_op (ADDR_EXPR, NULL_TREE,
3982 expr = maybe_unconstrained_array (expr);
3983 etype = TREE_TYPE (expr);
3984 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
3987 /* If the result is an integral type whose size is not equal to
3988 the size of the underlying machine type, sign- or zero-extend
3989 the result. We need not do this in the case where the input is
3990 an integral type of the same precision and signedness or if the output
3991 is a biased type or if both the input and output are unsigned. */
3993 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
3994 && !(TREE_CODE (type) == INTEGER_TYPE
3995 && TYPE_BIASED_REPRESENTATION_P (type))
3996 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3997 GET_MODE_BITSIZE (TYPE_MODE (type)))
3998 && !(INTEGRAL_TYPE_P (etype)
3999 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
4000 && operand_equal_p (TYPE_RM_SIZE (type),
4001 (TYPE_RM_SIZE (etype) != 0
4002 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
4004 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
4006 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
4007 TYPE_UNSIGNED (type));
4009 = convert (base_type,
4010 size_binop (MINUS_EXPR,
4012 (GET_MODE_BITSIZE (TYPE_MODE (type))),
4013 TYPE_RM_SIZE (type)));
4016 build_binary_op (RSHIFT_EXPR, base_type,
4017 build_binary_op (LSHIFT_EXPR, base_type,
4018 convert (base_type, expr),
4023 /* An unchecked conversion should never raise Constraint_Error. The code
4024 below assumes that GCC's conversion routines overflow the same way that
4025 the underlying hardware does. This is probably true. In the rare case
4026 when it is false, we can rely on the fact that such conversions are
4027 erroneous anyway. */
4028 if (TREE_CODE (expr) == INTEGER_CST)
4029 TREE_OVERFLOW (expr) = 0;
4031 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
4032 show no longer constant. */
4033 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4034 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
4036 TREE_CONSTANT (expr) = 0;
4041 /* Search the chain of currently available builtin declarations for a node
4042 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
4043 found, if any, or NULL_TREE otherwise. */
4045 builtin_decl_for (tree name)
4050 for (i = 0; VEC_iterate(tree, builtin_decls, i, decl); i++)
4051 if (DECL_NAME (decl) == name)
4057 /* Return the appropriate GCC tree code for the specified GNAT type,
4058 the latter being a record type as predicated by Is_Record_Type. */
4061 tree_code_for_record_type (Entity_Id gnat_type)
4063 Node_Id component_list
4064 = Component_List (Type_Definition
4066 (Implementation_Base_Type (gnat_type))));
4069 /* Make this a UNION_TYPE unless it's either not an Unchecked_Union or
4070 we have a non-discriminant field outside a variant. In either case,
4071 it's a RECORD_TYPE. */
4073 if (!Is_Unchecked_Union (gnat_type))
4076 for (component = First_Non_Pragma (Component_Items (component_list));
4077 Present (component);
4078 component = Next_Non_Pragma (component))
4079 if (Ekind (Defining_Entity (component)) == E_Component)
4085 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
4086 component of an aggregate type. */
4089 type_for_nonaliased_component_p (tree gnu_type)
4091 /* If the type is passed by reference, we may have pointers to the
4092 component so it cannot be made non-aliased. */
4093 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
4096 /* We used to say that any component of aggregate type is aliased
4097 because the front-end may take 'Reference of it. The front-end
4098 has been enhanced in the meantime so as to use a renaming instead
4099 in most cases, but the back-end can probably take the address of
4100 such a component too so we go for the conservative stance.
4102 For instance, we might need the address of any array type, even
4103 if normally passed by copy, to construct a fat pointer if the
4104 component is used as an actual for an unconstrained formal.
4106 Likewise for record types: even if a specific record subtype is
4107 passed by copy, the parent type might be passed by ref (e.g. if
4108 it's of variable size) and we might take the address of a child
4109 component to pass to a parent formal. We have no way to check
4110 for such conditions here. */
4111 if (AGGREGATE_TYPE_P (gnu_type))
4117 /* Perform final processing on global variables. */
4120 gnat_write_global_declarations (void)
4122 /* Proceed to optimize and emit assembly.
4123 FIXME: shouldn't be the front end's responsibility to call this. */
4126 /* Emit debug info for all global declarations. */
4127 emit_debug_global_declarations (VEC_address (tree, global_decls),
4128 VEC_length (tree, global_decls));
4131 #include "gt-ada-utils.h"
4132 #include "gtype-ada.h"