1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2009, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 ****************************************************************************/
26 /* We have attribute handlers using C specific format specifiers in warning
27 messages. Make sure they are properly recognized. */
28 #define GCC_DIAG_STYLE __gcc_cdiag__
32 #include "coretypes.h"
44 #include "langhooks.h"
45 #include "pointer-set.h"
47 #include "tree-dump.h"
48 #include "tree-inline.h"
49 #include "tree-iterator.h"
66 #ifndef MAX_FIXED_MODE_SIZE
67 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
70 #ifndef MAX_BITS_PER_WORD
71 #define MAX_BITS_PER_WORD BITS_PER_WORD
74 /* If nonzero, pretend we are allocating at global level. */
77 /* The default alignment of "double" floating-point types, i.e. floating
78 point types whose size is equal to 64 bits, or 0 if this alignment is
79 not specifically capped. */
80 int double_float_alignment;
82 /* The default alignment of "double" or larger scalar types, i.e. scalar
83 types whose size is greater or equal to 64 bits, or 0 if this alignment
84 is not specifically capped. */
85 int double_scalar_alignment;
87 /* Tree nodes for the various types and decls we create. */
88 tree gnat_std_decls[(int) ADT_LAST];
90 /* Functions to call for each of the possible raise reasons. */
91 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
93 /* Forward declarations for handlers of attributes. */
94 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
95 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
96 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
97 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
98 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
99 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
100 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
101 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
102 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
104 /* Fake handler for attributes we don't properly support, typically because
105 they'd require dragging a lot of the common-c front-end circuitry. */
106 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
108 /* Table of machine-independent internal attributes for Ada. We support
109 this minimal set of attributes to accommodate the needs of builtins. */
110 const struct attribute_spec gnat_internal_attribute_table[] =
112 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
113 { "const", 0, 0, true, false, false, handle_const_attribute },
114 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute },
115 { "pure", 0, 0, true, false, false, handle_pure_attribute },
116 { "no vops", 0, 0, true, false, false, handle_novops_attribute },
117 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute },
118 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute },
119 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute },
120 { "malloc", 0, 0, true, false, false, handle_malloc_attribute },
121 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute },
123 /* ??? format and format_arg are heavy and not supported, which actually
124 prevents support for stdio builtins, which we however declare as part
125 of the common builtins.def contents. */
126 { "format", 3, 3, false, true, true, fake_attribute_handler },
127 { "format_arg", 1, 1, false, true, true, fake_attribute_handler },
129 { NULL, 0, 0, false, false, false, NULL }
132 /* Associates a GNAT tree node to a GCC tree node. It is used in
133 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
134 of `save_gnu_tree' for more info. */
135 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
137 #define GET_GNU_TREE(GNAT_ENTITY) \
138 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
140 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
141 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
143 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
144 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
146 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
147 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
149 #define GET_DUMMY_NODE(GNAT_ENTITY) \
150 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
152 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
153 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
155 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
156 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
158 /* This variable keeps a table for types for each precision so that we only
159 allocate each of them once. Signed and unsigned types are kept separate.
161 Note that these types are only used when fold-const requests something
162 special. Perhaps we should NOT share these types; we'll see how it
164 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
166 /* Likewise for float types, but record these by mode. */
167 static GTY(()) tree float_types[NUM_MACHINE_MODES];
169 /* For each binding contour we allocate a binding_level structure to indicate
170 the binding depth. */
172 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
173 /* The binding level containing this one (the enclosing binding level). */
174 struct gnat_binding_level *chain;
175 /* The BLOCK node for this level. */
177 /* If nonzero, the setjmp buffer that needs to be updated for any
178 variable-sized definition within this context. */
182 /* The binding level currently in effect. */
183 static GTY(()) struct gnat_binding_level *current_binding_level;
185 /* A chain of gnat_binding_level structures awaiting reuse. */
186 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
188 /* An array of global declarations. */
189 static GTY(()) VEC(tree,gc) *global_decls;
191 /* An array of builtin function declarations. */
192 static GTY(()) VEC(tree,gc) *builtin_decls;
194 /* An array of global renaming pointers. */
195 static GTY(()) VEC(tree,gc) *global_renaming_pointers;
197 /* A chain of unused BLOCK nodes. */
198 static GTY((deletable)) tree free_block_chain;
200 static tree merge_sizes (tree, tree, tree, bool, bool);
201 static tree compute_related_constant (tree, tree);
202 static tree split_plus (tree, tree *);
203 static void gnat_gimplify_function (tree);
204 static tree float_type_for_precision (int, enum machine_mode);
205 static tree convert_to_fat_pointer (tree, tree);
206 static tree convert_to_thin_pointer (tree, tree);
207 static tree make_descriptor_field (const char *,tree, tree, tree);
208 static bool potential_alignment_gap (tree, tree, tree);
210 /* Initialize the association of GNAT nodes to GCC trees. */
213 init_gnat_to_gnu (void)
215 associate_gnat_to_gnu
216 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
219 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
220 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
221 a ..._DECL node. If NO_CHECK is true, the latter check is suppressed.
223 If GNU_DECL is zero, a previous association is to be reset. */
226 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
228 /* Check that GNAT_ENTITY is not already defined and that it is being set
229 to something which is a decl. Raise gigi 401 if not. Usually, this
230 means GNAT_ENTITY is defined twice, but occasionally is due to some
232 gcc_assert (!(gnu_decl
233 && (PRESENT_GNU_TREE (gnat_entity)
234 || (!no_check && !DECL_P (gnu_decl)))));
236 SET_GNU_TREE (gnat_entity, gnu_decl);
239 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
240 Return the ..._DECL node that was associated with it. If there is no tree
241 node associated with GNAT_ENTITY, abort.
243 In some cases, such as delayed elaboration or expressions that need to
244 be elaborated only once, GNAT_ENTITY is really not an entity. */
247 get_gnu_tree (Entity_Id gnat_entity)
249 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
250 return GET_GNU_TREE (gnat_entity);
253 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
256 present_gnu_tree (Entity_Id gnat_entity)
258 return PRESENT_GNU_TREE (gnat_entity);
261 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
264 init_dummy_type (void)
267 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
270 /* Make a dummy type corresponding to GNAT_TYPE. */
273 make_dummy_type (Entity_Id gnat_type)
275 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
278 /* If there is an equivalent type, get its underlying type. */
279 if (Present (gnat_underlying))
280 gnat_underlying = Underlying_Type (gnat_underlying);
282 /* If there was no equivalent type (can only happen when just annotating
283 types) or underlying type, go back to the original type. */
284 if (No (gnat_underlying))
285 gnat_underlying = gnat_type;
287 /* If it there already a dummy type, use that one. Else make one. */
288 if (PRESENT_DUMMY_NODE (gnat_underlying))
289 return GET_DUMMY_NODE (gnat_underlying);
291 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
293 gnu_type = make_node (Is_Record_Type (gnat_underlying)
294 ? tree_code_for_record_type (gnat_underlying)
296 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
297 TYPE_DUMMY_P (gnu_type) = 1;
298 TYPE_STUB_DECL (gnu_type)
299 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
300 if (AGGREGATE_TYPE_P (gnu_type))
301 TYPE_BY_REFERENCE_P (gnu_type) = Is_By_Reference_Type (gnat_type);
303 SET_DUMMY_NODE (gnat_underlying, gnu_type);
308 /* Return nonzero if we are currently in the global binding level. */
311 global_bindings_p (void)
313 return ((force_global || !current_function_decl) ? -1 : 0);
316 /* Enter a new binding level. */
321 struct gnat_binding_level *newlevel = NULL;
323 /* Reuse a struct for this binding level, if there is one. */
324 if (free_binding_level)
326 newlevel = free_binding_level;
327 free_binding_level = free_binding_level->chain;
331 = (struct gnat_binding_level *)
332 ggc_alloc (sizeof (struct gnat_binding_level));
334 /* Use a free BLOCK, if any; otherwise, allocate one. */
335 if (free_block_chain)
337 newlevel->block = free_block_chain;
338 free_block_chain = BLOCK_CHAIN (free_block_chain);
339 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
342 newlevel->block = make_node (BLOCK);
344 /* Point the BLOCK we just made to its parent. */
345 if (current_binding_level)
346 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
348 BLOCK_VARS (newlevel->block) = BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
349 TREE_USED (newlevel->block) = 1;
351 /* Add this level to the front of the chain (stack) of levels that are
353 newlevel->chain = current_binding_level;
354 newlevel->jmpbuf_decl = NULL_TREE;
355 current_binding_level = newlevel;
358 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
359 and point FNDECL to this BLOCK. */
362 set_current_block_context (tree fndecl)
364 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
365 DECL_INITIAL (fndecl) = current_binding_level->block;
368 /* Set the jmpbuf_decl for the current binding level to DECL. */
371 set_block_jmpbuf_decl (tree decl)
373 current_binding_level->jmpbuf_decl = decl;
376 /* Get the jmpbuf_decl, if any, for the current binding level. */
379 get_block_jmpbuf_decl ()
381 return current_binding_level->jmpbuf_decl;
384 /* Exit a binding level. Set any BLOCK into the current code group. */
389 struct gnat_binding_level *level = current_binding_level;
390 tree block = level->block;
392 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
393 BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
395 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
396 are no variables free the block and merge its subblocks into those of its
397 parent block. Otherwise, add it to the list of its parent. */
398 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
400 else if (BLOCK_VARS (block) == NULL_TREE)
402 BLOCK_SUBBLOCKS (level->chain->block)
403 = chainon (BLOCK_SUBBLOCKS (block),
404 BLOCK_SUBBLOCKS (level->chain->block));
405 BLOCK_CHAIN (block) = free_block_chain;
406 free_block_chain = block;
410 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
411 BLOCK_SUBBLOCKS (level->chain->block) = block;
412 TREE_USED (block) = 1;
413 set_block_for_group (block);
416 /* Free this binding structure. */
417 current_binding_level = level->chain;
418 level->chain = free_binding_level;
419 free_binding_level = level;
423 /* Records a ..._DECL node DECL as belonging to the current lexical scope
424 and uses GNAT_NODE for location information and propagating flags. */
427 gnat_pushdecl (tree decl, Node_Id gnat_node)
429 /* If this decl is public external or at toplevel, there is no context.
430 But PARM_DECLs always go in the level of its function. */
431 if (TREE_CODE (decl) != PARM_DECL
432 && ((DECL_EXTERNAL (decl) && TREE_PUBLIC (decl))
433 || global_bindings_p ()))
434 DECL_CONTEXT (decl) = 0;
437 DECL_CONTEXT (decl) = current_function_decl;
439 /* Functions imported in another function are not really nested. */
440 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_PUBLIC (decl))
441 DECL_NO_STATIC_CHAIN (decl) = 1;
444 TREE_NO_WARNING (decl) = (gnat_node == Empty || Warnings_Off (gnat_node));
446 /* Set the location of DECL and emit a declaration for it. */
447 if (Present (gnat_node))
448 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
449 add_decl_expr (decl, gnat_node);
451 /* Put the declaration on the list. The list of declarations is in reverse
452 order. The list will be reversed later. Put global variables in the
453 globals list and builtin functions in a dedicated list to speed up
454 further lookups. Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
455 the list, as they will cause trouble with the debugger and aren't needed
457 if (TREE_CODE (decl) != TYPE_DECL
458 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
460 if (global_bindings_p ())
462 VEC_safe_push (tree, gc, global_decls, decl);
464 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
465 VEC_safe_push (tree, gc, builtin_decls, decl);
469 TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
470 BLOCK_VARS (current_binding_level->block) = decl;
474 /* For the declaration of a type, set its name if it either is not already
475 set or if the previous type name was not derived from a source name.
476 We'd rather have the type named with a real name and all the pointer
477 types to the same object have the same POINTER_TYPE node. Code in the
478 equivalent function of c-decl.c makes a copy of the type node here, but
479 that may cause us trouble with incomplete types. We make an exception
480 for fat pointer types because the compiler automatically builds them
481 for unconstrained array types and the debugger uses them to represent
482 both these and pointers to these. */
483 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
485 tree t = TREE_TYPE (decl);
487 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
489 else if (TYPE_FAT_POINTER_P (t))
491 tree tt = build_variant_type_copy (t);
492 TYPE_NAME (tt) = decl;
493 TREE_USED (tt) = TREE_USED (t);
494 TREE_TYPE (decl) = tt;
495 DECL_ORIGINAL_TYPE (decl) = t;
498 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
503 /* Propagate the name to all the variants. This is needed for
504 the type qualifiers machinery to work properly. */
506 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
507 TYPE_NAME (t) = decl;
511 /* Do little here. Set up the standard declarations later after the
512 front end has been run. */
515 gnat_init_decl_processing (void)
517 /* Make the binding_level structure for global names. */
518 current_function_decl = 0;
519 current_binding_level = 0;
520 free_binding_level = 0;
523 build_common_tree_nodes (true, true);
525 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
526 corresponding to the width of Pmode. In most cases when ptr_mode
527 and Pmode differ, C will use the width of ptr_mode for SIZETYPE.
528 But we get far better code using the width of Pmode. */
529 size_type_node = gnat_type_for_mode (Pmode, 0);
530 set_sizetype (size_type_node);
532 /* In Ada, we use an unsigned 8-bit type for the default boolean type. */
533 boolean_type_node = make_unsigned_type (8);
534 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
535 SET_TYPE_RM_MAX_VALUE (boolean_type_node,
536 build_int_cst (boolean_type_node, 1));
537 SET_TYPE_RM_SIZE (boolean_type_node, bitsize_int (1));
539 build_common_tree_nodes_2 (0);
540 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
542 ptr_void_type_node = build_pointer_type (void_type_node);
545 /* Record TYPE as a builtin type for Ada. NAME is the name of the type. */
548 record_builtin_type (const char *name, tree type)
550 tree type_decl = build_decl (TYPE_DECL, get_identifier (name), type);
552 gnat_pushdecl (type_decl, Empty);
554 if (debug_hooks->type_decl)
555 debug_hooks->type_decl (type_decl, false);
558 /* Given a record type RECORD_TYPE and a chain of FIELD_DECL nodes FIELDLIST,
559 finish constructing the record or union type. If REP_LEVEL is zero, this
560 record has no representation clause and so will be entirely laid out here.
561 If REP_LEVEL is one, this record has a representation clause and has been
562 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
563 this record is derived from a parent record and thus inherits its layout;
564 only make a pass on the fields to finalize them. If DO_NOT_FINALIZE is
565 true, the record type is expected to be modified afterwards so it will
566 not be sent to the back-end for finalization. */
569 finish_record_type (tree record_type, tree fieldlist, int rep_level,
570 bool do_not_finalize)
572 enum tree_code code = TREE_CODE (record_type);
573 tree name = TYPE_NAME (record_type);
574 tree ada_size = bitsize_zero_node;
575 tree size = bitsize_zero_node;
576 bool had_size = TYPE_SIZE (record_type) != 0;
577 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
578 bool had_align = TYPE_ALIGN (record_type) != 0;
581 TYPE_FIELDS (record_type) = fieldlist;
583 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
584 generate debug info and have a parallel type. */
585 if (name && TREE_CODE (name) == TYPE_DECL)
586 name = DECL_NAME (name);
587 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
589 /* Globally initialize the record first. If this is a rep'ed record,
590 that just means some initializations; otherwise, layout the record. */
593 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
594 SET_TYPE_MODE (record_type, BLKmode);
597 TYPE_SIZE_UNIT (record_type) = size_zero_node;
599 TYPE_SIZE (record_type) = bitsize_zero_node;
601 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
602 out just like a UNION_TYPE, since the size will be fixed. */
603 else if (code == QUAL_UNION_TYPE)
608 /* Ensure there isn't a size already set. There can be in an error
609 case where there is a rep clause but all fields have errors and
610 no longer have a position. */
611 TYPE_SIZE (record_type) = 0;
612 layout_type (record_type);
615 /* At this point, the position and size of each field is known. It was
616 either set before entry by a rep clause, or by laying out the type above.
618 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
619 to compute the Ada size; the GCC size and alignment (for rep'ed records
620 that are not padding types); and the mode (for rep'ed records). We also
621 clear the DECL_BIT_FIELD indication for the cases we know have not been
622 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
624 if (code == QUAL_UNION_TYPE)
625 fieldlist = nreverse (fieldlist);
627 for (field = fieldlist; field; field = TREE_CHAIN (field))
629 tree type = TREE_TYPE (field);
630 tree pos = bit_position (field);
631 tree this_size = DECL_SIZE (field);
634 if ((TREE_CODE (type) == RECORD_TYPE
635 || TREE_CODE (type) == UNION_TYPE
636 || TREE_CODE (type) == QUAL_UNION_TYPE)
637 && !TYPE_IS_FAT_POINTER_P (type)
638 && !TYPE_CONTAINS_TEMPLATE_P (type)
639 && TYPE_ADA_SIZE (type))
640 this_ada_size = TYPE_ADA_SIZE (type);
642 this_ada_size = this_size;
644 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
645 if (DECL_BIT_FIELD (field)
646 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
648 unsigned int align = TYPE_ALIGN (type);
650 /* In the general case, type alignment is required. */
651 if (value_factor_p (pos, align))
653 /* The enclosing record type must be sufficiently aligned.
654 Otherwise, if no alignment was specified for it and it
655 has been laid out already, bump its alignment to the
656 desired one if this is compatible with its size. */
657 if (TYPE_ALIGN (record_type) >= align)
659 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
660 DECL_BIT_FIELD (field) = 0;
664 && value_factor_p (TYPE_SIZE (record_type), align))
666 TYPE_ALIGN (record_type) = align;
667 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
668 DECL_BIT_FIELD (field) = 0;
672 /* In the non-strict alignment case, only byte alignment is. */
673 if (!STRICT_ALIGNMENT
674 && DECL_BIT_FIELD (field)
675 && value_factor_p (pos, BITS_PER_UNIT))
676 DECL_BIT_FIELD (field) = 0;
679 /* If we still have DECL_BIT_FIELD set at this point, we know that the
680 field is technically not addressable. Except that it can actually
681 be addressed if it is BLKmode and happens to be properly aligned. */
682 if (DECL_BIT_FIELD (field)
683 && !(DECL_MODE (field) == BLKmode
684 && value_factor_p (pos, BITS_PER_UNIT)))
685 DECL_NONADDRESSABLE_P (field) = 1;
687 /* A type must be as aligned as its most aligned field that is not
688 a bit-field. But this is already enforced by layout_type. */
689 if (rep_level > 0 && !DECL_BIT_FIELD (field))
690 TYPE_ALIGN (record_type)
691 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
696 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
697 size = size_binop (MAX_EXPR, size, this_size);
700 case QUAL_UNION_TYPE:
702 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
703 this_ada_size, ada_size);
704 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
709 /* Since we know here that all fields are sorted in order of
710 increasing bit position, the size of the record is one
711 higher than the ending bit of the last field processed
712 unless we have a rep clause, since in that case we might
713 have a field outside a QUAL_UNION_TYPE that has a higher ending
714 position. So use a MAX in that case. Also, if this field is a
715 QUAL_UNION_TYPE, we need to take into account the previous size in
716 the case of empty variants. */
718 = merge_sizes (ada_size, pos, this_ada_size,
719 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
721 = merge_sizes (size, pos, this_size,
722 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
730 if (code == QUAL_UNION_TYPE)
731 nreverse (fieldlist);
733 /* If the type is discriminated, it can be used to access all its
734 constrained subtypes, so force structural equality checks. */
735 if (CONTAINS_PLACEHOLDER_P (size))
736 SET_TYPE_STRUCTURAL_EQUALITY (record_type);
740 /* If this is a padding record, we never want to make the size smaller
741 than what was specified in it, if any. */
742 if (TREE_CODE (record_type) == RECORD_TYPE
743 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
744 size = TYPE_SIZE (record_type);
746 /* Now set any of the values we've just computed that apply. */
747 if (!TYPE_IS_FAT_POINTER_P (record_type)
748 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
749 SET_TYPE_ADA_SIZE (record_type, ada_size);
753 tree size_unit = had_size_unit
754 ? TYPE_SIZE_UNIT (record_type)
756 size_binop (CEIL_DIV_EXPR, size,
758 unsigned int align = TYPE_ALIGN (record_type);
760 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
761 TYPE_SIZE_UNIT (record_type)
762 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
764 compute_record_mode (record_type);
768 if (!do_not_finalize)
769 rest_of_record_type_compilation (record_type);
772 /* Wrap up compilation of RECORD_TYPE, i.e. most notably output all
773 the debug information associated with it. It need not be invoked
774 directly in most cases since finish_record_type takes care of doing
775 so, unless explicitly requested not to through DO_NOT_FINALIZE. */
778 rest_of_record_type_compilation (tree record_type)
780 tree fieldlist = TYPE_FIELDS (record_type);
782 enum tree_code code = TREE_CODE (record_type);
783 bool var_size = false;
785 for (field = fieldlist; field; field = TREE_CHAIN (field))
787 /* We need to make an XVE/XVU record if any field has variable size,
788 whether or not the record does. For example, if we have a union,
789 it may be that all fields, rounded up to the alignment, have the
790 same size, in which case we'll use that size. But the debug
791 output routines (except Dwarf2) won't be able to output the fields,
792 so we need to make the special record. */
793 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
794 /* If a field has a non-constant qualifier, the record will have
795 variable size too. */
796 || (code == QUAL_UNION_TYPE
797 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
804 /* If this record is of variable size, rename it so that the
805 debugger knows it is and make a new, parallel, record
806 that tells the debugger how the record is laid out. See
807 exp_dbug.ads. But don't do this for records that are padding
808 since they confuse GDB. */
810 && !(TREE_CODE (record_type) == RECORD_TYPE
811 && TYPE_IS_PADDING_P (record_type)))
814 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
815 ? UNION_TYPE : TREE_CODE (record_type));
816 tree orig_name = TYPE_NAME (record_type), new_name;
817 tree last_pos = bitsize_zero_node;
818 tree old_field, prev_old_field = NULL_TREE;
820 if (TREE_CODE (orig_name) == TYPE_DECL)
821 orig_name = DECL_NAME (orig_name);
824 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
826 TYPE_NAME (new_record_type) = new_name;
827 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
828 TYPE_STUB_DECL (new_record_type)
829 = create_type_stub_decl (new_name, new_record_type);
830 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
831 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
832 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
833 TYPE_SIZE_UNIT (new_record_type)
834 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
836 add_parallel_type (TYPE_STUB_DECL (record_type), new_record_type);
838 /* Now scan all the fields, replacing each field with a new
839 field corresponding to the new encoding. */
840 for (old_field = TYPE_FIELDS (record_type); old_field;
841 old_field = TREE_CHAIN (old_field))
843 tree field_type = TREE_TYPE (old_field);
844 tree field_name = DECL_NAME (old_field);
846 tree curpos = bit_position (old_field);
848 unsigned int align = 0;
851 /* See how the position was modified from the last position.
853 There are two basic cases we support: a value was added
854 to the last position or the last position was rounded to
855 a boundary and they something was added. Check for the
856 first case first. If not, see if there is any evidence
857 of rounding. If so, round the last position and try
860 If this is a union, the position can be taken as zero. */
862 /* Some computations depend on the shape of the position expression,
863 so strip conversions to make sure it's exposed. */
864 curpos = remove_conversions (curpos, true);
866 if (TREE_CODE (new_record_type) == UNION_TYPE)
867 pos = bitsize_zero_node, align = 0;
869 pos = compute_related_constant (curpos, last_pos);
871 if (!pos && TREE_CODE (curpos) == MULT_EXPR
872 && host_integerp (TREE_OPERAND (curpos, 1), 1))
874 tree offset = TREE_OPERAND (curpos, 0);
875 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
877 /* An offset which is a bitwise AND with a negative power of 2
878 means an alignment corresponding to this power of 2. */
879 offset = remove_conversions (offset, true);
880 if (TREE_CODE (offset) == BIT_AND_EXPR
881 && host_integerp (TREE_OPERAND (offset, 1), 0)
882 && tree_int_cst_sgn (TREE_OPERAND (offset, 1)) < 0)
885 = - tree_low_cst (TREE_OPERAND (offset, 1), 0);
886 if (exact_log2 (pow) > 0)
890 pos = compute_related_constant (curpos,
891 round_up (last_pos, align));
893 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
894 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
895 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
896 && host_integerp (TREE_OPERAND
897 (TREE_OPERAND (curpos, 0), 1),
902 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
903 pos = compute_related_constant (curpos,
904 round_up (last_pos, align));
906 else if (potential_alignment_gap (prev_old_field, old_field,
909 align = TYPE_ALIGN (field_type);
910 pos = compute_related_constant (curpos,
911 round_up (last_pos, align));
914 /* If we can't compute a position, set it to zero.
916 ??? We really should abort here, but it's too much work
917 to get this correct for all cases. */
920 pos = bitsize_zero_node;
922 /* See if this type is variable-sized and make a pointer type
923 and indicate the indirection if so. Beware that the debug
924 back-end may adjust the position computed above according
925 to the alignment of the field type, i.e. the pointer type
926 in this case, if we don't preventively counter that. */
927 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
929 field_type = build_pointer_type (field_type);
930 if (align != 0 && TYPE_ALIGN (field_type) > align)
932 field_type = copy_node (field_type);
933 TYPE_ALIGN (field_type) = align;
938 /* Make a new field name, if necessary. */
939 if (var || align != 0)
944 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
945 align / BITS_PER_UNIT);
947 strcpy (suffix, "XVL");
949 field_name = concat_name (field_name, suffix);
952 new_field = create_field_decl (field_name, field_type,
954 DECL_SIZE (old_field), pos, 0);
955 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
956 TYPE_FIELDS (new_record_type) = new_field;
958 /* If old_field is a QUAL_UNION_TYPE, take its size as being
959 zero. The only time it's not the last field of the record
960 is when there are other components at fixed positions after
961 it (meaning there was a rep clause for every field) and we
962 want to be able to encode them. */
963 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
964 (TREE_CODE (TREE_TYPE (old_field))
967 : DECL_SIZE (old_field));
968 prev_old_field = old_field;
971 TYPE_FIELDS (new_record_type)
972 = nreverse (TYPE_FIELDS (new_record_type));
974 rest_of_type_decl_compilation (TYPE_STUB_DECL (new_record_type));
977 rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type));
980 /* Append PARALLEL_TYPE on the chain of parallel types for decl. */
983 add_parallel_type (tree decl, tree parallel_type)
987 while (DECL_PARALLEL_TYPE (d))
988 d = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (d));
990 SET_DECL_PARALLEL_TYPE (d, parallel_type);
993 /* Return the parallel type associated to a type, if any. */
996 get_parallel_type (tree type)
998 if (TYPE_STUB_DECL (type))
999 return DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type));
1004 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1005 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
1006 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
1007 replace a value of zero with the old size. If HAS_REP is true, we take the
1008 MAX of the end position of this field with LAST_SIZE. In all other cases,
1009 we use FIRST_BIT plus SIZE. Return an expression for the size. */
1012 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1015 tree type = TREE_TYPE (last_size);
1018 if (!special || TREE_CODE (size) != COND_EXPR)
1020 new = size_binop (PLUS_EXPR, first_bit, size);
1022 new = size_binop (MAX_EXPR, last_size, new);
1026 new = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1027 integer_zerop (TREE_OPERAND (size, 1))
1028 ? last_size : merge_sizes (last_size, first_bit,
1029 TREE_OPERAND (size, 1),
1031 integer_zerop (TREE_OPERAND (size, 2))
1032 ? last_size : merge_sizes (last_size, first_bit,
1033 TREE_OPERAND (size, 2),
1036 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1037 when fed through substitute_in_expr) into thinking that a constant
1038 size is not constant. */
1039 while (TREE_CODE (new) == NON_LVALUE_EXPR)
1040 new = TREE_OPERAND (new, 0);
1045 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1046 related by the addition of a constant. Return that constant if so. */
1049 compute_related_constant (tree op0, tree op1)
1051 tree op0_var, op1_var;
1052 tree op0_con = split_plus (op0, &op0_var);
1053 tree op1_con = split_plus (op1, &op1_var);
1054 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1056 if (operand_equal_p (op0_var, op1_var, 0))
1058 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1064 /* Utility function of above to split a tree OP which may be a sum, into a
1065 constant part, which is returned, and a variable part, which is stored
1066 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1070 split_plus (tree in, tree *pvar)
1072 /* Strip NOPS in order to ease the tree traversal and maximize the
1073 potential for constant or plus/minus discovery. We need to be careful
1074 to always return and set *pvar to bitsizetype trees, but it's worth
1078 *pvar = convert (bitsizetype, in);
1080 if (TREE_CODE (in) == INTEGER_CST)
1082 *pvar = bitsize_zero_node;
1083 return convert (bitsizetype, in);
1085 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1087 tree lhs_var, rhs_var;
1088 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1089 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1091 if (lhs_var == TREE_OPERAND (in, 0)
1092 && rhs_var == TREE_OPERAND (in, 1))
1093 return bitsize_zero_node;
1095 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1096 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1099 return bitsize_zero_node;
1102 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1103 subprogram. If it is void_type_node, then we are dealing with a procedure,
1104 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1105 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1106 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1107 RETURNS_UNCONSTRAINED is true if the function returns an unconstrained
1108 object. RETURNS_BY_REF is true if the function returns by reference.
1109 RETURNS_BY_TARGET_PTR is true if the function is to be passed (as its
1110 first parameter) the address of the place to copy its result. */
1113 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1114 bool returns_unconstrained, bool returns_by_ref,
1115 bool returns_by_target_ptr)
1117 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1118 the subprogram formal parameters. This list is generated by traversing the
1119 input list of PARM_DECL nodes. */
1120 tree param_type_list = NULL;
1124 for (param_decl = param_decl_list; param_decl;
1125 param_decl = TREE_CHAIN (param_decl))
1126 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1129 /* The list of the function parameter types has to be terminated by the void
1130 type to signal to the back-end that we are not dealing with a variable
1131 parameter subprogram, but that the subprogram has a fixed number of
1133 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1135 /* The list of argument types has been created in reverse
1137 param_type_list = nreverse (param_type_list);
1139 type = build_function_type (return_type, param_type_list);
1141 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1142 or the new type should, make a copy of TYPE. Likewise for
1143 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1144 if (TYPE_CI_CO_LIST (type) || cico_list
1145 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1146 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref
1147 || TYPE_RETURNS_BY_TARGET_PTR_P (type) != returns_by_target_ptr)
1148 type = copy_type (type);
1150 TYPE_CI_CO_LIST (type) = cico_list;
1151 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1152 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1153 TYPE_RETURNS_BY_TARGET_PTR_P (type) = returns_by_target_ptr;
1157 /* Return a copy of TYPE but safe to modify in any way. */
1160 copy_type (tree type)
1162 tree new = copy_node (type);
1164 /* copy_node clears this field instead of copying it, because it is
1165 aliased with TREE_CHAIN. */
1166 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1168 TYPE_POINTER_TO (new) = 0;
1169 TYPE_REFERENCE_TO (new) = 0;
1170 TYPE_MAIN_VARIANT (new) = new;
1171 TYPE_NEXT_VARIANT (new) = 0;
1176 /* Return a subtype of sizetype with range MIN to MAX and whose
1177 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
1178 of the associated TYPE_DECL. */
1181 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
1183 /* First build a type for the desired range. */
1184 tree type = build_index_2_type (min, max);
1186 /* If this type has the TYPE_INDEX_TYPE we want, return it. */
1187 if (TYPE_INDEX_TYPE (type) == index)
1190 /* Otherwise, if TYPE_INDEX_TYPE is set, make a copy. Note that we have
1191 no way of sharing these types, but that's only a small hole. */
1192 if (TYPE_INDEX_TYPE (type))
1193 type = copy_type (type);
1195 SET_TYPE_INDEX_TYPE (type, index);
1196 create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
1201 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
1202 sizetype is used. */
1205 create_range_type (tree type, tree min, tree max)
1209 if (type == NULL_TREE)
1212 /* First build a type with the base range. */
1214 = build_range_type (type, TYPE_MIN_VALUE (type), TYPE_MAX_VALUE (type));
1216 min = convert (type, min);
1217 max = convert (type, max);
1219 /* If this type has the TYPE_RM_{MIN,MAX}_VALUE we want, return it. */
1220 if (TYPE_RM_MIN_VALUE (range_type)
1221 && TYPE_RM_MAX_VALUE (range_type)
1222 && operand_equal_p (TYPE_RM_MIN_VALUE (range_type), min, 0)
1223 && operand_equal_p (TYPE_RM_MAX_VALUE (range_type), max, 0))
1226 /* Otherwise, if TYPE_RM_{MIN,MAX}_VALUE is set, make a copy. */
1227 if (TYPE_RM_MIN_VALUE (range_type) || TYPE_RM_MAX_VALUE (range_type))
1228 range_type = copy_type (range_type);
1230 /* Then set the actual range. */
1231 SET_TYPE_RM_MIN_VALUE (range_type, min);
1232 SET_TYPE_RM_MAX_VALUE (range_type, max);
1237 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
1238 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
1242 create_type_stub_decl (tree type_name, tree type)
1244 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
1245 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
1246 emitted in DWARF. */
1247 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1248 DECL_ARTIFICIAL (type_decl) = 1;
1252 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
1253 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
1254 is a declaration that was generated by the compiler. DEBUG_INFO_P is
1255 true if we need to write debug information about this type. GNAT_NODE
1256 is used for the position of the decl. */
1259 create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1260 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1262 enum tree_code code = TREE_CODE (type);
1263 bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
1266 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
1267 gcc_assert (!TYPE_IS_DUMMY_P (type));
1269 /* If the type hasn't been named yet, we're naming it; preserve an existing
1270 TYPE_STUB_DECL that has been attached to it for some purpose. */
1271 if (!named && TYPE_STUB_DECL (type))
1273 type_decl = TYPE_STUB_DECL (type);
1274 DECL_NAME (type_decl) = type_name;
1277 type_decl = build_decl (TYPE_DECL, type_name, type);
1279 DECL_ARTIFICIAL (type_decl) = artificial_p;
1280 gnat_pushdecl (type_decl, gnat_node);
1281 process_attributes (type_decl, attr_list);
1283 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
1284 This causes the name to be also viewed as a "tag" by the debug
1285 back-end, with the advantage that no DW_TAG_typedef is emitted
1286 for artificial "tagged" types in DWARF. */
1288 TYPE_STUB_DECL (type) = type_decl;
1290 /* Pass the type declaration to the debug back-end unless this is an
1291 UNCONSTRAINED_ARRAY_TYPE that the back-end does not support, or a
1292 type for which debugging information was not requested, or else an
1293 ENUMERAL_TYPE or RECORD_TYPE (except for fat pointers) which are
1294 handled separately. And do not pass dummy types either. */
1295 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
1296 DECL_IGNORED_P (type_decl) = 1;
1297 else if (code != ENUMERAL_TYPE
1298 && (code != RECORD_TYPE || TYPE_IS_FAT_POINTER_P (type))
1299 && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
1300 && TYPE_IS_DUMMY_P (TREE_TYPE (type)))
1301 && !(code == RECORD_TYPE
1303 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))))))
1304 rest_of_type_decl_compilation (type_decl);
1309 /* Return a VAR_DECL or CONST_DECL node.
1311 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1312 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1313 the GCC tree for an optional initial expression; NULL_TREE if none.
1315 CONST_FLAG is true if this variable is constant, in which case we might
1316 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
1318 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
1319 definition to be made visible outside of the current compilation unit, for
1320 instance variable definitions in a package specification.
1322 EXTERN_FLAG is true when processing an external variable declaration (as
1323 opposed to a definition: no storage is to be allocated for the variable).
1325 STATIC_FLAG is only relevant when not at top level. In that case
1326 it indicates whether to always allocate storage to the variable.
1328 GNAT_NODE is used for the position of the decl. */
1331 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
1332 bool const_flag, bool public_flag, bool extern_flag,
1333 bool static_flag, bool const_decl_allowed_p,
1334 struct attrib *attr_list, Node_Id gnat_node)
1338 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
1339 && (global_bindings_p () || static_flag
1340 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
1341 : TREE_CONSTANT (var_init)));
1343 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1344 case the initializer may be used in-lieu of the DECL node (as done in
1345 Identifier_to_gnu). This is useful to prevent the need of elaboration
1346 code when an identifier for which such a decl is made is in turn used as
1347 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1348 but extra constraints apply to this choice (see below) and are not
1349 relevant to the distinction we wish to make. */
1350 bool constant_p = const_flag && init_const;
1352 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1353 and may be used for scalars in general but not for aggregates. */
1355 = build_decl ((constant_p && const_decl_allowed_p
1356 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
1359 /* If this is external, throw away any initializations (they will be done
1360 elsewhere) unless this is a constant for which we would like to remain
1361 able to get the initializer. If we are defining a global here, leave a
1362 constant initialization and save any variable elaborations for the
1363 elaboration routine. If we are just annotating types, throw away the
1364 initialization if it isn't a constant. */
1365 if ((extern_flag && !constant_p)
1366 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1367 var_init = NULL_TREE;
1369 /* At the global level, an initializer requiring code to be generated
1370 produces elaboration statements. Check that such statements are allowed,
1371 that is, not violating a No_Elaboration_Code restriction. */
1372 if (global_bindings_p () && var_init != 0 && ! init_const)
1373 Check_Elaboration_Code_Allowed (gnat_node);
1375 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1376 try to fiddle with DECL_COMMON. However, on platforms that don't
1377 support global BSS sections, uninitialized global variables would
1378 go in DATA instead, thus increasing the size of the executable. */
1380 && TREE_CODE (var_decl) == VAR_DECL
1381 && !have_global_bss_p ())
1382 DECL_COMMON (var_decl) = 1;
1383 DECL_INITIAL (var_decl) = var_init;
1384 TREE_READONLY (var_decl) = const_flag;
1385 DECL_EXTERNAL (var_decl) = extern_flag;
1386 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1387 TREE_CONSTANT (var_decl) = constant_p;
1388 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1389 = TYPE_VOLATILE (type);
1391 /* If it's public and not external, always allocate storage for it.
1392 At the global binding level we need to allocate static storage for the
1393 variable if and only if it's not external. If we are not at the top level
1394 we allocate automatic storage unless requested not to. */
1395 TREE_STATIC (var_decl)
1396 = !extern_flag && (public_flag || static_flag || global_bindings_p ());
1398 /* For an external constant whose initializer is not absolute, do not emit
1399 debug info. In DWARF this would mean a global relocation in a read-only
1400 section which runs afoul of the PE-COFF runtime relocation mechanism. */
1403 && initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
1404 != null_pointer_node)
1405 DECL_IGNORED_P (var_decl) = 1;
1407 if (asm_name && VAR_OR_FUNCTION_DECL_P (var_decl))
1408 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1410 process_attributes (var_decl, attr_list);
1412 /* Add this decl to the current binding level. */
1413 gnat_pushdecl (var_decl, gnat_node);
1415 if (TREE_SIDE_EFFECTS (var_decl))
1416 TREE_ADDRESSABLE (var_decl) = 1;
1418 if (TREE_CODE (var_decl) != CONST_DECL)
1420 if (global_bindings_p ())
1421 rest_of_decl_compilation (var_decl, true, 0);
1424 expand_decl (var_decl);
1429 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
1432 aggregate_type_contains_array_p (tree type)
1434 switch (TREE_CODE (type))
1438 case QUAL_UNION_TYPE:
1441 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1442 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
1443 && aggregate_type_contains_array_p (TREE_TYPE (field)))
1456 /* Return a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1457 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1458 this field is in a record type with a "pragma pack". If SIZE is nonzero
1459 it is the specified size for this field. If POS is nonzero, it is the bit
1460 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1461 the address of this field for aliasing purposes. If it is negative, we
1462 should not make a bitfield, which is used by make_aligning_type. */
1465 create_field_decl (tree field_name, tree field_type, tree record_type,
1466 int packed, tree size, tree pos, int addressable)
1468 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1470 DECL_CONTEXT (field_decl) = record_type;
1471 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1473 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1474 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1475 Likewise for an aggregate without specified position that contains an
1476 array, because in this case slices of variable length of this array
1477 must be handled by GCC and variable-sized objects need to be aligned
1478 to at least a byte boundary. */
1479 if (packed && (TYPE_MODE (field_type) == BLKmode
1481 && AGGREGATE_TYPE_P (field_type)
1482 && aggregate_type_contains_array_p (field_type))))
1483 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1485 /* If a size is specified, use it. Otherwise, if the record type is packed
1486 compute a size to use, which may differ from the object's natural size.
1487 We always set a size in this case to trigger the checks for bitfield
1488 creation below, which is typically required when no position has been
1491 size = convert (bitsizetype, size);
1492 else if (packed == 1)
1494 size = rm_size (field_type);
1496 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1498 if (TREE_CODE (size) == INTEGER_CST
1499 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1500 size = round_up (size, BITS_PER_UNIT);
1503 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1504 specified for two reasons: first if the size differs from the natural
1505 size. Second, if the alignment is insufficient. There are a number of
1506 ways the latter can be true.
1508 We never make a bitfield if the type of the field has a nonconstant size,
1509 because no such entity requiring bitfield operations should reach here.
1511 We do *preventively* make a bitfield when there might be the need for it
1512 but we don't have all the necessary information to decide, as is the case
1513 of a field with no specified position in a packed record.
1515 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1516 in layout_decl or finish_record_type to clear the bit_field indication if
1517 it is in fact not needed. */
1518 if (addressable >= 0
1520 && TREE_CODE (size) == INTEGER_CST
1521 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1522 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
1523 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
1525 || (TYPE_ALIGN (record_type) != 0
1526 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1528 DECL_BIT_FIELD (field_decl) = 1;
1529 DECL_SIZE (field_decl) = size;
1530 if (!packed && !pos)
1532 if (TYPE_ALIGN (record_type) != 0
1533 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))
1534 DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
1536 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1540 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1542 /* Bump the alignment if need be, either for bitfield/packing purposes or
1543 to satisfy the type requirements if no such consideration applies. When
1544 we get the alignment from the type, indicate if this is from an explicit
1545 user request, which prevents stor-layout from lowering it later on. */
1547 unsigned int bit_align
1548 = (DECL_BIT_FIELD (field_decl) ? 1
1549 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
1551 if (bit_align > DECL_ALIGN (field_decl))
1552 DECL_ALIGN (field_decl) = bit_align;
1553 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
1555 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1556 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
1562 /* We need to pass in the alignment the DECL is known to have.
1563 This is the lowest-order bit set in POS, but no more than
1564 the alignment of the record, if one is specified. Note
1565 that an alignment of 0 is taken as infinite. */
1566 unsigned int known_align;
1568 if (host_integerp (pos, 1))
1569 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1571 known_align = BITS_PER_UNIT;
1573 if (TYPE_ALIGN (record_type)
1574 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1575 known_align = TYPE_ALIGN (record_type);
1577 layout_decl (field_decl, known_align);
1578 SET_DECL_OFFSET_ALIGN (field_decl,
1579 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1581 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1582 &DECL_FIELD_BIT_OFFSET (field_decl),
1583 DECL_OFFSET_ALIGN (field_decl), pos);
1586 /* In addition to what our caller says, claim the field is addressable if we
1587 know that its type is not suitable.
1589 The field may also be "technically" nonaddressable, meaning that even if
1590 we attempt to take the field's address we will actually get the address
1591 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
1592 value we have at this point is not accurate enough, so we don't account
1593 for this here and let finish_record_type decide. */
1594 if (!addressable && !type_for_nonaliased_component_p (field_type))
1597 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1602 /* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
1603 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
1604 (either an In parameter or an address of a pass-by-ref parameter). */
1607 create_param_decl (tree param_name, tree param_type, bool readonly)
1609 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1611 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
1612 can lead to various ABI violations. */
1613 if (targetm.calls.promote_prototypes (NULL_TREE)
1614 && INTEGRAL_TYPE_P (param_type)
1615 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1617 /* We have to be careful about biased types here. Make a subtype
1618 of integer_type_node with the proper biasing. */
1619 if (TREE_CODE (param_type) == INTEGER_TYPE
1620 && TYPE_BIASED_REPRESENTATION_P (param_type))
1623 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
1624 TREE_TYPE (subtype) = integer_type_node;
1625 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
1626 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (param_type));
1627 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (param_type));
1628 param_type = subtype;
1631 param_type = integer_type_node;
1634 DECL_ARG_TYPE (param_decl) = param_type;
1635 TREE_READONLY (param_decl) = readonly;
1639 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1642 process_attributes (tree decl, struct attrib *attr_list)
1644 for (; attr_list; attr_list = attr_list->next)
1645 switch (attr_list->type)
1647 case ATTR_MACHINE_ATTRIBUTE:
1648 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1650 ATTR_FLAG_TYPE_IN_PLACE);
1653 case ATTR_LINK_ALIAS:
1654 if (! DECL_EXTERNAL (decl))
1656 TREE_STATIC (decl) = 1;
1657 assemble_alias (decl, attr_list->name);
1661 case ATTR_WEAK_EXTERNAL:
1663 declare_weak (decl);
1665 post_error ("?weak declarations not supported on this target",
1666 attr_list->error_point);
1669 case ATTR_LINK_SECTION:
1670 if (targetm.have_named_sections)
1672 DECL_SECTION_NAME (decl)
1673 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1674 IDENTIFIER_POINTER (attr_list->name));
1675 DECL_COMMON (decl) = 0;
1678 post_error ("?section attributes are not supported for this target",
1679 attr_list->error_point);
1682 case ATTR_LINK_CONSTRUCTOR:
1683 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1684 TREE_USED (decl) = 1;
1687 case ATTR_LINK_DESTRUCTOR:
1688 DECL_STATIC_DESTRUCTOR (decl) = 1;
1689 TREE_USED (decl) = 1;
1692 case ATTR_THREAD_LOCAL_STORAGE:
1693 DECL_TLS_MODEL (decl) = decl_default_tls_model (decl);
1694 DECL_COMMON (decl) = 0;
1699 /* Record DECL as a global renaming pointer. */
1702 record_global_renaming_pointer (tree decl)
1704 gcc_assert (DECL_RENAMED_OBJECT (decl));
1705 VEC_safe_push (tree, gc, global_renaming_pointers, decl);
1708 /* Invalidate the global renaming pointers. */
1711 invalidate_global_renaming_pointers (void)
1716 for (i = 0; VEC_iterate(tree, global_renaming_pointers, i, iter); i++)
1717 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
1719 VEC_free (tree, gc, global_renaming_pointers);
1722 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1726 value_factor_p (tree value, HOST_WIDE_INT factor)
1728 if (host_integerp (value, 1))
1729 return tree_low_cst (value, 1) % factor == 0;
1731 if (TREE_CODE (value) == MULT_EXPR)
1732 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1733 || value_factor_p (TREE_OPERAND (value, 1), factor));
1738 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1739 unless we can prove these 2 fields are laid out in such a way that no gap
1740 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1741 is the distance in bits between the end of PREV_FIELD and the starting
1742 position of CURR_FIELD. It is ignored if null. */
1745 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1747 /* If this is the first field of the record, there cannot be any gap */
1751 /* If the previous field is a union type, then return False: The only
1752 time when such a field is not the last field of the record is when
1753 there are other components at fixed positions after it (meaning there
1754 was a rep clause for every field), in which case we don't want the
1755 alignment constraint to override them. */
1756 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1759 /* If the distance between the end of prev_field and the beginning of
1760 curr_field is constant, then there is a gap if the value of this
1761 constant is not null. */
1762 if (offset && host_integerp (offset, 1))
1763 return !integer_zerop (offset);
1765 /* If the size and position of the previous field are constant,
1766 then check the sum of this size and position. There will be a gap
1767 iff it is not multiple of the current field alignment. */
1768 if (host_integerp (DECL_SIZE (prev_field), 1)
1769 && host_integerp (bit_position (prev_field), 1))
1770 return ((tree_low_cst (bit_position (prev_field), 1)
1771 + tree_low_cst (DECL_SIZE (prev_field), 1))
1772 % DECL_ALIGN (curr_field) != 0);
1774 /* If both the position and size of the previous field are multiples
1775 of the current field alignment, there cannot be any gap. */
1776 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1777 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1780 /* Fallback, return that there may be a potential gap */
1784 /* Returns a LABEL_DECL node for LABEL_NAME. */
1787 create_label_decl (tree label_name)
1789 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1791 DECL_CONTEXT (label_decl) = current_function_decl;
1792 DECL_MODE (label_decl) = VOIDmode;
1793 DECL_SOURCE_LOCATION (label_decl) = input_location;
1798 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1799 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1800 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1801 PARM_DECL nodes chained through the TREE_CHAIN field).
1803 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1804 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1807 create_subprog_decl (tree subprog_name, tree asm_name,
1808 tree subprog_type, tree param_decl_list, bool inline_flag,
1809 bool public_flag, bool extern_flag,
1810 struct attrib *attr_list, Node_Id gnat_node)
1812 tree return_type = TREE_TYPE (subprog_type);
1813 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1815 /* If this is a non-inline function nested inside an inlined external
1816 function, we cannot honor both requests without cloning the nested
1817 function in the current unit since it is private to the other unit.
1818 We could inline the nested function as well but it's probably better
1819 to err on the side of too little inlining. */
1821 && current_function_decl
1822 && DECL_DECLARED_INLINE_P (current_function_decl)
1823 && DECL_EXTERNAL (current_function_decl))
1824 DECL_DECLARED_INLINE_P (current_function_decl) = 0;
1826 DECL_EXTERNAL (subprog_decl) = extern_flag;
1827 TREE_PUBLIC (subprog_decl) = public_flag;
1828 TREE_STATIC (subprog_decl) = 1;
1829 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1830 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1831 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1832 DECL_DECLARED_INLINE_P (subprog_decl) = inline_flag;
1833 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1834 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1835 DECL_ARTIFICIAL (DECL_RESULT (subprog_decl)) = 1;
1836 DECL_IGNORED_P (DECL_RESULT (subprog_decl)) = 1;
1838 /* TREE_ADDRESSABLE is set on the result type to request the use of the
1839 target by-reference return mechanism. This is not supported all the
1840 way down to RTL expansion with GCC 4, which ICEs on temporary creation
1841 attempts with such a type and expects DECL_BY_REFERENCE to be set on
1842 the RESULT_DECL instead - see gnat_genericize for more details. */
1843 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (subprog_decl))))
1845 tree result_decl = DECL_RESULT (subprog_decl);
1847 TREE_ADDRESSABLE (TREE_TYPE (result_decl)) = 0;
1848 DECL_BY_REFERENCE (result_decl) = 1;
1853 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1855 /* The expand_main_function circuitry expects "main_identifier_node" to
1856 designate the DECL_NAME of the 'main' entry point, in turn expected
1857 to be declared as the "main" function literally by default. Ada
1858 program entry points are typically declared with a different name
1859 within the binder generated file, exported as 'main' to satisfy the
1860 system expectations. Redirect main_identifier_node in this case. */
1861 if (asm_name == main_identifier_node)
1862 main_identifier_node = DECL_NAME (subprog_decl);
1865 process_attributes (subprog_decl, attr_list);
1867 /* Add this decl to the current binding level. */
1868 gnat_pushdecl (subprog_decl, gnat_node);
1870 /* Output the assembler code and/or RTL for the declaration. */
1871 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
1873 return subprog_decl;
1876 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1877 body. This routine needs to be invoked before processing the declarations
1878 appearing in the subprogram. */
1881 begin_subprog_body (tree subprog_decl)
1885 current_function_decl = subprog_decl;
1886 announce_function (subprog_decl);
1888 /* Enter a new binding level and show that all the parameters belong to
1891 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1892 param_decl = TREE_CHAIN (param_decl))
1893 DECL_CONTEXT (param_decl) = subprog_decl;
1895 make_decl_rtl (subprog_decl);
1897 /* We handle pending sizes via the elaboration of types, so we don't need to
1898 save them. This causes them to be marked as part of the outer function
1899 and then discarded. */
1900 get_pending_sizes ();
1904 /* Helper for the genericization callback. Return a dereference of VAL
1905 if it is of a reference type. */
1908 convert_from_reference (tree val)
1910 tree value_type, ref;
1912 if (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE)
1915 value_type = TREE_TYPE (TREE_TYPE (val));
1916 ref = build1 (INDIRECT_REF, value_type, val);
1918 /* See if what we reference is CONST or VOLATILE, which requires
1919 looking into array types to get to the component type. */
1921 while (TREE_CODE (value_type) == ARRAY_TYPE)
1922 value_type = TREE_TYPE (value_type);
1925 = (TYPE_QUALS (value_type) & TYPE_QUAL_CONST);
1926 TREE_THIS_VOLATILE (ref)
1927 = (TYPE_QUALS (value_type) & TYPE_QUAL_VOLATILE);
1929 TREE_SIDE_EFFECTS (ref)
1930 = (TREE_THIS_VOLATILE (ref) || TREE_SIDE_EFFECTS (val));
1935 /* Helper for the genericization callback. Returns true if T denotes
1936 a RESULT_DECL with DECL_BY_REFERENCE set. */
1939 is_byref_result (tree t)
1941 return (TREE_CODE (t) == RESULT_DECL && DECL_BY_REFERENCE (t));
1945 /* Tree walking callback for gnat_genericize. Currently ...
1947 o Adjust references to the function's DECL_RESULT if it is marked
1948 DECL_BY_REFERENCE and so has had its type turned into a reference
1949 type at the end of the function compilation. */
1952 gnat_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1954 /* This implementation is modeled after what the C++ front-end is
1955 doing, basis of the downstream passes behavior. */
1957 tree stmt = *stmt_p;
1958 struct pointer_set_t *p_set = (struct pointer_set_t*) data;
1960 /* If we have a direct mention of the result decl, dereference. */
1961 if (is_byref_result (stmt))
1963 *stmt_p = convert_from_reference (stmt);
1968 /* Otherwise, no need to walk the same tree twice. */
1969 if (pointer_set_contains (p_set, stmt))
1975 /* If we are taking the address of what now is a reference, just get the
1977 if (TREE_CODE (stmt) == ADDR_EXPR
1978 && is_byref_result (TREE_OPERAND (stmt, 0)))
1980 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1984 /* Don't dereference an by-reference RESULT_DECL inside a RETURN_EXPR. */
1985 else if (TREE_CODE (stmt) == RETURN_EXPR
1986 && TREE_OPERAND (stmt, 0)
1987 && is_byref_result (TREE_OPERAND (stmt, 0)))
1990 /* Don't look inside trees that cannot embed references of interest. */
1991 else if (IS_TYPE_OR_DECL_P (stmt))
1994 pointer_set_insert (p_set, *stmt_p);
1999 /* Perform lowering of Ada trees to GENERIC. In particular:
2001 o Turn a DECL_BY_REFERENCE RESULT_DECL into a real by-reference decl
2002 and adjust all the references to this decl accordingly. */
2005 gnat_genericize (tree fndecl)
2007 /* Prior to GCC 4, an explicit By_Reference result mechanism for a function
2008 was handled by simply setting TREE_ADDRESSABLE on the result type.
2009 Everything required to actually pass by invisible ref using the target
2010 mechanism (e.g. extra parameter) was handled at RTL expansion time.
2012 This doesn't work with GCC 4 any more for several reasons. First, the
2013 gimplification process might need the creation of temporaries of this
2014 type, and the gimplifier ICEs on such attempts. Second, the middle-end
2015 now relies on a different attribute for such cases (DECL_BY_REFERENCE on
2016 RESULT/PARM_DECLs), and expects the user invisible by-reference-ness to
2017 be explicitly accounted for by the front-end in the function body.
2019 We achieve the complete transformation in two steps:
2021 1/ create_subprog_decl performs early attribute tweaks: it clears
2022 TREE_ADDRESSABLE from the result type and sets DECL_BY_REFERENCE on
2023 the result decl. The former ensures that the bit isn't set in the GCC
2024 tree saved for the function, so prevents ICEs on temporary creation.
2025 The latter we use here to trigger the rest of the processing.
2027 2/ This function performs the type transformation on the result decl
2028 and adjusts all the references to this decl from the function body
2031 Clearing TREE_ADDRESSABLE from the type differs from the C++ front-end
2032 strategy, which escapes the gimplifier temporary creation issues by
2033 creating it's own temporaries using TARGET_EXPR nodes. Our way relies
2034 on simple specific support code in aggregate_value_p to look at the
2035 target function result decl explicitly. */
2037 struct pointer_set_t *p_set;
2038 tree decl_result = DECL_RESULT (fndecl);
2040 if (!DECL_BY_REFERENCE (decl_result))
2043 /* Make the DECL_RESULT explicitly by-reference and adjust all the
2044 occurrences in the function body using the common tree-walking facility.
2045 We want to see every occurrence of the result decl to adjust the
2046 referencing tree, so need to use our own pointer set to control which
2047 trees should be visited again or not. */
2049 p_set = pointer_set_create ();
2051 TREE_TYPE (decl_result) = build_reference_type (TREE_TYPE (decl_result));
2052 TREE_ADDRESSABLE (decl_result) = 0;
2053 relayout_decl (decl_result);
2055 walk_tree (&DECL_SAVED_TREE (fndecl), gnat_genericize_r, p_set, NULL);
2057 pointer_set_destroy (p_set);
2060 /* Finish the definition of the current subprogram BODY and compile it all the
2061 way to assembler language output. ELAB_P tells if this is called for an
2062 elaboration routine, to be entirely discarded if empty. */
2065 end_subprog_body (tree body, bool elab_p)
2067 tree fndecl = current_function_decl;
2069 /* Mark the BLOCK for this level as being for this function and pop the
2070 level. Since the vars in it are the parameters, clear them. */
2071 BLOCK_VARS (current_binding_level->block) = 0;
2072 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
2073 DECL_INITIAL (fndecl) = current_binding_level->block;
2076 /* We handle pending sizes via the elaboration of types, so we don't
2077 need to save them. */
2078 get_pending_sizes ();
2080 /* Mark the RESULT_DECL as being in this subprogram. */
2081 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
2083 DECL_SAVED_TREE (fndecl) = body;
2085 current_function_decl = DECL_CONTEXT (fndecl);
2088 /* We cannot track the location of errors past this point. */
2089 error_gnat_node = Empty;
2091 /* If we're only annotating types, don't actually compile this function. */
2092 if (type_annotate_only)
2095 /* Perform the required pre-gimplification transformations on the tree. */
2096 gnat_genericize (fndecl);
2098 /* We do different things for nested and non-nested functions.
2099 ??? This should be in cgraph. */
2100 if (!DECL_CONTEXT (fndecl))
2102 gnat_gimplify_function (fndecl);
2104 /* If this is an empty elaboration proc, just discard the node.
2105 Otherwise, compile further. */
2106 if (elab_p && empty_body_p (gimple_body (fndecl)))
2107 cgraph_remove_node (cgraph_node (fndecl));
2109 cgraph_finalize_function (fndecl, false);
2112 /* Register this function with cgraph just far enough to get it
2113 added to our parent's nested function list. */
2114 (void) cgraph_node (fndecl);
2117 /* Convert FNDECL's code to GIMPLE and handle any nested functions. */
2120 gnat_gimplify_function (tree fndecl)
2122 struct cgraph_node *cgn;
2124 dump_function (TDI_original, fndecl);
2125 gimplify_function_tree (fndecl);
2126 dump_function (TDI_generic, fndecl);
2128 /* Convert all nested functions to GIMPLE now. We do things in this order
2129 so that items like VLA sizes are expanded properly in the context of the
2130 correct function. */
2131 cgn = cgraph_node (fndecl);
2132 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
2133 gnat_gimplify_function (cgn->decl);
2137 gnat_builtin_function (tree decl)
2139 gnat_pushdecl (decl, Empty);
2143 /* Return an integer type with the number of bits of precision given by
2144 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2145 it is a signed type. */
2148 gnat_type_for_size (unsigned precision, int unsignedp)
2153 if (precision <= 2 * MAX_BITS_PER_WORD
2154 && signed_and_unsigned_types[precision][unsignedp])
2155 return signed_and_unsigned_types[precision][unsignedp];
2158 t = make_unsigned_type (precision);
2160 t = make_signed_type (precision);
2162 if (precision <= 2 * MAX_BITS_PER_WORD)
2163 signed_and_unsigned_types[precision][unsignedp] = t;
2167 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2168 TYPE_NAME (t) = get_identifier (type_name);
2174 /* Likewise for floating-point types. */
2177 float_type_for_precision (int precision, enum machine_mode mode)
2182 if (float_types[(int) mode])
2183 return float_types[(int) mode];
2185 float_types[(int) mode] = t = make_node (REAL_TYPE);
2186 TYPE_PRECISION (t) = precision;
2189 gcc_assert (TYPE_MODE (t) == mode);
2192 sprintf (type_name, "FLOAT_%d", precision);
2193 TYPE_NAME (t) = get_identifier (type_name);
2199 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2200 an unsigned type; otherwise a signed type is returned. */
2203 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2205 if (mode == BLKmode)
2207 else if (mode == VOIDmode)
2208 return void_type_node;
2209 else if (COMPLEX_MODE_P (mode))
2211 else if (SCALAR_FLOAT_MODE_P (mode))
2212 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2213 else if (SCALAR_INT_MODE_P (mode))
2214 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2219 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2222 gnat_unsigned_type (tree type_node)
2224 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2226 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2228 type = copy_node (type);
2229 TREE_TYPE (type) = type_node;
2231 else if (TREE_TYPE (type_node)
2232 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2233 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2235 type = copy_node (type);
2236 TREE_TYPE (type) = TREE_TYPE (type_node);
2242 /* Return the signed version of a TYPE_NODE, a scalar type. */
2245 gnat_signed_type (tree type_node)
2247 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2249 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2251 type = copy_node (type);
2252 TREE_TYPE (type) = type_node;
2254 else if (TREE_TYPE (type_node)
2255 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2256 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2258 type = copy_node (type);
2259 TREE_TYPE (type) = TREE_TYPE (type_node);
2265 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2266 transparently converted to each other. */
2269 gnat_types_compatible_p (tree t1, tree t2)
2271 enum tree_code code;
2273 /* This is the default criterion. */
2274 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2277 /* We only check structural equivalence here. */
2278 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2281 /* Array types are also compatible if they are constrained and have
2282 the same component type and the same domain. */
2283 if (code == ARRAY_TYPE
2284 && TREE_TYPE (t1) == TREE_TYPE (t2)
2285 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
2286 || (TYPE_DOMAIN (t1)
2288 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2289 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2290 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
2291 TYPE_MAX_VALUE (TYPE_DOMAIN (t2))))))
2294 /* Padding record types are also compatible if they pad the same
2295 type and have the same constant size. */
2296 if (code == RECORD_TYPE
2297 && TYPE_IS_PADDING_P (t1) && TYPE_IS_PADDING_P (t2)
2298 && TREE_TYPE (TYPE_FIELDS (t1)) == TREE_TYPE (TYPE_FIELDS (t2))
2299 && tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2)))
2305 /* EXP is an expression for the size of an object. If this size contains
2306 discriminant references, replace them with the maximum (if MAX_P) or
2307 minimum (if !MAX_P) possible value of the discriminant. */
2310 max_size (tree exp, bool max_p)
2312 enum tree_code code = TREE_CODE (exp);
2313 tree type = TREE_TYPE (exp);
2315 switch (TREE_CODE_CLASS (code))
2317 case tcc_declaration:
2322 if (code == CALL_EXPR)
2325 int i, n = call_expr_nargs (exp);
2328 argarray = (tree *) alloca (n * sizeof (tree));
2329 for (i = 0; i < n; i++)
2330 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
2331 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
2336 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2337 modify. Otherwise, we treat it like a variable. */
2338 if (!CONTAINS_PLACEHOLDER_P (exp))
2341 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2343 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
2345 case tcc_comparison:
2346 return max_p ? size_one_node : size_zero_node;
2350 case tcc_expression:
2351 switch (TREE_CODE_LENGTH (code))
2354 if (code == NON_LVALUE_EXPR)
2355 return max_size (TREE_OPERAND (exp, 0), max_p);
2358 fold_build1 (code, type,
2359 max_size (TREE_OPERAND (exp, 0),
2360 code == NEGATE_EXPR ? !max_p : max_p));
2363 if (code == COMPOUND_EXPR)
2364 return max_size (TREE_OPERAND (exp, 1), max_p);
2366 /* Calculate "(A ? B : C) - D" as "A ? B - D : C - D" which
2367 may provide a tighter bound on max_size. */
2368 if (code == MINUS_EXPR
2369 && TREE_CODE (TREE_OPERAND (exp, 0)) == COND_EXPR)
2371 tree lhs = fold_build2 (MINUS_EXPR, type,
2372 TREE_OPERAND (TREE_OPERAND (exp, 0), 1),
2373 TREE_OPERAND (exp, 1));
2374 tree rhs = fold_build2 (MINUS_EXPR, type,
2375 TREE_OPERAND (TREE_OPERAND (exp, 0), 2),
2376 TREE_OPERAND (exp, 1));
2377 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2378 max_size (lhs, max_p),
2379 max_size (rhs, max_p));
2383 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2384 tree rhs = max_size (TREE_OPERAND (exp, 1),
2385 code == MINUS_EXPR ? !max_p : max_p);
2387 /* Special-case wanting the maximum value of a MIN_EXPR.
2388 In that case, if one side overflows, return the other.
2389 sizetype is signed, but we know sizes are non-negative.
2390 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2391 overflowing or the maximum possible value and the RHS
2395 && TREE_CODE (rhs) == INTEGER_CST
2396 && TREE_OVERFLOW (rhs))
2400 && TREE_CODE (lhs) == INTEGER_CST
2401 && TREE_OVERFLOW (lhs))
2403 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2404 && ((TREE_CODE (lhs) == INTEGER_CST
2405 && TREE_OVERFLOW (lhs))
2406 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2407 && !TREE_CONSTANT (rhs))
2410 return fold_build2 (code, type, lhs, rhs);
2414 if (code == SAVE_EXPR)
2416 else if (code == COND_EXPR)
2417 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2418 max_size (TREE_OPERAND (exp, 1), max_p),
2419 max_size (TREE_OPERAND (exp, 2), max_p));
2422 /* Other tree classes cannot happen. */
2430 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2431 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2432 Return a constructor for the template. */
2435 build_template (tree template_type, tree array_type, tree expr)
2437 tree template_elts = NULL_TREE;
2438 tree bound_list = NULL_TREE;
2441 while (TREE_CODE (array_type) == RECORD_TYPE
2442 && (TYPE_IS_PADDING_P (array_type)
2443 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2444 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2446 if (TREE_CODE (array_type) == ARRAY_TYPE
2447 || (TREE_CODE (array_type) == INTEGER_TYPE
2448 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2449 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2451 /* First make the list for a CONSTRUCTOR for the template. Go down the
2452 field list of the template instead of the type chain because this
2453 array might be an Ada array of arrays and we can't tell where the
2454 nested arrays stop being the underlying object. */
2456 for (field = TYPE_FIELDS (template_type); field;
2458 ? (bound_list = TREE_CHAIN (bound_list))
2459 : (array_type = TREE_TYPE (array_type))),
2460 field = TREE_CHAIN (TREE_CHAIN (field)))
2462 tree bounds, min, max;
2464 /* If we have a bound list, get the bounds from there. Likewise
2465 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2466 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2467 This will give us a maximum range. */
2469 bounds = TREE_VALUE (bound_list);
2470 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2471 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2472 else if (expr && TREE_CODE (expr) == PARM_DECL
2473 && DECL_BY_COMPONENT_PTR_P (expr))
2474 bounds = TREE_TYPE (field);
2478 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
2479 max = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MAX_VALUE (bounds));
2481 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2482 substitute it from OBJECT. */
2483 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2484 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2486 template_elts = tree_cons (TREE_CHAIN (field), max,
2487 tree_cons (field, min, template_elts));
2490 return gnat_build_constructor (template_type, nreverse (template_elts));
2493 /* Build a 32bit VMS descriptor from a Mechanism_Type, which must specify
2494 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2495 in the type contains in its DECL_INITIAL the expression to use when
2496 a constructor is made for the type. GNAT_ENTITY is an entity used
2497 to print out an error message if the mechanism cannot be applied to
2498 an object of that type and also for the name. */
2501 build_vms_descriptor32 (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2503 tree record_type = make_node (RECORD_TYPE);
2504 tree pointer32_type;
2505 tree field_list = 0;
2514 /* If TYPE is an unconstrained array, use the underlying array type. */
2515 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2516 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2518 /* If this is an array, compute the number of dimensions in the array,
2519 get the index types, and point to the inner type. */
2520 if (TREE_CODE (type) != ARRAY_TYPE)
2523 for (ndim = 1, inner_type = type;
2524 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2525 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2526 ndim++, inner_type = TREE_TYPE (inner_type))
2529 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2531 if (mech != By_Descriptor_NCA && mech != By_Short_Descriptor_NCA
2532 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2533 for (i = ndim - 1, inner_type = type;
2535 i--, inner_type = TREE_TYPE (inner_type))
2536 idx_arr[i] = TYPE_DOMAIN (inner_type);
2538 for (i = 0, inner_type = type;
2540 i++, inner_type = TREE_TYPE (inner_type))
2541 idx_arr[i] = TYPE_DOMAIN (inner_type);
2543 /* Now get the DTYPE value. */
2544 switch (TREE_CODE (type))
2549 if (TYPE_VAX_FLOATING_POINT_P (type))
2550 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2563 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2566 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2569 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2572 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2575 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2578 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2584 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2588 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2589 && TYPE_VAX_FLOATING_POINT_P (type))
2590 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2602 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2613 /* Get the CLASS value. */
2616 case By_Descriptor_A:
2617 case By_Short_Descriptor_A:
2620 case By_Descriptor_NCA:
2621 case By_Short_Descriptor_NCA:
2624 case By_Descriptor_SB:
2625 case By_Short_Descriptor_SB:
2629 case By_Short_Descriptor:
2630 case By_Descriptor_S:
2631 case By_Short_Descriptor_S:
2637 /* Make the type for a descriptor for VMS. The first four fields
2638 are the same for all types. */
2641 = chainon (field_list,
2642 make_descriptor_field
2643 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2644 size_in_bytes ((mech == By_Descriptor_A ||
2645 mech == By_Short_Descriptor_A)
2646 ? inner_type : type)));
2648 field_list = chainon (field_list,
2649 make_descriptor_field ("DTYPE",
2650 gnat_type_for_size (8, 1),
2651 record_type, size_int (dtype)));
2652 field_list = chainon (field_list,
2653 make_descriptor_field ("CLASS",
2654 gnat_type_for_size (8, 1),
2655 record_type, size_int (class)));
2657 /* Of course this will crash at run-time if the address space is not
2658 within the low 32 bits, but there is nothing else we can do. */
2659 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
2662 = chainon (field_list,
2663 make_descriptor_field
2664 ("POINTER", pointer32_type, record_type,
2665 build_unary_op (ADDR_EXPR,
2667 build0 (PLACEHOLDER_EXPR, type))));
2672 case By_Short_Descriptor:
2673 case By_Descriptor_S:
2674 case By_Short_Descriptor_S:
2677 case By_Descriptor_SB:
2678 case By_Short_Descriptor_SB:
2680 = chainon (field_list,
2681 make_descriptor_field
2682 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2683 TREE_CODE (type) == ARRAY_TYPE
2684 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2686 = chainon (field_list,
2687 make_descriptor_field
2688 ("SB_U1", gnat_type_for_size (32, 1), record_type,
2689 TREE_CODE (type) == ARRAY_TYPE
2690 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2693 case By_Descriptor_A:
2694 case By_Short_Descriptor_A:
2695 case By_Descriptor_NCA:
2696 case By_Short_Descriptor_NCA:
2697 field_list = chainon (field_list,
2698 make_descriptor_field ("SCALE",
2699 gnat_type_for_size (8, 1),
2703 field_list = chainon (field_list,
2704 make_descriptor_field ("DIGITS",
2705 gnat_type_for_size (8, 1),
2710 = chainon (field_list,
2711 make_descriptor_field
2712 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2713 size_int ((mech == By_Descriptor_NCA ||
2714 mech == By_Short_Descriptor_NCA)
2716 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2717 : (TREE_CODE (type) == ARRAY_TYPE
2718 && TYPE_CONVENTION_FORTRAN_P (type)
2721 field_list = chainon (field_list,
2722 make_descriptor_field ("DIMCT",
2723 gnat_type_for_size (8, 1),
2727 field_list = chainon (field_list,
2728 make_descriptor_field ("ARSIZE",
2729 gnat_type_for_size (32, 1),
2731 size_in_bytes (type)));
2733 /* Now build a pointer to the 0,0,0... element. */
2734 tem = build0 (PLACEHOLDER_EXPR, type);
2735 for (i = 0, inner_type = type; i < ndim;
2736 i++, inner_type = TREE_TYPE (inner_type))
2737 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2738 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2739 NULL_TREE, NULL_TREE);
2742 = chainon (field_list,
2743 make_descriptor_field
2745 build_pointer_type_for_mode (inner_type, SImode, false),
2748 build_pointer_type_for_mode (inner_type, SImode,
2752 /* Next come the addressing coefficients. */
2753 tem = size_one_node;
2754 for (i = 0; i < ndim; i++)
2758 = size_binop (MULT_EXPR, tem,
2759 size_binop (PLUS_EXPR,
2760 size_binop (MINUS_EXPR,
2761 TYPE_MAX_VALUE (idx_arr[i]),
2762 TYPE_MIN_VALUE (idx_arr[i])),
2765 fname[0] = ((mech == By_Descriptor_NCA ||
2766 mech == By_Short_Descriptor_NCA) ? 'S' : 'M');
2767 fname[1] = '0' + i, fname[2] = 0;
2769 = chainon (field_list,
2770 make_descriptor_field (fname,
2771 gnat_type_for_size (32, 1),
2772 record_type, idx_length));
2774 if (mech == By_Descriptor_NCA || mech == By_Short_Descriptor_NCA)
2778 /* Finally here are the bounds. */
2779 for (i = 0; i < ndim; i++)
2783 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2785 = chainon (field_list,
2786 make_descriptor_field
2787 (fname, gnat_type_for_size (32, 1), record_type,
2788 TYPE_MIN_VALUE (idx_arr[i])));
2792 = chainon (field_list,
2793 make_descriptor_field
2794 (fname, gnat_type_for_size (32, 1), record_type,
2795 TYPE_MAX_VALUE (idx_arr[i])));
2800 post_error ("unsupported descriptor type for &", gnat_entity);
2803 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC");
2804 finish_record_type (record_type, field_list, 0, true);
2808 /* Build a 64bit VMS descriptor from a Mechanism_Type, which must specify
2809 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2810 in the type contains in its DECL_INITIAL the expression to use when
2811 a constructor is made for the type. GNAT_ENTITY is an entity used
2812 to print out an error message if the mechanism cannot be applied to
2813 an object of that type and also for the name. */
2816 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2818 tree record64_type = make_node (RECORD_TYPE);
2819 tree pointer64_type;
2820 tree field_list64 = 0;
2829 /* If TYPE is an unconstrained array, use the underlying array type. */
2830 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2831 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2833 /* If this is an array, compute the number of dimensions in the array,
2834 get the index types, and point to the inner type. */
2835 if (TREE_CODE (type) != ARRAY_TYPE)
2838 for (ndim = 1, inner_type = type;
2839 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2840 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2841 ndim++, inner_type = TREE_TYPE (inner_type))
2844 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2846 if (mech != By_Descriptor_NCA
2847 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2848 for (i = ndim - 1, inner_type = type;
2850 i--, inner_type = TREE_TYPE (inner_type))
2851 idx_arr[i] = TYPE_DOMAIN (inner_type);
2853 for (i = 0, inner_type = type;
2855 i++, inner_type = TREE_TYPE (inner_type))
2856 idx_arr[i] = TYPE_DOMAIN (inner_type);
2858 /* Now get the DTYPE value. */
2859 switch (TREE_CODE (type))
2864 if (TYPE_VAX_FLOATING_POINT_P (type))
2865 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2878 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2881 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2884 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2887 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2890 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2893 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2899 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2903 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2904 && TYPE_VAX_FLOATING_POINT_P (type))
2905 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2917 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2928 /* Get the CLASS value. */
2931 case By_Descriptor_A:
2934 case By_Descriptor_NCA:
2937 case By_Descriptor_SB:
2941 case By_Descriptor_S:
2947 /* Make the type for a 64bit descriptor for VMS. The first six fields
2948 are the same for all types. */
2950 field_list64 = chainon (field_list64,
2951 make_descriptor_field ("MBO",
2952 gnat_type_for_size (16, 1),
2953 record64_type, size_int (1)));
2955 field_list64 = chainon (field_list64,
2956 make_descriptor_field ("DTYPE",
2957 gnat_type_for_size (8, 1),
2958 record64_type, size_int (dtype)));
2959 field_list64 = chainon (field_list64,
2960 make_descriptor_field ("CLASS",
2961 gnat_type_for_size (8, 1),
2962 record64_type, size_int (class)));
2964 field_list64 = chainon (field_list64,
2965 make_descriptor_field ("MBMO",
2966 gnat_type_for_size (32, 1),
2967 record64_type, ssize_int (-1)));
2970 = chainon (field_list64,
2971 make_descriptor_field
2972 ("LENGTH", gnat_type_for_size (64, 1), record64_type,
2973 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2975 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
2978 = chainon (field_list64,
2979 make_descriptor_field
2980 ("POINTER", pointer64_type, record64_type,
2981 build_unary_op (ADDR_EXPR,
2983 build0 (PLACEHOLDER_EXPR, type))));
2988 case By_Descriptor_S:
2991 case By_Descriptor_SB:
2993 = chainon (field_list64,
2994 make_descriptor_field
2995 ("SB_L1", gnat_type_for_size (64, 1), record64_type,
2996 TREE_CODE (type) == ARRAY_TYPE
2997 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2999 = chainon (field_list64,
3000 make_descriptor_field
3001 ("SB_U1", gnat_type_for_size (64, 1), record64_type,
3002 TREE_CODE (type) == ARRAY_TYPE
3003 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
3006 case By_Descriptor_A:
3007 case By_Descriptor_NCA:
3008 field_list64 = chainon (field_list64,
3009 make_descriptor_field ("SCALE",
3010 gnat_type_for_size (8, 1),
3014 field_list64 = chainon (field_list64,
3015 make_descriptor_field ("DIGITS",
3016 gnat_type_for_size (8, 1),
3021 = chainon (field_list64,
3022 make_descriptor_field
3023 ("AFLAGS", gnat_type_for_size (8, 1), record64_type,
3024 size_int (mech == By_Descriptor_NCA
3026 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
3027 : (TREE_CODE (type) == ARRAY_TYPE
3028 && TYPE_CONVENTION_FORTRAN_P (type)
3031 field_list64 = chainon (field_list64,
3032 make_descriptor_field ("DIMCT",
3033 gnat_type_for_size (8, 1),
3037 field_list64 = chainon (field_list64,
3038 make_descriptor_field ("MBZ",
3039 gnat_type_for_size (32, 1),
3042 field_list64 = chainon (field_list64,
3043 make_descriptor_field ("ARSIZE",
3044 gnat_type_for_size (64, 1),
3046 size_in_bytes (type)));
3048 /* Now build a pointer to the 0,0,0... element. */
3049 tem = build0 (PLACEHOLDER_EXPR, type);
3050 for (i = 0, inner_type = type; i < ndim;
3051 i++, inner_type = TREE_TYPE (inner_type))
3052 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
3053 convert (TYPE_DOMAIN (inner_type), size_zero_node),
3054 NULL_TREE, NULL_TREE);
3057 = chainon (field_list64,
3058 make_descriptor_field
3060 build_pointer_type_for_mode (inner_type, DImode, false),
3063 build_pointer_type_for_mode (inner_type, DImode,
3067 /* Next come the addressing coefficients. */
3068 tem = size_one_node;
3069 for (i = 0; i < ndim; i++)
3073 = size_binop (MULT_EXPR, tem,
3074 size_binop (PLUS_EXPR,
3075 size_binop (MINUS_EXPR,
3076 TYPE_MAX_VALUE (idx_arr[i]),
3077 TYPE_MIN_VALUE (idx_arr[i])),
3080 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
3081 fname[1] = '0' + i, fname[2] = 0;
3083 = chainon (field_list64,
3084 make_descriptor_field (fname,
3085 gnat_type_for_size (64, 1),
3086 record64_type, idx_length));
3088 if (mech == By_Descriptor_NCA)
3092 /* Finally here are the bounds. */
3093 for (i = 0; i < ndim; i++)
3097 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
3099 = chainon (field_list64,
3100 make_descriptor_field
3101 (fname, gnat_type_for_size (64, 1), record64_type,
3102 TYPE_MIN_VALUE (idx_arr[i])));
3106 = chainon (field_list64,
3107 make_descriptor_field
3108 (fname, gnat_type_for_size (64, 1), record64_type,
3109 TYPE_MAX_VALUE (idx_arr[i])));
3114 post_error ("unsupported descriptor type for &", gnat_entity);
3117 TYPE_NAME (record64_type) = create_concat_name (gnat_entity, "DESC64");
3118 finish_record_type (record64_type, field_list64, 0, true);
3119 return record64_type;
3122 /* Utility routine for above code to make a field. */
3125 make_descriptor_field (const char *name, tree type,
3126 tree rec_type, tree initial)
3129 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
3131 DECL_INITIAL (field) = initial;
3135 /* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
3136 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3137 which the VMS descriptor is passed. */
3140 convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3142 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3143 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3144 /* The CLASS field is the 3rd field in the descriptor. */
3145 tree class = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
3146 /* The POINTER field is the 6th field in the descriptor. */
3147 tree pointer64 = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (class)));
3149 /* Retrieve the value of the POINTER field. */
3151 = build3 (COMPONENT_REF, TREE_TYPE (pointer64), desc, pointer64, NULL_TREE);
3153 if (POINTER_TYPE_P (gnu_type))
3154 return convert (gnu_type, gnu_expr64);
3156 else if (TYPE_FAT_POINTER_P (gnu_type))
3158 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3159 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3160 tree template_type = TREE_TYPE (p_bounds_type);
3161 tree min_field = TYPE_FIELDS (template_type);
3162 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3163 tree template, template_addr, aflags, dimct, t, u;
3164 /* See the head comment of build_vms_descriptor. */
3165 int iclass = TREE_INT_CST_LOW (DECL_INITIAL (class));
3166 tree lfield, ufield;
3168 /* Convert POINTER to the type of the P_ARRAY field. */
3169 gnu_expr64 = convert (p_array_type, gnu_expr64);
3173 case 1: /* Class S */
3174 case 15: /* Class SB */
3175 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
3176 t = TREE_CHAIN (TREE_CHAIN (class));
3177 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3178 t = tree_cons (min_field,
3179 convert (TREE_TYPE (min_field), integer_one_node),
3180 tree_cons (max_field,
3181 convert (TREE_TYPE (max_field), t),
3183 template = gnat_build_constructor (template_type, t);
3184 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
3186 /* For class S, we are done. */
3190 /* Test that we really have a SB descriptor, like DEC Ada. */
3191 t = build3 (COMPONENT_REF, TREE_TYPE (class), desc, class, NULL);
3192 u = convert (TREE_TYPE (class), DECL_INITIAL (class));
3193 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
3194 /* If so, there is already a template in the descriptor and
3195 it is located right after the POINTER field. The fields are
3196 64bits so they must be repacked. */
3197 t = TREE_CHAIN (pointer64);
3198 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3199 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3202 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3204 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3206 /* Build the template in the form of a constructor. */
3207 t = tree_cons (TYPE_FIELDS (template_type), lfield,
3208 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3209 ufield, NULL_TREE));
3210 template = gnat_build_constructor (template_type, t);
3212 /* Otherwise use the {1, LENGTH} template we build above. */
3213 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3214 build_unary_op (ADDR_EXPR, p_bounds_type,
3219 case 4: /* Class A */
3220 /* The AFLAGS field is the 3rd field after the pointer in the
3222 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer64)));
3223 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3224 /* The DIMCT field is the next field in the descriptor after
3227 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3228 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3229 or FL_COEFF or FL_BOUNDS not set. */
3230 u = build_int_cst (TREE_TYPE (aflags), 192);
3231 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3232 build_binary_op (NE_EXPR, integer_type_node,
3234 convert (TREE_TYPE (dimct),
3236 build_binary_op (NE_EXPR, integer_type_node,
3237 build2 (BIT_AND_EXPR,
3241 /* There is already a template in the descriptor and it is located
3242 in block 3. The fields are 64bits so they must be repacked. */
3243 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN
3245 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3246 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3249 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3251 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3253 /* Build the template in the form of a constructor. */
3254 t = tree_cons (TYPE_FIELDS (template_type), lfield,
3255 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3256 ufield, NULL_TREE));
3257 template = gnat_build_constructor (template_type, t);
3258 template = build3 (COND_EXPR, p_bounds_type, u,
3259 build_call_raise (CE_Length_Check_Failed, Empty,
3260 N_Raise_Constraint_Error),
3262 template_addr = build_unary_op (ADDR_EXPR, p_bounds_type, template);
3265 case 10: /* Class NCA */
3267 post_error ("unsupported descriptor type for &", gnat_subprog);
3268 template_addr = integer_zero_node;
3272 /* Build the fat pointer in the form of a constructor. */
3273 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr64,
3274 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3275 template_addr, NULL_TREE));
3276 return gnat_build_constructor (gnu_type, t);
3283 /* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3284 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3285 which the VMS descriptor is passed. */
3288 convert_vms_descriptor32 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3290 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3291 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3292 /* The CLASS field is the 3rd field in the descriptor. */
3293 tree class = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
3294 /* The POINTER field is the 4th field in the descriptor. */
3295 tree pointer = TREE_CHAIN (class);
3297 /* Retrieve the value of the POINTER field. */
3299 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
3301 if (POINTER_TYPE_P (gnu_type))
3302 return convert (gnu_type, gnu_expr32);
3304 else if (TYPE_FAT_POINTER_P (gnu_type))
3306 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3307 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3308 tree template_type = TREE_TYPE (p_bounds_type);
3309 tree min_field = TYPE_FIELDS (template_type);
3310 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3311 tree template, template_addr, aflags, dimct, t, u;
3312 /* See the head comment of build_vms_descriptor. */
3313 int iclass = TREE_INT_CST_LOW (DECL_INITIAL (class));
3315 /* Convert POINTER to the type of the P_ARRAY field. */
3316 gnu_expr32 = convert (p_array_type, gnu_expr32);
3320 case 1: /* Class S */
3321 case 15: /* Class SB */
3322 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
3323 t = TYPE_FIELDS (desc_type);
3324 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3325 t = tree_cons (min_field,
3326 convert (TREE_TYPE (min_field), integer_one_node),
3327 tree_cons (max_field,
3328 convert (TREE_TYPE (max_field), t),
3330 template = gnat_build_constructor (template_type, t);
3331 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
3333 /* For class S, we are done. */
3337 /* Test that we really have a SB descriptor, like DEC Ada. */
3338 t = build3 (COMPONENT_REF, TREE_TYPE (class), desc, class, NULL);
3339 u = convert (TREE_TYPE (class), DECL_INITIAL (class));
3340 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
3341 /* If so, there is already a template in the descriptor and
3342 it is located right after the POINTER field. */
3343 t = TREE_CHAIN (pointer);
3344 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3345 /* Otherwise use the {1, LENGTH} template we build above. */
3346 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3347 build_unary_op (ADDR_EXPR, p_bounds_type,
3352 case 4: /* Class A */
3353 /* The AFLAGS field is the 7th field in the descriptor. */
3354 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer)));
3355 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3356 /* The DIMCT field is the 8th field in the descriptor. */
3358 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3359 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3360 or FL_COEFF or FL_BOUNDS not set. */
3361 u = build_int_cst (TREE_TYPE (aflags), 192);
3362 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3363 build_binary_op (NE_EXPR, integer_type_node,
3365 convert (TREE_TYPE (dimct),
3367 build_binary_op (NE_EXPR, integer_type_node,
3368 build2 (BIT_AND_EXPR,
3372 /* There is already a template in the descriptor and it is
3373 located at the start of block 3 (12th field). */
3374 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (t))));
3375 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3376 template = build3 (COND_EXPR, p_bounds_type, u,
3377 build_call_raise (CE_Length_Check_Failed, Empty,
3378 N_Raise_Constraint_Error),
3380 template_addr = build_unary_op (ADDR_EXPR, p_bounds_type, template);
3383 case 10: /* Class NCA */
3385 post_error ("unsupported descriptor type for &", gnat_subprog);
3386 template_addr = integer_zero_node;
3390 /* Build the fat pointer in the form of a constructor. */
3391 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr32,
3392 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3393 template_addr, NULL_TREE));
3395 return gnat_build_constructor (gnu_type, t);
3402 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
3403 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
3404 pointer type of GNU_EXPR. GNAT_SUBPROG is the subprogram to which the
3405 VMS descriptor is passed. */
3408 convert_vms_descriptor (tree gnu_type, tree gnu_expr, tree gnu_expr_alt_type,
3409 Entity_Id gnat_subprog)
3411 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3412 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3413 tree mbo = TYPE_FIELDS (desc_type);
3414 const char *mbostr = IDENTIFIER_POINTER (DECL_NAME (mbo));
3415 tree mbmo = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (mbo)));
3416 tree is64bit, gnu_expr32, gnu_expr64;
3418 /* If the field name is not MBO, it must be 32-bit and no alternate.
3419 Otherwise primary must be 64-bit and alternate 32-bit. */
3420 if (strcmp (mbostr, "MBO") != 0)
3421 return convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
3423 /* Build the test for 64-bit descriptor. */
3424 mbo = build3 (COMPONENT_REF, TREE_TYPE (mbo), desc, mbo, NULL_TREE);
3425 mbmo = build3 (COMPONENT_REF, TREE_TYPE (mbmo), desc, mbmo, NULL_TREE);
3427 = build_binary_op (TRUTH_ANDIF_EXPR, integer_type_node,
3428 build_binary_op (EQ_EXPR, integer_type_node,
3429 convert (integer_type_node, mbo),
3431 build_binary_op (EQ_EXPR, integer_type_node,
3432 convert (integer_type_node, mbmo),
3433 integer_minus_one_node));
3435 /* Build the 2 possible end results. */
3436 gnu_expr64 = convert_vms_descriptor64 (gnu_type, gnu_expr, gnat_subprog);
3437 gnu_expr = fold_convert (gnu_expr_alt_type, gnu_expr);
3438 gnu_expr32 = convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
3440 return build3 (COND_EXPR, gnu_type, is64bit, gnu_expr64, gnu_expr32);
3443 /* Build a stub for the subprogram specified by the GCC tree GNU_SUBPROG
3444 and the GNAT node GNAT_SUBPROG. */
3447 build_function_stub (tree gnu_subprog, Entity_Id gnat_subprog)
3449 tree gnu_subprog_type, gnu_subprog_addr, gnu_subprog_call;
3450 tree gnu_stub_param, gnu_param_list, gnu_arg_types, gnu_param;
3451 tree gnu_stub_decl = DECL_FUNCTION_STUB (gnu_subprog);
3454 gnu_subprog_type = TREE_TYPE (gnu_subprog);
3455 gnu_param_list = NULL_TREE;
3457 begin_subprog_body (gnu_stub_decl);
3460 start_stmt_group ();
3462 /* Loop over the parameters of the stub and translate any of them
3463 passed by descriptor into a by reference one. */
3464 for (gnu_stub_param = DECL_ARGUMENTS (gnu_stub_decl),
3465 gnu_arg_types = TYPE_ARG_TYPES (gnu_subprog_type);
3467 gnu_stub_param = TREE_CHAIN (gnu_stub_param),
3468 gnu_arg_types = TREE_CHAIN (gnu_arg_types))
3470 if (DECL_BY_DESCRIPTOR_P (gnu_stub_param))
3472 = convert_vms_descriptor (TREE_VALUE (gnu_arg_types),
3474 DECL_PARM_ALT_TYPE (gnu_stub_param),
3477 gnu_param = gnu_stub_param;
3479 gnu_param_list = tree_cons (NULL_TREE, gnu_param, gnu_param_list);
3482 gnu_body = end_stmt_group ();
3484 /* Invoke the internal subprogram. */
3485 gnu_subprog_addr = build1 (ADDR_EXPR, build_pointer_type (gnu_subprog_type),
3487 gnu_subprog_call = build_call_list (TREE_TYPE (gnu_subprog_type),
3489 nreverse (gnu_param_list));
3491 /* Propagate the return value, if any. */
3492 if (VOID_TYPE_P (TREE_TYPE (gnu_subprog_type)))
3493 append_to_statement_list (gnu_subprog_call, &gnu_body);
3495 append_to_statement_list (build_return_expr (DECL_RESULT (gnu_stub_decl),
3501 allocate_struct_function (gnu_stub_decl, false);
3502 end_subprog_body (gnu_body, false);
3505 /* Build a type to be used to represent an aliased object whose nominal
3506 type is an unconstrained array. This consists of a RECORD_TYPE containing
3507 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
3508 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
3509 is used to represent an arbitrary unconstrained object. Use NAME
3510 as the name of the record. */
3513 build_unc_object_type (tree template_type, tree object_type, tree name)
3515 tree type = make_node (RECORD_TYPE);
3516 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
3517 template_type, type, 0, 0, 0, 1);
3518 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
3521 TYPE_NAME (type) = name;
3522 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3523 finish_record_type (type,
3524 chainon (chainon (NULL_TREE, template_field),
3531 /* Same, taking a thin or fat pointer type instead of a template type. */
3534 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3539 gcc_assert (TYPE_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3542 = (TYPE_FAT_POINTER_P (thin_fat_ptr_type)
3543 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3544 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3545 return build_unc_object_type (template_type, object_type, name);
3548 /* Shift the component offsets within an unconstrained object TYPE to make it
3549 suitable for use as a designated type for thin pointers. */
3552 shift_unc_components_for_thin_pointers (tree type)
3554 /* Thin pointer values designate the ARRAY data of an unconstrained object,
3555 allocated past the BOUNDS template. The designated type is adjusted to
3556 have ARRAY at position zero and the template at a negative offset, so
3557 that COMPONENT_REFs on (*thin_ptr) designate the proper location. */
3559 tree bounds_field = TYPE_FIELDS (type);
3560 tree array_field = TREE_CHAIN (TYPE_FIELDS (type));
3562 DECL_FIELD_OFFSET (bounds_field)
3563 = size_binop (MINUS_EXPR, size_zero_node, byte_position (array_field));
3565 DECL_FIELD_OFFSET (array_field) = size_zero_node;
3566 DECL_FIELD_BIT_OFFSET (array_field) = bitsize_zero_node;
3569 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3570 In the normal case this is just two adjustments, but we have more to
3571 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
3574 update_pointer_to (tree old_type, tree new_type)
3576 tree ptr = TYPE_POINTER_TO (old_type);
3577 tree ref = TYPE_REFERENCE_TO (old_type);
3581 /* If this is the main variant, process all the other variants first. */
3582 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3583 for (type = TYPE_NEXT_VARIANT (old_type); type;
3584 type = TYPE_NEXT_VARIANT (type))
3585 update_pointer_to (type, new_type);
3587 /* If no pointers and no references, we are done. */
3591 /* Merge the old type qualifiers in the new type.
3593 Each old variant has qualifiers for specific reasons, and the new
3594 designated type as well. Each set of qualifiers represents useful
3595 information grabbed at some point, and merging the two simply unifies
3596 these inputs into the final type description.
3598 Consider for instance a volatile type frozen after an access to constant
3599 type designating it; after the designated type's freeze, we get here with
3600 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3601 when the access type was processed. We will make a volatile and readonly
3602 designated type, because that's what it really is.
3604 We might also get here for a non-dummy OLD_TYPE variant with different
3605 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
3606 to private record type elaboration (see the comments around the call to
3607 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3608 the qualifiers in those cases too, to avoid accidentally discarding the
3609 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3611 = build_qualified_type (new_type,
3612 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3614 /* If old type and new type are identical, there is nothing to do. */
3615 if (old_type == new_type)
3618 /* Otherwise, first handle the simple case. */
3619 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3621 TYPE_POINTER_TO (new_type) = ptr;
3622 TYPE_REFERENCE_TO (new_type) = ref;
3624 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3625 for (ptr1 = TYPE_MAIN_VARIANT (ptr); ptr1;
3626 ptr1 = TYPE_NEXT_VARIANT (ptr1))
3627 TREE_TYPE (ptr1) = new_type;
3629 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3630 for (ref1 = TYPE_MAIN_VARIANT (ref); ref1;
3631 ref1 = TYPE_NEXT_VARIANT (ref1))
3632 TREE_TYPE (ref1) = new_type;
3635 /* Now deal with the unconstrained array case. In this case the "pointer"
3636 is actually a RECORD_TYPE where both fields are pointers to dummy nodes.
3637 Turn them into pointers to the correct types using update_pointer_to. */
3638 else if (!TYPE_FAT_POINTER_P (ptr))
3643 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
3644 tree array_field = TYPE_FIELDS (ptr);
3645 tree bounds_field = TREE_CHAIN (TYPE_FIELDS (ptr));
3646 tree new_ptr = TYPE_POINTER_TO (new_type);
3650 /* Make pointers to the dummy template point to the real template. */
3652 (TREE_TYPE (TREE_TYPE (bounds_field)),
3653 TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_ptr)))));
3655 /* The references to the template bounds present in the array type
3656 are made through a PLACEHOLDER_EXPR of type NEW_PTR. Since we
3657 are updating PTR to make it a full replacement for NEW_PTR as
3658 pointer to NEW_TYPE, we must rework the PLACEHOLDER_EXPR so as
3659 to make it of type PTR. */
3660 new_ref = build3 (COMPONENT_REF, TREE_TYPE (bounds_field),
3661 build0 (PLACEHOLDER_EXPR, ptr),
3662 bounds_field, NULL_TREE);
3664 /* Create the new array for the new PLACEHOLDER_EXPR and make pointers
3665 to the dummy array point to it. */
3667 (TREE_TYPE (TREE_TYPE (array_field)),
3668 substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))),
3669 TREE_CHAIN (TYPE_FIELDS (new_ptr)), new_ref));
3671 /* Make PTR the pointer to NEW_TYPE. */
3672 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
3673 = TREE_TYPE (new_type) = ptr;
3675 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
3676 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
3678 /* Now handle updating the allocation record, what the thin pointer
3679 points to. Update all pointers from the old record into the new
3680 one, update the type of the array field, and recompute the size. */
3681 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
3683 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
3684 = TREE_TYPE (TREE_TYPE (array_field));
3686 /* The size recomputation needs to account for alignment constraints, so
3687 we let layout_type work it out. This will reset the field offsets to
3688 what they would be in a regular record, so we shift them back to what
3689 we want them to be for a thin pointer designated type afterwards. */
3690 DECL_SIZE (TYPE_FIELDS (new_obj_rec)) = 0;
3691 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))) = 0;
3692 TYPE_SIZE (new_obj_rec) = 0;
3693 layout_type (new_obj_rec);
3695 shift_unc_components_for_thin_pointers (new_obj_rec);
3697 /* We are done, at last. */
3698 rest_of_record_type_compilation (ptr);
3702 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
3703 unconstrained one. This involves making or finding a template. */
3706 convert_to_fat_pointer (tree type, tree expr)
3708 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
3709 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
3710 tree etype = TREE_TYPE (expr);
3713 /* If EXPR is null, make a fat pointer that contains null pointers to the
3714 template and array. */
3715 if (integer_zerop (expr))
3717 gnat_build_constructor
3719 tree_cons (TYPE_FIELDS (type),
3720 convert (p_array_type, expr),
3721 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3722 convert (build_pointer_type (template_type),
3726 /* If EXPR is a thin pointer, make template and data from the record.. */
3727 else if (TYPE_THIN_POINTER_P (etype))
3729 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
3731 expr = save_expr (expr);
3732 if (TREE_CODE (expr) == ADDR_EXPR)
3733 expr = TREE_OPERAND (expr, 0);
3735 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
3737 template = build_component_ref (expr, NULL_TREE, fields, false);
3738 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3739 build_component_ref (expr, NULL_TREE,
3740 TREE_CHAIN (fields), false));
3743 /* Otherwise, build the constructor for the template. */
3745 template = build_template (template_type, TREE_TYPE (etype), expr);
3747 /* The final result is a constructor for the fat pointer.
3749 If EXPR is an argument of a foreign convention subprogram, the type it
3750 points to is directly the component type. In this case, the expression
3751 type may not match the corresponding FIELD_DECL type at this point, so we
3752 call "convert" here to fix that up if necessary. This type consistency is
3753 required, for instance because it ensures that possible later folding of
3754 COMPONENT_REFs against this constructor always yields something of the
3755 same type as the initial reference.
3757 Note that the call to "build_template" above is still fine because it
3758 will only refer to the provided TEMPLATE_TYPE in this case. */
3760 gnat_build_constructor
3762 tree_cons (TYPE_FIELDS (type),
3763 convert (p_array_type, expr),
3764 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3765 build_unary_op (ADDR_EXPR, NULL_TREE, template),
3769 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
3770 is something that is a fat pointer, so convert to it first if it EXPR
3771 is not already a fat pointer. */
3774 convert_to_thin_pointer (tree type, tree expr)
3776 if (!TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
3778 = convert_to_fat_pointer
3779 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
3781 /* We get the pointer to the data and use a NOP_EXPR to make it the
3783 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)),
3785 expr = build1 (NOP_EXPR, type, expr);
3790 /* Create an expression whose value is that of EXPR,
3791 converted to type TYPE. The TREE_TYPE of the value
3792 is always TYPE. This function implements all reasonable
3793 conversions; callers should filter out those that are
3794 not permitted by the language being compiled. */
3797 convert (tree type, tree expr)
3799 enum tree_code code = TREE_CODE (type);
3800 tree etype = TREE_TYPE (expr);
3801 enum tree_code ecode = TREE_CODE (etype);
3803 /* If EXPR is already the right type, we are done. */
3807 /* If both input and output have padding and are of variable size, do this
3808 as an unchecked conversion. Likewise if one is a mere variant of the
3809 other, so we avoid a pointless unpad/repad sequence. */
3810 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
3811 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
3812 && (!TREE_CONSTANT (TYPE_SIZE (type))
3813 || !TREE_CONSTANT (TYPE_SIZE (etype))
3814 || gnat_types_compatible_p (type, etype)
3815 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
3816 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
3819 /* If the output type has padding, convert to the inner type and
3820 make a constructor to build the record. */
3821 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
3823 /* If we previously converted from another type and our type is
3824 of variable size, remove the conversion to avoid the need for
3825 variable-size temporaries. Likewise for a conversion between
3826 original and packable version. */
3827 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3828 && (!TREE_CONSTANT (TYPE_SIZE (type))
3829 || (ecode == RECORD_TYPE
3830 && TYPE_NAME (etype)
3831 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
3832 expr = TREE_OPERAND (expr, 0);
3834 /* If we are just removing the padding from expr, convert the original
3835 object if we have variable size in order to avoid the need for some
3836 variable-size temporaries. Likewise if the padding is a mere variant
3837 of the other, so we avoid a pointless unpad/repad sequence. */
3838 if (TREE_CODE (expr) == COMPONENT_REF
3839 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
3840 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
3841 && (!TREE_CONSTANT (TYPE_SIZE (type))
3842 || gnat_types_compatible_p (type,
3843 TREE_TYPE (TREE_OPERAND (expr, 0)))
3844 || (ecode == RECORD_TYPE
3845 && TYPE_NAME (etype)
3846 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
3847 return convert (type, TREE_OPERAND (expr, 0));
3849 /* If the result type is a padded type with a self-referentially-sized
3850 field and the expression type is a record, do this as an
3851 unchecked conversion. */
3852 else if (TREE_CODE (etype) == RECORD_TYPE
3853 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
3854 return unchecked_convert (type, expr, false);
3858 gnat_build_constructor (type,
3859 tree_cons (TYPE_FIELDS (type),
3861 (TYPE_FIELDS (type)),
3866 /* If the input type has padding, remove it and convert to the output type.
3867 The conditions ordering is arranged to ensure that the output type is not
3868 a padding type here, as it is not clear whether the conversion would
3869 always be correct if this was to happen. */
3870 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
3874 /* If we have just converted to this padded type, just get the
3875 inner expression. */
3876 if (TREE_CODE (expr) == CONSTRUCTOR
3877 && !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (expr))
3878 && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->index
3879 == TYPE_FIELDS (etype))
3881 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->value;
3883 /* Otherwise, build an explicit component reference. */
3886 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
3888 return convert (type, unpadded);
3891 /* If the input is a biased type, adjust first. */
3892 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
3893 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
3894 fold_convert (TREE_TYPE (etype),
3896 TYPE_MIN_VALUE (etype)));
3898 /* If the input is a justified modular type, we need to extract the actual
3899 object before converting it to any other type with the exceptions of an
3900 unconstrained array or of a mere type variant. It is useful to avoid the
3901 extraction and conversion in the type variant case because it could end
3902 up replacing a VAR_DECL expr by a constructor and we might be about the
3903 take the address of the result. */
3904 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
3905 && code != UNCONSTRAINED_ARRAY_TYPE
3906 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
3907 return convert (type, build_component_ref (expr, NULL_TREE,
3908 TYPE_FIELDS (etype), false));
3910 /* If converting to a type that contains a template, convert to the data
3911 type and then build the template. */
3912 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
3914 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
3916 /* If the source already has a template, get a reference to the
3917 associated array only, as we are going to rebuild a template
3918 for the target type anyway. */
3919 expr = maybe_unconstrained_array (expr);
3922 gnat_build_constructor
3924 tree_cons (TYPE_FIELDS (type),
3925 build_template (TREE_TYPE (TYPE_FIELDS (type)),
3926 obj_type, NULL_TREE),
3927 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3928 convert (obj_type, expr), NULL_TREE)));
3931 /* There are some special cases of expressions that we process
3933 switch (TREE_CODE (expr))
3939 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
3940 conversion in gnat_expand_expr. NULL_EXPR does not represent
3941 and actual value, so no conversion is needed. */
3942 expr = copy_node (expr);
3943 TREE_TYPE (expr) = type;
3947 /* If we are converting a STRING_CST to another constrained array type,
3948 just make a new one in the proper type. */
3949 if (code == ecode && AGGREGATE_TYPE_P (etype)
3950 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
3951 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
3953 expr = copy_node (expr);
3954 TREE_TYPE (expr) = type;
3960 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
3961 a new one in the proper type. */
3962 if (code == ecode && gnat_types_compatible_p (type, etype))
3964 expr = copy_node (expr);
3965 TREE_TYPE (expr) = type;
3969 /* Likewise for a conversion between original and packable version, but
3970 we have to work harder in order to preserve type consistency. */
3972 && code == RECORD_TYPE
3973 && TYPE_NAME (type) == TYPE_NAME (etype))
3975 VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
3976 unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
3977 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, len);
3978 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
3979 unsigned HOST_WIDE_INT idx;
3982 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
3984 constructor_elt *elt = VEC_quick_push (constructor_elt, v, NULL);
3985 /* We expect only simple constructors. Otherwise, punt. */
3986 if (!(index == efield || index == DECL_ORIGINAL_FIELD (efield)))
3989 elt->value = convert (TREE_TYPE (field), value);
3990 efield = TREE_CHAIN (efield);
3991 field = TREE_CHAIN (field);
3996 expr = copy_node (expr);
3997 TREE_TYPE (expr) = type;
3998 CONSTRUCTOR_ELTS (expr) = v;
4004 case UNCONSTRAINED_ARRAY_REF:
4005 /* Convert this to the type of the inner array by getting the address of
4006 the array from the template. */
4007 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4008 build_component_ref (TREE_OPERAND (expr, 0),
4009 get_identifier ("P_ARRAY"),
4011 etype = TREE_TYPE (expr);
4012 ecode = TREE_CODE (etype);
4015 case VIEW_CONVERT_EXPR:
4017 /* GCC 4.x is very sensitive to type consistency overall, and view
4018 conversions thus are very frequent. Even though just "convert"ing
4019 the inner operand to the output type is fine in most cases, it
4020 might expose unexpected input/output type mismatches in special
4021 circumstances so we avoid such recursive calls when we can. */
4022 tree op0 = TREE_OPERAND (expr, 0);
4024 /* If we are converting back to the original type, we can just
4025 lift the input conversion. This is a common occurrence with
4026 switches back-and-forth amongst type variants. */
4027 if (type == TREE_TYPE (op0))
4030 /* Otherwise, if we're converting between two aggregate types, we
4031 might be allowed to substitute the VIEW_CONVERT_EXPR target type
4032 in place or to just convert the inner expression. */
4033 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4035 /* If we are converting between mere variants, we can just
4036 substitute the VIEW_CONVERT_EXPR in place. */
4037 if (gnat_types_compatible_p (type, etype))
4038 return build1 (VIEW_CONVERT_EXPR, type, op0);
4040 /* Otherwise, we may just bypass the input view conversion unless
4041 one of the types is a fat pointer, which is handled by
4042 specialized code below which relies on exact type matching. */
4043 else if (!TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
4044 return convert (type, op0);
4050 /* If both types are record types, just convert the pointer and
4051 make a new INDIRECT_REF.
4053 ??? Disable this for now since it causes problems with the
4054 code in build_binary_op for MODIFY_EXPR which wants to
4055 strip off conversions. But that code really is a mess and
4056 we need to do this a much better way some time. */
4058 && (TREE_CODE (type) == RECORD_TYPE
4059 || TREE_CODE (type) == UNION_TYPE)
4060 && (TREE_CODE (etype) == RECORD_TYPE
4061 || TREE_CODE (etype) == UNION_TYPE)
4062 && !TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
4063 return build_unary_op (INDIRECT_REF, NULL_TREE,
4064 convert (build_pointer_type (type),
4065 TREE_OPERAND (expr, 0)));
4072 /* Check for converting to a pointer to an unconstrained array. */
4073 if (TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
4074 return convert_to_fat_pointer (type, expr);
4076 /* If we are converting between two aggregate types that are mere
4077 variants, just make a VIEW_CONVERT_EXPR. */
4078 else if (code == ecode
4079 && AGGREGATE_TYPE_P (type)
4080 && gnat_types_compatible_p (type, etype))
4081 return build1 (VIEW_CONVERT_EXPR, type, expr);
4083 /* In all other cases of related types, make a NOP_EXPR. */
4084 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4085 || (code == INTEGER_CST && ecode == INTEGER_CST
4086 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
4087 return fold_convert (type, expr);
4092 return fold_build1 (CONVERT_EXPR, type, expr);
4095 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4096 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4097 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4098 return unchecked_convert (type, expr, false);
4099 else if (TYPE_BIASED_REPRESENTATION_P (type))
4100 return fold_convert (type,
4101 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4102 convert (TREE_TYPE (type), expr),
4103 TYPE_MIN_VALUE (type)));
4105 /* ... fall through ... */
4109 /* If we are converting an additive expression to an integer type
4110 with lower precision, be wary of the optimization that can be
4111 applied by convert_to_integer. There are 2 problematic cases:
4112 - if the first operand was originally of a biased type,
4113 because we could be recursively called to convert it
4114 to an intermediate type and thus rematerialize the
4115 additive operator endlessly,
4116 - if the expression contains a placeholder, because an
4117 intermediate conversion that changes the sign could
4118 be inserted and thus introduce an artificial overflow
4119 at compile time when the placeholder is substituted. */
4120 if (code == INTEGER_TYPE
4121 && ecode == INTEGER_TYPE
4122 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4123 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4125 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4127 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4128 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4129 || CONTAINS_PLACEHOLDER_P (expr))
4130 return build1 (NOP_EXPR, type, expr);
4133 return fold (convert_to_integer (type, expr));
4136 case REFERENCE_TYPE:
4137 /* If converting between two pointers to records denoting
4138 both a template and type, adjust if needed to account
4139 for any differing offsets, since one might be negative. */
4140 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
4143 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
4144 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
4145 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
4146 sbitsize_int (BITS_PER_UNIT));
4148 expr = build1 (NOP_EXPR, type, expr);
4149 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
4150 if (integer_zerop (byte_diff))
4153 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4154 fold (convert (sizetype, byte_diff)));
4157 /* If converting to a thin pointer, handle specially. */
4158 if (TYPE_THIN_POINTER_P (type)
4159 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
4160 return convert_to_thin_pointer (type, expr);
4162 /* If converting fat pointer to normal pointer, get the pointer to the
4163 array and then convert it. */
4164 else if (TYPE_FAT_POINTER_P (etype))
4165 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
4168 return fold (convert_to_pointer (type, expr));
4171 return fold (convert_to_real (type, expr));
4174 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4176 gnat_build_constructor
4177 (type, tree_cons (TYPE_FIELDS (type),
4178 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
4181 /* ... fall through ... */
4184 /* In these cases, assume the front-end has validated the conversion.
4185 If the conversion is valid, it will be a bit-wise conversion, so
4186 it can be viewed as an unchecked conversion. */
4187 return unchecked_convert (type, expr, false);
4190 /* This is a either a conversion between a tagged type and some
4191 subtype, which we have to mark as a UNION_TYPE because of
4192 overlapping fields or a conversion of an Unchecked_Union. */
4193 return unchecked_convert (type, expr, false);
4195 case UNCONSTRAINED_ARRAY_TYPE:
4196 /* If EXPR is a constrained array, take its address, convert it to a
4197 fat pointer, and then dereference it. Likewise if EXPR is a
4198 record containing both a template and a constrained array.
4199 Note that a record representing a justified modular type
4200 always represents a packed constrained array. */
4201 if (ecode == ARRAY_TYPE
4202 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4203 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4204 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4207 (INDIRECT_REF, NULL_TREE,
4208 convert_to_fat_pointer (TREE_TYPE (type),
4209 build_unary_op (ADDR_EXPR,
4212 /* Do something very similar for converting one unconstrained
4213 array to another. */
4214 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4216 build_unary_op (INDIRECT_REF, NULL_TREE,
4217 convert (TREE_TYPE (type),
4218 build_unary_op (ADDR_EXPR,
4224 return fold (convert_to_complex (type, expr));
4231 /* Remove all conversions that are done in EXP. This includes converting
4232 from a padded type or to a justified modular type. If TRUE_ADDRESS
4233 is true, always return the address of the containing object even if
4234 the address is not bit-aligned. */
4237 remove_conversions (tree exp, bool true_address)
4239 switch (TREE_CODE (exp))
4243 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4244 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4246 remove_conversions (VEC_index (constructor_elt,
4247 CONSTRUCTOR_ELTS (exp), 0)->value,
4252 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
4253 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4254 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4257 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
4259 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4268 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4269 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
4270 likewise return an expression pointing to the underlying array. */
4273 maybe_unconstrained_array (tree exp)
4275 enum tree_code code = TREE_CODE (exp);
4278 switch (TREE_CODE (TREE_TYPE (exp)))
4280 case UNCONSTRAINED_ARRAY_TYPE:
4281 if (code == UNCONSTRAINED_ARRAY_REF)
4284 = build_unary_op (INDIRECT_REF, NULL_TREE,
4285 build_component_ref (TREE_OPERAND (exp, 0),
4286 get_identifier ("P_ARRAY"),
4288 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
4292 else if (code == NULL_EXPR)
4293 return build1 (NULL_EXPR,
4294 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
4295 (TREE_TYPE (TREE_TYPE (exp))))),
4296 TREE_OPERAND (exp, 0));
4299 /* If this is a padded type, convert to the unpadded type and see if
4300 it contains a template. */
4301 if (TYPE_IS_PADDING_P (TREE_TYPE (exp)))
4303 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
4304 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
4305 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
4307 build_component_ref (new, NULL_TREE,
4308 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
4311 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
4313 build_component_ref (exp, NULL_TREE,
4314 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
4324 /* Return true if EXPR is an expression that can be folded as an operand
4325 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
4328 can_fold_for_view_convert_p (tree expr)
4332 /* The folder will fold NOP_EXPRs between integral types with the same
4333 precision (in the middle-end's sense). We cannot allow it if the
4334 types don't have the same precision in the Ada sense as well. */
4335 if (TREE_CODE (expr) != NOP_EXPR)
4338 t1 = TREE_TYPE (expr);
4339 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4341 /* Defer to the folder for non-integral conversions. */
4342 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4345 /* Only fold conversions that preserve both precisions. */
4346 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4347 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4353 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
4354 If NOTRUNC_P is true, truncation operations should be suppressed.
4356 Special care is required with (source or target) integral types whose
4357 precision is not equal to their size, to make sure we fetch or assign
4358 the value bits whose location might depend on the endianness, e.g.
4360 Rmsize : constant := 8;
4361 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4363 type Bit_Array is array (1 .. Rmsize) of Boolean;
4364 pragma Pack (Bit_Array);
4366 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4368 Value : Int := 2#1000_0001#;
4369 Vbits : Bit_Array := To_Bit_Array (Value);
4371 we expect the 8 bits at Vbits'Address to always contain Value, while
4372 their original location depends on the endianness, at Value'Address
4373 on a little-endian architecture but not on a big-endian one. */
4376 unchecked_convert (tree type, tree expr, bool notrunc_p)
4378 tree etype = TREE_TYPE (expr);
4380 /* If the expression is already the right type, we are done. */
4384 /* If both types types are integral just do a normal conversion.
4385 Likewise for a conversion to an unconstrained array. */
4386 if ((((INTEGRAL_TYPE_P (type)
4387 && !(TREE_CODE (type) == INTEGER_TYPE
4388 && TYPE_VAX_FLOATING_POINT_P (type)))
4389 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
4390 || (TREE_CODE (type) == RECORD_TYPE
4391 && TYPE_JUSTIFIED_MODULAR_P (type)))
4392 && ((INTEGRAL_TYPE_P (etype)
4393 && !(TREE_CODE (etype) == INTEGER_TYPE
4394 && TYPE_VAX_FLOATING_POINT_P (etype)))
4395 || (POINTER_TYPE_P (etype) && !TYPE_THIN_POINTER_P (etype))
4396 || (TREE_CODE (etype) == RECORD_TYPE
4397 && TYPE_JUSTIFIED_MODULAR_P (etype))))
4398 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
4400 if (TREE_CODE (etype) == INTEGER_TYPE
4401 && TYPE_BIASED_REPRESENTATION_P (etype))
4403 tree ntype = copy_type (etype);
4404 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4405 TYPE_MAIN_VARIANT (ntype) = ntype;
4406 expr = build1 (NOP_EXPR, ntype, expr);
4409 if (TREE_CODE (type) == INTEGER_TYPE
4410 && TYPE_BIASED_REPRESENTATION_P (type))
4412 tree rtype = copy_type (type);
4413 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4414 TYPE_MAIN_VARIANT (rtype) = rtype;
4415 expr = convert (rtype, expr);
4416 expr = build1 (NOP_EXPR, type, expr);
4419 expr = convert (type, expr);
4422 /* If we are converting to an integral type whose precision is not equal
4423 to its size, first unchecked convert to a record that contains an
4424 object of the output type. Then extract the field. */
4425 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4426 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4427 GET_MODE_BITSIZE (TYPE_MODE (type))))
4429 tree rec_type = make_node (RECORD_TYPE);
4430 tree field = create_field_decl (get_identifier ("OBJ"), type,
4431 rec_type, 1, 0, 0, 0);
4433 TYPE_FIELDS (rec_type) = field;
4434 layout_type (rec_type);
4436 expr = unchecked_convert (rec_type, expr, notrunc_p);
4437 expr = build_component_ref (expr, NULL_TREE, field, 0);
4440 /* Similarly if we are converting from an integral type whose precision
4441 is not equal to its size. */
4442 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype)
4443 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
4444 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4446 tree rec_type = make_node (RECORD_TYPE);
4448 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
4451 TYPE_FIELDS (rec_type) = field;
4452 layout_type (rec_type);
4454 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
4455 expr = unchecked_convert (type, expr, notrunc_p);
4458 /* We have a special case when we are converting between two
4459 unconstrained array types. In that case, take the address,
4460 convert the fat pointer types, and dereference. */
4461 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
4462 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
4463 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4464 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
4465 build_unary_op (ADDR_EXPR, NULL_TREE,
4469 expr = maybe_unconstrained_array (expr);
4470 etype = TREE_TYPE (expr);
4471 if (can_fold_for_view_convert_p (expr))
4472 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
4474 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
4477 /* If the result is an integral type whose precision is not equal to its
4478 size, sign- or zero-extend the result. We need not do this if the input
4479 is an integral type of the same precision and signedness or if the output
4480 is a biased type or if both the input and output are unsigned. */
4482 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4483 && !(TREE_CODE (type) == INTEGER_TYPE
4484 && TYPE_BIASED_REPRESENTATION_P (type))
4485 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4486 GET_MODE_BITSIZE (TYPE_MODE (type)))
4487 && !(INTEGRAL_TYPE_P (etype)
4488 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
4489 && operand_equal_p (TYPE_RM_SIZE (type),
4490 (TYPE_RM_SIZE (etype) != 0
4491 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
4493 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
4495 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
4496 TYPE_UNSIGNED (type));
4498 = convert (base_type,
4499 size_binop (MINUS_EXPR,
4501 (GET_MODE_BITSIZE (TYPE_MODE (type))),
4502 TYPE_RM_SIZE (type)));
4505 build_binary_op (RSHIFT_EXPR, base_type,
4506 build_binary_op (LSHIFT_EXPR, base_type,
4507 convert (base_type, expr),
4512 /* An unchecked conversion should never raise Constraint_Error. The code
4513 below assumes that GCC's conversion routines overflow the same way that
4514 the underlying hardware does. This is probably true. In the rare case
4515 when it is false, we can rely on the fact that such conversions are
4516 erroneous anyway. */
4517 if (TREE_CODE (expr) == INTEGER_CST)
4518 TREE_OVERFLOW (expr) = 0;
4520 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
4521 show no longer constant. */
4522 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4523 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
4525 TREE_CONSTANT (expr) = 0;
4530 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
4531 the latter being a record type as predicated by Is_Record_Type. */
4534 tree_code_for_record_type (Entity_Id gnat_type)
4536 Node_Id component_list
4537 = Component_List (Type_Definition
4539 (Implementation_Base_Type (gnat_type))));
4542 /* Make this a UNION_TYPE unless it's either not an Unchecked_Union or
4543 we have a non-discriminant field outside a variant. In either case,
4544 it's a RECORD_TYPE. */
4546 if (!Is_Unchecked_Union (gnat_type))
4549 for (component = First_Non_Pragma (Component_Items (component_list));
4550 Present (component);
4551 component = Next_Non_Pragma (component))
4552 if (Ekind (Defining_Entity (component)) == E_Component)
4558 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
4559 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
4560 according to the presence of an alignment clause on the type or, if it
4561 is an array, on the component type. */
4564 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
4566 gnat_type = Underlying_Type (gnat_type);
4568 *align_clause = Present (Alignment_Clause (gnat_type));
4570 if (Is_Array_Type (gnat_type))
4572 gnat_type = Underlying_Type (Component_Type (gnat_type));
4573 if (Present (Alignment_Clause (gnat_type)))
4574 *align_clause = true;
4577 if (!Is_Floating_Point_Type (gnat_type))
4580 if (UI_To_Int (Esize (gnat_type)) != 64)
4586 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
4587 size is greater or equal to 64 bits, or an array of such a type. Set
4588 ALIGN_CLAUSE according to the presence of an alignment clause on the
4589 type or, if it is an array, on the component type. */
4592 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
4594 gnat_type = Underlying_Type (gnat_type);
4596 *align_clause = Present (Alignment_Clause (gnat_type));
4598 if (Is_Array_Type (gnat_type))
4600 gnat_type = Underlying_Type (Component_Type (gnat_type));
4601 if (Present (Alignment_Clause (gnat_type)))
4602 *align_clause = true;
4605 if (!Is_Scalar_Type (gnat_type))
4608 if (UI_To_Int (Esize (gnat_type)) < 64)
4614 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
4615 component of an aggregate type. */
4618 type_for_nonaliased_component_p (tree gnu_type)
4620 /* If the type is passed by reference, we may have pointers to the
4621 component so it cannot be made non-aliased. */
4622 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
4625 /* We used to say that any component of aggregate type is aliased
4626 because the front-end may take 'Reference of it. The front-end
4627 has been enhanced in the meantime so as to use a renaming instead
4628 in most cases, but the back-end can probably take the address of
4629 such a component too so we go for the conservative stance.
4631 For instance, we might need the address of any array type, even
4632 if normally passed by copy, to construct a fat pointer if the
4633 component is used as an actual for an unconstrained formal.
4635 Likewise for record types: even if a specific record subtype is
4636 passed by copy, the parent type might be passed by ref (e.g. if
4637 it's of variable size) and we might take the address of a child
4638 component to pass to a parent formal. We have no way to check
4639 for such conditions here. */
4640 if (AGGREGATE_TYPE_P (gnu_type))
4646 /* Perform final processing on global variables. */
4649 gnat_write_global_declarations (void)
4651 /* Proceed to optimize and emit assembly.
4652 FIXME: shouldn't be the front end's responsibility to call this. */
4655 /* Emit debug info for all global declarations. */
4656 emit_debug_global_declarations (VEC_address (tree, global_decls),
4657 VEC_length (tree, global_decls));
4660 /* ************************************************************************
4661 * * GCC builtins support *
4662 * ************************************************************************ */
4664 /* The general scheme is fairly simple:
4666 For each builtin function/type to be declared, gnat_install_builtins calls
4667 internal facilities which eventually get to gnat_push_decl, which in turn
4668 tracks the so declared builtin function decls in the 'builtin_decls' global
4669 datastructure. When an Intrinsic subprogram declaration is processed, we
4670 search this global datastructure to retrieve the associated BUILT_IN DECL
4673 /* Search the chain of currently available builtin declarations for a node
4674 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
4675 found, if any, or NULL_TREE otherwise. */
4677 builtin_decl_for (tree name)
4682 for (i = 0; VEC_iterate(tree, builtin_decls, i, decl); i++)
4683 if (DECL_NAME (decl) == name)
4689 /* The code below eventually exposes gnat_install_builtins, which declares
4690 the builtin types and functions we might need, either internally or as
4691 user accessible facilities.
4693 ??? This is a first implementation shot, still in rough shape. It is
4694 heavily inspired from the "C" family implementation, with chunks copied
4695 verbatim from there.
4697 Two obvious TODO candidates are
4698 o Use a more efficient name/decl mapping scheme
4699 o Devise a middle-end infrastructure to avoid having to copy
4700 pieces between front-ends. */
4702 /* ----------------------------------------------------------------------- *
4703 * BUILTIN ELEMENTARY TYPES *
4704 * ----------------------------------------------------------------------- */
4706 /* Standard data types to be used in builtin argument declarations. */
4710 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
4712 CTI_CONST_STRING_TYPE,
4717 static tree c_global_trees[CTI_MAX];
4719 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
4720 #define string_type_node c_global_trees[CTI_STRING_TYPE]
4721 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
4723 /* ??? In addition some attribute handlers, we currently don't support a
4724 (small) number of builtin-types, which in turns inhibits support for a
4725 number of builtin functions. */
4726 #define wint_type_node void_type_node
4727 #define intmax_type_node void_type_node
4728 #define uintmax_type_node void_type_node
4730 /* Build the void_list_node (void_type_node having been created). */
4733 build_void_list_node (void)
4735 tree t = build_tree_list (NULL_TREE, void_type_node);
4739 /* Used to help initialize the builtin-types.def table. When a type of
4740 the correct size doesn't exist, use error_mark_node instead of NULL.
4741 The later results in segfaults even when a decl using the type doesn't
4745 builtin_type_for_size (int size, bool unsignedp)
4747 tree type = lang_hooks.types.type_for_size (size, unsignedp);
4748 return type ? type : error_mark_node;
4751 /* Build/push the elementary type decls that builtin functions/types
4755 install_builtin_elementary_types (void)
4757 signed_size_type_node = size_type_node;
4758 pid_type_node = integer_type_node;
4759 void_list_node = build_void_list_node ();
4761 string_type_node = build_pointer_type (char_type_node);
4762 const_string_type_node
4763 = build_pointer_type (build_qualified_type
4764 (char_type_node, TYPE_QUAL_CONST));
4767 /* ----------------------------------------------------------------------- *
4768 * BUILTIN FUNCTION TYPES *
4769 * ----------------------------------------------------------------------- */
4771 /* Now, builtin function types per se. */
4775 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
4776 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
4777 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
4778 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
4779 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4780 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4781 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
4782 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
4783 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
4784 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
4785 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
4786 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
4787 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4788 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4789 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
4791 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
4792 #include "builtin-types.def"
4793 #undef DEF_PRIMITIVE_TYPE
4794 #undef DEF_FUNCTION_TYPE_0
4795 #undef DEF_FUNCTION_TYPE_1
4796 #undef DEF_FUNCTION_TYPE_2
4797 #undef DEF_FUNCTION_TYPE_3
4798 #undef DEF_FUNCTION_TYPE_4
4799 #undef DEF_FUNCTION_TYPE_5
4800 #undef DEF_FUNCTION_TYPE_6
4801 #undef DEF_FUNCTION_TYPE_7
4802 #undef DEF_FUNCTION_TYPE_VAR_0
4803 #undef DEF_FUNCTION_TYPE_VAR_1
4804 #undef DEF_FUNCTION_TYPE_VAR_2
4805 #undef DEF_FUNCTION_TYPE_VAR_3
4806 #undef DEF_FUNCTION_TYPE_VAR_4
4807 #undef DEF_FUNCTION_TYPE_VAR_5
4808 #undef DEF_POINTER_TYPE
4812 typedef enum c_builtin_type builtin_type;
4814 /* A temporary array used in communication with def_fn_type. */
4815 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
4817 /* A helper function for install_builtin_types. Build function type
4818 for DEF with return type RET and N arguments. If VAR is true, then the
4819 function should be variadic after those N arguments.
4821 Takes special care not to ICE if any of the types involved are
4822 error_mark_node, which indicates that said type is not in fact available
4823 (see builtin_type_for_size). In which case the function type as a whole
4824 should be error_mark_node. */
4827 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
4829 tree args = NULL, t;
4834 for (i = 0; i < n; ++i)
4836 builtin_type a = va_arg (list, builtin_type);
4837 t = builtin_types[a];
4838 if (t == error_mark_node)
4840 args = tree_cons (NULL_TREE, t, args);
4844 args = nreverse (args);
4846 args = chainon (args, void_list_node);
4848 t = builtin_types[ret];
4849 if (t == error_mark_node)
4851 t = build_function_type (t, args);
4854 builtin_types[def] = t;
4857 /* Build the builtin function types and install them in the builtin_types
4858 array for later use in builtin function decls. */
4861 install_builtin_function_types (void)
4863 tree va_list_ref_type_node;
4864 tree va_list_arg_type_node;
4866 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4868 va_list_arg_type_node = va_list_ref_type_node =
4869 build_pointer_type (TREE_TYPE (va_list_type_node));
4873 va_list_arg_type_node = va_list_type_node;
4874 va_list_ref_type_node = build_reference_type (va_list_type_node);
4877 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
4878 builtin_types[ENUM] = VALUE;
4879 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
4880 def_fn_type (ENUM, RETURN, 0, 0);
4881 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
4882 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
4883 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
4884 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
4885 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
4886 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
4887 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
4888 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
4889 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
4890 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
4891 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4893 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
4894 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4896 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
4897 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
4898 def_fn_type (ENUM, RETURN, 1, 0);
4899 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
4900 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
4901 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
4902 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
4903 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
4904 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
4905 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
4906 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
4907 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
4908 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
4909 #define DEF_POINTER_TYPE(ENUM, TYPE) \
4910 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
4912 #include "builtin-types.def"
4914 #undef DEF_PRIMITIVE_TYPE
4915 #undef DEF_FUNCTION_TYPE_1
4916 #undef DEF_FUNCTION_TYPE_2
4917 #undef DEF_FUNCTION_TYPE_3
4918 #undef DEF_FUNCTION_TYPE_4
4919 #undef DEF_FUNCTION_TYPE_5
4920 #undef DEF_FUNCTION_TYPE_6
4921 #undef DEF_FUNCTION_TYPE_VAR_0
4922 #undef DEF_FUNCTION_TYPE_VAR_1
4923 #undef DEF_FUNCTION_TYPE_VAR_2
4924 #undef DEF_FUNCTION_TYPE_VAR_3
4925 #undef DEF_FUNCTION_TYPE_VAR_4
4926 #undef DEF_FUNCTION_TYPE_VAR_5
4927 #undef DEF_POINTER_TYPE
4928 builtin_types[(int) BT_LAST] = NULL_TREE;
4931 /* ----------------------------------------------------------------------- *
4932 * BUILTIN ATTRIBUTES *
4933 * ----------------------------------------------------------------------- */
4935 enum built_in_attribute
4937 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
4938 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
4939 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
4940 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
4941 #include "builtin-attrs.def"
4942 #undef DEF_ATTR_NULL_TREE
4944 #undef DEF_ATTR_IDENT
4945 #undef DEF_ATTR_TREE_LIST
4949 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
4952 install_builtin_attributes (void)
4954 /* Fill in the built_in_attributes array. */
4955 #define DEF_ATTR_NULL_TREE(ENUM) \
4956 built_in_attributes[(int) ENUM] = NULL_TREE;
4957 #define DEF_ATTR_INT(ENUM, VALUE) \
4958 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
4959 #define DEF_ATTR_IDENT(ENUM, STRING) \
4960 built_in_attributes[(int) ENUM] = get_identifier (STRING);
4961 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
4962 built_in_attributes[(int) ENUM] \
4963 = tree_cons (built_in_attributes[(int) PURPOSE], \
4964 built_in_attributes[(int) VALUE], \
4965 built_in_attributes[(int) CHAIN]);
4966 #include "builtin-attrs.def"
4967 #undef DEF_ATTR_NULL_TREE
4969 #undef DEF_ATTR_IDENT
4970 #undef DEF_ATTR_TREE_LIST
4973 /* Handle a "const" attribute; arguments as in
4974 struct attribute_spec.handler. */
4977 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
4978 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
4981 if (TREE_CODE (*node) == FUNCTION_DECL)
4982 TREE_READONLY (*node) = 1;
4984 *no_add_attrs = true;
4989 /* Handle a "nothrow" attribute; arguments as in
4990 struct attribute_spec.handler. */
4993 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
4994 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
4997 if (TREE_CODE (*node) == FUNCTION_DECL)
4998 TREE_NOTHROW (*node) = 1;
5000 *no_add_attrs = true;
5005 /* Handle a "pure" attribute; arguments as in
5006 struct attribute_spec.handler. */
5009 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5010 int ARG_UNUSED (flags), bool *no_add_attrs)
5012 if (TREE_CODE (*node) == FUNCTION_DECL)
5013 DECL_PURE_P (*node) = 1;
5014 /* ??? TODO: Support types. */
5017 warning (OPT_Wattributes, "%qE attribute ignored", name);
5018 *no_add_attrs = true;
5024 /* Handle a "no vops" attribute; arguments as in
5025 struct attribute_spec.handler. */
5028 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
5029 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5030 bool *ARG_UNUSED (no_add_attrs))
5032 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
5033 DECL_IS_NOVOPS (*node) = 1;
5037 /* Helper for nonnull attribute handling; fetch the operand number
5038 from the attribute argument list. */
5041 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
5043 /* Verify the arg number is a constant. */
5044 if (TREE_CODE (arg_num_expr) != INTEGER_CST
5045 || TREE_INT_CST_HIGH (arg_num_expr) != 0)
5048 *valp = TREE_INT_CST_LOW (arg_num_expr);
5052 /* Handle the "nonnull" attribute. */
5054 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
5055 tree args, int ARG_UNUSED (flags),
5059 unsigned HOST_WIDE_INT attr_arg_num;
5061 /* If no arguments are specified, all pointer arguments should be
5062 non-null. Verify a full prototype is given so that the arguments
5063 will have the correct types when we actually check them later. */
5066 if (!TYPE_ARG_TYPES (type))
5068 error ("nonnull attribute without arguments on a non-prototype");
5069 *no_add_attrs = true;
5074 /* Argument list specified. Verify that each argument number references
5075 a pointer argument. */
5076 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5079 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5081 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5083 error ("nonnull argument has invalid operand number (argument %lu)",
5084 (unsigned long) attr_arg_num);
5085 *no_add_attrs = true;
5089 argument = TYPE_ARG_TYPES (type);
5092 for (ck_num = 1; ; ck_num++)
5094 if (!argument || ck_num == arg_num)
5096 argument = TREE_CHAIN (argument);
5100 || TREE_CODE (TREE_VALUE (argument)) == VOID_TYPE)
5102 error ("nonnull argument with out-of-range operand number (argument %lu, operand %lu)",
5103 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5104 *no_add_attrs = true;
5108 if (TREE_CODE (TREE_VALUE (argument)) != POINTER_TYPE)
5110 error ("nonnull argument references non-pointer operand (argument %lu, operand %lu)",
5111 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5112 *no_add_attrs = true;
5121 /* Handle a "sentinel" attribute. */
5124 handle_sentinel_attribute (tree *node, tree name, tree args,
5125 int ARG_UNUSED (flags), bool *no_add_attrs)
5127 tree params = TYPE_ARG_TYPES (*node);
5131 warning (OPT_Wattributes,
5132 "%qE attribute requires prototypes with named arguments", name);
5133 *no_add_attrs = true;
5137 while (TREE_CHAIN (params))
5138 params = TREE_CHAIN (params);
5140 if (VOID_TYPE_P (TREE_VALUE (params)))
5142 warning (OPT_Wattributes,
5143 "%qE attribute only applies to variadic functions", name);
5144 *no_add_attrs = true;
5150 tree position = TREE_VALUE (args);
5152 if (TREE_CODE (position) != INTEGER_CST)
5154 warning (0, "requested position is not an integer constant");
5155 *no_add_attrs = true;
5159 if (tree_int_cst_lt (position, integer_zero_node))
5161 warning (0, "requested position is less than zero");
5162 *no_add_attrs = true;
5170 /* Handle a "noreturn" attribute; arguments as in
5171 struct attribute_spec.handler. */
5174 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5175 int ARG_UNUSED (flags), bool *no_add_attrs)
5177 tree type = TREE_TYPE (*node);
5179 /* See FIXME comment in c_common_attribute_table. */
5180 if (TREE_CODE (*node) == FUNCTION_DECL)
5181 TREE_THIS_VOLATILE (*node) = 1;
5182 else if (TREE_CODE (type) == POINTER_TYPE
5183 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5185 = build_pointer_type
5186 (build_type_variant (TREE_TYPE (type),
5187 TYPE_READONLY (TREE_TYPE (type)), 1));
5190 warning (OPT_Wattributes, "%qE attribute ignored", name);
5191 *no_add_attrs = true;
5197 /* Handle a "malloc" attribute; arguments as in
5198 struct attribute_spec.handler. */
5201 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5202 int ARG_UNUSED (flags), bool *no_add_attrs)
5204 if (TREE_CODE (*node) == FUNCTION_DECL
5205 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
5206 DECL_IS_MALLOC (*node) = 1;
5209 warning (OPT_Wattributes, "%qE attribute ignored", name);
5210 *no_add_attrs = true;
5216 /* Fake handler for attributes we don't properly support. */
5219 fake_attribute_handler (tree * ARG_UNUSED (node),
5220 tree ARG_UNUSED (name),
5221 tree ARG_UNUSED (args),
5222 int ARG_UNUSED (flags),
5223 bool * ARG_UNUSED (no_add_attrs))
5228 /* Handle a "type_generic" attribute. */
5231 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
5232 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5233 bool * ARG_UNUSED (no_add_attrs))
5237 /* Ensure we have a function type. */
5238 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
5240 params = TYPE_ARG_TYPES (*node);
5241 while (params && ! VOID_TYPE_P (TREE_VALUE (params)))
5242 params = TREE_CHAIN (params);
5244 /* Ensure we have a variadic function. */
5245 gcc_assert (!params);
5250 /* ----------------------------------------------------------------------- *
5251 * BUILTIN FUNCTIONS *
5252 * ----------------------------------------------------------------------- */
5254 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
5255 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
5256 if nonansi_p and flag_no_nonansi_builtin. */
5259 def_builtin_1 (enum built_in_function fncode,
5261 enum built_in_class fnclass,
5262 tree fntype, tree libtype,
5263 bool both_p, bool fallback_p,
5264 bool nonansi_p ATTRIBUTE_UNUSED,
5265 tree fnattrs, bool implicit_p)
5268 const char *libname;
5270 /* Preserve an already installed decl. It most likely was setup in advance
5271 (e.g. as part of the internal builtins) for specific reasons. */
5272 if (built_in_decls[(int) fncode] != NULL_TREE)
5275 gcc_assert ((!both_p && !fallback_p)
5276 || !strncmp (name, "__builtin_",
5277 strlen ("__builtin_")));
5279 libname = name + strlen ("__builtin_");
5280 decl = add_builtin_function (name, fntype, fncode, fnclass,
5281 (fallback_p ? libname : NULL),
5284 /* ??? This is normally further controlled by command-line options
5285 like -fno-builtin, but we don't have them for Ada. */
5286 add_builtin_function (libname, libtype, fncode, fnclass,
5289 built_in_decls[(int) fncode] = decl;
5291 implicit_built_in_decls[(int) fncode] = decl;
5294 static int flag_isoc94 = 0;
5295 static int flag_isoc99 = 0;
5297 /* Install what the common builtins.def offers. */
5300 install_builtin_functions (void)
5302 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
5303 NONANSI_P, ATTRS, IMPLICIT, COND) \
5305 def_builtin_1 (ENUM, NAME, CLASS, \
5306 builtin_types[(int) TYPE], \
5307 builtin_types[(int) LIBTYPE], \
5308 BOTH_P, FALLBACK_P, NONANSI_P, \
5309 built_in_attributes[(int) ATTRS], IMPLICIT);
5310 #include "builtins.def"
5314 /* ----------------------------------------------------------------------- *
5315 * BUILTIN FUNCTIONS *
5316 * ----------------------------------------------------------------------- */
5318 /* Install the builtin functions we might need. */
5321 gnat_install_builtins (void)
5323 install_builtin_elementary_types ();
5324 install_builtin_function_types ();
5325 install_builtin_attributes ();
5327 /* Install builtins used by generic middle-end pieces first. Some of these
5328 know about internal specificities and control attributes accordingly, for
5329 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
5330 the generic definition from builtins.def. */
5331 build_common_builtin_nodes ();
5333 /* Now, install the target specific builtins, such as the AltiVec family on
5334 ppc, and the common set as exposed by builtins.def. */
5335 targetm.init_builtins ();
5336 install_builtin_functions ();
5339 #include "gt-ada-utils.h"
5340 #include "gtype-ada.h"