1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2011, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 ****************************************************************************/
28 #include "coretypes.h"
33 #include "diagnostic-core.h"
39 #include "langhooks.h"
41 #include "tree-dump.h"
42 #include "tree-inline.h"
43 #include "tree-iterator.h"
59 #ifndef MAX_BITS_PER_WORD
60 #define MAX_BITS_PER_WORD BITS_PER_WORD
63 /* If nonzero, pretend we are allocating at global level. */
66 /* The default alignment of "double" floating-point types, i.e. floating
67 point types whose size is equal to 64 bits, or 0 if this alignment is
68 not specifically capped. */
69 int double_float_alignment;
71 /* The default alignment of "double" or larger scalar types, i.e. scalar
72 types whose size is greater or equal to 64 bits, or 0 if this alignment
73 is not specifically capped. */
74 int double_scalar_alignment;
76 /* Tree nodes for the various types and decls we create. */
77 tree gnat_std_decls[(int) ADT_LAST];
79 /* Functions to call for each of the possible raise reasons. */
80 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
82 /* Likewise, but with extra info for each of the possible raise reasons. */
83 tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
85 /* Forward declarations for handlers of attributes. */
86 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
87 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
88 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
93 static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
94 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
95 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
96 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
97 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
99 /* Fake handler for attributes we don't properly support, typically because
100 they'd require dragging a lot of the common-c front-end circuitry. */
101 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
103 /* Table of machine-independent internal attributes for Ada. We support
104 this minimal set of attributes to accommodate the needs of builtins. */
105 const struct attribute_spec gnat_internal_attribute_table[] =
107 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
108 affects_type_identity } */
109 { "const", 0, 0, true, false, false, handle_const_attribute,
111 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute,
113 { "pure", 0, 0, true, false, false, handle_pure_attribute,
115 { "no vops", 0, 0, true, false, false, handle_novops_attribute,
117 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute,
119 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute,
121 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute,
123 { "leaf", 0, 0, true, false, false, handle_leaf_attribute,
125 { "malloc", 0, 0, true, false, false, handle_malloc_attribute,
127 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute,
130 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute,
132 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute,
134 { "may_alias", 0, 0, false, true, false, NULL, false },
136 /* ??? format and format_arg are heavy and not supported, which actually
137 prevents support for stdio builtins, which we however declare as part
138 of the common builtins.def contents. */
139 { "format", 3, 3, false, true, true, fake_attribute_handler, false },
140 { "format_arg", 1, 1, false, true, true, fake_attribute_handler, false },
142 { NULL, 0, 0, false, false, false, NULL, false }
145 /* Associates a GNAT tree node to a GCC tree node. It is used in
146 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
147 of `save_gnu_tree' for more info. */
148 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
150 #define GET_GNU_TREE(GNAT_ENTITY) \
151 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
153 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
154 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
156 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
157 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
159 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
160 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
162 #define GET_DUMMY_NODE(GNAT_ENTITY) \
163 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
165 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
166 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
168 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
169 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
171 /* This variable keeps a table for types for each precision so that we only
172 allocate each of them once. Signed and unsigned types are kept separate.
174 Note that these types are only used when fold-const requests something
175 special. Perhaps we should NOT share these types; we'll see how it
177 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
179 /* Likewise for float types, but record these by mode. */
180 static GTY(()) tree float_types[NUM_MACHINE_MODES];
182 /* For each binding contour we allocate a binding_level structure to indicate
183 the binding depth. */
185 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
186 /* The binding level containing this one (the enclosing binding level). */
187 struct gnat_binding_level *chain;
188 /* The BLOCK node for this level. */
190 /* If nonzero, the setjmp buffer that needs to be updated for any
191 variable-sized definition within this context. */
195 /* The binding level currently in effect. */
196 static GTY(()) struct gnat_binding_level *current_binding_level;
198 /* A chain of gnat_binding_level structures awaiting reuse. */
199 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
201 /* An array of global declarations. */
202 static GTY(()) VEC(tree,gc) *global_decls;
204 /* An array of builtin function declarations. */
205 static GTY(()) VEC(tree,gc) *builtin_decls;
207 /* An array of global renaming pointers. */
208 static GTY(()) VEC(tree,gc) *global_renaming_pointers;
210 /* A chain of unused BLOCK nodes. */
211 static GTY((deletable)) tree free_block_chain;
213 static tree merge_sizes (tree, tree, tree, bool, bool);
214 static tree compute_related_constant (tree, tree);
215 static tree split_plus (tree, tree *);
216 static tree float_type_for_precision (int, enum machine_mode);
217 static tree convert_to_fat_pointer (tree, tree);
218 static tree convert_to_thin_pointer (tree, tree);
219 static bool potential_alignment_gap (tree, tree, tree);
220 static void process_attributes (tree, struct attrib *);
222 /* Initialize the association of GNAT nodes to GCC trees. */
225 init_gnat_to_gnu (void)
227 associate_gnat_to_gnu = ggc_alloc_cleared_vec_tree (max_gnat_nodes);
230 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
231 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
232 If NO_CHECK is true, the latter check is suppressed.
234 If GNU_DECL is zero, reset a previous association. */
237 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
239 /* Check that GNAT_ENTITY is not already defined and that it is being set
240 to something which is a decl. If that is not the case, this usually
241 means GNAT_ENTITY is defined twice, but occasionally is due to some
243 gcc_assert (!(gnu_decl
244 && (PRESENT_GNU_TREE (gnat_entity)
245 || (!no_check && !DECL_P (gnu_decl)))));
247 SET_GNU_TREE (gnat_entity, gnu_decl);
250 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
251 that was associated with it. If there is no such tree node, abort.
253 In some cases, such as delayed elaboration or expressions that need to
254 be elaborated only once, GNAT_ENTITY is really not an entity. */
257 get_gnu_tree (Entity_Id gnat_entity)
259 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
260 return GET_GNU_TREE (gnat_entity);
263 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
266 present_gnu_tree (Entity_Id gnat_entity)
268 return PRESENT_GNU_TREE (gnat_entity);
271 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
274 init_dummy_type (void)
276 dummy_node_table = ggc_alloc_cleared_vec_tree (max_gnat_nodes);
279 /* Make a dummy type corresponding to GNAT_TYPE. */
282 make_dummy_type (Entity_Id gnat_type)
284 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
287 /* If there is an equivalent type, get its underlying type. */
288 if (Present (gnat_underlying))
289 gnat_underlying = Underlying_Type (gnat_underlying);
291 /* If there was no equivalent type (can only happen when just annotating
292 types) or underlying type, go back to the original type. */
293 if (No (gnat_underlying))
294 gnat_underlying = gnat_type;
296 /* If it there already a dummy type, use that one. Else make one. */
297 if (PRESENT_DUMMY_NODE (gnat_underlying))
298 return GET_DUMMY_NODE (gnat_underlying);
300 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
302 gnu_type = make_node (Is_Record_Type (gnat_underlying)
303 ? tree_code_for_record_type (gnat_underlying)
305 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
306 TYPE_DUMMY_P (gnu_type) = 1;
307 TYPE_STUB_DECL (gnu_type)
308 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
309 if (Is_By_Reference_Type (gnat_type))
310 TREE_ADDRESSABLE (gnu_type) = 1;
312 SET_DUMMY_NODE (gnat_underlying, gnu_type);
317 /* Return nonzero if we are currently in the global binding level. */
320 global_bindings_p (void)
322 return ((force_global || !current_function_decl) ? -1 : 0);
325 /* Enter a new binding level. */
328 gnat_pushlevel (void)
330 struct gnat_binding_level *newlevel = NULL;
332 /* Reuse a struct for this binding level, if there is one. */
333 if (free_binding_level)
335 newlevel = free_binding_level;
336 free_binding_level = free_binding_level->chain;
339 newlevel = ggc_alloc_gnat_binding_level ();
341 /* Use a free BLOCK, if any; otherwise, allocate one. */
342 if (free_block_chain)
344 newlevel->block = free_block_chain;
345 free_block_chain = BLOCK_CHAIN (free_block_chain);
346 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
349 newlevel->block = make_node (BLOCK);
351 /* Point the BLOCK we just made to its parent. */
352 if (current_binding_level)
353 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
355 BLOCK_VARS (newlevel->block) = NULL_TREE;
356 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
357 TREE_USED (newlevel->block) = 1;
359 /* Add this level to the front of the chain (stack) of active levels. */
360 newlevel->chain = current_binding_level;
361 newlevel->jmpbuf_decl = NULL_TREE;
362 current_binding_level = newlevel;
365 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
366 and point FNDECL to this BLOCK. */
369 set_current_block_context (tree fndecl)
371 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
372 DECL_INITIAL (fndecl) = current_binding_level->block;
373 set_block_for_group (current_binding_level->block);
376 /* Set the jmpbuf_decl for the current binding level to DECL. */
379 set_block_jmpbuf_decl (tree decl)
381 current_binding_level->jmpbuf_decl = decl;
384 /* Get the jmpbuf_decl, if any, for the current binding level. */
387 get_block_jmpbuf_decl (void)
389 return current_binding_level->jmpbuf_decl;
392 /* Exit a binding level. Set any BLOCK into the current code group. */
397 struct gnat_binding_level *level = current_binding_level;
398 tree block = level->block;
400 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
401 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
403 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
404 are no variables free the block and merge its subblocks into those of its
405 parent block. Otherwise, add it to the list of its parent. */
406 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
408 else if (BLOCK_VARS (block) == NULL_TREE)
410 BLOCK_SUBBLOCKS (level->chain->block)
411 = chainon (BLOCK_SUBBLOCKS (block),
412 BLOCK_SUBBLOCKS (level->chain->block));
413 BLOCK_CHAIN (block) = free_block_chain;
414 free_block_chain = block;
418 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
419 BLOCK_SUBBLOCKS (level->chain->block) = block;
420 TREE_USED (block) = 1;
421 set_block_for_group (block);
424 /* Free this binding structure. */
425 current_binding_level = level->chain;
426 level->chain = free_binding_level;
427 free_binding_level = level;
430 /* Exit a binding level and discard the associated BLOCK. */
435 struct gnat_binding_level *level = current_binding_level;
436 tree block = level->block;
438 BLOCK_CHAIN (block) = free_block_chain;
439 free_block_chain = block;
441 /* Free this binding structure. */
442 current_binding_level = level->chain;
443 level->chain = free_binding_level;
444 free_binding_level = level;
447 /* Records a ..._DECL node DECL as belonging to the current lexical scope
448 and uses GNAT_NODE for location information and propagating flags. */
451 gnat_pushdecl (tree decl, Node_Id gnat_node)
453 /* If this decl is public external or at toplevel, there is no context. */
454 if ((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || global_bindings_p ())
455 DECL_CONTEXT (decl) = 0;
458 DECL_CONTEXT (decl) = current_function_decl;
460 /* Functions imported in another function are not really nested.
461 For really nested functions mark them initially as needing
462 a static chain for uses of that flag before unnesting;
463 lower_nested_functions will then recompute it. */
464 if (TREE_CODE (decl) == FUNCTION_DECL && !TREE_PUBLIC (decl))
465 DECL_STATIC_CHAIN (decl) = 1;
468 TREE_NO_WARNING (decl) = (gnat_node == Empty || Warnings_Off (gnat_node));
470 /* Set the location of DECL and emit a declaration for it. */
471 if (Present (gnat_node))
472 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
473 add_decl_expr (decl, gnat_node);
475 /* Put the declaration on the list. The list of declarations is in reverse
476 order. The list will be reversed later. Put global declarations in the
477 globals list and local ones in the current block. But skip TYPE_DECLs
478 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
479 with the debugger and aren't needed anyway. */
480 if (!(TREE_CODE (decl) == TYPE_DECL
481 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
483 if (global_bindings_p ())
485 VEC_safe_push (tree, gc, global_decls, decl);
487 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
488 VEC_safe_push (tree, gc, builtin_decls, decl);
490 else if (!DECL_EXTERNAL (decl))
492 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
493 BLOCK_VARS (current_binding_level->block) = decl;
497 /* For the declaration of a type, set its name if it either is not already
498 set or if the previous type name was not derived from a source name.
499 We'd rather have the type named with a real name and all the pointer
500 types to the same object have the same POINTER_TYPE node. Code in the
501 equivalent function of c-decl.c makes a copy of the type node here, but
502 that may cause us trouble with incomplete types. We make an exception
503 for fat pointer types because the compiler automatically builds them
504 for unconstrained array types and the debugger uses them to represent
505 both these and pointers to these. */
506 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
508 tree t = TREE_TYPE (decl);
510 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
512 /* Array types aren't tagged types in the C sense so we force the
513 type to be associated with its typedef in the DWARF back-end,
514 in order to make sure that the latter is always preserved. */
515 if (!DECL_ARTIFICIAL (decl) && TREE_CODE (t) == ARRAY_TYPE)
517 tree tt = build_distinct_type_copy (t);
518 TYPE_NAME (tt) = DECL_NAME (decl);
519 TYPE_STUB_DECL (tt) = TYPE_STUB_DECL (t);
520 DECL_ORIGINAL_TYPE (decl) = tt;
523 else if (TYPE_IS_FAT_POINTER_P (t))
525 tree tt = build_variant_type_copy (t);
526 TYPE_NAME (tt) = decl;
527 TREE_USED (tt) = TREE_USED (t);
528 TREE_TYPE (decl) = tt;
529 if (DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
530 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
532 DECL_ORIGINAL_TYPE (decl) = t;
534 DECL_ARTIFICIAL (decl) = 0;
536 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
541 /* Propagate the name to all the variants. This is needed for
542 the type qualifiers machinery to work properly. */
544 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
545 TYPE_NAME (t) = decl;
549 /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
550 ARTIFICIAL_P is true if it's a type that was generated by the compiler. */
553 record_builtin_type (const char *name, tree type, bool artificial_p)
555 tree type_decl = build_decl (input_location,
556 TYPE_DECL, get_identifier (name), type);
557 DECL_ARTIFICIAL (type_decl) = artificial_p;
558 gnat_pushdecl (type_decl, Empty);
560 if (debug_hooks->type_decl)
561 debug_hooks->type_decl (type_decl, false);
564 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
565 finish constructing the record or union type. If REP_LEVEL is zero, this
566 record has no representation clause and so will be entirely laid out here.
567 If REP_LEVEL is one, this record has a representation clause and has been
568 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
569 this record is derived from a parent record and thus inherits its layout;
570 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
571 we need to write debug information about this type. */
574 finish_record_type (tree record_type, tree field_list, int rep_level,
577 enum tree_code code = TREE_CODE (record_type);
578 tree name = TYPE_NAME (record_type);
579 tree ada_size = bitsize_zero_node;
580 tree size = bitsize_zero_node;
581 bool had_size = TYPE_SIZE (record_type) != 0;
582 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
583 bool had_align = TYPE_ALIGN (record_type) != 0;
586 TYPE_FIELDS (record_type) = field_list;
588 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
589 generate debug info and have a parallel type. */
590 if (name && TREE_CODE (name) == TYPE_DECL)
591 name = DECL_NAME (name);
592 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
594 /* Globally initialize the record first. If this is a rep'ed record,
595 that just means some initializations; otherwise, layout the record. */
598 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
601 TYPE_SIZE_UNIT (record_type) = size_zero_node;
604 TYPE_SIZE (record_type) = bitsize_zero_node;
606 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
607 out just like a UNION_TYPE, since the size will be fixed. */
608 else if (code == QUAL_UNION_TYPE)
613 /* Ensure there isn't a size already set. There can be in an error
614 case where there is a rep clause but all fields have errors and
615 no longer have a position. */
616 TYPE_SIZE (record_type) = 0;
617 layout_type (record_type);
620 /* At this point, the position and size of each field is known. It was
621 either set before entry by a rep clause, or by laying out the type above.
623 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
624 to compute the Ada size; the GCC size and alignment (for rep'ed records
625 that are not padding types); and the mode (for rep'ed records). We also
626 clear the DECL_BIT_FIELD indication for the cases we know have not been
627 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
629 if (code == QUAL_UNION_TYPE)
630 field_list = nreverse (field_list);
632 for (field = field_list; field; field = DECL_CHAIN (field))
634 tree type = TREE_TYPE (field);
635 tree pos = bit_position (field);
636 tree this_size = DECL_SIZE (field);
639 if ((TREE_CODE (type) == RECORD_TYPE
640 || TREE_CODE (type) == UNION_TYPE
641 || TREE_CODE (type) == QUAL_UNION_TYPE)
642 && !TYPE_FAT_POINTER_P (type)
643 && !TYPE_CONTAINS_TEMPLATE_P (type)
644 && TYPE_ADA_SIZE (type))
645 this_ada_size = TYPE_ADA_SIZE (type);
647 this_ada_size = this_size;
649 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
650 if (DECL_BIT_FIELD (field)
651 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
653 unsigned int align = TYPE_ALIGN (type);
655 /* In the general case, type alignment is required. */
656 if (value_factor_p (pos, align))
658 /* The enclosing record type must be sufficiently aligned.
659 Otherwise, if no alignment was specified for it and it
660 has been laid out already, bump its alignment to the
661 desired one if this is compatible with its size. */
662 if (TYPE_ALIGN (record_type) >= align)
664 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
665 DECL_BIT_FIELD (field) = 0;
669 && value_factor_p (TYPE_SIZE (record_type), align))
671 TYPE_ALIGN (record_type) = align;
672 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
673 DECL_BIT_FIELD (field) = 0;
677 /* In the non-strict alignment case, only byte alignment is. */
678 if (!STRICT_ALIGNMENT
679 && DECL_BIT_FIELD (field)
680 && value_factor_p (pos, BITS_PER_UNIT))
681 DECL_BIT_FIELD (field) = 0;
684 /* If we still have DECL_BIT_FIELD set at this point, we know that the
685 field is technically not addressable. Except that it can actually
686 be addressed if it is BLKmode and happens to be properly aligned. */
687 if (DECL_BIT_FIELD (field)
688 && !(DECL_MODE (field) == BLKmode
689 && value_factor_p (pos, BITS_PER_UNIT)))
690 DECL_NONADDRESSABLE_P (field) = 1;
692 /* A type must be as aligned as its most aligned field that is not
693 a bit-field. But this is already enforced by layout_type. */
694 if (rep_level > 0 && !DECL_BIT_FIELD (field))
695 TYPE_ALIGN (record_type)
696 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
701 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
702 size = size_binop (MAX_EXPR, size, this_size);
705 case QUAL_UNION_TYPE:
707 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
708 this_ada_size, ada_size);
709 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
714 /* Since we know here that all fields are sorted in order of
715 increasing bit position, the size of the record is one
716 higher than the ending bit of the last field processed
717 unless we have a rep clause, since in that case we might
718 have a field outside a QUAL_UNION_TYPE that has a higher ending
719 position. So use a MAX in that case. Also, if this field is a
720 QUAL_UNION_TYPE, we need to take into account the previous size in
721 the case of empty variants. */
723 = merge_sizes (ada_size, pos, this_ada_size,
724 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
726 = merge_sizes (size, pos, this_size,
727 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
735 if (code == QUAL_UNION_TYPE)
736 nreverse (field_list);
740 /* If this is a padding record, we never want to make the size smaller
741 than what was specified in it, if any. */
742 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
743 size = TYPE_SIZE (record_type);
745 /* Now set any of the values we've just computed that apply. */
746 if (!TYPE_FAT_POINTER_P (record_type)
747 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
748 SET_TYPE_ADA_SIZE (record_type, ada_size);
752 tree size_unit = had_size_unit
753 ? TYPE_SIZE_UNIT (record_type)
755 size_binop (CEIL_DIV_EXPR, size,
757 unsigned int align = TYPE_ALIGN (record_type);
759 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
760 TYPE_SIZE_UNIT (record_type)
761 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
763 compute_record_mode (record_type);
768 rest_of_record_type_compilation (record_type);
771 /* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
772 associated with it. It need not be invoked directly in most cases since
773 finish_record_type takes care of doing so, but this can be necessary if
774 a parallel type is to be attached to the record type. */
777 rest_of_record_type_compilation (tree record_type)
779 tree field_list = TYPE_FIELDS (record_type);
781 enum tree_code code = TREE_CODE (record_type);
782 bool var_size = false;
784 for (field = field_list; field; field = DECL_CHAIN (field))
786 /* We need to make an XVE/XVU record if any field has variable size,
787 whether or not the record does. For example, if we have a union,
788 it may be that all fields, rounded up to the alignment, have the
789 same size, in which case we'll use that size. But the debug
790 output routines (except Dwarf2) won't be able to output the fields,
791 so we need to make the special record. */
792 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
793 /* If a field has a non-constant qualifier, the record will have
794 variable size too. */
795 || (code == QUAL_UNION_TYPE
796 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
803 /* If this record is of variable size, rename it so that the
804 debugger knows it is and make a new, parallel, record
805 that tells the debugger how the record is laid out. See
806 exp_dbug.ads. But don't do this for records that are padding
807 since they confuse GDB. */
808 if (var_size && !TYPE_IS_PADDING_P (record_type))
811 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
812 ? UNION_TYPE : TREE_CODE (record_type));
813 tree orig_name = TYPE_NAME (record_type), new_name;
814 tree last_pos = bitsize_zero_node;
815 tree old_field, prev_old_field = NULL_TREE;
817 if (TREE_CODE (orig_name) == TYPE_DECL)
818 orig_name = DECL_NAME (orig_name);
821 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
823 TYPE_NAME (new_record_type) = new_name;
824 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
825 TYPE_STUB_DECL (new_record_type)
826 = create_type_stub_decl (new_name, new_record_type);
827 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
828 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
829 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
830 TYPE_SIZE_UNIT (new_record_type)
831 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
833 add_parallel_type (TYPE_STUB_DECL (record_type), new_record_type);
835 /* Now scan all the fields, replacing each field with a new
836 field corresponding to the new encoding. */
837 for (old_field = TYPE_FIELDS (record_type); old_field;
838 old_field = DECL_CHAIN (old_field))
840 tree field_type = TREE_TYPE (old_field);
841 tree field_name = DECL_NAME (old_field);
843 tree curpos = bit_position (old_field);
845 unsigned int align = 0;
848 /* See how the position was modified from the last position.
850 There are two basic cases we support: a value was added
851 to the last position or the last position was rounded to
852 a boundary and they something was added. Check for the
853 first case first. If not, see if there is any evidence
854 of rounding. If so, round the last position and try
857 If this is a union, the position can be taken as zero. */
859 /* Some computations depend on the shape of the position expression,
860 so strip conversions to make sure it's exposed. */
861 curpos = remove_conversions (curpos, true);
863 if (TREE_CODE (new_record_type) == UNION_TYPE)
864 pos = bitsize_zero_node, align = 0;
866 pos = compute_related_constant (curpos, last_pos);
868 if (!pos && TREE_CODE (curpos) == MULT_EXPR
869 && host_integerp (TREE_OPERAND (curpos, 1), 1))
871 tree offset = TREE_OPERAND (curpos, 0);
872 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
874 /* An offset which is a bitwise AND with a negative power of 2
875 means an alignment corresponding to this power of 2. Note
876 that, as sizetype is sign-extended but nonetheless unsigned,
877 we don't directly use tree_int_cst_sgn. */
878 offset = remove_conversions (offset, true);
879 if (TREE_CODE (offset) == BIT_AND_EXPR
880 && host_integerp (TREE_OPERAND (offset, 1), 0)
881 && TREE_INT_CST_HIGH (TREE_OPERAND (offset, 1)) < 0)
884 = - tree_low_cst (TREE_OPERAND (offset, 1), 0);
885 if (exact_log2 (pow) > 0)
889 pos = compute_related_constant (curpos,
890 round_up (last_pos, align));
892 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
893 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
894 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
895 && host_integerp (TREE_OPERAND
896 (TREE_OPERAND (curpos, 0), 1),
901 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
902 pos = compute_related_constant (curpos,
903 round_up (last_pos, align));
905 else if (potential_alignment_gap (prev_old_field, old_field,
908 align = TYPE_ALIGN (field_type);
909 pos = compute_related_constant (curpos,
910 round_up (last_pos, align));
913 /* If we can't compute a position, set it to zero.
915 ??? We really should abort here, but it's too much work
916 to get this correct for all cases. */
919 pos = bitsize_zero_node;
921 /* See if this type is variable-sized and make a pointer type
922 and indicate the indirection if so. Beware that the debug
923 back-end may adjust the position computed above according
924 to the alignment of the field type, i.e. the pointer type
925 in this case, if we don't preventively counter that. */
926 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
928 field_type = build_pointer_type (field_type);
929 if (align != 0 && TYPE_ALIGN (field_type) > align)
931 field_type = copy_node (field_type);
932 TYPE_ALIGN (field_type) = align;
937 /* Make a new field name, if necessary. */
938 if (var || align != 0)
943 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
944 align / BITS_PER_UNIT);
946 strcpy (suffix, "XVL");
948 field_name = concat_name (field_name, suffix);
952 = create_field_decl (field_name, field_type, new_record_type,
953 DECL_SIZE (old_field), pos, 0, 0);
954 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
955 TYPE_FIELDS (new_record_type) = new_field;
957 /* If old_field is a QUAL_UNION_TYPE, take its size as being
958 zero. The only time it's not the last field of the record
959 is when there are other components at fixed positions after
960 it (meaning there was a rep clause for every field) and we
961 want to be able to encode them. */
962 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
963 (TREE_CODE (TREE_TYPE (old_field))
966 : DECL_SIZE (old_field));
967 prev_old_field = old_field;
970 TYPE_FIELDS (new_record_type)
971 = nreverse (TYPE_FIELDS (new_record_type));
973 rest_of_type_decl_compilation (TYPE_STUB_DECL (new_record_type));
976 rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type));
979 /* Append PARALLEL_TYPE on the chain of parallel types for decl. */
982 add_parallel_type (tree decl, tree parallel_type)
986 while (DECL_PARALLEL_TYPE (d))
987 d = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (d));
989 SET_DECL_PARALLEL_TYPE (d, parallel_type);
992 /* Utility function of above to merge LAST_SIZE, the previous size of a record
993 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
994 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
995 replace a value of zero with the old size. If HAS_REP is true, we take the
996 MAX of the end position of this field with LAST_SIZE. In all other cases,
997 we use FIRST_BIT plus SIZE. Return an expression for the size. */
1000 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1003 tree type = TREE_TYPE (last_size);
1006 if (!special || TREE_CODE (size) != COND_EXPR)
1008 new_size = size_binop (PLUS_EXPR, first_bit, size);
1010 new_size = size_binop (MAX_EXPR, last_size, new_size);
1014 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1015 integer_zerop (TREE_OPERAND (size, 1))
1016 ? last_size : merge_sizes (last_size, first_bit,
1017 TREE_OPERAND (size, 1),
1019 integer_zerop (TREE_OPERAND (size, 2))
1020 ? last_size : merge_sizes (last_size, first_bit,
1021 TREE_OPERAND (size, 2),
1024 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1025 when fed through substitute_in_expr) into thinking that a constant
1026 size is not constant. */
1027 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
1028 new_size = TREE_OPERAND (new_size, 0);
1033 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1034 related by the addition of a constant. Return that constant if so. */
1037 compute_related_constant (tree op0, tree op1)
1039 tree op0_var, op1_var;
1040 tree op0_con = split_plus (op0, &op0_var);
1041 tree op1_con = split_plus (op1, &op1_var);
1042 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1044 if (operand_equal_p (op0_var, op1_var, 0))
1046 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1052 /* Utility function of above to split a tree OP which may be a sum, into a
1053 constant part, which is returned, and a variable part, which is stored
1054 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1058 split_plus (tree in, tree *pvar)
1060 /* Strip NOPS in order to ease the tree traversal and maximize the
1061 potential for constant or plus/minus discovery. We need to be careful
1062 to always return and set *pvar to bitsizetype trees, but it's worth
1066 *pvar = convert (bitsizetype, in);
1068 if (TREE_CODE (in) == INTEGER_CST)
1070 *pvar = bitsize_zero_node;
1071 return convert (bitsizetype, in);
1073 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1075 tree lhs_var, rhs_var;
1076 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1077 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1079 if (lhs_var == TREE_OPERAND (in, 0)
1080 && rhs_var == TREE_OPERAND (in, 1))
1081 return bitsize_zero_node;
1083 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1084 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1087 return bitsize_zero_node;
1090 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1091 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
1092 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1093 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
1094 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
1095 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
1096 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
1097 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
1098 invisible reference. */
1101 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1102 bool return_unconstrained_p, bool return_by_direct_ref_p,
1103 bool return_by_invisi_ref_p)
1105 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1106 the subprogram formal parameters. This list is generated by traversing
1107 the input list of PARM_DECL nodes. */
1108 tree param_type_list = NULL_TREE;
1111 for (t = param_decl_list; t; t = DECL_CHAIN (t))
1112 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (t), param_type_list);
1114 /* The list of the function parameter types has to be terminated by the void
1115 type to signal to the back-end that we are not dealing with a variable
1116 parameter subprogram, but that it has a fixed number of parameters. */
1117 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1119 /* The list of argument types has been created in reverse so reverse it. */
1120 param_type_list = nreverse (param_type_list);
1122 type = build_function_type (return_type, param_type_list);
1124 /* TYPE may have been shared since GCC hashes types. If it has a different
1125 CICO_LIST, make a copy. Likewise for the various flags. */
1126 if (!fntype_same_flags_p (type, cico_list, return_unconstrained_p,
1127 return_by_direct_ref_p, return_by_invisi_ref_p))
1129 type = copy_type (type);
1130 TYPE_CI_CO_LIST (type) = cico_list;
1131 TYPE_RETURN_UNCONSTRAINED_P (type) = return_unconstrained_p;
1132 TYPE_RETURN_BY_DIRECT_REF_P (type) = return_by_direct_ref_p;
1133 TREE_ADDRESSABLE (type) = return_by_invisi_ref_p;
1139 /* Return a copy of TYPE but safe to modify in any way. */
1142 copy_type (tree type)
1144 tree new_type = copy_node (type);
1146 /* Unshare the language-specific data. */
1147 if (TYPE_LANG_SPECIFIC (type))
1149 TYPE_LANG_SPECIFIC (new_type) = NULL;
1150 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
1153 /* And the contents of the language-specific slot if needed. */
1154 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
1155 && TYPE_RM_VALUES (type))
1157 TYPE_RM_VALUES (new_type) = NULL_TREE;
1158 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
1159 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
1160 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
1163 /* copy_node clears this field instead of copying it, because it is
1164 aliased with TREE_CHAIN. */
1165 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
1167 TYPE_POINTER_TO (new_type) = 0;
1168 TYPE_REFERENCE_TO (new_type) = 0;
1169 TYPE_MAIN_VARIANT (new_type) = new_type;
1170 TYPE_NEXT_VARIANT (new_type) = 0;
1175 /* Return a subtype of sizetype with range MIN to MAX and whose
1176 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
1177 of the associated TYPE_DECL. */
1180 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
1182 /* First build a type for the desired range. */
1183 tree type = build_nonshared_range_type (sizetype, min, max);
1185 /* Then set the index type. */
1186 SET_TYPE_INDEX_TYPE (type, index);
1187 create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
1192 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
1193 sizetype is used. */
1196 create_range_type (tree type, tree min, tree max)
1200 if (type == NULL_TREE)
1203 /* First build a type with the base range. */
1204 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
1205 TYPE_MAX_VALUE (type));
1207 /* Then set the actual range. */
1208 SET_TYPE_RM_MIN_VALUE (range_type, convert (type, min));
1209 SET_TYPE_RM_MAX_VALUE (range_type, convert (type, max));
1214 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
1215 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
1219 create_type_stub_decl (tree type_name, tree type)
1221 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
1222 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
1223 emitted in DWARF. */
1224 tree type_decl = build_decl (input_location,
1225 TYPE_DECL, type_name, type);
1226 DECL_ARTIFICIAL (type_decl) = 1;
1230 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
1231 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
1232 is a declaration that was generated by the compiler. DEBUG_INFO_P is
1233 true if we need to write debug information about this type. GNAT_NODE
1234 is used for the position of the decl. */
1237 create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1238 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1240 enum tree_code code = TREE_CODE (type);
1241 bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
1244 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
1245 gcc_assert (!TYPE_IS_DUMMY_P (type));
1247 /* If the type hasn't been named yet, we're naming it; preserve an existing
1248 TYPE_STUB_DECL that has been attached to it for some purpose. */
1249 if (!named && TYPE_STUB_DECL (type))
1251 type_decl = TYPE_STUB_DECL (type);
1252 DECL_NAME (type_decl) = type_name;
1255 type_decl = build_decl (input_location,
1256 TYPE_DECL, type_name, type);
1258 DECL_ARTIFICIAL (type_decl) = artificial_p;
1260 /* Add this decl to the current binding level. */
1261 gnat_pushdecl (type_decl, gnat_node);
1263 process_attributes (type_decl, attr_list);
1265 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
1266 This causes the name to be also viewed as a "tag" by the debug
1267 back-end, with the advantage that no DW_TAG_typedef is emitted
1268 for artificial "tagged" types in DWARF. */
1270 TYPE_STUB_DECL (type) = type_decl;
1272 /* Pass the type declaration to the debug back-end unless this is an
1273 UNCONSTRAINED_ARRAY_TYPE that the back-end does not support, or a
1274 type for which debugging information was not requested, or else an
1275 ENUMERAL_TYPE or RECORD_TYPE (except for fat pointers) which are
1276 handled separately. And do not pass dummy types either. */
1277 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
1278 DECL_IGNORED_P (type_decl) = 1;
1279 else if (code != ENUMERAL_TYPE
1280 && (code != RECORD_TYPE || TYPE_FAT_POINTER_P (type))
1281 && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
1282 && TYPE_IS_DUMMY_P (TREE_TYPE (type)))
1283 && !(code == RECORD_TYPE
1285 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))))))
1286 rest_of_type_decl_compilation (type_decl);
1291 /* Return a VAR_DECL or CONST_DECL node.
1293 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1294 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1295 the GCC tree for an optional initial expression; NULL_TREE if none.
1297 CONST_FLAG is true if this variable is constant, in which case we might
1298 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
1300 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
1301 definition to be made visible outside of the current compilation unit, for
1302 instance variable definitions in a package specification.
1304 EXTERN_FLAG is true when processing an external variable declaration (as
1305 opposed to a definition: no storage is to be allocated for the variable).
1307 STATIC_FLAG is only relevant when not at top level. In that case
1308 it indicates whether to always allocate storage to the variable.
1310 GNAT_NODE is used for the position of the decl. */
1313 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
1314 bool const_flag, bool public_flag, bool extern_flag,
1315 bool static_flag, bool const_decl_allowed_p,
1316 struct attrib *attr_list, Node_Id gnat_node)
1320 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
1321 && (global_bindings_p () || static_flag
1322 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
1323 : TREE_CONSTANT (var_init)));
1325 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1326 case the initializer may be used in-lieu of the DECL node (as done in
1327 Identifier_to_gnu). This is useful to prevent the need of elaboration
1328 code when an identifier for which such a decl is made is in turn used as
1329 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1330 but extra constraints apply to this choice (see below) and are not
1331 relevant to the distinction we wish to make. */
1332 bool constant_p = const_flag && init_const;
1334 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1335 and may be used for scalars in general but not for aggregates. */
1337 = build_decl (input_location,
1338 (constant_p && const_decl_allowed_p
1339 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
1342 /* If this is external, throw away any initializations (they will be done
1343 elsewhere) unless this is a constant for which we would like to remain
1344 able to get the initializer. If we are defining a global here, leave a
1345 constant initialization and save any variable elaborations for the
1346 elaboration routine. If we are just annotating types, throw away the
1347 initialization if it isn't a constant. */
1348 if ((extern_flag && !constant_p)
1349 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1350 var_init = NULL_TREE;
1352 /* At the global level, an initializer requiring code to be generated
1353 produces elaboration statements. Check that such statements are allowed,
1354 that is, not violating a No_Elaboration_Code restriction. */
1355 if (global_bindings_p () && var_init != 0 && !init_const)
1356 Check_Elaboration_Code_Allowed (gnat_node);
1358 DECL_INITIAL (var_decl) = var_init;
1359 TREE_READONLY (var_decl) = const_flag;
1360 DECL_EXTERNAL (var_decl) = extern_flag;
1361 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1362 TREE_CONSTANT (var_decl) = constant_p;
1363 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1364 = TYPE_VOLATILE (type);
1366 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1367 try to fiddle with DECL_COMMON. However, on platforms that don't
1368 support global BSS sections, uninitialized global variables would
1369 go in DATA instead, thus increasing the size of the executable. */
1371 && TREE_CODE (var_decl) == VAR_DECL
1372 && TREE_PUBLIC (var_decl)
1373 && !have_global_bss_p ())
1374 DECL_COMMON (var_decl) = 1;
1376 /* At the global binding level, we need to allocate static storage for the
1377 variable if it isn't external. Otherwise, we allocate automatic storage
1378 unless requested not to. */
1379 TREE_STATIC (var_decl)
1380 = !extern_flag && (static_flag || global_bindings_p ());
1382 /* For an external constant whose initializer is not absolute, do not emit
1383 debug info. In DWARF this would mean a global relocation in a read-only
1384 section which runs afoul of the PE-COFF run-time relocation mechanism. */
1387 && initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
1388 != null_pointer_node)
1389 DECL_IGNORED_P (var_decl) = 1;
1391 /* Add this decl to the current binding level. */
1392 gnat_pushdecl (var_decl, gnat_node);
1394 if (TREE_SIDE_EFFECTS (var_decl))
1395 TREE_ADDRESSABLE (var_decl) = 1;
1397 if (TREE_CODE (var_decl) == VAR_DECL)
1400 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1401 process_attributes (var_decl, attr_list);
1402 if (global_bindings_p ())
1403 rest_of_decl_compilation (var_decl, true, 0);
1406 expand_decl (var_decl);
1411 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
1414 aggregate_type_contains_array_p (tree type)
1416 switch (TREE_CODE (type))
1420 case QUAL_UNION_TYPE:
1423 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1424 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
1425 && aggregate_type_contains_array_p (TREE_TYPE (field)))
1438 /* Return a FIELD_DECL node. FIELD_NAME is the field's name, FIELD_TYPE is
1439 its type and RECORD_TYPE is the type of the enclosing record. If SIZE is
1440 nonzero, it is the specified size of the field. If POS is nonzero, it is
1441 the bit position. PACKED is 1 if the enclosing record is packed, -1 if it
1442 has Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
1443 means we are allowed to take the address of the field; if it is negative,
1444 we should not make a bitfield, which is used by make_aligning_type. */
1447 create_field_decl (tree field_name, tree field_type, tree record_type,
1448 tree size, tree pos, int packed, int addressable)
1450 tree field_decl = build_decl (input_location,
1451 FIELD_DECL, field_name, field_type);
1453 DECL_CONTEXT (field_decl) = record_type;
1454 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1456 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1457 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1458 Likewise for an aggregate without specified position that contains an
1459 array, because in this case slices of variable length of this array
1460 must be handled by GCC and variable-sized objects need to be aligned
1461 to at least a byte boundary. */
1462 if (packed && (TYPE_MODE (field_type) == BLKmode
1464 && AGGREGATE_TYPE_P (field_type)
1465 && aggregate_type_contains_array_p (field_type))))
1466 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1468 /* If a size is specified, use it. Otherwise, if the record type is packed
1469 compute a size to use, which may differ from the object's natural size.
1470 We always set a size in this case to trigger the checks for bitfield
1471 creation below, which is typically required when no position has been
1474 size = convert (bitsizetype, size);
1475 else if (packed == 1)
1477 size = rm_size (field_type);
1478 if (TYPE_MODE (field_type) == BLKmode)
1479 size = round_up (size, BITS_PER_UNIT);
1482 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1483 specified for two reasons: first if the size differs from the natural
1484 size. Second, if the alignment is insufficient. There are a number of
1485 ways the latter can be true.
1487 We never make a bitfield if the type of the field has a nonconstant size,
1488 because no such entity requiring bitfield operations should reach here.
1490 We do *preventively* make a bitfield when there might be the need for it
1491 but we don't have all the necessary information to decide, as is the case
1492 of a field with no specified position in a packed record.
1494 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1495 in layout_decl or finish_record_type to clear the bit_field indication if
1496 it is in fact not needed. */
1497 if (addressable >= 0
1499 && TREE_CODE (size) == INTEGER_CST
1500 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1501 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
1502 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
1504 || (TYPE_ALIGN (record_type) != 0
1505 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1507 DECL_BIT_FIELD (field_decl) = 1;
1508 DECL_SIZE (field_decl) = size;
1509 if (!packed && !pos)
1511 if (TYPE_ALIGN (record_type) != 0
1512 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))
1513 DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
1515 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1519 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1521 /* Bump the alignment if need be, either for bitfield/packing purposes or
1522 to satisfy the type requirements if no such consideration applies. When
1523 we get the alignment from the type, indicate if this is from an explicit
1524 user request, which prevents stor-layout from lowering it later on. */
1526 unsigned int bit_align
1527 = (DECL_BIT_FIELD (field_decl) ? 1
1528 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
1530 if (bit_align > DECL_ALIGN (field_decl))
1531 DECL_ALIGN (field_decl) = bit_align;
1532 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
1534 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1535 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
1541 /* We need to pass in the alignment the DECL is known to have.
1542 This is the lowest-order bit set in POS, but no more than
1543 the alignment of the record, if one is specified. Note
1544 that an alignment of 0 is taken as infinite. */
1545 unsigned int known_align;
1547 if (host_integerp (pos, 1))
1548 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1550 known_align = BITS_PER_UNIT;
1552 if (TYPE_ALIGN (record_type)
1553 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1554 known_align = TYPE_ALIGN (record_type);
1556 layout_decl (field_decl, known_align);
1557 SET_DECL_OFFSET_ALIGN (field_decl,
1558 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1560 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1561 &DECL_FIELD_BIT_OFFSET (field_decl),
1562 DECL_OFFSET_ALIGN (field_decl), pos);
1565 /* In addition to what our caller says, claim the field is addressable if we
1566 know that its type is not suitable.
1568 The field may also be "technically" nonaddressable, meaning that even if
1569 we attempt to take the field's address we will actually get the address
1570 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
1571 value we have at this point is not accurate enough, so we don't account
1572 for this here and let finish_record_type decide. */
1573 if (!addressable && !type_for_nonaliased_component_p (field_type))
1576 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1581 /* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
1582 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
1583 (either an In parameter or an address of a pass-by-ref parameter). */
1586 create_param_decl (tree param_name, tree param_type, bool readonly)
1588 tree param_decl = build_decl (input_location,
1589 PARM_DECL, param_name, param_type);
1591 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
1592 can lead to various ABI violations. */
1593 if (targetm.calls.promote_prototypes (NULL_TREE)
1594 && INTEGRAL_TYPE_P (param_type)
1595 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1597 /* We have to be careful about biased types here. Make a subtype
1598 of integer_type_node with the proper biasing. */
1599 if (TREE_CODE (param_type) == INTEGER_TYPE
1600 && TYPE_BIASED_REPRESENTATION_P (param_type))
1603 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
1604 TREE_TYPE (subtype) = integer_type_node;
1605 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
1606 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (param_type));
1607 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (param_type));
1608 param_type = subtype;
1611 param_type = integer_type_node;
1614 DECL_ARG_TYPE (param_decl) = param_type;
1615 TREE_READONLY (param_decl) = readonly;
1619 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1622 process_attributes (tree decl, struct attrib *attr_list)
1624 for (; attr_list; attr_list = attr_list->next)
1625 switch (attr_list->type)
1627 case ATTR_MACHINE_ATTRIBUTE:
1628 input_location = DECL_SOURCE_LOCATION (decl);
1629 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1631 ATTR_FLAG_TYPE_IN_PLACE);
1634 case ATTR_LINK_ALIAS:
1635 if (! DECL_EXTERNAL (decl))
1637 TREE_STATIC (decl) = 1;
1638 assemble_alias (decl, attr_list->name);
1642 case ATTR_WEAK_EXTERNAL:
1644 declare_weak (decl);
1646 post_error ("?weak declarations not supported on this target",
1647 attr_list->error_point);
1650 case ATTR_LINK_SECTION:
1651 if (targetm.have_named_sections)
1653 DECL_SECTION_NAME (decl)
1654 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1655 IDENTIFIER_POINTER (attr_list->name));
1656 DECL_COMMON (decl) = 0;
1659 post_error ("?section attributes are not supported for this target",
1660 attr_list->error_point);
1663 case ATTR_LINK_CONSTRUCTOR:
1664 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1665 TREE_USED (decl) = 1;
1668 case ATTR_LINK_DESTRUCTOR:
1669 DECL_STATIC_DESTRUCTOR (decl) = 1;
1670 TREE_USED (decl) = 1;
1673 case ATTR_THREAD_LOCAL_STORAGE:
1674 DECL_TLS_MODEL (decl) = decl_default_tls_model (decl);
1675 DECL_COMMON (decl) = 0;
1680 /* Record DECL as a global renaming pointer. */
1683 record_global_renaming_pointer (tree decl)
1685 gcc_assert (DECL_RENAMED_OBJECT (decl));
1686 VEC_safe_push (tree, gc, global_renaming_pointers, decl);
1689 /* Invalidate the global renaming pointers. */
1692 invalidate_global_renaming_pointers (void)
1697 FOR_EACH_VEC_ELT (tree, global_renaming_pointers, i, iter)
1698 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
1700 VEC_free (tree, gc, global_renaming_pointers);
1703 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1707 value_factor_p (tree value, HOST_WIDE_INT factor)
1709 if (host_integerp (value, 1))
1710 return tree_low_cst (value, 1) % factor == 0;
1712 if (TREE_CODE (value) == MULT_EXPR)
1713 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1714 || value_factor_p (TREE_OPERAND (value, 1), factor));
1719 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1720 unless we can prove these 2 fields are laid out in such a way that no gap
1721 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1722 is the distance in bits between the end of PREV_FIELD and the starting
1723 position of CURR_FIELD. It is ignored if null. */
1726 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1728 /* If this is the first field of the record, there cannot be any gap */
1732 /* If the previous field is a union type, then return False: The only
1733 time when such a field is not the last field of the record is when
1734 there are other components at fixed positions after it (meaning there
1735 was a rep clause for every field), in which case we don't want the
1736 alignment constraint to override them. */
1737 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1740 /* If the distance between the end of prev_field and the beginning of
1741 curr_field is constant, then there is a gap if the value of this
1742 constant is not null. */
1743 if (offset && host_integerp (offset, 1))
1744 return !integer_zerop (offset);
1746 /* If the size and position of the previous field are constant,
1747 then check the sum of this size and position. There will be a gap
1748 iff it is not multiple of the current field alignment. */
1749 if (host_integerp (DECL_SIZE (prev_field), 1)
1750 && host_integerp (bit_position (prev_field), 1))
1751 return ((tree_low_cst (bit_position (prev_field), 1)
1752 + tree_low_cst (DECL_SIZE (prev_field), 1))
1753 % DECL_ALIGN (curr_field) != 0);
1755 /* If both the position and size of the previous field are multiples
1756 of the current field alignment, there cannot be any gap. */
1757 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1758 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1761 /* Fallback, return that there may be a potential gap */
1765 /* Returns a LABEL_DECL node for LABEL_NAME. */
1768 create_label_decl (tree label_name)
1770 tree label_decl = build_decl (input_location,
1771 LABEL_DECL, label_name, void_type_node);
1773 DECL_CONTEXT (label_decl) = current_function_decl;
1774 DECL_MODE (label_decl) = VOIDmode;
1775 DECL_SOURCE_LOCATION (label_decl) = input_location;
1780 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1781 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1782 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1783 PARM_DECL nodes chained through the TREE_CHAIN field).
1785 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1786 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1789 create_subprog_decl (tree subprog_name, tree asm_name,
1790 tree subprog_type, tree param_decl_list, bool inline_flag,
1791 bool public_flag, bool extern_flag,
1792 struct attrib *attr_list, Node_Id gnat_node)
1794 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, subprog_name,
1796 tree result_decl = build_decl (input_location, RESULT_DECL, NULL_TREE,
1797 TREE_TYPE (subprog_type));
1799 /* If this is a non-inline function nested inside an inlined external
1800 function, we cannot honor both requests without cloning the nested
1801 function in the current unit since it is private to the other unit.
1802 We could inline the nested function as well but it's probably better
1803 to err on the side of too little inlining. */
1806 && current_function_decl
1807 && DECL_DECLARED_INLINE_P (current_function_decl)
1808 && DECL_EXTERNAL (current_function_decl))
1809 DECL_DECLARED_INLINE_P (current_function_decl) = 0;
1811 DECL_EXTERNAL (subprog_decl) = extern_flag;
1812 TREE_PUBLIC (subprog_decl) = public_flag;
1813 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1814 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1815 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1816 DECL_DECLARED_INLINE_P (subprog_decl) = inline_flag;
1817 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1819 DECL_ARTIFICIAL (result_decl) = 1;
1820 DECL_IGNORED_P (result_decl) = 1;
1821 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (subprog_type);
1822 DECL_RESULT (subprog_decl) = result_decl;
1826 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1828 /* The expand_main_function circuitry expects "main_identifier_node" to
1829 designate the DECL_NAME of the 'main' entry point, in turn expected
1830 to be declared as the "main" function literally by default. Ada
1831 program entry points are typically declared with a different name
1832 within the binder generated file, exported as 'main' to satisfy the
1833 system expectations. Force main_identifier_node in this case. */
1834 if (asm_name == main_identifier_node)
1835 DECL_NAME (subprog_decl) = main_identifier_node;
1838 /* Add this decl to the current binding level. */
1839 gnat_pushdecl (subprog_decl, gnat_node);
1841 process_attributes (subprog_decl, attr_list);
1843 /* Output the assembler code and/or RTL for the declaration. */
1844 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
1846 return subprog_decl;
1849 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1850 body. This routine needs to be invoked before processing the declarations
1851 appearing in the subprogram. */
1854 begin_subprog_body (tree subprog_decl)
1858 announce_function (subprog_decl);
1860 /* This function is being defined. */
1861 TREE_STATIC (subprog_decl) = 1;
1863 current_function_decl = subprog_decl;
1865 /* Enter a new binding level and show that all the parameters belong to
1869 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1870 param_decl = DECL_CHAIN (param_decl))
1871 DECL_CONTEXT (param_decl) = subprog_decl;
1873 make_decl_rtl (subprog_decl);
1875 /* We handle pending sizes via the elaboration of types, so we don't need to
1876 save them. This causes them to be marked as part of the outer function
1877 and then discarded. */
1878 get_pending_sizes ();
1881 /* Finish the definition of the current subprogram BODY and finalize it. */
1884 end_subprog_body (tree body)
1886 tree fndecl = current_function_decl;
1888 /* Attach the BLOCK for this level to the function and pop the level. */
1889 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
1890 DECL_INITIAL (fndecl) = current_binding_level->block;
1893 /* We handle pending sizes via the elaboration of types, so we don't
1894 need to save them. */
1895 get_pending_sizes ();
1897 /* Mark the RESULT_DECL as being in this subprogram. */
1898 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
1900 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
1901 if (TREE_CODE (body) == BIND_EXPR)
1903 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
1904 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
1907 DECL_SAVED_TREE (fndecl) = body;
1909 current_function_decl = DECL_CONTEXT (fndecl);
1911 /* We cannot track the location of errors past this point. */
1912 error_gnat_node = Empty;
1914 /* If we're only annotating types, don't actually compile this function. */
1915 if (type_annotate_only)
1918 /* Dump functions before gimplification. */
1919 dump_function (TDI_original, fndecl);
1921 /* ??? This special handling of nested functions is probably obsolete. */
1922 if (!DECL_CONTEXT (fndecl))
1923 cgraph_finalize_function (fndecl, false);
1925 /* Register this function with cgraph just far enough to get it
1926 added to our parent's nested function list. */
1927 (void) cgraph_node (fndecl);
1931 gnat_builtin_function (tree decl)
1933 gnat_pushdecl (decl, Empty);
1937 /* Return an integer type with the number of bits of precision given by
1938 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
1939 it is a signed type. */
1942 gnat_type_for_size (unsigned precision, int unsignedp)
1947 if (precision <= 2 * MAX_BITS_PER_WORD
1948 && signed_and_unsigned_types[precision][unsignedp])
1949 return signed_and_unsigned_types[precision][unsignedp];
1952 t = make_unsigned_type (precision);
1954 t = make_signed_type (precision);
1956 if (precision <= 2 * MAX_BITS_PER_WORD)
1957 signed_and_unsigned_types[precision][unsignedp] = t;
1961 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
1962 TYPE_NAME (t) = get_identifier (type_name);
1968 /* Likewise for floating-point types. */
1971 float_type_for_precision (int precision, enum machine_mode mode)
1976 if (float_types[(int) mode])
1977 return float_types[(int) mode];
1979 float_types[(int) mode] = t = make_node (REAL_TYPE);
1980 TYPE_PRECISION (t) = precision;
1983 gcc_assert (TYPE_MODE (t) == mode);
1986 sprintf (type_name, "FLOAT_%d", precision);
1987 TYPE_NAME (t) = get_identifier (type_name);
1993 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
1994 an unsigned type; otherwise a signed type is returned. */
1997 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
1999 if (mode == BLKmode)
2002 if (mode == VOIDmode)
2003 return void_type_node;
2005 if (COMPLEX_MODE_P (mode))
2008 if (SCALAR_FLOAT_MODE_P (mode))
2009 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2011 if (SCALAR_INT_MODE_P (mode))
2012 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2014 if (VECTOR_MODE_P (mode))
2016 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2017 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
2019 return build_vector_type_for_mode (inner_type, mode);
2025 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2028 gnat_unsigned_type (tree type_node)
2030 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2032 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2034 type = copy_node (type);
2035 TREE_TYPE (type) = type_node;
2037 else if (TREE_TYPE (type_node)
2038 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2039 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2041 type = copy_node (type);
2042 TREE_TYPE (type) = TREE_TYPE (type_node);
2048 /* Return the signed version of a TYPE_NODE, a scalar type. */
2051 gnat_signed_type (tree type_node)
2053 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2055 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2057 type = copy_node (type);
2058 TREE_TYPE (type) = type_node;
2060 else if (TREE_TYPE (type_node)
2061 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2062 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2064 type = copy_node (type);
2065 TREE_TYPE (type) = TREE_TYPE (type_node);
2071 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2072 transparently converted to each other. */
2075 gnat_types_compatible_p (tree t1, tree t2)
2077 enum tree_code code;
2079 /* This is the default criterion. */
2080 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2083 /* We only check structural equivalence here. */
2084 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2087 /* Vector types are also compatible if they have the same number of subparts
2088 and the same form of (scalar) element type. */
2089 if (code == VECTOR_TYPE
2090 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
2091 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
2092 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
2095 /* Array types are also compatible if they are constrained and have the same
2096 domain(s) and the same component type. */
2097 if (code == ARRAY_TYPE
2098 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
2099 || (TYPE_DOMAIN (t1)
2101 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2102 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2103 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
2104 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
2105 && (TREE_TYPE (t1) == TREE_TYPE (t2)
2106 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
2107 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))))
2110 /* Padding record types are also compatible if they pad the same
2111 type and have the same constant size. */
2112 if (code == RECORD_TYPE
2113 && TYPE_PADDING_P (t1) && TYPE_PADDING_P (t2)
2114 && TREE_TYPE (TYPE_FIELDS (t1)) == TREE_TYPE (TYPE_FIELDS (t2))
2115 && tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2)))
2121 /* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
2124 fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
2125 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
2127 return TYPE_CI_CO_LIST (t) == cico_list
2128 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
2129 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
2130 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
2133 /* EXP is an expression for the size of an object. If this size contains
2134 discriminant references, replace them with the maximum (if MAX_P) or
2135 minimum (if !MAX_P) possible value of the discriminant. */
2138 max_size (tree exp, bool max_p)
2140 enum tree_code code = TREE_CODE (exp);
2141 tree type = TREE_TYPE (exp);
2143 switch (TREE_CODE_CLASS (code))
2145 case tcc_declaration:
2150 if (code == CALL_EXPR)
2155 t = maybe_inline_call_in_expr (exp);
2157 return max_size (t, max_p);
2159 n = call_expr_nargs (exp);
2161 argarray = XALLOCAVEC (tree, n);
2162 for (i = 0; i < n; i++)
2163 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
2164 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
2169 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2170 modify. Otherwise, we treat it like a variable. */
2171 if (!CONTAINS_PLACEHOLDER_P (exp))
2174 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2176 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
2178 case tcc_comparison:
2179 return max_p ? size_one_node : size_zero_node;
2183 case tcc_expression:
2184 switch (TREE_CODE_LENGTH (code))
2187 if (code == NON_LVALUE_EXPR)
2188 return max_size (TREE_OPERAND (exp, 0), max_p);
2191 fold_build1 (code, type,
2192 max_size (TREE_OPERAND (exp, 0),
2193 code == NEGATE_EXPR ? !max_p : max_p));
2196 if (code == COMPOUND_EXPR)
2197 return max_size (TREE_OPERAND (exp, 1), max_p);
2200 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2201 tree rhs = max_size (TREE_OPERAND (exp, 1),
2202 code == MINUS_EXPR ? !max_p : max_p);
2204 /* Special-case wanting the maximum value of a MIN_EXPR.
2205 In that case, if one side overflows, return the other.
2206 sizetype is signed, but we know sizes are non-negative.
2207 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2208 overflowing and the RHS a variable. */
2211 && TREE_CODE (rhs) == INTEGER_CST
2212 && TREE_OVERFLOW (rhs))
2216 && TREE_CODE (lhs) == INTEGER_CST
2217 && TREE_OVERFLOW (lhs))
2219 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2220 && TREE_CODE (lhs) == INTEGER_CST
2221 && TREE_OVERFLOW (lhs)
2222 && !TREE_CONSTANT (rhs))
2225 return fold_build2 (code, type, lhs, rhs);
2229 if (code == SAVE_EXPR)
2231 else if (code == COND_EXPR)
2232 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2233 max_size (TREE_OPERAND (exp, 1), max_p),
2234 max_size (TREE_OPERAND (exp, 2), max_p));
2237 /* Other tree classes cannot happen. */
2245 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2246 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2247 Return a constructor for the template. */
2250 build_template (tree template_type, tree array_type, tree expr)
2252 VEC(constructor_elt,gc) *template_elts = NULL;
2253 tree bound_list = NULL_TREE;
2256 while (TREE_CODE (array_type) == RECORD_TYPE
2257 && (TYPE_PADDING_P (array_type)
2258 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2259 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2261 if (TREE_CODE (array_type) == ARRAY_TYPE
2262 || (TREE_CODE (array_type) == INTEGER_TYPE
2263 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2264 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2266 /* First make the list for a CONSTRUCTOR for the template. Go down the
2267 field list of the template instead of the type chain because this
2268 array might be an Ada array of arrays and we can't tell where the
2269 nested arrays stop being the underlying object. */
2271 for (field = TYPE_FIELDS (template_type); field;
2273 ? (bound_list = TREE_CHAIN (bound_list))
2274 : (array_type = TREE_TYPE (array_type))),
2275 field = DECL_CHAIN (DECL_CHAIN (field)))
2277 tree bounds, min, max;
2279 /* If we have a bound list, get the bounds from there. Likewise
2280 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2281 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2282 This will give us a maximum range. */
2284 bounds = TREE_VALUE (bound_list);
2285 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2286 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2287 else if (expr && TREE_CODE (expr) == PARM_DECL
2288 && DECL_BY_COMPONENT_PTR_P (expr))
2289 bounds = TREE_TYPE (field);
2293 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
2294 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
2296 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2297 substitute it from OBJECT. */
2298 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2299 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2301 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
2302 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
2305 return gnat_build_constructor (template_type, template_elts);
2308 /* Helper routine to make a descriptor field. FIELD_LIST is the list of decls
2309 being built; the new decl is chained on to the front of the list. */
2312 make_descriptor_field (const char *name, tree type, tree rec_type,
2313 tree initial, tree field_list)
2316 = create_field_decl (get_identifier (name), type, rec_type, NULL_TREE,
2319 DECL_INITIAL (field) = initial;
2320 DECL_CHAIN (field) = field_list;
2324 /* Build a 32-bit VMS descriptor from a Mechanism_Type, which must specify a
2325 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
2326 type contains in its DECL_INITIAL the expression to use when a constructor
2327 is made for the type. GNAT_ENTITY is an entity used to print out an error
2328 message if the mechanism cannot be applied to an object of that type and
2329 also for the name. */
2332 build_vms_descriptor32 (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2334 tree record_type = make_node (RECORD_TYPE);
2335 tree pointer32_type, pointer64_type;
2336 tree field_list = NULL_TREE;
2337 int klass, ndim, i, dtype = 0;
2338 tree inner_type, tem;
2341 /* If TYPE is an unconstrained array, use the underlying array type. */
2342 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2343 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2345 /* If this is an array, compute the number of dimensions in the array,
2346 get the index types, and point to the inner type. */
2347 if (TREE_CODE (type) != ARRAY_TYPE)
2350 for (ndim = 1, inner_type = type;
2351 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2352 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2353 ndim++, inner_type = TREE_TYPE (inner_type))
2356 idx_arr = XALLOCAVEC (tree, ndim);
2358 if (mech != By_Descriptor_NCA && mech != By_Short_Descriptor_NCA
2359 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2360 for (i = ndim - 1, inner_type = type;
2362 i--, inner_type = TREE_TYPE (inner_type))
2363 idx_arr[i] = TYPE_DOMAIN (inner_type);
2365 for (i = 0, inner_type = type;
2367 i++, inner_type = TREE_TYPE (inner_type))
2368 idx_arr[i] = TYPE_DOMAIN (inner_type);
2370 /* Now get the DTYPE value. */
2371 switch (TREE_CODE (type))
2376 if (TYPE_VAX_FLOATING_POINT_P (type))
2377 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2390 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2393 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2396 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2399 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2402 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2405 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2411 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2415 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2416 && TYPE_VAX_FLOATING_POINT_P (type))
2417 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2429 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2440 /* Get the CLASS value. */
2443 case By_Descriptor_A:
2444 case By_Short_Descriptor_A:
2447 case By_Descriptor_NCA:
2448 case By_Short_Descriptor_NCA:
2451 case By_Descriptor_SB:
2452 case By_Short_Descriptor_SB:
2456 case By_Short_Descriptor:
2457 case By_Descriptor_S:
2458 case By_Short_Descriptor_S:
2464 /* Make the type for a descriptor for VMS. The first four fields are the
2465 same for all types. */
2467 = make_descriptor_field ("LENGTH", gnat_type_for_size (16, 1), record_type,
2468 size_in_bytes ((mech == By_Descriptor_A
2469 || mech == By_Short_Descriptor_A)
2470 ? inner_type : type),
2473 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1), record_type,
2474 size_int (dtype), field_list);
2476 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1), record_type,
2477 size_int (klass), field_list);
2479 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
2480 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
2482 /* Ensure that only 32-bit pointers are passed in 32-bit descriptors. Note
2483 that we cannot build a template call to the CE routine as it would get a
2484 wrong source location; instead we use a second placeholder for it. */
2485 tem = build_unary_op (ADDR_EXPR, pointer64_type,
2486 build0 (PLACEHOLDER_EXPR, type));
2487 tem = build3 (COND_EXPR, pointer32_type,
2488 build_binary_op (GE_EXPR, boolean_type_node, tem,
2489 build_int_cstu (pointer64_type, 0x80000000)),
2490 build0 (PLACEHOLDER_EXPR, void_type_node),
2491 convert (pointer32_type, tem));
2494 = make_descriptor_field ("POINTER", pointer32_type, record_type, tem,
2500 case By_Short_Descriptor:
2501 case By_Descriptor_S:
2502 case By_Short_Descriptor_S:
2505 case By_Descriptor_SB:
2506 case By_Short_Descriptor_SB:
2508 = make_descriptor_field ("SB_L1", gnat_type_for_size (32, 1),
2510 (TREE_CODE (type) == ARRAY_TYPE
2511 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type))
2515 = make_descriptor_field ("SB_U1", gnat_type_for_size (32, 1),
2517 (TREE_CODE (type) == ARRAY_TYPE
2518 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type))
2523 case By_Descriptor_A:
2524 case By_Short_Descriptor_A:
2525 case By_Descriptor_NCA:
2526 case By_Short_Descriptor_NCA:
2528 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
2529 record_type, size_zero_node, field_list);
2532 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
2533 record_type, size_zero_node, field_list);
2536 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
2538 size_int ((mech == By_Descriptor_NCA
2539 || mech == By_Short_Descriptor_NCA)
2541 /* Set FL_COLUMN, FL_COEFF, and
2543 : (TREE_CODE (type) == ARRAY_TYPE
2544 && TYPE_CONVENTION_FORTRAN_P
2550 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
2551 record_type, size_int (ndim), field_list);
2554 = make_descriptor_field ("ARSIZE", gnat_type_for_size (32, 1),
2555 record_type, size_in_bytes (type),
2558 /* Now build a pointer to the 0,0,0... element. */
2559 tem = build0 (PLACEHOLDER_EXPR, type);
2560 for (i = 0, inner_type = type; i < ndim;
2561 i++, inner_type = TREE_TYPE (inner_type))
2562 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2563 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2564 NULL_TREE, NULL_TREE);
2567 = make_descriptor_field ("A0", pointer32_type, record_type,
2568 build1 (ADDR_EXPR, pointer32_type, tem),
2571 /* Next come the addressing coefficients. */
2572 tem = size_one_node;
2573 for (i = 0; i < ndim; i++)
2577 = size_binop (MULT_EXPR, tem,
2578 size_binop (PLUS_EXPR,
2579 size_binop (MINUS_EXPR,
2580 TYPE_MAX_VALUE (idx_arr[i]),
2581 TYPE_MIN_VALUE (idx_arr[i])),
2584 fname[0] = ((mech == By_Descriptor_NCA ||
2585 mech == By_Short_Descriptor_NCA) ? 'S' : 'M');
2586 fname[1] = '0' + i, fname[2] = 0;
2588 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
2589 record_type, idx_length, field_list);
2591 if (mech == By_Descriptor_NCA || mech == By_Short_Descriptor_NCA)
2595 /* Finally here are the bounds. */
2596 for (i = 0; i < ndim; i++)
2600 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2602 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
2603 record_type, TYPE_MIN_VALUE (idx_arr[i]),
2608 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
2609 record_type, TYPE_MAX_VALUE (idx_arr[i]),
2615 post_error ("unsupported descriptor type for &", gnat_entity);
2618 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC");
2619 finish_record_type (record_type, nreverse (field_list), 0, false);
2623 /* Build a 64-bit VMS descriptor from a Mechanism_Type, which must specify a
2624 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
2625 type contains in its DECL_INITIAL the expression to use when a constructor
2626 is made for the type. GNAT_ENTITY is an entity used to print out an error
2627 message if the mechanism cannot be applied to an object of that type and
2628 also for the name. */
2631 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2633 tree record_type = make_node (RECORD_TYPE);
2634 tree pointer64_type;
2635 tree field_list = NULL_TREE;
2636 int klass, ndim, i, dtype = 0;
2637 tree inner_type, tem;
2640 /* If TYPE is an unconstrained array, use the underlying array type. */
2641 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2642 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2644 /* If this is an array, compute the number of dimensions in the array,
2645 get the index types, and point to the inner type. */
2646 if (TREE_CODE (type) != ARRAY_TYPE)
2649 for (ndim = 1, inner_type = type;
2650 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2651 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2652 ndim++, inner_type = TREE_TYPE (inner_type))
2655 idx_arr = XALLOCAVEC (tree, ndim);
2657 if (mech != By_Descriptor_NCA
2658 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2659 for (i = ndim - 1, inner_type = type;
2661 i--, inner_type = TREE_TYPE (inner_type))
2662 idx_arr[i] = TYPE_DOMAIN (inner_type);
2664 for (i = 0, inner_type = type;
2666 i++, inner_type = TREE_TYPE (inner_type))
2667 idx_arr[i] = TYPE_DOMAIN (inner_type);
2669 /* Now get the DTYPE value. */
2670 switch (TREE_CODE (type))
2675 if (TYPE_VAX_FLOATING_POINT_P (type))
2676 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2689 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2692 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2695 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2698 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2701 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2704 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2710 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2714 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2715 && TYPE_VAX_FLOATING_POINT_P (type))
2716 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2728 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2739 /* Get the CLASS value. */
2742 case By_Descriptor_A:
2745 case By_Descriptor_NCA:
2748 case By_Descriptor_SB:
2752 case By_Descriptor_S:
2758 /* Make the type for a 64-bit descriptor for VMS. The first six fields
2759 are the same for all types. */
2761 = make_descriptor_field ("MBO", gnat_type_for_size (16, 1),
2762 record_type, size_int (1), field_list);
2764 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1),
2765 record_type, size_int (dtype), field_list);
2767 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1),
2768 record_type, size_int (klass), field_list);
2770 = make_descriptor_field ("MBMO", gnat_type_for_size (32, 1),
2771 record_type, ssize_int (-1), field_list);
2773 = make_descriptor_field ("LENGTH", gnat_type_for_size (64, 1),
2775 size_in_bytes (mech == By_Descriptor_A
2776 ? inner_type : type),
2779 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
2782 = make_descriptor_field ("POINTER", pointer64_type, record_type,
2783 build_unary_op (ADDR_EXPR, pointer64_type,
2784 build0 (PLACEHOLDER_EXPR, type)),
2790 case By_Descriptor_S:
2793 case By_Descriptor_SB:
2795 = make_descriptor_field ("SB_L1", gnat_type_for_size (64, 1),
2797 (TREE_CODE (type) == ARRAY_TYPE
2798 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type))
2802 = make_descriptor_field ("SB_U1", gnat_type_for_size (64, 1),
2804 (TREE_CODE (type) == ARRAY_TYPE
2805 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type))
2810 case By_Descriptor_A:
2811 case By_Descriptor_NCA:
2813 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
2814 record_type, size_zero_node, field_list);
2817 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
2818 record_type, size_zero_node, field_list);
2820 dtype = (mech == By_Descriptor_NCA
2822 /* Set FL_COLUMN, FL_COEFF, and
2824 : (TREE_CODE (type) == ARRAY_TYPE
2825 && TYPE_CONVENTION_FORTRAN_P (type)
2828 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
2829 record_type, size_int (dtype),
2833 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
2834 record_type, size_int (ndim), field_list);
2837 = make_descriptor_field ("MBZ", gnat_type_for_size (32, 1),
2838 record_type, size_int (0), field_list);
2840 = make_descriptor_field ("ARSIZE", gnat_type_for_size (64, 1),
2841 record_type, size_in_bytes (type),
2844 /* Now build a pointer to the 0,0,0... element. */
2845 tem = build0 (PLACEHOLDER_EXPR, type);
2846 for (i = 0, inner_type = type; i < ndim;
2847 i++, inner_type = TREE_TYPE (inner_type))
2848 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2849 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2850 NULL_TREE, NULL_TREE);
2853 = make_descriptor_field ("A0", pointer64_type, record_type,
2854 build1 (ADDR_EXPR, pointer64_type, tem),
2857 /* Next come the addressing coefficients. */
2858 tem = size_one_node;
2859 for (i = 0; i < ndim; i++)
2863 = size_binop (MULT_EXPR, tem,
2864 size_binop (PLUS_EXPR,
2865 size_binop (MINUS_EXPR,
2866 TYPE_MAX_VALUE (idx_arr[i]),
2867 TYPE_MIN_VALUE (idx_arr[i])),
2870 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2871 fname[1] = '0' + i, fname[2] = 0;
2873 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
2874 record_type, idx_length, field_list);
2876 if (mech == By_Descriptor_NCA)
2880 /* Finally here are the bounds. */
2881 for (i = 0; i < ndim; i++)
2885 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2887 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
2889 TYPE_MIN_VALUE (idx_arr[i]), field_list);
2893 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
2895 TYPE_MAX_VALUE (idx_arr[i]), field_list);
2900 post_error ("unsupported descriptor type for &", gnat_entity);
2903 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC64");
2904 finish_record_type (record_type, nreverse (field_list), 0, false);
2908 /* Fill in a VMS descriptor of GNU_TYPE for GNU_EXPR and return the result.
2909 GNAT_ACTUAL is the actual parameter for which the descriptor is built. */
2912 fill_vms_descriptor (tree gnu_type, tree gnu_expr, Node_Id gnat_actual)
2914 VEC(constructor_elt,gc) *v = NULL;
2917 gnu_expr = maybe_unconstrained_array (gnu_expr);
2918 gnu_expr = gnat_protect_expr (gnu_expr);
2919 gnat_mark_addressable (gnu_expr);
2921 /* We may need to substitute both GNU_EXPR and a CALL_EXPR to the raise CE
2922 routine in case we have a 32-bit descriptor. */
2923 gnu_expr = build2 (COMPOUND_EXPR, void_type_node,
2924 build_call_raise (CE_Range_Check_Failed, gnat_actual,
2925 N_Raise_Constraint_Error),
2928 for (field = TYPE_FIELDS (gnu_type); field; field = DECL_CHAIN (field))
2931 = convert (TREE_TYPE (field),
2932 SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_INITIAL (field),
2934 CONSTRUCTOR_APPEND_ELT (v, field, value);
2937 return gnat_build_constructor (gnu_type, v);
2940 /* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
2941 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
2942 which the VMS descriptor is passed. */
2945 convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
2947 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
2948 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
2949 /* The CLASS field is the 3rd field in the descriptor. */
2950 tree klass = DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type)));
2951 /* The POINTER field is the 6th field in the descriptor. */
2952 tree pointer = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (klass)));
2954 /* Retrieve the value of the POINTER field. */
2956 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
2958 if (POINTER_TYPE_P (gnu_type))
2959 return convert (gnu_type, gnu_expr64);
2961 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
2963 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
2964 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
2965 tree template_type = TREE_TYPE (p_bounds_type);
2966 tree min_field = TYPE_FIELDS (template_type);
2967 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
2968 tree template_tree, template_addr, aflags, dimct, t, u;
2969 /* See the head comment of build_vms_descriptor. */
2970 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
2971 tree lfield, ufield;
2972 VEC(constructor_elt,gc) *v;
2974 /* Convert POINTER to the pointer-to-array type. */
2975 gnu_expr64 = convert (p_array_type, gnu_expr64);
2979 case 1: /* Class S */
2980 case 15: /* Class SB */
2981 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
2982 v = VEC_alloc (constructor_elt, gc, 2);
2983 t = DECL_CHAIN (DECL_CHAIN (klass));
2984 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2985 CONSTRUCTOR_APPEND_ELT (v, min_field,
2986 convert (TREE_TYPE (min_field),
2988 CONSTRUCTOR_APPEND_ELT (v, max_field,
2989 convert (TREE_TYPE (max_field), t));
2990 template_tree = gnat_build_constructor (template_type, v);
2991 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
2993 /* For class S, we are done. */
2997 /* Test that we really have a SB descriptor, like DEC Ada. */
2998 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
2999 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
3000 u = build_binary_op (EQ_EXPR, boolean_type_node, t, u);
3001 /* If so, there is already a template in the descriptor and
3002 it is located right after the POINTER field. The fields are
3003 64bits so they must be repacked. */
3004 t = TREE_CHAIN (pointer);
3005 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3006 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3009 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3011 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type))), ufield);
3013 /* Build the template in the form of a constructor. */
3014 v = VEC_alloc (constructor_elt, gc, 2);
3015 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (template_type), lfield);
3016 CONSTRUCTOR_APPEND_ELT (v, TREE_CHAIN (TYPE_FIELDS (template_type)),
3018 template_tree = gnat_build_constructor (template_type, v);
3020 /* Otherwise use the {1, LENGTH} template we build above. */
3021 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3022 build_unary_op (ADDR_EXPR, p_bounds_type,
3027 case 4: /* Class A */
3028 /* The AFLAGS field is the 3rd field after the pointer in the
3030 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer)));
3031 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3032 /* The DIMCT field is the next field in the descriptor after
3035 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3036 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3037 or FL_COEFF or FL_BOUNDS not set. */
3038 u = build_int_cst (TREE_TYPE (aflags), 192);
3039 u = build_binary_op (TRUTH_OR_EXPR, boolean_type_node,
3040 build_binary_op (NE_EXPR, boolean_type_node,
3042 convert (TREE_TYPE (dimct),
3044 build_binary_op (NE_EXPR, boolean_type_node,
3045 build2 (BIT_AND_EXPR,
3049 /* There is already a template in the descriptor and it is located
3050 in block 3. The fields are 64bits so they must be repacked. */
3051 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN
3053 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3054 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3057 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3059 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type))), ufield);
3061 /* Build the template in the form of a constructor. */
3062 v = VEC_alloc (constructor_elt, gc, 2);
3063 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (template_type), lfield);
3064 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (template_type)),
3066 template_tree = gnat_build_constructor (template_type, v);
3067 template_tree = build3 (COND_EXPR, template_type, u,
3068 build_call_raise (CE_Length_Check_Failed, Empty,
3069 N_Raise_Constraint_Error),
3072 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
3075 case 10: /* Class NCA */
3077 post_error ("unsupported descriptor type for &", gnat_subprog);
3078 template_addr = integer_zero_node;
3082 /* Build the fat pointer in the form of a constructor. */
3083 v = VEC_alloc (constructor_elt, gc, 2);
3084 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (gnu_type), gnu_expr64);
3085 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (gnu_type)),
3087 return gnat_build_constructor (gnu_type, v);
3094 /* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3095 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3096 which the VMS descriptor is passed. */
3099 convert_vms_descriptor32 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3101 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3102 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3103 /* The CLASS field is the 3rd field in the descriptor. */
3104 tree klass = DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type)));
3105 /* The POINTER field is the 4th field in the descriptor. */
3106 tree pointer = DECL_CHAIN (klass);
3108 /* Retrieve the value of the POINTER field. */
3110 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
3112 if (POINTER_TYPE_P (gnu_type))
3113 return convert (gnu_type, gnu_expr32);
3115 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
3117 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3118 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3119 tree template_type = TREE_TYPE (p_bounds_type);
3120 tree min_field = TYPE_FIELDS (template_type);
3121 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3122 tree template_tree, template_addr, aflags, dimct, t, u;
3123 /* See the head comment of build_vms_descriptor. */
3124 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
3125 VEC(constructor_elt,gc) *v;
3127 /* Convert POINTER to the pointer-to-array type. */
3128 gnu_expr32 = convert (p_array_type, gnu_expr32);
3132 case 1: /* Class S */
3133 case 15: /* Class SB */
3134 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
3135 v = VEC_alloc (constructor_elt, gc, 2);
3136 t = TYPE_FIELDS (desc_type);
3137 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3138 CONSTRUCTOR_APPEND_ELT (v, min_field,
3139 convert (TREE_TYPE (min_field),
3141 CONSTRUCTOR_APPEND_ELT (v, max_field,
3142 convert (TREE_TYPE (max_field), t));
3143 template_tree = gnat_build_constructor (template_type, v);
3144 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
3146 /* For class S, we are done. */
3150 /* Test that we really have a SB descriptor, like DEC Ada. */
3151 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3152 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
3153 u = build_binary_op (EQ_EXPR, boolean_type_node, t, u);
3154 /* If so, there is already a template in the descriptor and
3155 it is located right after the POINTER field. */
3156 t = TREE_CHAIN (pointer);
3158 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3159 /* Otherwise use the {1, LENGTH} template we build above. */
3160 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3161 build_unary_op (ADDR_EXPR, p_bounds_type,
3166 case 4: /* Class A */
3167 /* The AFLAGS field is the 7th field in the descriptor. */
3168 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer)));
3169 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3170 /* The DIMCT field is the 8th field in the descriptor. */
3172 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3173 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3174 or FL_COEFF or FL_BOUNDS not set. */
3175 u = build_int_cst (TREE_TYPE (aflags), 192);
3176 u = build_binary_op (TRUTH_OR_EXPR, boolean_type_node,
3177 build_binary_op (NE_EXPR, boolean_type_node,
3179 convert (TREE_TYPE (dimct),
3181 build_binary_op (NE_EXPR, boolean_type_node,
3182 build2 (BIT_AND_EXPR,
3186 /* There is already a template in the descriptor and it is
3187 located at the start of block 3 (12th field). */
3188 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (t))));
3190 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3191 template_tree = build3 (COND_EXPR, TREE_TYPE (t), u,
3192 build_call_raise (CE_Length_Check_Failed, Empty,
3193 N_Raise_Constraint_Error),
3196 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
3199 case 10: /* Class NCA */
3201 post_error ("unsupported descriptor type for &", gnat_subprog);
3202 template_addr = integer_zero_node;
3206 /* Build the fat pointer in the form of a constructor. */
3207 v = VEC_alloc (constructor_elt, gc, 2);
3208 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (gnu_type), gnu_expr32);
3209 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (gnu_type)),
3212 return gnat_build_constructor (gnu_type, v);
3219 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
3220 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
3221 pointer type of GNU_EXPR. BY_REF is true if the result is to be used by
3222 reference. GNAT_SUBPROG is the subprogram to which the VMS descriptor is
3226 convert_vms_descriptor (tree gnu_type, tree gnu_expr, tree gnu_expr_alt_type,
3227 bool by_ref, Entity_Id gnat_subprog)
3229 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3230 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3231 tree mbo = TYPE_FIELDS (desc_type);
3232 const char *mbostr = IDENTIFIER_POINTER (DECL_NAME (mbo));
3233 tree mbmo = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (mbo)));
3234 tree real_type, is64bit, gnu_expr32, gnu_expr64;
3237 real_type = TREE_TYPE (gnu_type);
3239 real_type = gnu_type;
3241 /* If the field name is not MBO, it must be 32-bit and no alternate.
3242 Otherwise primary must be 64-bit and alternate 32-bit. */
3243 if (strcmp (mbostr, "MBO") != 0)
3245 tree ret = convert_vms_descriptor32 (real_type, gnu_expr, gnat_subprog);
3247 ret = build_unary_op (ADDR_EXPR, gnu_type, ret);
3251 /* Build the test for 64-bit descriptor. */
3252 mbo = build3 (COMPONENT_REF, TREE_TYPE (mbo), desc, mbo, NULL_TREE);
3253 mbmo = build3 (COMPONENT_REF, TREE_TYPE (mbmo), desc, mbmo, NULL_TREE);
3255 = build_binary_op (TRUTH_ANDIF_EXPR, boolean_type_node,
3256 build_binary_op (EQ_EXPR, boolean_type_node,
3257 convert (integer_type_node, mbo),
3259 build_binary_op (EQ_EXPR, boolean_type_node,
3260 convert (integer_type_node, mbmo),
3261 integer_minus_one_node));
3263 /* Build the 2 possible end results. */
3264 gnu_expr64 = convert_vms_descriptor64 (real_type, gnu_expr, gnat_subprog);
3266 gnu_expr64 = build_unary_op (ADDR_EXPR, gnu_type, gnu_expr64);
3267 gnu_expr = fold_convert (gnu_expr_alt_type, gnu_expr);
3268 gnu_expr32 = convert_vms_descriptor32 (real_type, gnu_expr, gnat_subprog);
3270 gnu_expr32 = build_unary_op (ADDR_EXPR, gnu_type, gnu_expr32);
3272 return build3 (COND_EXPR, gnu_type, is64bit, gnu_expr64, gnu_expr32);
3275 /* Build a stub for the subprogram specified by the GCC tree GNU_SUBPROG
3276 and the GNAT node GNAT_SUBPROG. */
3279 build_function_stub (tree gnu_subprog, Entity_Id gnat_subprog)
3281 tree gnu_subprog_type, gnu_subprog_addr, gnu_subprog_call;
3282 tree gnu_subprog_param, gnu_stub_param, gnu_param;
3283 tree gnu_stub_decl = DECL_FUNCTION_STUB (gnu_subprog);
3284 VEC(tree,gc) *gnu_param_vec = NULL;
3286 gnu_subprog_type = TREE_TYPE (gnu_subprog);
3288 /* Initialize the information structure for the function. */
3289 allocate_struct_function (gnu_stub_decl, false);
3292 begin_subprog_body (gnu_stub_decl);
3294 start_stmt_group ();
3297 /* Loop over the parameters of the stub and translate any of them
3298 passed by descriptor into a by reference one. */
3299 for (gnu_stub_param = DECL_ARGUMENTS (gnu_stub_decl),
3300 gnu_subprog_param = DECL_ARGUMENTS (gnu_subprog);
3302 gnu_stub_param = TREE_CHAIN (gnu_stub_param),
3303 gnu_subprog_param = TREE_CHAIN (gnu_subprog_param))
3305 if (DECL_BY_DESCRIPTOR_P (gnu_stub_param))
3307 gcc_assert (DECL_BY_REF_P (gnu_subprog_param));
3309 = convert_vms_descriptor (TREE_TYPE (gnu_subprog_param),
3311 DECL_PARM_ALT_TYPE (gnu_stub_param),
3312 DECL_BY_DOUBLE_REF_P (gnu_subprog_param),
3316 gnu_param = gnu_stub_param;
3318 VEC_safe_push (tree, gc, gnu_param_vec, gnu_param);
3321 /* Invoke the internal subprogram. */
3322 gnu_subprog_addr = build1 (ADDR_EXPR, build_pointer_type (gnu_subprog_type),
3324 gnu_subprog_call = build_call_vec (TREE_TYPE (gnu_subprog_type),
3325 gnu_subprog_addr, gnu_param_vec);
3327 /* Propagate the return value, if any. */
3328 if (VOID_TYPE_P (TREE_TYPE (gnu_subprog_type)))
3329 add_stmt (gnu_subprog_call);
3331 add_stmt (build_return_expr (DECL_RESULT (gnu_stub_decl),
3335 end_subprog_body (end_stmt_group ());
3338 /* Build a type to be used to represent an aliased object whose nominal type
3339 is an unconstrained array. This consists of a RECORD_TYPE containing a
3340 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
3341 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
3342 an arbitrary unconstrained object. Use NAME as the name of the record.
3343 DEBUG_INFO_P is true if we need to write debug information for the type. */
3346 build_unc_object_type (tree template_type, tree object_type, tree name,
3349 tree type = make_node (RECORD_TYPE);
3351 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
3352 NULL_TREE, NULL_TREE, 0, 1);
3354 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
3355 NULL_TREE, NULL_TREE, 0, 1);
3357 TYPE_NAME (type) = name;
3358 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3359 DECL_CHAIN (template_field) = array_field;
3360 finish_record_type (type, template_field, 0, true);
3362 /* Declare it now since it will never be declared otherwise. This is
3363 necessary to ensure that its subtrees are properly marked. */
3364 create_type_decl (name, type, NULL, true, debug_info_p, Empty);
3369 /* Same, taking a thin or fat pointer type instead of a template type. */
3372 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3373 tree name, bool debug_info_p)
3377 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3380 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
3381 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3382 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3385 build_unc_object_type (template_type, object_type, name, debug_info_p);
3388 /* Shift the component offsets within an unconstrained object TYPE to make it
3389 suitable for use as a designated type for thin pointers. */
3392 shift_unc_components_for_thin_pointers (tree type)
3394 /* Thin pointer values designate the ARRAY data of an unconstrained object,
3395 allocated past the BOUNDS template. The designated type is adjusted to
3396 have ARRAY at position zero and the template at a negative offset, so
3397 that COMPONENT_REFs on (*thin_ptr) designate the proper location. */
3399 tree bounds_field = TYPE_FIELDS (type);
3400 tree array_field = DECL_CHAIN (TYPE_FIELDS (type));
3402 DECL_FIELD_OFFSET (bounds_field)
3403 = size_binop (MINUS_EXPR, size_zero_node, byte_position (array_field));
3405 DECL_FIELD_OFFSET (array_field) = size_zero_node;
3406 DECL_FIELD_BIT_OFFSET (array_field) = bitsize_zero_node;
3409 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3410 In the normal case this is just two adjustments, but we have more to
3411 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
3414 update_pointer_to (tree old_type, tree new_type)
3416 tree ptr = TYPE_POINTER_TO (old_type);
3417 tree ref = TYPE_REFERENCE_TO (old_type);
3420 /* If this is the main variant, process all the other variants first. */
3421 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3422 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
3423 update_pointer_to (t, new_type);
3425 /* If no pointers and no references, we are done. */
3429 /* Merge the old type qualifiers in the new type.
3431 Each old variant has qualifiers for specific reasons, and the new
3432 designated type as well. Each set of qualifiers represents useful
3433 information grabbed at some point, and merging the two simply unifies
3434 these inputs into the final type description.
3436 Consider for instance a volatile type frozen after an access to constant
3437 type designating it; after the designated type's freeze, we get here with
3438 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3439 when the access type was processed. We will make a volatile and readonly
3440 designated type, because that's what it really is.
3442 We might also get here for a non-dummy OLD_TYPE variant with different
3443 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
3444 to private record type elaboration (see the comments around the call to
3445 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3446 the qualifiers in those cases too, to avoid accidentally discarding the
3447 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3449 = build_qualified_type (new_type,
3450 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3452 /* If old type and new type are identical, there is nothing to do. */
3453 if (old_type == new_type)
3456 /* Otherwise, first handle the simple case. */
3457 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3459 tree new_ptr, new_ref;
3461 /* If pointer or reference already points to new type, nothing to do.
3462 This can happen as update_pointer_to can be invoked multiple times
3463 on the same couple of types because of the type variants. */
3464 if ((ptr && TREE_TYPE (ptr) == new_type)
3465 || (ref && TREE_TYPE (ref) == new_type))
3468 /* Chain PTR and its variants at the end. */
3469 new_ptr = TYPE_POINTER_TO (new_type);
3472 while (TYPE_NEXT_PTR_TO (new_ptr))
3473 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
3474 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
3477 TYPE_POINTER_TO (new_type) = ptr;
3479 /* Now adjust them. */
3480 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3481 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
3482 TREE_TYPE (t) = new_type;
3483 TYPE_POINTER_TO (old_type) = NULL_TREE;
3485 /* Chain REF and its variants at the end. */
3486 new_ref = TYPE_REFERENCE_TO (new_type);
3489 while (TYPE_NEXT_REF_TO (new_ref))
3490 new_ref = TYPE_NEXT_REF_TO (new_ref);
3491 TYPE_NEXT_REF_TO (new_ref) = ref;
3494 TYPE_REFERENCE_TO (new_type) = ref;
3496 /* Now adjust them. */
3497 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3498 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
3499 TREE_TYPE (t) = new_type;
3500 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
3503 /* Now deal with the unconstrained array case. In this case the pointer
3504 is actually a record where both fields are pointers to dummy nodes.
3505 Turn them into pointers to the correct types using update_pointer_to. */
3508 tree new_ptr = TYPE_MAIN_VARIANT (TYPE_POINTER_TO (new_type));
3509 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
3510 tree array_field, bounds_field, new_ref, last = NULL_TREE;
3512 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
3514 /* If PTR already points to new type, nothing to do. This can happen
3515 since update_pointer_to can be invoked multiple times on the same
3516 couple of types because of the type variants. */
3517 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
3520 array_field = TYPE_FIELDS (ptr);
3521 bounds_field = DECL_CHAIN (array_field);
3523 /* Make pointers to the dummy template point to the real template. */
3525 (TREE_TYPE (TREE_TYPE (bounds_field)),
3526 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
3528 /* The references to the template bounds present in the array type use
3529 the bounds field of NEW_PTR through a PLACEHOLDER_EXPR. Since we
3530 are going to merge PTR in NEW_PTR, we must rework these references
3531 to use the bounds field of PTR instead. */
3532 new_ref = build3 (COMPONENT_REF, TREE_TYPE (bounds_field),
3533 build0 (PLACEHOLDER_EXPR, new_ptr),
3534 bounds_field, NULL_TREE);
3536 /* Create the new array for the new PLACEHOLDER_EXPR and make pointers
3537 to the dummy array point to it. */
3539 (TREE_TYPE (TREE_TYPE (array_field)),
3540 substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))),
3541 DECL_CHAIN (TYPE_FIELDS (new_ptr)), new_ref));
3543 /* Merge PTR in NEW_PTR. */
3544 DECL_FIELD_CONTEXT (array_field) = new_ptr;
3545 DECL_FIELD_CONTEXT (bounds_field) = new_ptr;
3546 for (t = new_ptr; t; last = t, t = TYPE_NEXT_VARIANT (t))
3547 TYPE_FIELDS (t) = TYPE_FIELDS (ptr);
3548 TYPE_ALIAS_SET (new_ptr) = TYPE_ALIAS_SET (ptr);
3550 /* Chain PTR and its variants at the end. */
3551 TYPE_NEXT_VARIANT (last) = TYPE_MAIN_VARIANT (ptr);
3553 /* Now adjust them. */
3554 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
3556 TYPE_MAIN_VARIANT (t) = new_ptr;
3557 SET_TYPE_UNCONSTRAINED_ARRAY (t, new_type);
3559 /* And show the original pointer NEW_PTR to the debugger. This is
3560 the counterpart of the special processing for fat pointer types
3561 in gnat_pushdecl, but when the unconstrained array type is only
3562 frozen after access types to it. */
3563 if (TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
3565 DECL_ORIGINAL_TYPE (TYPE_NAME (t)) = new_ptr;
3566 DECL_ARTIFICIAL (TYPE_NAME (t)) = 0;
3570 /* Now handle updating the allocation record, what the thin pointer
3571 points to. Update all pointers from the old record into the new
3572 one, update the type of the array field, and recompute the size. */
3573 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
3574 TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_obj_rec)))
3575 = TREE_TYPE (TREE_TYPE (array_field));
3577 /* The size recomputation needs to account for alignment constraints, so
3578 we let layout_type work it out. This will reset the field offsets to
3579 what they would be in a regular record, so we shift them back to what
3580 we want them to be for a thin pointer designated type afterwards. */
3581 DECL_SIZE (TYPE_FIELDS (new_obj_rec)) = NULL_TREE;
3582 DECL_SIZE (DECL_CHAIN (TYPE_FIELDS (new_obj_rec))) = NULL_TREE;
3583 TYPE_SIZE (new_obj_rec) = NULL_TREE;
3584 layout_type (new_obj_rec);
3585 shift_unc_components_for_thin_pointers (new_obj_rec);
3587 /* We are done, at last. */
3588 rest_of_record_type_compilation (ptr);
3592 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
3593 unconstrained one. This involves making or finding a template. */
3596 convert_to_fat_pointer (tree type, tree expr)
3598 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
3599 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
3600 tree etype = TREE_TYPE (expr);
3602 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 2);
3604 /* If EXPR is null, make a fat pointer that contains null pointers to the
3605 template and array. */
3606 if (integer_zerop (expr))
3608 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
3609 convert (p_array_type, expr));
3610 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
3611 convert (build_pointer_type (template_type),
3613 return gnat_build_constructor (type, v);
3616 /* If EXPR is a thin pointer, make template and data from the record.. */
3617 else if (TYPE_IS_THIN_POINTER_P (etype))
3619 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
3621 expr = gnat_protect_expr (expr);
3622 if (TREE_CODE (expr) == ADDR_EXPR)
3623 expr = TREE_OPERAND (expr, 0);
3625 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
3627 template_tree = build_component_ref (expr, NULL_TREE, fields, false);
3628 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3629 build_component_ref (expr, NULL_TREE,
3630 DECL_CHAIN (fields), false));
3633 /* Otherwise, build the constructor for the template. */
3635 template_tree = build_template (template_type, TREE_TYPE (etype), expr);
3637 /* The final result is a constructor for the fat pointer.
3639 If EXPR is an argument of a foreign convention subprogram, the type it
3640 points to is directly the component type. In this case, the expression
3641 type may not match the corresponding FIELD_DECL type at this point, so we
3642 call "convert" here to fix that up if necessary. This type consistency is
3643 required, for instance because it ensures that possible later folding of
3644 COMPONENT_REFs against this constructor always yields something of the
3645 same type as the initial reference.
3647 Note that the call to "build_template" above is still fine because it
3648 will only refer to the provided TEMPLATE_TYPE in this case. */
3649 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
3650 convert (p_array_type, expr));
3651 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
3652 build_unary_op (ADDR_EXPR, NULL_TREE,
3654 return gnat_build_constructor (type, v);
3657 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
3658 is something that is a fat pointer, so convert to it first if it EXPR
3659 is not already a fat pointer. */
3662 convert_to_thin_pointer (tree type, tree expr)
3664 if (!TYPE_IS_FAT_POINTER_P (TREE_TYPE (expr)))
3666 = convert_to_fat_pointer
3667 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
3669 /* We get the pointer to the data and use a NOP_EXPR to make it the
3671 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)),
3673 expr = build1 (NOP_EXPR, type, expr);
3678 /* Create an expression whose value is that of EXPR,
3679 converted to type TYPE. The TREE_TYPE of the value
3680 is always TYPE. This function implements all reasonable
3681 conversions; callers should filter out those that are
3682 not permitted by the language being compiled. */
3685 convert (tree type, tree expr)
3687 tree etype = TREE_TYPE (expr);
3688 enum tree_code ecode = TREE_CODE (etype);
3689 enum tree_code code = TREE_CODE (type);
3691 /* If the expression is already of the right type, we are done. */
3695 /* If both input and output have padding and are of variable size, do this
3696 as an unchecked conversion. Likewise if one is a mere variant of the
3697 other, so we avoid a pointless unpad/repad sequence. */
3698 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
3699 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
3700 && (!TREE_CONSTANT (TYPE_SIZE (type))
3701 || !TREE_CONSTANT (TYPE_SIZE (etype))
3702 || gnat_types_compatible_p (type, etype)
3703 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
3704 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
3707 /* If the output type has padding, convert to the inner type and make a
3708 constructor to build the record, unless a variable size is involved. */
3709 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
3711 VEC(constructor_elt,gc) *v;
3713 /* If we previously converted from another type and our type is
3714 of variable size, remove the conversion to avoid the need for
3715 variable-sized temporaries. Likewise for a conversion between
3716 original and packable version. */
3717 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3718 && (!TREE_CONSTANT (TYPE_SIZE (type))
3719 || (ecode == RECORD_TYPE
3720 && TYPE_NAME (etype)
3721 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
3722 expr = TREE_OPERAND (expr, 0);
3724 /* If we are just removing the padding from expr, convert the original
3725 object if we have variable size in order to avoid the need for some
3726 variable-sized temporaries. Likewise if the padding is a variant
3727 of the other, so we avoid a pointless unpad/repad sequence. */
3728 if (TREE_CODE (expr) == COMPONENT_REF
3729 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
3730 && (!TREE_CONSTANT (TYPE_SIZE (type))
3731 || gnat_types_compatible_p (type,
3732 TREE_TYPE (TREE_OPERAND (expr, 0)))
3733 || (ecode == RECORD_TYPE
3734 && TYPE_NAME (etype)
3735 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
3736 return convert (type, TREE_OPERAND (expr, 0));
3738 /* If the inner type is of self-referential size and the expression type
3739 is a record, do this as an unchecked conversion. But first pad the
3740 expression if possible to have the same size on both sides. */
3741 if (ecode == RECORD_TYPE
3742 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
3744 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
3745 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
3746 false, false, false, true),
3748 return unchecked_convert (type, expr, false);
3751 /* If we are converting between array types with variable size, do the
3752 final conversion as an unchecked conversion, again to avoid the need
3753 for some variable-sized temporaries. If valid, this conversion is
3754 very likely purely technical and without real effects. */
3755 if (ecode == ARRAY_TYPE
3756 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
3757 && !TREE_CONSTANT (TYPE_SIZE (etype))
3758 && !TREE_CONSTANT (TYPE_SIZE (type)))
3759 return unchecked_convert (type,
3760 convert (TREE_TYPE (TYPE_FIELDS (type)),
3764 v = VEC_alloc (constructor_elt, gc, 1);
3765 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
3766 convert (TREE_TYPE (TYPE_FIELDS (type)), expr));
3767 return gnat_build_constructor (type, v);
3770 /* If the input type has padding, remove it and convert to the output type.
3771 The conditions ordering is arranged to ensure that the output type is not
3772 a padding type here, as it is not clear whether the conversion would
3773 always be correct if this was to happen. */
3774 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
3778 /* If we have just converted to this padded type, just get the
3779 inner expression. */
3780 if (TREE_CODE (expr) == CONSTRUCTOR
3781 && !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (expr))
3782 && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->index
3783 == TYPE_FIELDS (etype))
3785 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->value;
3787 /* Otherwise, build an explicit component reference. */
3790 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
3792 return convert (type, unpadded);
3795 /* If the input is a biased type, adjust first. */
3796 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
3797 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
3798 fold_convert (TREE_TYPE (etype),
3800 TYPE_MIN_VALUE (etype)));
3802 /* If the input is a justified modular type, we need to extract the actual
3803 object before converting it to any other type with the exceptions of an
3804 unconstrained array or of a mere type variant. It is useful to avoid the
3805 extraction and conversion in the type variant case because it could end
3806 up replacing a VAR_DECL expr by a constructor and we might be about the
3807 take the address of the result. */
3808 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
3809 && code != UNCONSTRAINED_ARRAY_TYPE
3810 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
3811 return convert (type, build_component_ref (expr, NULL_TREE,
3812 TYPE_FIELDS (etype), false));
3814 /* If converting to a type that contains a template, convert to the data
3815 type and then build the template. */
3816 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
3818 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
3819 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 2);
3821 /* If the source already has a template, get a reference to the
3822 associated array only, as we are going to rebuild a template
3823 for the target type anyway. */
3824 expr = maybe_unconstrained_array (expr);
3826 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
3827 build_template (TREE_TYPE (TYPE_FIELDS (type)),
3828 obj_type, NULL_TREE));
3829 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
3830 convert (obj_type, expr));
3831 return gnat_build_constructor (type, v);
3834 /* There are some special cases of expressions that we process
3836 switch (TREE_CODE (expr))
3842 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
3843 conversion in gnat_expand_expr. NULL_EXPR does not represent
3844 and actual value, so no conversion is needed. */
3845 expr = copy_node (expr);
3846 TREE_TYPE (expr) = type;
3850 /* If we are converting a STRING_CST to another constrained array type,
3851 just make a new one in the proper type. */
3852 if (code == ecode && AGGREGATE_TYPE_P (etype)
3853 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
3854 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
3856 expr = copy_node (expr);
3857 TREE_TYPE (expr) = type;
3863 /* If we are converting a VECTOR_CST to a mere variant type, just make
3864 a new one in the proper type. */
3865 if (code == ecode && gnat_types_compatible_p (type, etype))
3867 expr = copy_node (expr);
3868 TREE_TYPE (expr) = type;
3873 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
3874 a new one in the proper type. */
3875 if (code == ecode && gnat_types_compatible_p (type, etype))
3877 expr = copy_node (expr);
3878 TREE_TYPE (expr) = type;
3882 /* Likewise for a conversion between original and packable version, or
3883 conversion between types of the same size and with the same list of
3884 fields, but we have to work harder to preserve type consistency. */
3886 && code == RECORD_TYPE
3887 && (TYPE_NAME (type) == TYPE_NAME (etype)
3888 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
3891 VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
3892 unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
3893 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, len);
3894 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
3895 unsigned HOST_WIDE_INT idx;
3898 /* Whether we need to clear TREE_CONSTANT et al. on the output
3899 constructor when we convert in place. */
3900 bool clear_constant = false;
3902 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
3904 constructor_elt *elt;
3905 /* We expect only simple constructors. */
3906 if (!SAME_FIELD_P (index, efield))
3908 /* The field must be the same. */
3909 if (!SAME_FIELD_P (efield, field))
3911 elt = VEC_quick_push (constructor_elt, v, NULL);
3913 elt->value = convert (TREE_TYPE (field), value);
3915 /* If packing has made this field a bitfield and the input
3916 value couldn't be emitted statically any more, we need to
3917 clear TREE_CONSTANT on our output. */
3919 && TREE_CONSTANT (expr)
3920 && !CONSTRUCTOR_BITFIELD_P (efield)
3921 && CONSTRUCTOR_BITFIELD_P (field)
3922 && !initializer_constant_valid_for_bitfield_p (value))
3923 clear_constant = true;
3925 efield = DECL_CHAIN (efield);
3926 field = DECL_CHAIN (field);
3929 /* If we have been able to match and convert all the input fields
3930 to their output type, convert in place now. We'll fallback to a
3931 view conversion downstream otherwise. */
3934 expr = copy_node (expr);
3935 TREE_TYPE (expr) = type;
3936 CONSTRUCTOR_ELTS (expr) = v;
3938 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
3943 /* Likewise for a conversion between array type and vector type with a
3944 compatible representative array. */
3945 else if (code == VECTOR_TYPE
3946 && ecode == ARRAY_TYPE
3947 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
3950 VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
3951 unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
3952 VEC(constructor_elt,gc) *v;
3953 unsigned HOST_WIDE_INT ix;
3956 /* Build a VECTOR_CST from a *constant* array constructor. */
3957 if (TREE_CONSTANT (expr))
3959 bool constant_p = true;
3961 /* Iterate through elements and check if all constructor
3962 elements are *_CSTs. */
3963 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
3964 if (!CONSTANT_CLASS_P (value))
3971 return build_vector_from_ctor (type,
3972 CONSTRUCTOR_ELTS (expr));
3975 /* Otherwise, build a regular vector constructor. */
3976 v = VEC_alloc (constructor_elt, gc, len);
3977 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
3979 constructor_elt *elt = VEC_quick_push (constructor_elt, v, NULL);
3980 elt->index = NULL_TREE;
3983 expr = copy_node (expr);
3984 TREE_TYPE (expr) = type;
3985 CONSTRUCTOR_ELTS (expr) = v;
3990 case UNCONSTRAINED_ARRAY_REF:
3991 /* Convert this to the type of the inner array by getting the address of
3992 the array from the template. */
3993 expr = TREE_OPERAND (expr, 0);
3994 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3995 build_component_ref (expr, NULL_TREE,
3999 etype = TREE_TYPE (expr);
4000 ecode = TREE_CODE (etype);
4003 case VIEW_CONVERT_EXPR:
4005 /* GCC 4.x is very sensitive to type consistency overall, and view
4006 conversions thus are very frequent. Even though just "convert"ing
4007 the inner operand to the output type is fine in most cases, it
4008 might expose unexpected input/output type mismatches in special
4009 circumstances so we avoid such recursive calls when we can. */
4010 tree op0 = TREE_OPERAND (expr, 0);
4012 /* If we are converting back to the original type, we can just
4013 lift the input conversion. This is a common occurrence with
4014 switches back-and-forth amongst type variants. */
4015 if (type == TREE_TYPE (op0))
4018 /* Otherwise, if we're converting between two aggregate or vector
4019 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4020 target type in place or to just convert the inner expression. */
4021 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4022 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
4024 /* If we are converting between mere variants, we can just
4025 substitute the VIEW_CONVERT_EXPR in place. */
4026 if (gnat_types_compatible_p (type, etype))
4027 return build1 (VIEW_CONVERT_EXPR, type, op0);
4029 /* Otherwise, we may just bypass the input view conversion unless
4030 one of the types is a fat pointer, which is handled by
4031 specialized code below which relies on exact type matching. */
4032 else if (!TYPE_IS_FAT_POINTER_P (type)
4033 && !TYPE_IS_FAT_POINTER_P (etype))
4034 return convert (type, op0);
4043 /* Check for converting to a pointer to an unconstrained array. */
4044 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4045 return convert_to_fat_pointer (type, expr);
4047 /* If we are converting between two aggregate or vector types that are mere
4048 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4049 to a vector type from its representative array type. */
4050 else if ((code == ecode
4051 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4052 && gnat_types_compatible_p (type, etype))
4053 || (code == VECTOR_TYPE
4054 && ecode == ARRAY_TYPE
4055 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4057 return build1 (VIEW_CONVERT_EXPR, type, expr);
4059 /* If we are converting between tagged types, try to upcast properly. */
4060 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4061 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type))
4063 tree child_etype = etype;
4065 tree field = TYPE_FIELDS (child_etype);
4066 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4067 return build_component_ref (expr, NULL_TREE, field, false);
4068 child_etype = TREE_TYPE (field);
4069 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4072 /* If we are converting from a smaller form of record type back to it, just
4073 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4074 size on both sides. */
4075 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4076 && smaller_form_type_p (etype, type))
4078 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4079 false, false, false, true),
4081 return build1 (VIEW_CONVERT_EXPR, type, expr);
4084 /* In all other cases of related types, make a NOP_EXPR. */
4085 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
4086 return fold_convert (type, expr);
4091 return fold_build1 (CONVERT_EXPR, type, expr);
4094 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4095 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4096 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4097 return unchecked_convert (type, expr, false);
4098 else if (TYPE_BIASED_REPRESENTATION_P (type))
4099 return fold_convert (type,
4100 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4101 convert (TREE_TYPE (type), expr),
4102 TYPE_MIN_VALUE (type)));
4104 /* ... fall through ... */
4108 /* If we are converting an additive expression to an integer type
4109 with lower precision, be wary of the optimization that can be
4110 applied by convert_to_integer. There are 2 problematic cases:
4111 - if the first operand was originally of a biased type,
4112 because we could be recursively called to convert it
4113 to an intermediate type and thus rematerialize the
4114 additive operator endlessly,
4115 - if the expression contains a placeholder, because an
4116 intermediate conversion that changes the sign could
4117 be inserted and thus introduce an artificial overflow
4118 at compile time when the placeholder is substituted. */
4119 if (code == INTEGER_TYPE
4120 && ecode == INTEGER_TYPE
4121 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4122 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4124 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4126 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4127 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4128 || CONTAINS_PLACEHOLDER_P (expr))
4129 return build1 (NOP_EXPR, type, expr);
4132 return fold (convert_to_integer (type, expr));
4135 case REFERENCE_TYPE:
4136 /* If converting between two pointers to records denoting
4137 both a template and type, adjust if needed to account
4138 for any differing offsets, since one might be negative. */
4139 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4142 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
4143 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
4145 = size_binop (CEIL_DIV_EXPR, bit_diff, sbitsize_unit_node);
4146 expr = build1 (NOP_EXPR, type, expr);
4147 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
4148 if (integer_zerop (byte_diff))
4151 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4152 fold (convert (sizetype, byte_diff)));
4155 /* If converting to a thin pointer, handle specially. */
4156 if (TYPE_IS_THIN_POINTER_P (type)
4157 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
4158 return convert_to_thin_pointer (type, expr);
4160 /* If converting fat pointer to normal pointer, get the pointer to the
4161 array and then convert it. */
4162 else if (TYPE_IS_FAT_POINTER_P (etype))
4164 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
4166 return fold (convert_to_pointer (type, expr));
4169 return fold (convert_to_real (type, expr));
4172 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4174 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 1);
4176 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4177 convert (TREE_TYPE (TYPE_FIELDS (type)),
4179 return gnat_build_constructor (type, v);
4182 /* ... fall through ... */
4185 /* In these cases, assume the front-end has validated the conversion.
4186 If the conversion is valid, it will be a bit-wise conversion, so
4187 it can be viewed as an unchecked conversion. */
4188 return unchecked_convert (type, expr, false);
4191 /* This is a either a conversion between a tagged type and some
4192 subtype, which we have to mark as a UNION_TYPE because of
4193 overlapping fields or a conversion of an Unchecked_Union. */
4194 return unchecked_convert (type, expr, false);
4196 case UNCONSTRAINED_ARRAY_TYPE:
4197 /* If the input is a VECTOR_TYPE, convert to the representative
4198 array type first. */
4199 if (ecode == VECTOR_TYPE)
4201 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4202 etype = TREE_TYPE (expr);
4203 ecode = TREE_CODE (etype);
4206 /* If EXPR is a constrained array, take its address, convert it to a
4207 fat pointer, and then dereference it. Likewise if EXPR is a
4208 record containing both a template and a constrained array.
4209 Note that a record representing a justified modular type
4210 always represents a packed constrained array. */
4211 if (ecode == ARRAY_TYPE
4212 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4213 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4214 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4217 (INDIRECT_REF, NULL_TREE,
4218 convert_to_fat_pointer (TREE_TYPE (type),
4219 build_unary_op (ADDR_EXPR,
4222 /* Do something very similar for converting one unconstrained
4223 array to another. */
4224 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4226 build_unary_op (INDIRECT_REF, NULL_TREE,
4227 convert (TREE_TYPE (type),
4228 build_unary_op (ADDR_EXPR,
4234 return fold (convert_to_complex (type, expr));
4241 /* Remove all conversions that are done in EXP. This includes converting
4242 from a padded type or to a justified modular type. If TRUE_ADDRESS
4243 is true, always return the address of the containing object even if
4244 the address is not bit-aligned. */
4247 remove_conversions (tree exp, bool true_address)
4249 switch (TREE_CODE (exp))
4253 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4254 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4256 remove_conversions (VEC_index (constructor_elt,
4257 CONSTRUCTOR_ELTS (exp), 0)->value,
4262 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4263 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4266 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
4268 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4277 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4278 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
4279 likewise return an expression pointing to the underlying array. */
4282 maybe_unconstrained_array (tree exp)
4284 enum tree_code code = TREE_CODE (exp);
4287 switch (TREE_CODE (TREE_TYPE (exp)))
4289 case UNCONSTRAINED_ARRAY_TYPE:
4290 if (code == UNCONSTRAINED_ARRAY_REF)
4292 new_exp = TREE_OPERAND (exp, 0);
4294 = build_unary_op (INDIRECT_REF, NULL_TREE,
4295 build_component_ref (new_exp, NULL_TREE,
4297 (TREE_TYPE (new_exp)),
4299 TREE_READONLY (new_exp) = TREE_READONLY (exp);
4303 else if (code == NULL_EXPR)
4304 return build1 (NULL_EXPR,
4305 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
4306 (TREE_TYPE (TREE_TYPE (exp))))),
4307 TREE_OPERAND (exp, 0));
4310 /* If this is a padded type, convert to the unpadded type and see if
4311 it contains a template. */
4312 if (TYPE_PADDING_P (TREE_TYPE (exp)))
4314 new_exp = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
4315 if (TREE_CODE (TREE_TYPE (new_exp)) == RECORD_TYPE
4316 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new_exp)))
4318 build_component_ref (new_exp, NULL_TREE,
4320 (TYPE_FIELDS (TREE_TYPE (new_exp))),
4323 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
4325 build_component_ref (exp, NULL_TREE,
4326 DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))),
4337 /* If EXP's type is a VECTOR_TYPE, return EXP converted to the associated
4338 TYPE_REPRESENTATIVE_ARRAY. */
4341 maybe_vector_array (tree exp)
4343 tree etype = TREE_TYPE (exp);
4345 if (VECTOR_TYPE_P (etype))
4346 exp = convert (TYPE_REPRESENTATIVE_ARRAY (etype), exp);
4351 /* Return true if EXPR is an expression that can be folded as an operand
4352 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
4355 can_fold_for_view_convert_p (tree expr)
4359 /* The folder will fold NOP_EXPRs between integral types with the same
4360 precision (in the middle-end's sense). We cannot allow it if the
4361 types don't have the same precision in the Ada sense as well. */
4362 if (TREE_CODE (expr) != NOP_EXPR)
4365 t1 = TREE_TYPE (expr);
4366 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4368 /* Defer to the folder for non-integral conversions. */
4369 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4372 /* Only fold conversions that preserve both precisions. */
4373 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4374 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4380 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
4381 If NOTRUNC_P is true, truncation operations should be suppressed.
4383 Special care is required with (source or target) integral types whose
4384 precision is not equal to their size, to make sure we fetch or assign
4385 the value bits whose location might depend on the endianness, e.g.
4387 Rmsize : constant := 8;
4388 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4390 type Bit_Array is array (1 .. Rmsize) of Boolean;
4391 pragma Pack (Bit_Array);
4393 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4395 Value : Int := 2#1000_0001#;
4396 Vbits : Bit_Array := To_Bit_Array (Value);
4398 we expect the 8 bits at Vbits'Address to always contain Value, while
4399 their original location depends on the endianness, at Value'Address
4400 on a little-endian architecture but not on a big-endian one. */
4403 unchecked_convert (tree type, tree expr, bool notrunc_p)
4405 tree etype = TREE_TYPE (expr);
4406 enum tree_code ecode = TREE_CODE (etype);
4407 enum tree_code code = TREE_CODE (type);
4410 /* If the expression is already of the right type, we are done. */
4414 /* If both types types are integral just do a normal conversion.
4415 Likewise for a conversion to an unconstrained array. */
4416 if ((((INTEGRAL_TYPE_P (type)
4417 && !(code == INTEGER_TYPE && TYPE_VAX_FLOATING_POINT_P (type)))
4418 || (POINTER_TYPE_P (type) && ! TYPE_IS_THIN_POINTER_P (type))
4419 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
4420 && ((INTEGRAL_TYPE_P (etype)
4421 && !(ecode == INTEGER_TYPE && TYPE_VAX_FLOATING_POINT_P (etype)))
4422 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
4423 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
4424 || code == UNCONSTRAINED_ARRAY_TYPE)
4426 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4428 tree ntype = copy_type (etype);
4429 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4430 TYPE_MAIN_VARIANT (ntype) = ntype;
4431 expr = build1 (NOP_EXPR, ntype, expr);
4434 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
4436 tree rtype = copy_type (type);
4437 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4438 TYPE_MAIN_VARIANT (rtype) = rtype;
4439 expr = convert (rtype, expr);
4440 expr = build1 (NOP_EXPR, type, expr);
4443 expr = convert (type, expr);
4446 /* If we are converting to an integral type whose precision is not equal
4447 to its size, first unchecked convert to a record that contains an
4448 object of the output type. Then extract the field. */
4449 else if (INTEGRAL_TYPE_P (type)
4450 && TYPE_RM_SIZE (type)
4451 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4452 GET_MODE_BITSIZE (TYPE_MODE (type))))
4454 tree rec_type = make_node (RECORD_TYPE);
4455 tree field = create_field_decl (get_identifier ("OBJ"), type, rec_type,
4456 NULL_TREE, NULL_TREE, 1, 0);
4458 TYPE_FIELDS (rec_type) = field;
4459 layout_type (rec_type);
4461 expr = unchecked_convert (rec_type, expr, notrunc_p);
4462 expr = build_component_ref (expr, NULL_TREE, field, false);
4465 /* Similarly if we are converting from an integral type whose precision
4466 is not equal to its size. */
4467 else if (INTEGRAL_TYPE_P (etype)
4468 && TYPE_RM_SIZE (etype)
4469 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
4470 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4472 tree rec_type = make_node (RECORD_TYPE);
4473 tree field = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
4474 NULL_TREE, NULL_TREE, 1, 0);
4475 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 1);
4477 TYPE_FIELDS (rec_type) = field;
4478 layout_type (rec_type);
4480 CONSTRUCTOR_APPEND_ELT (v, field, expr);
4481 expr = gnat_build_constructor (rec_type, v);
4482 expr = unchecked_convert (type, expr, notrunc_p);
4485 /* If we are converting from a scalar type to a type with a different size,
4486 we need to pad to have the same size on both sides.
4488 ??? We cannot do it unconditionally because unchecked conversions are
4489 used liberally by the front-end to implement polymorphism, e.g. in:
4491 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
4492 return p___size__4 (p__object!(S191s.all));
4494 so we skip all expressions that are references. */
4495 else if (!REFERENCE_CLASS_P (expr)
4496 && !AGGREGATE_TYPE_P (etype)
4497 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
4498 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
4502 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4503 false, false, false, true),
4505 expr = unchecked_convert (type, expr, notrunc_p);
4509 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
4510 false, false, false, true);
4511 expr = unchecked_convert (rec_type, expr, notrunc_p);
4512 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (rec_type),
4517 /* We have a special case when we are converting between two unconstrained
4518 array types. In that case, take the address, convert the fat pointer
4519 types, and dereference. */
4520 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
4521 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4522 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
4523 build_unary_op (ADDR_EXPR, NULL_TREE,
4526 /* Another special case is when we are converting to a vector type from its
4527 representative array type; this a regular conversion. */
4528 else if (code == VECTOR_TYPE
4529 && ecode == ARRAY_TYPE
4530 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4532 expr = convert (type, expr);
4536 expr = maybe_unconstrained_array (expr);
4537 etype = TREE_TYPE (expr);
4538 ecode = TREE_CODE (etype);
4539 if (can_fold_for_view_convert_p (expr))
4540 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
4542 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
4545 /* If the result is an integral type whose precision is not equal to its
4546 size, sign- or zero-extend the result. We need not do this if the input
4547 is an integral type of the same precision and signedness or if the output
4548 is a biased type or if both the input and output are unsigned. */
4550 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4551 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
4552 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4553 GET_MODE_BITSIZE (TYPE_MODE (type)))
4554 && !(INTEGRAL_TYPE_P (etype)
4555 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
4556 && operand_equal_p (TYPE_RM_SIZE (type),
4557 (TYPE_RM_SIZE (etype) != 0
4558 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
4560 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
4563 = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
4565 = convert (base_type,
4566 size_binop (MINUS_EXPR,
4568 (GET_MODE_BITSIZE (TYPE_MODE (type))),
4569 TYPE_RM_SIZE (type)));
4572 build_binary_op (RSHIFT_EXPR, base_type,
4573 build_binary_op (LSHIFT_EXPR, base_type,
4574 convert (base_type, expr),
4579 /* An unchecked conversion should never raise Constraint_Error. The code
4580 below assumes that GCC's conversion routines overflow the same way that
4581 the underlying hardware does. This is probably true. In the rare case
4582 when it is false, we can rely on the fact that such conversions are
4583 erroneous anyway. */
4584 if (TREE_CODE (expr) == INTEGER_CST)
4585 TREE_OVERFLOW (expr) = 0;
4587 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
4588 show no longer constant. */
4589 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4590 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
4592 TREE_CONSTANT (expr) = 0;
4597 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
4598 the latter being a record type as predicated by Is_Record_Type. */
4601 tree_code_for_record_type (Entity_Id gnat_type)
4603 Node_Id component_list
4604 = Component_List (Type_Definition
4606 (Implementation_Base_Type (gnat_type))));
4609 /* Make this a UNION_TYPE unless it's either not an Unchecked_Union or
4610 we have a non-discriminant field outside a variant. In either case,
4611 it's a RECORD_TYPE. */
4613 if (!Is_Unchecked_Union (gnat_type))
4616 for (component = First_Non_Pragma (Component_Items (component_list));
4617 Present (component);
4618 component = Next_Non_Pragma (component))
4619 if (Ekind (Defining_Entity (component)) == E_Component)
4625 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
4626 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
4627 according to the presence of an alignment clause on the type or, if it
4628 is an array, on the component type. */
4631 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
4633 gnat_type = Underlying_Type (gnat_type);
4635 *align_clause = Present (Alignment_Clause (gnat_type));
4637 if (Is_Array_Type (gnat_type))
4639 gnat_type = Underlying_Type (Component_Type (gnat_type));
4640 if (Present (Alignment_Clause (gnat_type)))
4641 *align_clause = true;
4644 if (!Is_Floating_Point_Type (gnat_type))
4647 if (UI_To_Int (Esize (gnat_type)) != 64)
4653 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
4654 size is greater or equal to 64 bits, or an array of such a type. Set
4655 ALIGN_CLAUSE according to the presence of an alignment clause on the
4656 type or, if it is an array, on the component type. */
4659 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
4661 gnat_type = Underlying_Type (gnat_type);
4663 *align_clause = Present (Alignment_Clause (gnat_type));
4665 if (Is_Array_Type (gnat_type))
4667 gnat_type = Underlying_Type (Component_Type (gnat_type));
4668 if (Present (Alignment_Clause (gnat_type)))
4669 *align_clause = true;
4672 if (!Is_Scalar_Type (gnat_type))
4675 if (UI_To_Int (Esize (gnat_type)) < 64)
4681 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
4682 component of an aggregate type. */
4685 type_for_nonaliased_component_p (tree gnu_type)
4687 /* If the type is passed by reference, we may have pointers to the
4688 component so it cannot be made non-aliased. */
4689 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
4692 /* We used to say that any component of aggregate type is aliased
4693 because the front-end may take 'Reference of it. The front-end
4694 has been enhanced in the meantime so as to use a renaming instead
4695 in most cases, but the back-end can probably take the address of
4696 such a component too so we go for the conservative stance.
4698 For instance, we might need the address of any array type, even
4699 if normally passed by copy, to construct a fat pointer if the
4700 component is used as an actual for an unconstrained formal.
4702 Likewise for record types: even if a specific record subtype is
4703 passed by copy, the parent type might be passed by ref (e.g. if
4704 it's of variable size) and we might take the address of a child
4705 component to pass to a parent formal. We have no way to check
4706 for such conditions here. */
4707 if (AGGREGATE_TYPE_P (gnu_type))
4713 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
4716 smaller_form_type_p (tree type, tree orig_type)
4720 /* We're not interested in variants here. */
4721 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
4724 /* Like a variant, a packable version keeps the original TYPE_NAME. */
4725 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
4728 size = TYPE_SIZE (type);
4729 osize = TYPE_SIZE (orig_type);
4731 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
4734 return tree_int_cst_lt (size, osize) != 0;
4737 /* Perform final processing on global variables. */
4740 gnat_write_global_declarations (void)
4742 /* Proceed to optimize and emit assembly.
4743 FIXME: shouldn't be the front end's responsibility to call this. */
4744 cgraph_finalize_compilation_unit ();
4746 /* Emit debug info for all global declarations. */
4747 emit_debug_global_declarations (VEC_address (tree, global_decls),
4748 VEC_length (tree, global_decls));
4751 /* ************************************************************************
4752 * * GCC builtins support *
4753 * ************************************************************************ */
4755 /* The general scheme is fairly simple:
4757 For each builtin function/type to be declared, gnat_install_builtins calls
4758 internal facilities which eventually get to gnat_push_decl, which in turn
4759 tracks the so declared builtin function decls in the 'builtin_decls' global
4760 datastructure. When an Intrinsic subprogram declaration is processed, we
4761 search this global datastructure to retrieve the associated BUILT_IN DECL
4764 /* Search the chain of currently available builtin declarations for a node
4765 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
4766 found, if any, or NULL_TREE otherwise. */
4768 builtin_decl_for (tree name)
4773 FOR_EACH_VEC_ELT (tree, builtin_decls, i, decl)
4774 if (DECL_NAME (decl) == name)
4780 /* The code below eventually exposes gnat_install_builtins, which declares
4781 the builtin types and functions we might need, either internally or as
4782 user accessible facilities.
4784 ??? This is a first implementation shot, still in rough shape. It is
4785 heavily inspired from the "C" family implementation, with chunks copied
4786 verbatim from there.
4788 Two obvious TODO candidates are
4789 o Use a more efficient name/decl mapping scheme
4790 o Devise a middle-end infrastructure to avoid having to copy
4791 pieces between front-ends. */
4793 /* ----------------------------------------------------------------------- *
4794 * BUILTIN ELEMENTARY TYPES *
4795 * ----------------------------------------------------------------------- */
4797 /* Standard data types to be used in builtin argument declarations. */
4801 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
4803 CTI_CONST_STRING_TYPE,
4808 static tree c_global_trees[CTI_MAX];
4810 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
4811 #define string_type_node c_global_trees[CTI_STRING_TYPE]
4812 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
4814 /* ??? In addition some attribute handlers, we currently don't support a
4815 (small) number of builtin-types, which in turns inhibits support for a
4816 number of builtin functions. */
4817 #define wint_type_node void_type_node
4818 #define intmax_type_node void_type_node
4819 #define uintmax_type_node void_type_node
4821 /* Build the void_list_node (void_type_node having been created). */
4824 build_void_list_node (void)
4826 tree t = build_tree_list (NULL_TREE, void_type_node);
4830 /* Used to help initialize the builtin-types.def table. When a type of
4831 the correct size doesn't exist, use error_mark_node instead of NULL.
4832 The later results in segfaults even when a decl using the type doesn't
4836 builtin_type_for_size (int size, bool unsignedp)
4838 tree type = gnat_type_for_size (size, unsignedp);
4839 return type ? type : error_mark_node;
4842 /* Build/push the elementary type decls that builtin functions/types
4846 install_builtin_elementary_types (void)
4848 signed_size_type_node = gnat_signed_type (size_type_node);
4849 pid_type_node = integer_type_node;
4850 void_list_node = build_void_list_node ();
4852 string_type_node = build_pointer_type (char_type_node);
4853 const_string_type_node
4854 = build_pointer_type (build_qualified_type
4855 (char_type_node, TYPE_QUAL_CONST));
4858 /* ----------------------------------------------------------------------- *
4859 * BUILTIN FUNCTION TYPES *
4860 * ----------------------------------------------------------------------- */
4862 /* Now, builtin function types per se. */
4866 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
4867 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
4868 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
4869 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
4870 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4871 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4872 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
4873 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
4874 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
4875 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
4876 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
4877 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
4878 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4879 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4880 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
4882 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
4883 #include "builtin-types.def"
4884 #undef DEF_PRIMITIVE_TYPE
4885 #undef DEF_FUNCTION_TYPE_0
4886 #undef DEF_FUNCTION_TYPE_1
4887 #undef DEF_FUNCTION_TYPE_2
4888 #undef DEF_FUNCTION_TYPE_3
4889 #undef DEF_FUNCTION_TYPE_4
4890 #undef DEF_FUNCTION_TYPE_5
4891 #undef DEF_FUNCTION_TYPE_6
4892 #undef DEF_FUNCTION_TYPE_7
4893 #undef DEF_FUNCTION_TYPE_VAR_0
4894 #undef DEF_FUNCTION_TYPE_VAR_1
4895 #undef DEF_FUNCTION_TYPE_VAR_2
4896 #undef DEF_FUNCTION_TYPE_VAR_3
4897 #undef DEF_FUNCTION_TYPE_VAR_4
4898 #undef DEF_FUNCTION_TYPE_VAR_5
4899 #undef DEF_POINTER_TYPE
4903 typedef enum c_builtin_type builtin_type;
4905 /* A temporary array used in communication with def_fn_type. */
4906 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
4908 /* A helper function for install_builtin_types. Build function type
4909 for DEF with return type RET and N arguments. If VAR is true, then the
4910 function should be variadic after those N arguments.
4912 Takes special care not to ICE if any of the types involved are
4913 error_mark_node, which indicates that said type is not in fact available
4914 (see builtin_type_for_size). In which case the function type as a whole
4915 should be error_mark_node. */
4918 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
4920 tree args = NULL, t;
4925 for (i = 0; i < n; ++i)
4927 builtin_type a = (builtin_type) va_arg (list, int);
4928 t = builtin_types[a];
4929 if (t == error_mark_node)
4931 args = tree_cons (NULL_TREE, t, args);
4935 args = nreverse (args);
4937 args = chainon (args, void_list_node);
4939 t = builtin_types[ret];
4940 if (t == error_mark_node)
4942 t = build_function_type (t, args);
4945 builtin_types[def] = t;
4949 /* Build the builtin function types and install them in the builtin_types
4950 array for later use in builtin function decls. */
4953 install_builtin_function_types (void)
4955 tree va_list_ref_type_node;
4956 tree va_list_arg_type_node;
4958 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4960 va_list_arg_type_node = va_list_ref_type_node =
4961 build_pointer_type (TREE_TYPE (va_list_type_node));
4965 va_list_arg_type_node = va_list_type_node;
4966 va_list_ref_type_node = build_reference_type (va_list_type_node);
4969 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
4970 builtin_types[ENUM] = VALUE;
4971 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
4972 def_fn_type (ENUM, RETURN, 0, 0);
4973 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
4974 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
4975 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
4976 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
4977 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
4978 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
4979 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
4980 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
4981 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
4982 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
4983 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4985 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
4986 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4988 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
4989 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
4990 def_fn_type (ENUM, RETURN, 1, 0);
4991 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
4992 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
4993 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
4994 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
4995 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
4996 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
4997 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
4998 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
4999 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5000 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5001 #define DEF_POINTER_TYPE(ENUM, TYPE) \
5002 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5004 #include "builtin-types.def"
5006 #undef DEF_PRIMITIVE_TYPE
5007 #undef DEF_FUNCTION_TYPE_1
5008 #undef DEF_FUNCTION_TYPE_2
5009 #undef DEF_FUNCTION_TYPE_3
5010 #undef DEF_FUNCTION_TYPE_4
5011 #undef DEF_FUNCTION_TYPE_5
5012 #undef DEF_FUNCTION_TYPE_6
5013 #undef DEF_FUNCTION_TYPE_VAR_0
5014 #undef DEF_FUNCTION_TYPE_VAR_1
5015 #undef DEF_FUNCTION_TYPE_VAR_2
5016 #undef DEF_FUNCTION_TYPE_VAR_3
5017 #undef DEF_FUNCTION_TYPE_VAR_4
5018 #undef DEF_FUNCTION_TYPE_VAR_5
5019 #undef DEF_POINTER_TYPE
5020 builtin_types[(int) BT_LAST] = NULL_TREE;
5023 /* ----------------------------------------------------------------------- *
5024 * BUILTIN ATTRIBUTES *
5025 * ----------------------------------------------------------------------- */
5027 enum built_in_attribute
5029 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5030 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5031 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5032 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5033 #include "builtin-attrs.def"
5034 #undef DEF_ATTR_NULL_TREE
5036 #undef DEF_ATTR_IDENT
5037 #undef DEF_ATTR_TREE_LIST
5041 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5044 install_builtin_attributes (void)
5046 /* Fill in the built_in_attributes array. */
5047 #define DEF_ATTR_NULL_TREE(ENUM) \
5048 built_in_attributes[(int) ENUM] = NULL_TREE;
5049 #define DEF_ATTR_INT(ENUM, VALUE) \
5050 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5051 #define DEF_ATTR_IDENT(ENUM, STRING) \
5052 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5053 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5054 built_in_attributes[(int) ENUM] \
5055 = tree_cons (built_in_attributes[(int) PURPOSE], \
5056 built_in_attributes[(int) VALUE], \
5057 built_in_attributes[(int) CHAIN]);
5058 #include "builtin-attrs.def"
5059 #undef DEF_ATTR_NULL_TREE
5061 #undef DEF_ATTR_IDENT
5062 #undef DEF_ATTR_TREE_LIST
5065 /* Handle a "const" attribute; arguments as in
5066 struct attribute_spec.handler. */
5069 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5070 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5073 if (TREE_CODE (*node) == FUNCTION_DECL)
5074 TREE_READONLY (*node) = 1;
5076 *no_add_attrs = true;
5081 /* Handle a "nothrow" attribute; arguments as in
5082 struct attribute_spec.handler. */
5085 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
5086 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5089 if (TREE_CODE (*node) == FUNCTION_DECL)
5090 TREE_NOTHROW (*node) = 1;
5092 *no_add_attrs = true;
5097 /* Handle a "pure" attribute; arguments as in
5098 struct attribute_spec.handler. */
5101 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5102 int ARG_UNUSED (flags), bool *no_add_attrs)
5104 if (TREE_CODE (*node) == FUNCTION_DECL)
5105 DECL_PURE_P (*node) = 1;
5106 /* ??? TODO: Support types. */
5109 warning (OPT_Wattributes, "%qs attribute ignored",
5110 IDENTIFIER_POINTER (name));
5111 *no_add_attrs = true;
5117 /* Handle a "no vops" attribute; arguments as in
5118 struct attribute_spec.handler. */
5121 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
5122 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5123 bool *ARG_UNUSED (no_add_attrs))
5125 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
5126 DECL_IS_NOVOPS (*node) = 1;
5130 /* Helper for nonnull attribute handling; fetch the operand number
5131 from the attribute argument list. */
5134 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
5136 /* Verify the arg number is a constant. */
5137 if (TREE_CODE (arg_num_expr) != INTEGER_CST
5138 || TREE_INT_CST_HIGH (arg_num_expr) != 0)
5141 *valp = TREE_INT_CST_LOW (arg_num_expr);
5145 /* Handle the "nonnull" attribute. */
5147 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
5148 tree args, int ARG_UNUSED (flags),
5152 unsigned HOST_WIDE_INT attr_arg_num;
5154 /* If no arguments are specified, all pointer arguments should be
5155 non-null. Verify a full prototype is given so that the arguments
5156 will have the correct types when we actually check them later. */
5159 if (!prototype_p (type))
5161 error ("nonnull attribute without arguments on a non-prototype");
5162 *no_add_attrs = true;
5167 /* Argument list specified. Verify that each argument number references
5168 a pointer argument. */
5169 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5172 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5174 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5176 error ("nonnull argument has invalid operand number (argument %lu)",
5177 (unsigned long) attr_arg_num);
5178 *no_add_attrs = true;
5182 argument = TYPE_ARG_TYPES (type);
5185 for (ck_num = 1; ; ck_num++)
5187 if (!argument || ck_num == arg_num)
5189 argument = TREE_CHAIN (argument);
5193 || TREE_CODE (TREE_VALUE (argument)) == VOID_TYPE)
5195 error ("nonnull argument with out-of-range operand number "
5196 "(argument %lu, operand %lu)",
5197 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5198 *no_add_attrs = true;
5202 if (TREE_CODE (TREE_VALUE (argument)) != POINTER_TYPE)
5204 error ("nonnull argument references non-pointer operand "
5205 "(argument %lu, operand %lu)",
5206 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5207 *no_add_attrs = true;
5216 /* Handle a "sentinel" attribute. */
5219 handle_sentinel_attribute (tree *node, tree name, tree args,
5220 int ARG_UNUSED (flags), bool *no_add_attrs)
5222 tree params = TYPE_ARG_TYPES (*node);
5224 if (!prototype_p (*node))
5226 warning (OPT_Wattributes,
5227 "%qs attribute requires prototypes with named arguments",
5228 IDENTIFIER_POINTER (name));
5229 *no_add_attrs = true;
5233 while (TREE_CHAIN (params))
5234 params = TREE_CHAIN (params);
5236 if (VOID_TYPE_P (TREE_VALUE (params)))
5238 warning (OPT_Wattributes,
5239 "%qs attribute only applies to variadic functions",
5240 IDENTIFIER_POINTER (name));
5241 *no_add_attrs = true;
5247 tree position = TREE_VALUE (args);
5249 if (TREE_CODE (position) != INTEGER_CST)
5251 warning (0, "requested position is not an integer constant");
5252 *no_add_attrs = true;
5256 if (tree_int_cst_lt (position, integer_zero_node))
5258 warning (0, "requested position is less than zero");
5259 *no_add_attrs = true;
5267 /* Handle a "noreturn" attribute; arguments as in
5268 struct attribute_spec.handler. */
5271 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5272 int ARG_UNUSED (flags), bool *no_add_attrs)
5274 tree type = TREE_TYPE (*node);
5276 /* See FIXME comment in c_common_attribute_table. */
5277 if (TREE_CODE (*node) == FUNCTION_DECL)
5278 TREE_THIS_VOLATILE (*node) = 1;
5279 else if (TREE_CODE (type) == POINTER_TYPE
5280 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5282 = build_pointer_type
5283 (build_type_variant (TREE_TYPE (type),
5284 TYPE_READONLY (TREE_TYPE (type)), 1));
5287 warning (OPT_Wattributes, "%qs attribute ignored",
5288 IDENTIFIER_POINTER (name));
5289 *no_add_attrs = true;
5295 /* Handle a "leaf" attribute; arguments as in
5296 struct attribute_spec.handler. */
5299 handle_leaf_attribute (tree *node, tree name,
5300 tree ARG_UNUSED (args),
5301 int ARG_UNUSED (flags), bool *no_add_attrs)
5303 if (TREE_CODE (*node) != FUNCTION_DECL)
5305 warning (OPT_Wattributes, "%qE attribute ignored", name);
5306 *no_add_attrs = true;
5308 if (!TREE_PUBLIC (*node))
5310 warning (OPT_Wattributes, "%qE attribute has no effect", name);
5311 *no_add_attrs = true;
5317 /* Handle a "malloc" attribute; arguments as in
5318 struct attribute_spec.handler. */
5321 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5322 int ARG_UNUSED (flags), bool *no_add_attrs)
5324 if (TREE_CODE (*node) == FUNCTION_DECL
5325 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
5326 DECL_IS_MALLOC (*node) = 1;
5329 warning (OPT_Wattributes, "%qs attribute ignored",
5330 IDENTIFIER_POINTER (name));
5331 *no_add_attrs = true;
5337 /* Fake handler for attributes we don't properly support. */
5340 fake_attribute_handler (tree * ARG_UNUSED (node),
5341 tree ARG_UNUSED (name),
5342 tree ARG_UNUSED (args),
5343 int ARG_UNUSED (flags),
5344 bool * ARG_UNUSED (no_add_attrs))
5349 /* Handle a "type_generic" attribute. */
5352 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
5353 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5354 bool * ARG_UNUSED (no_add_attrs))
5358 /* Ensure we have a function type. */
5359 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
5361 params = TYPE_ARG_TYPES (*node);
5362 while (params && ! VOID_TYPE_P (TREE_VALUE (params)))
5363 params = TREE_CHAIN (params);
5365 /* Ensure we have a variadic function. */
5366 gcc_assert (!params);
5371 /* Handle a "vector_size" attribute; arguments as in
5372 struct attribute_spec.handler. */
5375 handle_vector_size_attribute (tree *node, tree name, tree args,
5376 int ARG_UNUSED (flags),
5379 unsigned HOST_WIDE_INT vecsize, nunits;
5380 enum machine_mode orig_mode;
5381 tree type = *node, new_type, size;
5383 *no_add_attrs = true;
5385 size = TREE_VALUE (args);
5387 if (!host_integerp (size, 1))
5389 warning (OPT_Wattributes, "%qs attribute ignored",
5390 IDENTIFIER_POINTER (name));
5394 /* Get the vector size (in bytes). */
5395 vecsize = tree_low_cst (size, 1);
5397 /* We need to provide for vector pointers, vector arrays, and
5398 functions returning vectors. For example:
5400 __attribute__((vector_size(16))) short *foo;
5402 In this case, the mode is SI, but the type being modified is
5403 HI, so we need to look further. */
5405 while (POINTER_TYPE_P (type)
5406 || TREE_CODE (type) == FUNCTION_TYPE
5407 || TREE_CODE (type) == ARRAY_TYPE)
5408 type = TREE_TYPE (type);
5410 /* Get the mode of the type being modified. */
5411 orig_mode = TYPE_MODE (type);
5413 if ((!INTEGRAL_TYPE_P (type)
5414 && !SCALAR_FLOAT_TYPE_P (type)
5415 && !FIXED_POINT_TYPE_P (type))
5416 || (!SCALAR_FLOAT_MODE_P (orig_mode)
5417 && GET_MODE_CLASS (orig_mode) != MODE_INT
5418 && !ALL_SCALAR_FIXED_POINT_MODE_P (orig_mode))
5419 || !host_integerp (TYPE_SIZE_UNIT (type), 1)
5420 || TREE_CODE (type) == BOOLEAN_TYPE)
5422 error ("invalid vector type for attribute %qs",
5423 IDENTIFIER_POINTER (name));
5427 if (vecsize % tree_low_cst (TYPE_SIZE_UNIT (type), 1))
5429 error ("vector size not an integral multiple of component size");
5435 error ("zero vector size");
5439 /* Calculate how many units fit in the vector. */
5440 nunits = vecsize / tree_low_cst (TYPE_SIZE_UNIT (type), 1);
5441 if (nunits & (nunits - 1))
5443 error ("number of components of the vector not a power of two");
5447 new_type = build_vector_type (type, nunits);
5449 /* Build back pointers if needed. */
5450 *node = reconstruct_complex_type (*node, new_type);
5455 /* Handle a "vector_type" attribute; arguments as in
5456 struct attribute_spec.handler. */
5459 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5460 int ARG_UNUSED (flags),
5463 /* Vector representative type and size. */
5464 tree rep_type = *node;
5465 tree rep_size = TYPE_SIZE_UNIT (rep_type);
5468 /* Vector size in bytes and number of units. */
5469 unsigned HOST_WIDE_INT vec_bytes, vec_units;
5471 /* Vector element type and mode. */
5473 enum machine_mode elem_mode;
5475 *no_add_attrs = true;
5477 /* Get the representative array type, possibly nested within a
5478 padding record e.g. for alignment purposes. */
5480 if (TYPE_IS_PADDING_P (rep_type))
5481 rep_type = TREE_TYPE (TYPE_FIELDS (rep_type));
5483 if (TREE_CODE (rep_type) != ARRAY_TYPE)
5485 error ("attribute %qs applies to array types only",
5486 IDENTIFIER_POINTER (name));
5490 /* Silently punt on variable sizes. We can't make vector types for them,
5491 need to ignore them on front-end generated subtypes of unconstrained
5492 bases, and this attribute is for binding implementors, not end-users, so
5493 we should never get there from legitimate explicit uses. */
5495 if (!host_integerp (rep_size, 1))
5498 /* Get the element type/mode and check this is something we know
5499 how to make vectors of. */
5501 elem_type = TREE_TYPE (rep_type);
5502 elem_mode = TYPE_MODE (elem_type);
5504 if ((!INTEGRAL_TYPE_P (elem_type)
5505 && !SCALAR_FLOAT_TYPE_P (elem_type)
5506 && !FIXED_POINT_TYPE_P (elem_type))
5507 || (!SCALAR_FLOAT_MODE_P (elem_mode)
5508 && GET_MODE_CLASS (elem_mode) != MODE_INT
5509 && !ALL_SCALAR_FIXED_POINT_MODE_P (elem_mode))
5510 || !host_integerp (TYPE_SIZE_UNIT (elem_type), 1))
5512 error ("invalid element type for attribute %qs",
5513 IDENTIFIER_POINTER (name));
5517 /* Sanity check the vector size and element type consistency. */
5519 vec_bytes = tree_low_cst (rep_size, 1);
5521 if (vec_bytes % tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1))
5523 error ("vector size not an integral multiple of component size");
5529 error ("zero vector size");
5533 vec_units = vec_bytes / tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1);
5534 if (vec_units & (vec_units - 1))
5536 error ("number of components of the vector not a power of two");
5540 /* Build the vector type and replace. */
5542 *node = build_vector_type (elem_type, vec_units);
5543 rep_name = TYPE_NAME (rep_type);
5544 if (TREE_CODE (rep_name) == TYPE_DECL)
5545 rep_name = DECL_NAME (rep_name);
5546 TYPE_NAME (*node) = rep_name;
5547 TYPE_REPRESENTATIVE_ARRAY (*node) = rep_type;
5552 /* ----------------------------------------------------------------------- *
5553 * BUILTIN FUNCTIONS *
5554 * ----------------------------------------------------------------------- */
5556 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
5557 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
5558 if nonansi_p and flag_no_nonansi_builtin. */
5561 def_builtin_1 (enum built_in_function fncode,
5563 enum built_in_class fnclass,
5564 tree fntype, tree libtype,
5565 bool both_p, bool fallback_p,
5566 bool nonansi_p ATTRIBUTE_UNUSED,
5567 tree fnattrs, bool implicit_p)
5570 const char *libname;
5572 /* Preserve an already installed decl. It most likely was setup in advance
5573 (e.g. as part of the internal builtins) for specific reasons. */
5574 if (built_in_decls[(int) fncode] != NULL_TREE)
5577 gcc_assert ((!both_p && !fallback_p)
5578 || !strncmp (name, "__builtin_",
5579 strlen ("__builtin_")));
5581 libname = name + strlen ("__builtin_");
5582 decl = add_builtin_function (name, fntype, fncode, fnclass,
5583 (fallback_p ? libname : NULL),
5586 /* ??? This is normally further controlled by command-line options
5587 like -fno-builtin, but we don't have them for Ada. */
5588 add_builtin_function (libname, libtype, fncode, fnclass,
5591 built_in_decls[(int) fncode] = decl;
5593 implicit_built_in_decls[(int) fncode] = decl;
5596 static int flag_isoc94 = 0;
5597 static int flag_isoc99 = 0;
5599 /* Install what the common builtins.def offers. */
5602 install_builtin_functions (void)
5604 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
5605 NONANSI_P, ATTRS, IMPLICIT, COND) \
5607 def_builtin_1 (ENUM, NAME, CLASS, \
5608 builtin_types[(int) TYPE], \
5609 builtin_types[(int) LIBTYPE], \
5610 BOTH_P, FALLBACK_P, NONANSI_P, \
5611 built_in_attributes[(int) ATTRS], IMPLICIT);
5612 #include "builtins.def"
5616 /* ----------------------------------------------------------------------- *
5617 * BUILTIN FUNCTIONS *
5618 * ----------------------------------------------------------------------- */
5620 /* Install the builtin functions we might need. */
5623 gnat_install_builtins (void)
5625 install_builtin_elementary_types ();
5626 install_builtin_function_types ();
5627 install_builtin_attributes ();
5629 /* Install builtins used by generic middle-end pieces first. Some of these
5630 know about internal specificities and control attributes accordingly, for
5631 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
5632 the generic definition from builtins.def. */
5633 build_common_builtin_nodes ();
5635 /* Now, install the target specific builtins, such as the AltiVec family on
5636 ppc, and the common set as exposed by builtins.def. */
5637 targetm.init_builtins ();
5638 install_builtin_functions ();
5641 #include "gt-ada-utils.h"
5642 #include "gtype-ada.h"