1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2009, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 ****************************************************************************/
28 #include "coretypes.h"
40 #include "langhooks.h"
41 #include "pointer-set.h"
43 #include "tree-dump.h"
44 #include "tree-inline.h"
45 #include "tree-iterator.h"
62 #ifndef MAX_BITS_PER_WORD
63 #define MAX_BITS_PER_WORD BITS_PER_WORD
66 /* If nonzero, pretend we are allocating at global level. */
69 /* The default alignment of "double" floating-point types, i.e. floating
70 point types whose size is equal to 64 bits, or 0 if this alignment is
71 not specifically capped. */
72 int double_float_alignment;
74 /* The default alignment of "double" or larger scalar types, i.e. scalar
75 types whose size is greater or equal to 64 bits, or 0 if this alignment
76 is not specifically capped. */
77 int double_scalar_alignment;
79 /* Tree nodes for the various types and decls we create. */
80 tree gnat_std_decls[(int) ADT_LAST];
82 /* Functions to call for each of the possible raise reasons. */
83 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
85 /* Forward declarations for handlers of attributes. */
86 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
87 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
88 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
93 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
94 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
95 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
96 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
98 /* Fake handler for attributes we don't properly support, typically because
99 they'd require dragging a lot of the common-c front-end circuitry. */
100 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
102 /* Table of machine-independent internal attributes for Ada. We support
103 this minimal set of attributes to accommodate the needs of builtins. */
104 const struct attribute_spec gnat_internal_attribute_table[] =
106 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
107 { "const", 0, 0, true, false, false, handle_const_attribute },
108 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute },
109 { "pure", 0, 0, true, false, false, handle_pure_attribute },
110 { "no vops", 0, 0, true, false, false, handle_novops_attribute },
111 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute },
112 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute },
113 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute },
114 { "malloc", 0, 0, true, false, false, handle_malloc_attribute },
115 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute },
117 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute },
118 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute },
119 { "may_alias", 0, 0, false, true, false, NULL },
121 /* ??? format and format_arg are heavy and not supported, which actually
122 prevents support for stdio builtins, which we however declare as part
123 of the common builtins.def contents. */
124 { "format", 3, 3, false, true, true, fake_attribute_handler },
125 { "format_arg", 1, 1, false, true, true, fake_attribute_handler },
127 { NULL, 0, 0, false, false, false, NULL }
130 /* Associates a GNAT tree node to a GCC tree node. It is used in
131 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
132 of `save_gnu_tree' for more info. */
133 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
135 #define GET_GNU_TREE(GNAT_ENTITY) \
136 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
138 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
139 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
141 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
142 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
144 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
145 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
147 #define GET_DUMMY_NODE(GNAT_ENTITY) \
148 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
150 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
151 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
153 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
154 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
156 /* This variable keeps a table for types for each precision so that we only
157 allocate each of them once. Signed and unsigned types are kept separate.
159 Note that these types are only used when fold-const requests something
160 special. Perhaps we should NOT share these types; we'll see how it
162 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
164 /* Likewise for float types, but record these by mode. */
165 static GTY(()) tree float_types[NUM_MACHINE_MODES];
167 /* For each binding contour we allocate a binding_level structure to indicate
168 the binding depth. */
170 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
171 /* The binding level containing this one (the enclosing binding level). */
172 struct gnat_binding_level *chain;
173 /* The BLOCK node for this level. */
175 /* If nonzero, the setjmp buffer that needs to be updated for any
176 variable-sized definition within this context. */
180 /* The binding level currently in effect. */
181 static GTY(()) struct gnat_binding_level *current_binding_level;
183 /* A chain of gnat_binding_level structures awaiting reuse. */
184 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
186 /* An array of global declarations. */
187 static GTY(()) VEC(tree,gc) *global_decls;
189 /* An array of builtin function declarations. */
190 static GTY(()) VEC(tree,gc) *builtin_decls;
192 /* An array of global renaming pointers. */
193 static GTY(()) VEC(tree,gc) *global_renaming_pointers;
195 /* A chain of unused BLOCK nodes. */
196 static GTY((deletable)) tree free_block_chain;
198 static tree merge_sizes (tree, tree, tree, bool, bool);
199 static tree compute_related_constant (tree, tree);
200 static tree split_plus (tree, tree *);
201 static tree float_type_for_precision (int, enum machine_mode);
202 static tree convert_to_fat_pointer (tree, tree);
203 static tree convert_to_thin_pointer (tree, tree);
204 static tree make_descriptor_field (const char *,tree, tree, tree);
205 static bool potential_alignment_gap (tree, tree, tree);
207 /* Initialize the association of GNAT nodes to GCC trees. */
210 init_gnat_to_gnu (void)
212 associate_gnat_to_gnu
213 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
216 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
217 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
218 a ..._DECL node. If NO_CHECK is true, the latter check is suppressed.
220 If GNU_DECL is zero, a previous association is to be reset. */
223 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
225 /* Check that GNAT_ENTITY is not already defined and that it is being set
226 to something which is a decl. Raise gigi 401 if not. Usually, this
227 means GNAT_ENTITY is defined twice, but occasionally is due to some
229 gcc_assert (!(gnu_decl
230 && (PRESENT_GNU_TREE (gnat_entity)
231 || (!no_check && !DECL_P (gnu_decl)))));
233 SET_GNU_TREE (gnat_entity, gnu_decl);
236 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
237 Return the ..._DECL node that was associated with it. If there is no tree
238 node associated with GNAT_ENTITY, abort.
240 In some cases, such as delayed elaboration or expressions that need to
241 be elaborated only once, GNAT_ENTITY is really not an entity. */
244 get_gnu_tree (Entity_Id gnat_entity)
246 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
247 return GET_GNU_TREE (gnat_entity);
250 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
253 present_gnu_tree (Entity_Id gnat_entity)
255 return PRESENT_GNU_TREE (gnat_entity);
258 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
261 init_dummy_type (void)
264 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
267 /* Make a dummy type corresponding to GNAT_TYPE. */
270 make_dummy_type (Entity_Id gnat_type)
272 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
275 /* If there is an equivalent type, get its underlying type. */
276 if (Present (gnat_underlying))
277 gnat_underlying = Underlying_Type (gnat_underlying);
279 /* If there was no equivalent type (can only happen when just annotating
280 types) or underlying type, go back to the original type. */
281 if (No (gnat_underlying))
282 gnat_underlying = gnat_type;
284 /* If it there already a dummy type, use that one. Else make one. */
285 if (PRESENT_DUMMY_NODE (gnat_underlying))
286 return GET_DUMMY_NODE (gnat_underlying);
288 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
290 gnu_type = make_node (Is_Record_Type (gnat_underlying)
291 ? tree_code_for_record_type (gnat_underlying)
293 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
294 TYPE_DUMMY_P (gnu_type) = 1;
295 TYPE_STUB_DECL (gnu_type)
296 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
297 if (AGGREGATE_TYPE_P (gnu_type))
298 TYPE_BY_REFERENCE_P (gnu_type) = Is_By_Reference_Type (gnat_type);
300 SET_DUMMY_NODE (gnat_underlying, gnu_type);
305 /* Return nonzero if we are currently in the global binding level. */
308 global_bindings_p (void)
310 return ((force_global || !current_function_decl) ? -1 : 0);
313 /* Enter a new binding level. */
316 gnat_pushlevel (void)
318 struct gnat_binding_level *newlevel = NULL;
320 /* Reuse a struct for this binding level, if there is one. */
321 if (free_binding_level)
323 newlevel = free_binding_level;
324 free_binding_level = free_binding_level->chain;
328 = (struct gnat_binding_level *)
329 ggc_alloc (sizeof (struct gnat_binding_level));
331 /* Use a free BLOCK, if any; otherwise, allocate one. */
332 if (free_block_chain)
334 newlevel->block = free_block_chain;
335 free_block_chain = BLOCK_CHAIN (free_block_chain);
336 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
339 newlevel->block = make_node (BLOCK);
341 /* Point the BLOCK we just made to its parent. */
342 if (current_binding_level)
343 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
345 BLOCK_VARS (newlevel->block) = BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
346 TREE_USED (newlevel->block) = 1;
348 /* Add this level to the front of the chain (stack) of levels that are
350 newlevel->chain = current_binding_level;
351 newlevel->jmpbuf_decl = NULL_TREE;
352 current_binding_level = newlevel;
355 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
356 and point FNDECL to this BLOCK. */
359 set_current_block_context (tree fndecl)
361 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
362 DECL_INITIAL (fndecl) = current_binding_level->block;
365 /* Set the jmpbuf_decl for the current binding level to DECL. */
368 set_block_jmpbuf_decl (tree decl)
370 current_binding_level->jmpbuf_decl = decl;
373 /* Get the jmpbuf_decl, if any, for the current binding level. */
376 get_block_jmpbuf_decl (void)
378 return current_binding_level->jmpbuf_decl;
381 /* Exit a binding level. Set any BLOCK into the current code group. */
386 struct gnat_binding_level *level = current_binding_level;
387 tree block = level->block;
389 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
390 BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
392 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
393 are no variables free the block and merge its subblocks into those of its
394 parent block. Otherwise, add it to the list of its parent. */
395 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
397 else if (BLOCK_VARS (block) == NULL_TREE)
399 BLOCK_SUBBLOCKS (level->chain->block)
400 = chainon (BLOCK_SUBBLOCKS (block),
401 BLOCK_SUBBLOCKS (level->chain->block));
402 BLOCK_CHAIN (block) = free_block_chain;
403 free_block_chain = block;
407 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
408 BLOCK_SUBBLOCKS (level->chain->block) = block;
409 TREE_USED (block) = 1;
410 set_block_for_group (block);
413 /* Free this binding structure. */
414 current_binding_level = level->chain;
415 level->chain = free_binding_level;
416 free_binding_level = level;
420 /* Records a ..._DECL node DECL as belonging to the current lexical scope
421 and uses GNAT_NODE for location information and propagating flags. */
424 gnat_pushdecl (tree decl, Node_Id gnat_node)
426 /* If this decl is public external or at toplevel, there is no context.
427 But PARM_DECLs always go in the level of its function. */
428 if (TREE_CODE (decl) != PARM_DECL
429 && ((DECL_EXTERNAL (decl) && TREE_PUBLIC (decl))
430 || global_bindings_p ()))
431 DECL_CONTEXT (decl) = 0;
434 DECL_CONTEXT (decl) = current_function_decl;
436 /* Functions imported in another function are not really nested.
437 For really nested functions mark them initially as needing
438 a static chain for uses of that flag before unnesting;
439 lower_nested_functions will then recompute it. */
440 if (TREE_CODE (decl) == FUNCTION_DECL && !TREE_PUBLIC (decl))
441 DECL_STATIC_CHAIN (decl) = 1;
444 TREE_NO_WARNING (decl) = (gnat_node == Empty || Warnings_Off (gnat_node));
446 /* Set the location of DECL and emit a declaration for it. */
447 if (Present (gnat_node))
448 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
449 add_decl_expr (decl, gnat_node);
451 /* Put the declaration on the list. The list of declarations is in reverse
452 order. The list will be reversed later. Put global variables in the
453 globals list and builtin functions in a dedicated list to speed up
454 further lookups. Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
455 the list, as they will cause trouble with the debugger and aren't needed
457 if (TREE_CODE (decl) != TYPE_DECL
458 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
460 if (global_bindings_p ())
462 VEC_safe_push (tree, gc, global_decls, decl);
464 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
465 VEC_safe_push (tree, gc, builtin_decls, decl);
469 TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
470 BLOCK_VARS (current_binding_level->block) = decl;
474 /* For the declaration of a type, set its name if it either is not already
475 set or if the previous type name was not derived from a source name.
476 We'd rather have the type named with a real name and all the pointer
477 types to the same object have the same POINTER_TYPE node. Code in the
478 equivalent function of c-decl.c makes a copy of the type node here, but
479 that may cause us trouble with incomplete types. We make an exception
480 for fat pointer types because the compiler automatically builds them
481 for unconstrained array types and the debugger uses them to represent
482 both these and pointers to these. */
483 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
485 tree t = TREE_TYPE (decl);
487 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
489 else if (TYPE_IS_FAT_POINTER_P (t))
491 tree tt = build_variant_type_copy (t);
492 TYPE_NAME (tt) = decl;
493 TREE_USED (tt) = TREE_USED (t);
494 TREE_TYPE (decl) = tt;
495 if (DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
496 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
498 DECL_ORIGINAL_TYPE (decl) = t;
500 DECL_ARTIFICIAL (decl) = 0;
502 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
507 /* Propagate the name to all the variants. This is needed for
508 the type qualifiers machinery to work properly. */
510 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
511 TYPE_NAME (t) = decl;
515 /* Do little here. Set up the standard declarations later after the
516 front end has been run. */
519 gnat_init_decl_processing (void)
521 /* Make the binding_level structure for global names. */
522 current_function_decl = 0;
523 current_binding_level = 0;
524 free_binding_level = 0;
527 build_common_tree_nodes (true, true);
529 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
530 corresponding to the width of Pmode. In most cases when ptr_mode
531 and Pmode differ, C will use the width of ptr_mode for SIZETYPE.
532 But we get far better code using the width of Pmode. */
533 size_type_node = gnat_type_for_mode (Pmode, 0);
534 set_sizetype (size_type_node);
536 /* In Ada, we use an unsigned 8-bit type for the default boolean type. */
537 boolean_type_node = make_unsigned_type (8);
538 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
539 SET_TYPE_RM_MAX_VALUE (boolean_type_node,
540 build_int_cst (boolean_type_node, 1));
541 SET_TYPE_RM_SIZE (boolean_type_node, bitsize_int (1));
543 build_common_tree_nodes_2 (0);
544 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
546 ptr_void_type_node = build_pointer_type (void_type_node);
549 /* Record TYPE as a builtin type for Ada. NAME is the name of the type. */
552 record_builtin_type (const char *name, tree type)
554 tree type_decl = build_decl (input_location,
555 TYPE_DECL, get_identifier (name), type);
557 gnat_pushdecl (type_decl, Empty);
559 if (debug_hooks->type_decl)
560 debug_hooks->type_decl (type_decl, false);
563 /* Given a record type RECORD_TYPE and a chain of FIELD_DECL nodes FIELDLIST,
564 finish constructing the record or union type. If REP_LEVEL is zero, this
565 record has no representation clause and so will be entirely laid out here.
566 If REP_LEVEL is one, this record has a representation clause and has been
567 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
568 this record is derived from a parent record and thus inherits its layout;
569 only make a pass on the fields to finalize them. If DO_NOT_FINALIZE is
570 true, the record type is expected to be modified afterwards so it will
571 not be sent to the back-end for finalization. */
574 finish_record_type (tree record_type, tree fieldlist, int rep_level,
575 bool do_not_finalize)
577 enum tree_code code = TREE_CODE (record_type);
578 tree name = TYPE_NAME (record_type);
579 tree ada_size = bitsize_zero_node;
580 tree size = bitsize_zero_node;
581 bool had_size = TYPE_SIZE (record_type) != 0;
582 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
583 bool had_align = TYPE_ALIGN (record_type) != 0;
586 TYPE_FIELDS (record_type) = fieldlist;
588 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
589 generate debug info and have a parallel type. */
590 if (name && TREE_CODE (name) == TYPE_DECL)
591 name = DECL_NAME (name);
592 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
594 /* Globally initialize the record first. If this is a rep'ed record,
595 that just means some initializations; otherwise, layout the record. */
598 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
599 SET_TYPE_MODE (record_type, BLKmode);
602 TYPE_SIZE_UNIT (record_type) = size_zero_node;
604 TYPE_SIZE (record_type) = bitsize_zero_node;
606 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
607 out just like a UNION_TYPE, since the size will be fixed. */
608 else if (code == QUAL_UNION_TYPE)
613 /* Ensure there isn't a size already set. There can be in an error
614 case where there is a rep clause but all fields have errors and
615 no longer have a position. */
616 TYPE_SIZE (record_type) = 0;
617 layout_type (record_type);
620 /* At this point, the position and size of each field is known. It was
621 either set before entry by a rep clause, or by laying out the type above.
623 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
624 to compute the Ada size; the GCC size and alignment (for rep'ed records
625 that are not padding types); and the mode (for rep'ed records). We also
626 clear the DECL_BIT_FIELD indication for the cases we know have not been
627 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
629 if (code == QUAL_UNION_TYPE)
630 fieldlist = nreverse (fieldlist);
632 for (field = fieldlist; field; field = TREE_CHAIN (field))
634 tree type = TREE_TYPE (field);
635 tree pos = bit_position (field);
636 tree this_size = DECL_SIZE (field);
639 if ((TREE_CODE (type) == RECORD_TYPE
640 || TREE_CODE (type) == UNION_TYPE
641 || TREE_CODE (type) == QUAL_UNION_TYPE)
642 && !TYPE_FAT_POINTER_P (type)
643 && !TYPE_CONTAINS_TEMPLATE_P (type)
644 && TYPE_ADA_SIZE (type))
645 this_ada_size = TYPE_ADA_SIZE (type);
647 this_ada_size = this_size;
649 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
650 if (DECL_BIT_FIELD (field)
651 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
653 unsigned int align = TYPE_ALIGN (type);
655 /* In the general case, type alignment is required. */
656 if (value_factor_p (pos, align))
658 /* The enclosing record type must be sufficiently aligned.
659 Otherwise, if no alignment was specified for it and it
660 has been laid out already, bump its alignment to the
661 desired one if this is compatible with its size. */
662 if (TYPE_ALIGN (record_type) >= align)
664 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
665 DECL_BIT_FIELD (field) = 0;
669 && value_factor_p (TYPE_SIZE (record_type), align))
671 TYPE_ALIGN (record_type) = align;
672 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
673 DECL_BIT_FIELD (field) = 0;
677 /* In the non-strict alignment case, only byte alignment is. */
678 if (!STRICT_ALIGNMENT
679 && DECL_BIT_FIELD (field)
680 && value_factor_p (pos, BITS_PER_UNIT))
681 DECL_BIT_FIELD (field) = 0;
684 /* If we still have DECL_BIT_FIELD set at this point, we know that the
685 field is technically not addressable. Except that it can actually
686 be addressed if it is BLKmode and happens to be properly aligned. */
687 if (DECL_BIT_FIELD (field)
688 && !(DECL_MODE (field) == BLKmode
689 && value_factor_p (pos, BITS_PER_UNIT)))
690 DECL_NONADDRESSABLE_P (field) = 1;
692 /* A type must be as aligned as its most aligned field that is not
693 a bit-field. But this is already enforced by layout_type. */
694 if (rep_level > 0 && !DECL_BIT_FIELD (field))
695 TYPE_ALIGN (record_type)
696 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
701 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
702 size = size_binop (MAX_EXPR, size, this_size);
705 case QUAL_UNION_TYPE:
707 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
708 this_ada_size, ada_size);
709 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
714 /* Since we know here that all fields are sorted in order of
715 increasing bit position, the size of the record is one
716 higher than the ending bit of the last field processed
717 unless we have a rep clause, since in that case we might
718 have a field outside a QUAL_UNION_TYPE that has a higher ending
719 position. So use a MAX in that case. Also, if this field is a
720 QUAL_UNION_TYPE, we need to take into account the previous size in
721 the case of empty variants. */
723 = merge_sizes (ada_size, pos, this_ada_size,
724 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
726 = merge_sizes (size, pos, this_size,
727 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
735 if (code == QUAL_UNION_TYPE)
736 nreverse (fieldlist);
740 /* If this is a padding record, we never want to make the size smaller
741 than what was specified in it, if any. */
742 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
743 size = TYPE_SIZE (record_type);
745 /* Now set any of the values we've just computed that apply. */
746 if (!TYPE_FAT_POINTER_P (record_type)
747 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
748 SET_TYPE_ADA_SIZE (record_type, ada_size);
752 tree size_unit = had_size_unit
753 ? TYPE_SIZE_UNIT (record_type)
755 size_binop (CEIL_DIV_EXPR, size,
757 unsigned int align = TYPE_ALIGN (record_type);
759 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
760 TYPE_SIZE_UNIT (record_type)
761 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
763 compute_record_mode (record_type);
767 if (!do_not_finalize)
768 rest_of_record_type_compilation (record_type);
771 /* Wrap up compilation of RECORD_TYPE, i.e. most notably output all
772 the debug information associated with it. It need not be invoked
773 directly in most cases since finish_record_type takes care of doing
774 so, unless explicitly requested not to through DO_NOT_FINALIZE. */
777 rest_of_record_type_compilation (tree record_type)
779 tree fieldlist = TYPE_FIELDS (record_type);
781 enum tree_code code = TREE_CODE (record_type);
782 bool var_size = false;
784 for (field = fieldlist; field; field = TREE_CHAIN (field))
786 /* We need to make an XVE/XVU record if any field has variable size,
787 whether or not the record does. For example, if we have a union,
788 it may be that all fields, rounded up to the alignment, have the
789 same size, in which case we'll use that size. But the debug
790 output routines (except Dwarf2) won't be able to output the fields,
791 so we need to make the special record. */
792 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
793 /* If a field has a non-constant qualifier, the record will have
794 variable size too. */
795 || (code == QUAL_UNION_TYPE
796 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
803 /* If this record is of variable size, rename it so that the
804 debugger knows it is and make a new, parallel, record
805 that tells the debugger how the record is laid out. See
806 exp_dbug.ads. But don't do this for records that are padding
807 since they confuse GDB. */
808 if (var_size && !TYPE_IS_PADDING_P (record_type))
811 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
812 ? UNION_TYPE : TREE_CODE (record_type));
813 tree orig_name = TYPE_NAME (record_type), new_name;
814 tree last_pos = bitsize_zero_node;
815 tree old_field, prev_old_field = NULL_TREE;
817 if (TREE_CODE (orig_name) == TYPE_DECL)
818 orig_name = DECL_NAME (orig_name);
821 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
823 TYPE_NAME (new_record_type) = new_name;
824 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
825 TYPE_STUB_DECL (new_record_type)
826 = create_type_stub_decl (new_name, new_record_type);
827 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
828 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
829 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
830 TYPE_SIZE_UNIT (new_record_type)
831 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
833 add_parallel_type (TYPE_STUB_DECL (record_type), new_record_type);
835 /* Now scan all the fields, replacing each field with a new
836 field corresponding to the new encoding. */
837 for (old_field = TYPE_FIELDS (record_type); old_field;
838 old_field = TREE_CHAIN (old_field))
840 tree field_type = TREE_TYPE (old_field);
841 tree field_name = DECL_NAME (old_field);
843 tree curpos = bit_position (old_field);
845 unsigned int align = 0;
848 /* See how the position was modified from the last position.
850 There are two basic cases we support: a value was added
851 to the last position or the last position was rounded to
852 a boundary and they something was added. Check for the
853 first case first. If not, see if there is any evidence
854 of rounding. If so, round the last position and try
857 If this is a union, the position can be taken as zero. */
859 /* Some computations depend on the shape of the position expression,
860 so strip conversions to make sure it's exposed. */
861 curpos = remove_conversions (curpos, true);
863 if (TREE_CODE (new_record_type) == UNION_TYPE)
864 pos = bitsize_zero_node, align = 0;
866 pos = compute_related_constant (curpos, last_pos);
868 if (!pos && TREE_CODE (curpos) == MULT_EXPR
869 && host_integerp (TREE_OPERAND (curpos, 1), 1))
871 tree offset = TREE_OPERAND (curpos, 0);
872 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
874 /* An offset which is a bitwise AND with a negative power of 2
875 means an alignment corresponding to this power of 2. */
876 offset = remove_conversions (offset, true);
877 if (TREE_CODE (offset) == BIT_AND_EXPR
878 && host_integerp (TREE_OPERAND (offset, 1), 0)
879 && tree_int_cst_sgn (TREE_OPERAND (offset, 1)) < 0)
882 = - tree_low_cst (TREE_OPERAND (offset, 1), 0);
883 if (exact_log2 (pow) > 0)
887 pos = compute_related_constant (curpos,
888 round_up (last_pos, align));
890 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
891 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
892 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
893 && host_integerp (TREE_OPERAND
894 (TREE_OPERAND (curpos, 0), 1),
899 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
900 pos = compute_related_constant (curpos,
901 round_up (last_pos, align));
903 else if (potential_alignment_gap (prev_old_field, old_field,
906 align = TYPE_ALIGN (field_type);
907 pos = compute_related_constant (curpos,
908 round_up (last_pos, align));
911 /* If we can't compute a position, set it to zero.
913 ??? We really should abort here, but it's too much work
914 to get this correct for all cases. */
917 pos = bitsize_zero_node;
919 /* See if this type is variable-sized and make a pointer type
920 and indicate the indirection if so. Beware that the debug
921 back-end may adjust the position computed above according
922 to the alignment of the field type, i.e. the pointer type
923 in this case, if we don't preventively counter that. */
924 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
926 field_type = build_pointer_type (field_type);
927 if (align != 0 && TYPE_ALIGN (field_type) > align)
929 field_type = copy_node (field_type);
930 TYPE_ALIGN (field_type) = align;
935 /* Make a new field name, if necessary. */
936 if (var || align != 0)
941 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
942 align / BITS_PER_UNIT);
944 strcpy (suffix, "XVL");
946 field_name = concat_name (field_name, suffix);
949 new_field = create_field_decl (field_name, field_type,
951 DECL_SIZE (old_field), pos, 0);
952 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
953 TYPE_FIELDS (new_record_type) = new_field;
955 /* If old_field is a QUAL_UNION_TYPE, take its size as being
956 zero. The only time it's not the last field of the record
957 is when there are other components at fixed positions after
958 it (meaning there was a rep clause for every field) and we
959 want to be able to encode them. */
960 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
961 (TREE_CODE (TREE_TYPE (old_field))
964 : DECL_SIZE (old_field));
965 prev_old_field = old_field;
968 TYPE_FIELDS (new_record_type)
969 = nreverse (TYPE_FIELDS (new_record_type));
971 rest_of_type_decl_compilation (TYPE_STUB_DECL (new_record_type));
974 rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type));
977 /* Append PARALLEL_TYPE on the chain of parallel types for decl. */
980 add_parallel_type (tree decl, tree parallel_type)
984 while (DECL_PARALLEL_TYPE (d))
985 d = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (d));
987 SET_DECL_PARALLEL_TYPE (d, parallel_type);
990 /* Return the parallel type associated to a type, if any. */
993 get_parallel_type (tree type)
995 if (TYPE_STUB_DECL (type))
996 return DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type));
1001 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1002 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
1003 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
1004 replace a value of zero with the old size. If HAS_REP is true, we take the
1005 MAX of the end position of this field with LAST_SIZE. In all other cases,
1006 we use FIRST_BIT plus SIZE. Return an expression for the size. */
1009 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1012 tree type = TREE_TYPE (last_size);
1015 if (!special || TREE_CODE (size) != COND_EXPR)
1017 new_size = size_binop (PLUS_EXPR, first_bit, size);
1019 new_size = size_binop (MAX_EXPR, last_size, new_size);
1023 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1024 integer_zerop (TREE_OPERAND (size, 1))
1025 ? last_size : merge_sizes (last_size, first_bit,
1026 TREE_OPERAND (size, 1),
1028 integer_zerop (TREE_OPERAND (size, 2))
1029 ? last_size : merge_sizes (last_size, first_bit,
1030 TREE_OPERAND (size, 2),
1033 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1034 when fed through substitute_in_expr) into thinking that a constant
1035 size is not constant. */
1036 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
1037 new_size = TREE_OPERAND (new_size, 0);
1042 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1043 related by the addition of a constant. Return that constant if so. */
1046 compute_related_constant (tree op0, tree op1)
1048 tree op0_var, op1_var;
1049 tree op0_con = split_plus (op0, &op0_var);
1050 tree op1_con = split_plus (op1, &op1_var);
1051 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1053 if (operand_equal_p (op0_var, op1_var, 0))
1055 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1061 /* Utility function of above to split a tree OP which may be a sum, into a
1062 constant part, which is returned, and a variable part, which is stored
1063 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1067 split_plus (tree in, tree *pvar)
1069 /* Strip NOPS in order to ease the tree traversal and maximize the
1070 potential for constant or plus/minus discovery. We need to be careful
1071 to always return and set *pvar to bitsizetype trees, but it's worth
1075 *pvar = convert (bitsizetype, in);
1077 if (TREE_CODE (in) == INTEGER_CST)
1079 *pvar = bitsize_zero_node;
1080 return convert (bitsizetype, in);
1082 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1084 tree lhs_var, rhs_var;
1085 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1086 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1088 if (lhs_var == TREE_OPERAND (in, 0)
1089 && rhs_var == TREE_OPERAND (in, 1))
1090 return bitsize_zero_node;
1092 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1093 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1096 return bitsize_zero_node;
1099 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1100 subprogram. If it is void_type_node, then we are dealing with a procedure,
1101 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1102 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1103 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1104 RETURNS_UNCONSTRAINED is true if the function returns an unconstrained
1105 object. RETURNS_BY_REF is true if the function returns by reference.
1106 RETURNS_BY_TARGET_PTR is true if the function is to be passed (as its
1107 first parameter) the address of the place to copy its result. */
1110 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1111 bool returns_unconstrained, bool returns_by_ref,
1112 bool returns_by_target_ptr)
1114 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1115 the subprogram formal parameters. This list is generated by traversing the
1116 input list of PARM_DECL nodes. */
1117 tree param_type_list = NULL;
1121 for (param_decl = param_decl_list; param_decl;
1122 param_decl = TREE_CHAIN (param_decl))
1123 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1126 /* The list of the function parameter types has to be terminated by the void
1127 type to signal to the back-end that we are not dealing with a variable
1128 parameter subprogram, but that the subprogram has a fixed number of
1130 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1132 /* The list of argument types has been created in reverse
1134 param_type_list = nreverse (param_type_list);
1136 type = build_function_type (return_type, param_type_list);
1138 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1139 or the new type should, make a copy of TYPE. Likewise for
1140 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1141 if (TYPE_CI_CO_LIST (type) || cico_list
1142 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1143 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref
1144 || TYPE_RETURNS_BY_TARGET_PTR_P (type) != returns_by_target_ptr)
1145 type = copy_type (type);
1147 TYPE_CI_CO_LIST (type) = cico_list;
1148 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1149 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1150 TYPE_RETURNS_BY_TARGET_PTR_P (type) = returns_by_target_ptr;
1154 /* Return a copy of TYPE but safe to modify in any way. */
1157 copy_type (tree type)
1159 tree new_type = copy_node (type);
1161 /* copy_node clears this field instead of copying it, because it is
1162 aliased with TREE_CHAIN. */
1163 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
1165 TYPE_POINTER_TO (new_type) = 0;
1166 TYPE_REFERENCE_TO (new_type) = 0;
1167 TYPE_MAIN_VARIANT (new_type) = new_type;
1168 TYPE_NEXT_VARIANT (new_type) = 0;
1173 /* Return a subtype of sizetype with range MIN to MAX and whose
1174 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
1175 of the associated TYPE_DECL. */
1178 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
1180 /* First build a type for the desired range. */
1181 tree type = build_index_2_type (min, max);
1183 /* If this type has the TYPE_INDEX_TYPE we want, return it. */
1184 if (TYPE_INDEX_TYPE (type) == index)
1187 /* Otherwise, if TYPE_INDEX_TYPE is set, make a copy. Note that we have
1188 no way of sharing these types, but that's only a small hole. */
1189 if (TYPE_INDEX_TYPE (type))
1190 type = copy_type (type);
1192 SET_TYPE_INDEX_TYPE (type, index);
1193 create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
1198 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
1199 sizetype is used. */
1202 create_range_type (tree type, tree min, tree max)
1206 if (type == NULL_TREE)
1209 /* First build a type with the base range. */
1211 = build_range_type (type, TYPE_MIN_VALUE (type), TYPE_MAX_VALUE (type));
1213 min = convert (type, min);
1214 max = convert (type, max);
1216 /* If this type has the TYPE_RM_{MIN,MAX}_VALUE we want, return it. */
1217 if (TYPE_RM_MIN_VALUE (range_type)
1218 && TYPE_RM_MAX_VALUE (range_type)
1219 && operand_equal_p (TYPE_RM_MIN_VALUE (range_type), min, 0)
1220 && operand_equal_p (TYPE_RM_MAX_VALUE (range_type), max, 0))
1223 /* Otherwise, if TYPE_RM_{MIN,MAX}_VALUE is set, make a copy. */
1224 if (TYPE_RM_MIN_VALUE (range_type) || TYPE_RM_MAX_VALUE (range_type))
1225 range_type = copy_type (range_type);
1227 /* Then set the actual range. */
1228 SET_TYPE_RM_MIN_VALUE (range_type, min);
1229 SET_TYPE_RM_MAX_VALUE (range_type, max);
1234 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
1235 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
1239 create_type_stub_decl (tree type_name, tree type)
1241 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
1242 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
1243 emitted in DWARF. */
1244 tree type_decl = build_decl (input_location,
1245 TYPE_DECL, type_name, type);
1246 DECL_ARTIFICIAL (type_decl) = 1;
1250 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
1251 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
1252 is a declaration that was generated by the compiler. DEBUG_INFO_P is
1253 true if we need to write debug information about this type. GNAT_NODE
1254 is used for the position of the decl. */
1257 create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1258 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1260 enum tree_code code = TREE_CODE (type);
1261 bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
1264 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
1265 gcc_assert (!TYPE_IS_DUMMY_P (type));
1267 /* If the type hasn't been named yet, we're naming it; preserve an existing
1268 TYPE_STUB_DECL that has been attached to it for some purpose. */
1269 if (!named && TYPE_STUB_DECL (type))
1271 type_decl = TYPE_STUB_DECL (type);
1272 DECL_NAME (type_decl) = type_name;
1275 type_decl = build_decl (input_location,
1276 TYPE_DECL, type_name, type);
1278 DECL_ARTIFICIAL (type_decl) = artificial_p;
1279 gnat_pushdecl (type_decl, gnat_node);
1280 process_attributes (type_decl, attr_list);
1282 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
1283 This causes the name to be also viewed as a "tag" by the debug
1284 back-end, with the advantage that no DW_TAG_typedef is emitted
1285 for artificial "tagged" types in DWARF. */
1287 TYPE_STUB_DECL (type) = type_decl;
1289 /* Pass the type declaration to the debug back-end unless this is an
1290 UNCONSTRAINED_ARRAY_TYPE that the back-end does not support, or a
1291 type for which debugging information was not requested, or else an
1292 ENUMERAL_TYPE or RECORD_TYPE (except for fat pointers) which are
1293 handled separately. And do not pass dummy types either. */
1294 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
1295 DECL_IGNORED_P (type_decl) = 1;
1296 else if (code != ENUMERAL_TYPE
1297 && (code != RECORD_TYPE || TYPE_FAT_POINTER_P (type))
1298 && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
1299 && TYPE_IS_DUMMY_P (TREE_TYPE (type)))
1300 && !(code == RECORD_TYPE
1302 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))))))
1303 rest_of_type_decl_compilation (type_decl);
1308 /* Return a VAR_DECL or CONST_DECL node.
1310 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1311 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1312 the GCC tree for an optional initial expression; NULL_TREE if none.
1314 CONST_FLAG is true if this variable is constant, in which case we might
1315 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
1317 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
1318 definition to be made visible outside of the current compilation unit, for
1319 instance variable definitions in a package specification.
1321 EXTERN_FLAG is true when processing an external variable declaration (as
1322 opposed to a definition: no storage is to be allocated for the variable).
1324 STATIC_FLAG is only relevant when not at top level. In that case
1325 it indicates whether to always allocate storage to the variable.
1327 GNAT_NODE is used for the position of the decl. */
1330 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
1331 bool const_flag, bool public_flag, bool extern_flag,
1332 bool static_flag, bool const_decl_allowed_p,
1333 struct attrib *attr_list, Node_Id gnat_node)
1337 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
1338 && (global_bindings_p () || static_flag
1339 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
1340 : TREE_CONSTANT (var_init)));
1342 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1343 case the initializer may be used in-lieu of the DECL node (as done in
1344 Identifier_to_gnu). This is useful to prevent the need of elaboration
1345 code when an identifier for which such a decl is made is in turn used as
1346 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1347 but extra constraints apply to this choice (see below) and are not
1348 relevant to the distinction we wish to make. */
1349 bool constant_p = const_flag && init_const;
1351 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1352 and may be used for scalars in general but not for aggregates. */
1354 = build_decl (input_location,
1355 (constant_p && const_decl_allowed_p
1356 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
1359 /* If this is external, throw away any initializations (they will be done
1360 elsewhere) unless this is a constant for which we would like to remain
1361 able to get the initializer. If we are defining a global here, leave a
1362 constant initialization and save any variable elaborations for the
1363 elaboration routine. If we are just annotating types, throw away the
1364 initialization if it isn't a constant. */
1365 if ((extern_flag && !constant_p)
1366 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1367 var_init = NULL_TREE;
1369 /* At the global level, an initializer requiring code to be generated
1370 produces elaboration statements. Check that such statements are allowed,
1371 that is, not violating a No_Elaboration_Code restriction. */
1372 if (global_bindings_p () && var_init != 0 && ! init_const)
1373 Check_Elaboration_Code_Allowed (gnat_node);
1375 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1376 try to fiddle with DECL_COMMON. However, on platforms that don't
1377 support global BSS sections, uninitialized global variables would
1378 go in DATA instead, thus increasing the size of the executable. */
1380 && TREE_CODE (var_decl) == VAR_DECL
1381 && !have_global_bss_p ())
1382 DECL_COMMON (var_decl) = 1;
1383 DECL_INITIAL (var_decl) = var_init;
1384 TREE_READONLY (var_decl) = const_flag;
1385 DECL_EXTERNAL (var_decl) = extern_flag;
1386 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1387 TREE_CONSTANT (var_decl) = constant_p;
1388 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1389 = TYPE_VOLATILE (type);
1391 /* If it's public and not external, always allocate storage for it.
1392 At the global binding level we need to allocate static storage for the
1393 variable if and only if it's not external. If we are not at the top level
1394 we allocate automatic storage unless requested not to. */
1395 TREE_STATIC (var_decl)
1396 = !extern_flag && (public_flag || static_flag || global_bindings_p ());
1398 /* For an external constant whose initializer is not absolute, do not emit
1399 debug info. In DWARF this would mean a global relocation in a read-only
1400 section which runs afoul of the PE-COFF runtime relocation mechanism. */
1403 && initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
1404 != null_pointer_node)
1405 DECL_IGNORED_P (var_decl) = 1;
1407 if (asm_name && VAR_OR_FUNCTION_DECL_P (var_decl))
1408 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1410 process_attributes (var_decl, attr_list);
1412 /* Add this decl to the current binding level. */
1413 gnat_pushdecl (var_decl, gnat_node);
1415 if (TREE_SIDE_EFFECTS (var_decl))
1416 TREE_ADDRESSABLE (var_decl) = 1;
1418 if (TREE_CODE (var_decl) != CONST_DECL)
1420 if (global_bindings_p ())
1421 rest_of_decl_compilation (var_decl, true, 0);
1424 expand_decl (var_decl);
1429 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
1432 aggregate_type_contains_array_p (tree type)
1434 switch (TREE_CODE (type))
1438 case QUAL_UNION_TYPE:
1441 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1442 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
1443 && aggregate_type_contains_array_p (TREE_TYPE (field)))
1456 /* Return a FIELD_DECL node. FIELD_NAME is the field's name, FIELD_TYPE is
1457 its type and RECORD_TYPE is the type of the enclosing record. PACKED is
1458 1 if the enclosing record is packed, -1 if it has Component_Alignment of
1459 Storage_Unit. If SIZE is nonzero, it is the specified size of the field.
1460 If POS is nonzero, it is the bit position. If ADDRESSABLE is nonzero, it
1461 means we are allowed to take the address of the field; if it is negative,
1462 we should not make a bitfield, which is used by make_aligning_type. */
1465 create_field_decl (tree field_name, tree field_type, tree record_type,
1466 int packed, tree size, tree pos, int addressable)
1468 tree field_decl = build_decl (input_location,
1469 FIELD_DECL, field_name, field_type);
1471 DECL_CONTEXT (field_decl) = record_type;
1472 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1474 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1475 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1476 Likewise for an aggregate without specified position that contains an
1477 array, because in this case slices of variable length of this array
1478 must be handled by GCC and variable-sized objects need to be aligned
1479 to at least a byte boundary. */
1480 if (packed && (TYPE_MODE (field_type) == BLKmode
1482 && AGGREGATE_TYPE_P (field_type)
1483 && aggregate_type_contains_array_p (field_type))))
1484 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1486 /* If a size is specified, use it. Otherwise, if the record type is packed
1487 compute a size to use, which may differ from the object's natural size.
1488 We always set a size in this case to trigger the checks for bitfield
1489 creation below, which is typically required when no position has been
1492 size = convert (bitsizetype, size);
1493 else if (packed == 1)
1495 size = rm_size (field_type);
1496 if (TYPE_MODE (field_type) == BLKmode)
1497 size = round_up (size, BITS_PER_UNIT);
1500 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1501 specified for two reasons: first if the size differs from the natural
1502 size. Second, if the alignment is insufficient. There are a number of
1503 ways the latter can be true.
1505 We never make a bitfield if the type of the field has a nonconstant size,
1506 because no such entity requiring bitfield operations should reach here.
1508 We do *preventively* make a bitfield when there might be the need for it
1509 but we don't have all the necessary information to decide, as is the case
1510 of a field with no specified position in a packed record.
1512 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1513 in layout_decl or finish_record_type to clear the bit_field indication if
1514 it is in fact not needed. */
1515 if (addressable >= 0
1517 && TREE_CODE (size) == INTEGER_CST
1518 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1519 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
1520 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
1522 || (TYPE_ALIGN (record_type) != 0
1523 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1525 DECL_BIT_FIELD (field_decl) = 1;
1526 DECL_SIZE (field_decl) = size;
1527 if (!packed && !pos)
1529 if (TYPE_ALIGN (record_type) != 0
1530 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))
1531 DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
1533 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1537 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1539 /* Bump the alignment if need be, either for bitfield/packing purposes or
1540 to satisfy the type requirements if no such consideration applies. When
1541 we get the alignment from the type, indicate if this is from an explicit
1542 user request, which prevents stor-layout from lowering it later on. */
1544 unsigned int bit_align
1545 = (DECL_BIT_FIELD (field_decl) ? 1
1546 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
1548 if (bit_align > DECL_ALIGN (field_decl))
1549 DECL_ALIGN (field_decl) = bit_align;
1550 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
1552 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1553 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
1559 /* We need to pass in the alignment the DECL is known to have.
1560 This is the lowest-order bit set in POS, but no more than
1561 the alignment of the record, if one is specified. Note
1562 that an alignment of 0 is taken as infinite. */
1563 unsigned int known_align;
1565 if (host_integerp (pos, 1))
1566 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1568 known_align = BITS_PER_UNIT;
1570 if (TYPE_ALIGN (record_type)
1571 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1572 known_align = TYPE_ALIGN (record_type);
1574 layout_decl (field_decl, known_align);
1575 SET_DECL_OFFSET_ALIGN (field_decl,
1576 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1578 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1579 &DECL_FIELD_BIT_OFFSET (field_decl),
1580 DECL_OFFSET_ALIGN (field_decl), pos);
1583 /* In addition to what our caller says, claim the field is addressable if we
1584 know that its type is not suitable.
1586 The field may also be "technically" nonaddressable, meaning that even if
1587 we attempt to take the field's address we will actually get the address
1588 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
1589 value we have at this point is not accurate enough, so we don't account
1590 for this here and let finish_record_type decide. */
1591 if (!addressable && !type_for_nonaliased_component_p (field_type))
1594 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1599 /* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
1600 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
1601 (either an In parameter or an address of a pass-by-ref parameter). */
1604 create_param_decl (tree param_name, tree param_type, bool readonly)
1606 tree param_decl = build_decl (input_location,
1607 PARM_DECL, param_name, param_type);
1609 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
1610 can lead to various ABI violations. */
1611 if (targetm.calls.promote_prototypes (NULL_TREE)
1612 && INTEGRAL_TYPE_P (param_type)
1613 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1615 /* We have to be careful about biased types here. Make a subtype
1616 of integer_type_node with the proper biasing. */
1617 if (TREE_CODE (param_type) == INTEGER_TYPE
1618 && TYPE_BIASED_REPRESENTATION_P (param_type))
1621 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
1622 TREE_TYPE (subtype) = integer_type_node;
1623 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
1624 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (param_type));
1625 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (param_type));
1626 param_type = subtype;
1629 param_type = integer_type_node;
1632 DECL_ARG_TYPE (param_decl) = param_type;
1633 TREE_READONLY (param_decl) = readonly;
1637 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1640 process_attributes (tree decl, struct attrib *attr_list)
1642 for (; attr_list; attr_list = attr_list->next)
1643 switch (attr_list->type)
1645 case ATTR_MACHINE_ATTRIBUTE:
1646 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1648 ATTR_FLAG_TYPE_IN_PLACE);
1651 case ATTR_LINK_ALIAS:
1652 if (! DECL_EXTERNAL (decl))
1654 TREE_STATIC (decl) = 1;
1655 assemble_alias (decl, attr_list->name);
1659 case ATTR_WEAK_EXTERNAL:
1661 declare_weak (decl);
1663 post_error ("?weak declarations not supported on this target",
1664 attr_list->error_point);
1667 case ATTR_LINK_SECTION:
1668 if (targetm.have_named_sections)
1670 DECL_SECTION_NAME (decl)
1671 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1672 IDENTIFIER_POINTER (attr_list->name));
1673 DECL_COMMON (decl) = 0;
1676 post_error ("?section attributes are not supported for this target",
1677 attr_list->error_point);
1680 case ATTR_LINK_CONSTRUCTOR:
1681 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1682 TREE_USED (decl) = 1;
1685 case ATTR_LINK_DESTRUCTOR:
1686 DECL_STATIC_DESTRUCTOR (decl) = 1;
1687 TREE_USED (decl) = 1;
1690 case ATTR_THREAD_LOCAL_STORAGE:
1691 DECL_TLS_MODEL (decl) = decl_default_tls_model (decl);
1692 DECL_COMMON (decl) = 0;
1697 /* Record DECL as a global renaming pointer. */
1700 record_global_renaming_pointer (tree decl)
1702 gcc_assert (DECL_RENAMED_OBJECT (decl));
1703 VEC_safe_push (tree, gc, global_renaming_pointers, decl);
1706 /* Invalidate the global renaming pointers. */
1709 invalidate_global_renaming_pointers (void)
1714 for (i = 0; VEC_iterate(tree, global_renaming_pointers, i, iter); i++)
1715 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
1717 VEC_free (tree, gc, global_renaming_pointers);
1720 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1724 value_factor_p (tree value, HOST_WIDE_INT factor)
1726 if (host_integerp (value, 1))
1727 return tree_low_cst (value, 1) % factor == 0;
1729 if (TREE_CODE (value) == MULT_EXPR)
1730 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1731 || value_factor_p (TREE_OPERAND (value, 1), factor));
1736 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1737 unless we can prove these 2 fields are laid out in such a way that no gap
1738 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1739 is the distance in bits between the end of PREV_FIELD and the starting
1740 position of CURR_FIELD. It is ignored if null. */
1743 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1745 /* If this is the first field of the record, there cannot be any gap */
1749 /* If the previous field is a union type, then return False: The only
1750 time when such a field is not the last field of the record is when
1751 there are other components at fixed positions after it (meaning there
1752 was a rep clause for every field), in which case we don't want the
1753 alignment constraint to override them. */
1754 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1757 /* If the distance between the end of prev_field and the beginning of
1758 curr_field is constant, then there is a gap if the value of this
1759 constant is not null. */
1760 if (offset && host_integerp (offset, 1))
1761 return !integer_zerop (offset);
1763 /* If the size and position of the previous field are constant,
1764 then check the sum of this size and position. There will be a gap
1765 iff it is not multiple of the current field alignment. */
1766 if (host_integerp (DECL_SIZE (prev_field), 1)
1767 && host_integerp (bit_position (prev_field), 1))
1768 return ((tree_low_cst (bit_position (prev_field), 1)
1769 + tree_low_cst (DECL_SIZE (prev_field), 1))
1770 % DECL_ALIGN (curr_field) != 0);
1772 /* If both the position and size of the previous field are multiples
1773 of the current field alignment, there cannot be any gap. */
1774 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1775 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1778 /* Fallback, return that there may be a potential gap */
1782 /* Returns a LABEL_DECL node for LABEL_NAME. */
1785 create_label_decl (tree label_name)
1787 tree label_decl = build_decl (input_location,
1788 LABEL_DECL, label_name, void_type_node);
1790 DECL_CONTEXT (label_decl) = current_function_decl;
1791 DECL_MODE (label_decl) = VOIDmode;
1792 DECL_SOURCE_LOCATION (label_decl) = input_location;
1797 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1798 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1799 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1800 PARM_DECL nodes chained through the TREE_CHAIN field).
1802 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1803 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1806 create_subprog_decl (tree subprog_name, tree asm_name,
1807 tree subprog_type, tree param_decl_list, bool inline_flag,
1808 bool public_flag, bool extern_flag,
1809 struct attrib *attr_list, Node_Id gnat_node)
1811 tree return_type = TREE_TYPE (subprog_type);
1812 tree subprog_decl = build_decl (input_location,
1813 FUNCTION_DECL, subprog_name, subprog_type);
1815 /* If this is a non-inline function nested inside an inlined external
1816 function, we cannot honor both requests without cloning the nested
1817 function in the current unit since it is private to the other unit.
1818 We could inline the nested function as well but it's probably better
1819 to err on the side of too little inlining. */
1821 && current_function_decl
1822 && DECL_DECLARED_INLINE_P (current_function_decl)
1823 && DECL_EXTERNAL (current_function_decl))
1824 DECL_DECLARED_INLINE_P (current_function_decl) = 0;
1826 DECL_EXTERNAL (subprog_decl) = extern_flag;
1827 TREE_PUBLIC (subprog_decl) = public_flag;
1828 TREE_STATIC (subprog_decl) = 1;
1829 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1830 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1831 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1832 DECL_DECLARED_INLINE_P (subprog_decl) = inline_flag;
1833 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1834 DECL_RESULT (subprog_decl) = build_decl (input_location,
1835 RESULT_DECL, 0, return_type);
1836 DECL_ARTIFICIAL (DECL_RESULT (subprog_decl)) = 1;
1837 DECL_IGNORED_P (DECL_RESULT (subprog_decl)) = 1;
1839 /* TREE_ADDRESSABLE is set on the result type to request the use of the
1840 target by-reference return mechanism. This is not supported all the
1841 way down to RTL expansion with GCC 4, which ICEs on temporary creation
1842 attempts with such a type and expects DECL_BY_REFERENCE to be set on
1843 the RESULT_DECL instead - see gnat_genericize for more details. */
1844 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (subprog_decl))))
1846 tree result_decl = DECL_RESULT (subprog_decl);
1848 TREE_ADDRESSABLE (TREE_TYPE (result_decl)) = 0;
1849 DECL_BY_REFERENCE (result_decl) = 1;
1854 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1856 /* The expand_main_function circuitry expects "main_identifier_node" to
1857 designate the DECL_NAME of the 'main' entry point, in turn expected
1858 to be declared as the "main" function literally by default. Ada
1859 program entry points are typically declared with a different name
1860 within the binder generated file, exported as 'main' to satisfy the
1861 system expectations. Force main_identifier_node in this case. */
1862 if (asm_name == main_identifier_node)
1863 DECL_NAME (subprog_decl) = main_identifier_node;
1866 process_attributes (subprog_decl, attr_list);
1868 /* Add this decl to the current binding level. */
1869 gnat_pushdecl (subprog_decl, gnat_node);
1871 /* Output the assembler code and/or RTL for the declaration. */
1872 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
1874 return subprog_decl;
1877 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1878 body. This routine needs to be invoked before processing the declarations
1879 appearing in the subprogram. */
1882 begin_subprog_body (tree subprog_decl)
1886 current_function_decl = subprog_decl;
1887 announce_function (subprog_decl);
1889 /* Enter a new binding level and show that all the parameters belong to
1892 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1893 param_decl = TREE_CHAIN (param_decl))
1894 DECL_CONTEXT (param_decl) = subprog_decl;
1896 make_decl_rtl (subprog_decl);
1898 /* We handle pending sizes via the elaboration of types, so we don't need to
1899 save them. This causes them to be marked as part of the outer function
1900 and then discarded. */
1901 get_pending_sizes ();
1905 /* Helper for the genericization callback. Return a dereference of VAL
1906 if it is of a reference type. */
1909 convert_from_reference (tree val)
1911 tree value_type, ref;
1913 if (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE)
1916 value_type = TREE_TYPE (TREE_TYPE (val));
1917 ref = build1 (INDIRECT_REF, value_type, val);
1919 /* See if what we reference is CONST or VOLATILE, which requires
1920 looking into array types to get to the component type. */
1922 while (TREE_CODE (value_type) == ARRAY_TYPE)
1923 value_type = TREE_TYPE (value_type);
1926 = (TYPE_QUALS (value_type) & TYPE_QUAL_CONST);
1927 TREE_THIS_VOLATILE (ref)
1928 = (TYPE_QUALS (value_type) & TYPE_QUAL_VOLATILE);
1930 TREE_SIDE_EFFECTS (ref)
1931 = (TREE_THIS_VOLATILE (ref) || TREE_SIDE_EFFECTS (val));
1936 /* Helper for the genericization callback. Returns true if T denotes
1937 a RESULT_DECL with DECL_BY_REFERENCE set. */
1940 is_byref_result (tree t)
1942 return (TREE_CODE (t) == RESULT_DECL && DECL_BY_REFERENCE (t));
1946 /* Tree walking callback for gnat_genericize. Currently ...
1948 o Adjust references to the function's DECL_RESULT if it is marked
1949 DECL_BY_REFERENCE and so has had its type turned into a reference
1950 type at the end of the function compilation. */
1953 gnat_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1955 /* This implementation is modeled after what the C++ front-end is
1956 doing, basis of the downstream passes behavior. */
1958 tree stmt = *stmt_p;
1959 struct pointer_set_t *p_set = (struct pointer_set_t*) data;
1961 /* If we have a direct mention of the result decl, dereference. */
1962 if (is_byref_result (stmt))
1964 *stmt_p = convert_from_reference (stmt);
1969 /* Otherwise, no need to walk the same tree twice. */
1970 if (pointer_set_contains (p_set, stmt))
1976 /* If we are taking the address of what now is a reference, just get the
1978 if (TREE_CODE (stmt) == ADDR_EXPR
1979 && is_byref_result (TREE_OPERAND (stmt, 0)))
1981 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1985 /* Don't dereference an by-reference RESULT_DECL inside a RETURN_EXPR. */
1986 else if (TREE_CODE (stmt) == RETURN_EXPR
1987 && TREE_OPERAND (stmt, 0)
1988 && is_byref_result (TREE_OPERAND (stmt, 0)))
1991 /* Don't look inside trees that cannot embed references of interest. */
1992 else if (IS_TYPE_OR_DECL_P (stmt))
1995 pointer_set_insert (p_set, *stmt_p);
2000 /* Perform lowering of Ada trees to GENERIC. In particular:
2002 o Turn a DECL_BY_REFERENCE RESULT_DECL into a real by-reference decl
2003 and adjust all the references to this decl accordingly. */
2006 gnat_genericize (tree fndecl)
2008 /* Prior to GCC 4, an explicit By_Reference result mechanism for a function
2009 was handled by simply setting TREE_ADDRESSABLE on the result type.
2010 Everything required to actually pass by invisible ref using the target
2011 mechanism (e.g. extra parameter) was handled at RTL expansion time.
2013 This doesn't work with GCC 4 any more for several reasons. First, the
2014 gimplification process might need the creation of temporaries of this
2015 type, and the gimplifier ICEs on such attempts. Second, the middle-end
2016 now relies on a different attribute for such cases (DECL_BY_REFERENCE on
2017 RESULT/PARM_DECLs), and expects the user invisible by-reference-ness to
2018 be explicitly accounted for by the front-end in the function body.
2020 We achieve the complete transformation in two steps:
2022 1/ create_subprog_decl performs early attribute tweaks: it clears
2023 TREE_ADDRESSABLE from the result type and sets DECL_BY_REFERENCE on
2024 the result decl. The former ensures that the bit isn't set in the GCC
2025 tree saved for the function, so prevents ICEs on temporary creation.
2026 The latter we use here to trigger the rest of the processing.
2028 2/ This function performs the type transformation on the result decl
2029 and adjusts all the references to this decl from the function body
2032 Clearing TREE_ADDRESSABLE from the type differs from the C++ front-end
2033 strategy, which escapes the gimplifier temporary creation issues by
2034 creating it's own temporaries using TARGET_EXPR nodes. Our way relies
2035 on simple specific support code in aggregate_value_p to look at the
2036 target function result decl explicitly. */
2038 struct pointer_set_t *p_set;
2039 tree decl_result = DECL_RESULT (fndecl);
2041 if (!DECL_BY_REFERENCE (decl_result))
2044 /* Make the DECL_RESULT explicitly by-reference and adjust all the
2045 occurrences in the function body using the common tree-walking facility.
2046 We want to see every occurrence of the result decl to adjust the
2047 referencing tree, so need to use our own pointer set to control which
2048 trees should be visited again or not. */
2050 p_set = pointer_set_create ();
2052 TREE_TYPE (decl_result) = build_reference_type (TREE_TYPE (decl_result));
2053 TREE_ADDRESSABLE (decl_result) = 0;
2054 relayout_decl (decl_result);
2056 walk_tree (&DECL_SAVED_TREE (fndecl), gnat_genericize_r, p_set, NULL);
2058 pointer_set_destroy (p_set);
2061 /* Finish the definition of the current subprogram BODY and finalize it. */
2064 end_subprog_body (tree body)
2066 tree fndecl = current_function_decl;
2068 /* Mark the BLOCK for this level as being for this function and pop the
2069 level. Since the vars in it are the parameters, clear them. */
2070 BLOCK_VARS (current_binding_level->block) = 0;
2071 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
2072 DECL_INITIAL (fndecl) = current_binding_level->block;
2075 /* We handle pending sizes via the elaboration of types, so we don't
2076 need to save them. */
2077 get_pending_sizes ();
2079 /* Mark the RESULT_DECL as being in this subprogram. */
2080 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
2082 DECL_SAVED_TREE (fndecl) = body;
2084 current_function_decl = DECL_CONTEXT (fndecl);
2087 /* We cannot track the location of errors past this point. */
2088 error_gnat_node = Empty;
2090 /* If we're only annotating types, don't actually compile this function. */
2091 if (type_annotate_only)
2094 /* Perform the required pre-gimplification transformations on the tree. */
2095 gnat_genericize (fndecl);
2097 /* Dump functions before gimplification. */
2098 dump_function (TDI_original, fndecl);
2100 /* ??? This special handling of nested functions is probably obsolete. */
2101 if (!DECL_CONTEXT (fndecl))
2102 cgraph_finalize_function (fndecl, false);
2104 /* Register this function with cgraph just far enough to get it
2105 added to our parent's nested function list. */
2106 (void) cgraph_node (fndecl);
2110 gnat_builtin_function (tree decl)
2112 gnat_pushdecl (decl, Empty);
2116 /* Return an integer type with the number of bits of precision given by
2117 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2118 it is a signed type. */
2121 gnat_type_for_size (unsigned precision, int unsignedp)
2126 if (precision <= 2 * MAX_BITS_PER_WORD
2127 && signed_and_unsigned_types[precision][unsignedp])
2128 return signed_and_unsigned_types[precision][unsignedp];
2131 t = make_unsigned_type (precision);
2133 t = make_signed_type (precision);
2135 if (precision <= 2 * MAX_BITS_PER_WORD)
2136 signed_and_unsigned_types[precision][unsignedp] = t;
2140 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2141 TYPE_NAME (t) = get_identifier (type_name);
2147 /* Likewise for floating-point types. */
2150 float_type_for_precision (int precision, enum machine_mode mode)
2155 if (float_types[(int) mode])
2156 return float_types[(int) mode];
2158 float_types[(int) mode] = t = make_node (REAL_TYPE);
2159 TYPE_PRECISION (t) = precision;
2162 gcc_assert (TYPE_MODE (t) == mode);
2165 sprintf (type_name, "FLOAT_%d", precision);
2166 TYPE_NAME (t) = get_identifier (type_name);
2172 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2173 an unsigned type; otherwise a signed type is returned. */
2176 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2178 if (mode == BLKmode)
2180 else if (mode == VOIDmode)
2181 return void_type_node;
2182 else if (COMPLEX_MODE_P (mode))
2184 else if (SCALAR_FLOAT_MODE_P (mode))
2185 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2186 else if (SCALAR_INT_MODE_P (mode))
2187 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2192 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2195 gnat_unsigned_type (tree type_node)
2197 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2199 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2201 type = copy_node (type);
2202 TREE_TYPE (type) = type_node;
2204 else if (TREE_TYPE (type_node)
2205 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2206 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2208 type = copy_node (type);
2209 TREE_TYPE (type) = TREE_TYPE (type_node);
2215 /* Return the signed version of a TYPE_NODE, a scalar type. */
2218 gnat_signed_type (tree type_node)
2220 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2222 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2224 type = copy_node (type);
2225 TREE_TYPE (type) = type_node;
2227 else if (TREE_TYPE (type_node)
2228 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2229 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2231 type = copy_node (type);
2232 TREE_TYPE (type) = TREE_TYPE (type_node);
2238 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2239 transparently converted to each other. */
2242 gnat_types_compatible_p (tree t1, tree t2)
2244 enum tree_code code;
2246 /* This is the default criterion. */
2247 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2250 /* We only check structural equivalence here. */
2251 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2254 /* Vector types are also compatible if they have the same number of subparts
2255 and the same form of (scalar) element type. */
2256 if (code == VECTOR_TYPE
2257 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
2258 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
2259 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
2262 /* Array types are also compatible if they are constrained and have
2263 the same component type and the same domain. */
2264 if (code == ARRAY_TYPE
2265 && TREE_TYPE (t1) == TREE_TYPE (t2)
2266 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
2267 || (TYPE_DOMAIN (t1)
2269 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2270 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2271 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
2272 TYPE_MAX_VALUE (TYPE_DOMAIN (t2))))))
2275 /* Padding record types are also compatible if they pad the same
2276 type and have the same constant size. */
2277 if (code == RECORD_TYPE
2278 && TYPE_PADDING_P (t1) && TYPE_PADDING_P (t2)
2279 && TREE_TYPE (TYPE_FIELDS (t1)) == TREE_TYPE (TYPE_FIELDS (t2))
2280 && tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2)))
2286 /* EXP is an expression for the size of an object. If this size contains
2287 discriminant references, replace them with the maximum (if MAX_P) or
2288 minimum (if !MAX_P) possible value of the discriminant. */
2291 max_size (tree exp, bool max_p)
2293 enum tree_code code = TREE_CODE (exp);
2294 tree type = TREE_TYPE (exp);
2296 switch (TREE_CODE_CLASS (code))
2298 case tcc_declaration:
2303 if (code == CALL_EXPR)
2308 t = maybe_inline_call_in_expr (exp);
2310 return max_size (t, max_p);
2312 n = call_expr_nargs (exp);
2314 argarray = (tree *) alloca (n * sizeof (tree));
2315 for (i = 0; i < n; i++)
2316 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
2317 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
2322 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2323 modify. Otherwise, we treat it like a variable. */
2324 if (!CONTAINS_PLACEHOLDER_P (exp))
2327 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2329 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
2331 case tcc_comparison:
2332 return max_p ? size_one_node : size_zero_node;
2336 case tcc_expression:
2337 switch (TREE_CODE_LENGTH (code))
2340 if (code == NON_LVALUE_EXPR)
2341 return max_size (TREE_OPERAND (exp, 0), max_p);
2344 fold_build1 (code, type,
2345 max_size (TREE_OPERAND (exp, 0),
2346 code == NEGATE_EXPR ? !max_p : max_p));
2349 if (code == COMPOUND_EXPR)
2350 return max_size (TREE_OPERAND (exp, 1), max_p);
2352 /* Calculate "(A ? B : C) - D" as "A ? B - D : C - D" which
2353 may provide a tighter bound on max_size. */
2354 if (code == MINUS_EXPR
2355 && TREE_CODE (TREE_OPERAND (exp, 0)) == COND_EXPR)
2357 tree lhs = fold_build2 (MINUS_EXPR, type,
2358 TREE_OPERAND (TREE_OPERAND (exp, 0), 1),
2359 TREE_OPERAND (exp, 1));
2360 tree rhs = fold_build2 (MINUS_EXPR, type,
2361 TREE_OPERAND (TREE_OPERAND (exp, 0), 2),
2362 TREE_OPERAND (exp, 1));
2363 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2364 max_size (lhs, max_p),
2365 max_size (rhs, max_p));
2369 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2370 tree rhs = max_size (TREE_OPERAND (exp, 1),
2371 code == MINUS_EXPR ? !max_p : max_p);
2373 /* Special-case wanting the maximum value of a MIN_EXPR.
2374 In that case, if one side overflows, return the other.
2375 sizetype is signed, but we know sizes are non-negative.
2376 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2377 overflowing or the maximum possible value and the RHS
2381 && TREE_CODE (rhs) == INTEGER_CST
2382 && TREE_OVERFLOW (rhs))
2386 && TREE_CODE (lhs) == INTEGER_CST
2387 && TREE_OVERFLOW (lhs))
2389 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2390 && ((TREE_CODE (lhs) == INTEGER_CST
2391 && TREE_OVERFLOW (lhs))
2392 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2393 && !TREE_CONSTANT (rhs))
2396 return fold_build2 (code, type, lhs, rhs);
2400 if (code == SAVE_EXPR)
2402 else if (code == COND_EXPR)
2403 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2404 max_size (TREE_OPERAND (exp, 1), max_p),
2405 max_size (TREE_OPERAND (exp, 2), max_p));
2408 /* Other tree classes cannot happen. */
2416 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2417 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2418 Return a constructor for the template. */
2421 build_template (tree template_type, tree array_type, tree expr)
2423 tree template_elts = NULL_TREE;
2424 tree bound_list = NULL_TREE;
2427 while (TREE_CODE (array_type) == RECORD_TYPE
2428 && (TYPE_PADDING_P (array_type)
2429 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2430 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2432 if (TREE_CODE (array_type) == ARRAY_TYPE
2433 || (TREE_CODE (array_type) == INTEGER_TYPE
2434 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2435 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2437 /* First make the list for a CONSTRUCTOR for the template. Go down the
2438 field list of the template instead of the type chain because this
2439 array might be an Ada array of arrays and we can't tell where the
2440 nested arrays stop being the underlying object. */
2442 for (field = TYPE_FIELDS (template_type); field;
2444 ? (bound_list = TREE_CHAIN (bound_list))
2445 : (array_type = TREE_TYPE (array_type))),
2446 field = TREE_CHAIN (TREE_CHAIN (field)))
2448 tree bounds, min, max;
2450 /* If we have a bound list, get the bounds from there. Likewise
2451 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2452 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2453 This will give us a maximum range. */
2455 bounds = TREE_VALUE (bound_list);
2456 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2457 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2458 else if (expr && TREE_CODE (expr) == PARM_DECL
2459 && DECL_BY_COMPONENT_PTR_P (expr))
2460 bounds = TREE_TYPE (field);
2464 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
2465 max = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MAX_VALUE (bounds));
2467 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2468 substitute it from OBJECT. */
2469 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2470 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2472 template_elts = tree_cons (TREE_CHAIN (field), max,
2473 tree_cons (field, min, template_elts));
2476 return gnat_build_constructor (template_type, nreverse (template_elts));
2479 /* Build a 32bit VMS descriptor from a Mechanism_Type, which must specify
2480 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2481 in the type contains in its DECL_INITIAL the expression to use when
2482 a constructor is made for the type. GNAT_ENTITY is an entity used
2483 to print out an error message if the mechanism cannot be applied to
2484 an object of that type and also for the name. */
2487 build_vms_descriptor32 (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2489 tree record_type = make_node (RECORD_TYPE);
2490 tree pointer32_type;
2491 tree field_list = 0;
2500 /* If TYPE is an unconstrained array, use the underlying array type. */
2501 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2502 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2504 /* If this is an array, compute the number of dimensions in the array,
2505 get the index types, and point to the inner type. */
2506 if (TREE_CODE (type) != ARRAY_TYPE)
2509 for (ndim = 1, inner_type = type;
2510 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2511 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2512 ndim++, inner_type = TREE_TYPE (inner_type))
2515 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2517 if (mech != By_Descriptor_NCA && mech != By_Short_Descriptor_NCA
2518 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2519 for (i = ndim - 1, inner_type = type;
2521 i--, inner_type = TREE_TYPE (inner_type))
2522 idx_arr[i] = TYPE_DOMAIN (inner_type);
2524 for (i = 0, inner_type = type;
2526 i++, inner_type = TREE_TYPE (inner_type))
2527 idx_arr[i] = TYPE_DOMAIN (inner_type);
2529 /* Now get the DTYPE value. */
2530 switch (TREE_CODE (type))
2535 if (TYPE_VAX_FLOATING_POINT_P (type))
2536 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2549 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2552 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2555 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2558 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2561 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2564 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2570 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2574 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2575 && TYPE_VAX_FLOATING_POINT_P (type))
2576 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2588 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2599 /* Get the CLASS value. */
2602 case By_Descriptor_A:
2603 case By_Short_Descriptor_A:
2606 case By_Descriptor_NCA:
2607 case By_Short_Descriptor_NCA:
2610 case By_Descriptor_SB:
2611 case By_Short_Descriptor_SB:
2615 case By_Short_Descriptor:
2616 case By_Descriptor_S:
2617 case By_Short_Descriptor_S:
2623 /* Make the type for a descriptor for VMS. The first four fields
2624 are the same for all types. */
2627 = chainon (field_list,
2628 make_descriptor_field
2629 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2630 size_in_bytes ((mech == By_Descriptor_A ||
2631 mech == By_Short_Descriptor_A)
2632 ? inner_type : type)));
2634 field_list = chainon (field_list,
2635 make_descriptor_field ("DTYPE",
2636 gnat_type_for_size (8, 1),
2637 record_type, size_int (dtype)));
2638 field_list = chainon (field_list,
2639 make_descriptor_field ("CLASS",
2640 gnat_type_for_size (8, 1),
2641 record_type, size_int (klass)));
2643 /* Of course this will crash at run-time if the address space is not
2644 within the low 32 bits, but there is nothing else we can do. */
2645 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
2648 = chainon (field_list,
2649 make_descriptor_field
2650 ("POINTER", pointer32_type, record_type,
2651 build_unary_op (ADDR_EXPR,
2653 build0 (PLACEHOLDER_EXPR, type))));
2658 case By_Short_Descriptor:
2659 case By_Descriptor_S:
2660 case By_Short_Descriptor_S:
2663 case By_Descriptor_SB:
2664 case By_Short_Descriptor_SB:
2666 = chainon (field_list,
2667 make_descriptor_field
2668 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2669 TREE_CODE (type) == ARRAY_TYPE
2670 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2672 = chainon (field_list,
2673 make_descriptor_field
2674 ("SB_U1", gnat_type_for_size (32, 1), record_type,
2675 TREE_CODE (type) == ARRAY_TYPE
2676 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2679 case By_Descriptor_A:
2680 case By_Short_Descriptor_A:
2681 case By_Descriptor_NCA:
2682 case By_Short_Descriptor_NCA:
2683 field_list = chainon (field_list,
2684 make_descriptor_field ("SCALE",
2685 gnat_type_for_size (8, 1),
2689 field_list = chainon (field_list,
2690 make_descriptor_field ("DIGITS",
2691 gnat_type_for_size (8, 1),
2696 = chainon (field_list,
2697 make_descriptor_field
2698 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2699 size_int ((mech == By_Descriptor_NCA ||
2700 mech == By_Short_Descriptor_NCA)
2702 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2703 : (TREE_CODE (type) == ARRAY_TYPE
2704 && TYPE_CONVENTION_FORTRAN_P (type)
2707 field_list = chainon (field_list,
2708 make_descriptor_field ("DIMCT",
2709 gnat_type_for_size (8, 1),
2713 field_list = chainon (field_list,
2714 make_descriptor_field ("ARSIZE",
2715 gnat_type_for_size (32, 1),
2717 size_in_bytes (type)));
2719 /* Now build a pointer to the 0,0,0... element. */
2720 tem = build0 (PLACEHOLDER_EXPR, type);
2721 for (i = 0, inner_type = type; i < ndim;
2722 i++, inner_type = TREE_TYPE (inner_type))
2723 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2724 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2725 NULL_TREE, NULL_TREE);
2728 = chainon (field_list,
2729 make_descriptor_field
2731 build_pointer_type_for_mode (inner_type, SImode, false),
2734 build_pointer_type_for_mode (inner_type, SImode,
2738 /* Next come the addressing coefficients. */
2739 tem = size_one_node;
2740 for (i = 0; i < ndim; i++)
2744 = size_binop (MULT_EXPR, tem,
2745 size_binop (PLUS_EXPR,
2746 size_binop (MINUS_EXPR,
2747 TYPE_MAX_VALUE (idx_arr[i]),
2748 TYPE_MIN_VALUE (idx_arr[i])),
2751 fname[0] = ((mech == By_Descriptor_NCA ||
2752 mech == By_Short_Descriptor_NCA) ? 'S' : 'M');
2753 fname[1] = '0' + i, fname[2] = 0;
2755 = chainon (field_list,
2756 make_descriptor_field (fname,
2757 gnat_type_for_size (32, 1),
2758 record_type, idx_length));
2760 if (mech == By_Descriptor_NCA || mech == By_Short_Descriptor_NCA)
2764 /* Finally here are the bounds. */
2765 for (i = 0; i < ndim; i++)
2769 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2771 = chainon (field_list,
2772 make_descriptor_field
2773 (fname, gnat_type_for_size (32, 1), record_type,
2774 TYPE_MIN_VALUE (idx_arr[i])));
2778 = chainon (field_list,
2779 make_descriptor_field
2780 (fname, gnat_type_for_size (32, 1), record_type,
2781 TYPE_MAX_VALUE (idx_arr[i])));
2786 post_error ("unsupported descriptor type for &", gnat_entity);
2789 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC");
2790 finish_record_type (record_type, field_list, 0, true);
2794 /* Build a 64bit VMS descriptor from a Mechanism_Type, which must specify
2795 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2796 in the type contains in its DECL_INITIAL the expression to use when
2797 a constructor is made for the type. GNAT_ENTITY is an entity used
2798 to print out an error message if the mechanism cannot be applied to
2799 an object of that type and also for the name. */
2802 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2804 tree record64_type = make_node (RECORD_TYPE);
2805 tree pointer64_type;
2806 tree field_list64 = 0;
2815 /* If TYPE is an unconstrained array, use the underlying array type. */
2816 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2817 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2819 /* If this is an array, compute the number of dimensions in the array,
2820 get the index types, and point to the inner type. */
2821 if (TREE_CODE (type) != ARRAY_TYPE)
2824 for (ndim = 1, inner_type = type;
2825 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2826 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2827 ndim++, inner_type = TREE_TYPE (inner_type))
2830 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2832 if (mech != By_Descriptor_NCA
2833 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2834 for (i = ndim - 1, inner_type = type;
2836 i--, inner_type = TREE_TYPE (inner_type))
2837 idx_arr[i] = TYPE_DOMAIN (inner_type);
2839 for (i = 0, inner_type = type;
2841 i++, inner_type = TREE_TYPE (inner_type))
2842 idx_arr[i] = TYPE_DOMAIN (inner_type);
2844 /* Now get the DTYPE value. */
2845 switch (TREE_CODE (type))
2850 if (TYPE_VAX_FLOATING_POINT_P (type))
2851 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2864 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2867 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2870 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2873 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2876 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2879 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2885 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2889 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2890 && TYPE_VAX_FLOATING_POINT_P (type))
2891 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2903 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2914 /* Get the CLASS value. */
2917 case By_Descriptor_A:
2920 case By_Descriptor_NCA:
2923 case By_Descriptor_SB:
2927 case By_Descriptor_S:
2933 /* Make the type for a 64bit descriptor for VMS. The first six fields
2934 are the same for all types. */
2936 field_list64 = chainon (field_list64,
2937 make_descriptor_field ("MBO",
2938 gnat_type_for_size (16, 1),
2939 record64_type, size_int (1)));
2941 field_list64 = chainon (field_list64,
2942 make_descriptor_field ("DTYPE",
2943 gnat_type_for_size (8, 1),
2944 record64_type, size_int (dtype)));
2945 field_list64 = chainon (field_list64,
2946 make_descriptor_field ("CLASS",
2947 gnat_type_for_size (8, 1),
2948 record64_type, size_int (klass)));
2950 field_list64 = chainon (field_list64,
2951 make_descriptor_field ("MBMO",
2952 gnat_type_for_size (32, 1),
2953 record64_type, ssize_int (-1)));
2956 = chainon (field_list64,
2957 make_descriptor_field
2958 ("LENGTH", gnat_type_for_size (64, 1), record64_type,
2959 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2961 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
2964 = chainon (field_list64,
2965 make_descriptor_field
2966 ("POINTER", pointer64_type, record64_type,
2967 build_unary_op (ADDR_EXPR,
2969 build0 (PLACEHOLDER_EXPR, type))));
2974 case By_Descriptor_S:
2977 case By_Descriptor_SB:
2979 = chainon (field_list64,
2980 make_descriptor_field
2981 ("SB_L1", gnat_type_for_size (64, 1), record64_type,
2982 TREE_CODE (type) == ARRAY_TYPE
2983 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2985 = chainon (field_list64,
2986 make_descriptor_field
2987 ("SB_U1", gnat_type_for_size (64, 1), record64_type,
2988 TREE_CODE (type) == ARRAY_TYPE
2989 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2992 case By_Descriptor_A:
2993 case By_Descriptor_NCA:
2994 field_list64 = chainon (field_list64,
2995 make_descriptor_field ("SCALE",
2996 gnat_type_for_size (8, 1),
3000 field_list64 = chainon (field_list64,
3001 make_descriptor_field ("DIGITS",
3002 gnat_type_for_size (8, 1),
3007 = chainon (field_list64,
3008 make_descriptor_field
3009 ("AFLAGS", gnat_type_for_size (8, 1), record64_type,
3010 size_int (mech == By_Descriptor_NCA
3012 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
3013 : (TREE_CODE (type) == ARRAY_TYPE
3014 && TYPE_CONVENTION_FORTRAN_P (type)
3017 field_list64 = chainon (field_list64,
3018 make_descriptor_field ("DIMCT",
3019 gnat_type_for_size (8, 1),
3023 field_list64 = chainon (field_list64,
3024 make_descriptor_field ("MBZ",
3025 gnat_type_for_size (32, 1),
3028 field_list64 = chainon (field_list64,
3029 make_descriptor_field ("ARSIZE",
3030 gnat_type_for_size (64, 1),
3032 size_in_bytes (type)));
3034 /* Now build a pointer to the 0,0,0... element. */
3035 tem = build0 (PLACEHOLDER_EXPR, type);
3036 for (i = 0, inner_type = type; i < ndim;
3037 i++, inner_type = TREE_TYPE (inner_type))
3038 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
3039 convert (TYPE_DOMAIN (inner_type), size_zero_node),
3040 NULL_TREE, NULL_TREE);
3043 = chainon (field_list64,
3044 make_descriptor_field
3046 build_pointer_type_for_mode (inner_type, DImode, false),
3049 build_pointer_type_for_mode (inner_type, DImode,
3053 /* Next come the addressing coefficients. */
3054 tem = size_one_node;
3055 for (i = 0; i < ndim; i++)
3059 = size_binop (MULT_EXPR, tem,
3060 size_binop (PLUS_EXPR,
3061 size_binop (MINUS_EXPR,
3062 TYPE_MAX_VALUE (idx_arr[i]),
3063 TYPE_MIN_VALUE (idx_arr[i])),
3066 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
3067 fname[1] = '0' + i, fname[2] = 0;
3069 = chainon (field_list64,
3070 make_descriptor_field (fname,
3071 gnat_type_for_size (64, 1),
3072 record64_type, idx_length));
3074 if (mech == By_Descriptor_NCA)
3078 /* Finally here are the bounds. */
3079 for (i = 0; i < ndim; i++)
3083 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
3085 = chainon (field_list64,
3086 make_descriptor_field
3087 (fname, gnat_type_for_size (64, 1), record64_type,
3088 TYPE_MIN_VALUE (idx_arr[i])));
3092 = chainon (field_list64,
3093 make_descriptor_field
3094 (fname, gnat_type_for_size (64, 1), record64_type,
3095 TYPE_MAX_VALUE (idx_arr[i])));
3100 post_error ("unsupported descriptor type for &", gnat_entity);
3103 TYPE_NAME (record64_type) = create_concat_name (gnat_entity, "DESC64");
3104 finish_record_type (record64_type, field_list64, 0, true);
3105 return record64_type;
3108 /* Utility routine for above code to make a field. */
3111 make_descriptor_field (const char *name, tree type,
3112 tree rec_type, tree initial)
3115 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
3117 DECL_INITIAL (field) = initial;
3121 /* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
3122 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3123 which the VMS descriptor is passed. */
3126 convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3128 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3129 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3130 /* The CLASS field is the 3rd field in the descriptor. */
3131 tree klass = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
3132 /* The POINTER field is the 6th field in the descriptor. */
3133 tree pointer64 = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (klass)));
3135 /* Retrieve the value of the POINTER field. */
3137 = build3 (COMPONENT_REF, TREE_TYPE (pointer64), desc, pointer64, NULL_TREE);
3139 if (POINTER_TYPE_P (gnu_type))
3140 return convert (gnu_type, gnu_expr64);
3142 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
3144 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3145 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3146 tree template_type = TREE_TYPE (p_bounds_type);
3147 tree min_field = TYPE_FIELDS (template_type);
3148 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3149 tree template_tree, template_addr, aflags, dimct, t, u;
3150 /* See the head comment of build_vms_descriptor. */
3151 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
3152 tree lfield, ufield;
3154 /* Convert POINTER to the type of the P_ARRAY field. */
3155 gnu_expr64 = convert (p_array_type, gnu_expr64);
3159 case 1: /* Class S */
3160 case 15: /* Class SB */
3161 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
3162 t = TREE_CHAIN (TREE_CHAIN (klass));
3163 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3164 t = tree_cons (min_field,
3165 convert (TREE_TYPE (min_field), integer_one_node),
3166 tree_cons (max_field,
3167 convert (TREE_TYPE (max_field), t),
3169 template_tree = gnat_build_constructor (template_type, t);
3170 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
3172 /* For class S, we are done. */
3176 /* Test that we really have a SB descriptor, like DEC Ada. */
3177 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3178 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
3179 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
3180 /* If so, there is already a template in the descriptor and
3181 it is located right after the POINTER field. The fields are
3182 64bits so they must be repacked. */
3183 t = TREE_CHAIN (pointer64);
3184 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3185 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3188 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3190 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3192 /* Build the template in the form of a constructor. */
3193 t = tree_cons (TYPE_FIELDS (template_type), lfield,
3194 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3195 ufield, NULL_TREE));
3196 template_tree = gnat_build_constructor (template_type, t);
3198 /* Otherwise use the {1, LENGTH} template we build above. */
3199 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3200 build_unary_op (ADDR_EXPR, p_bounds_type,
3205 case 4: /* Class A */
3206 /* The AFLAGS field is the 3rd field after the pointer in the
3208 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer64)));
3209 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3210 /* The DIMCT field is the next field in the descriptor after
3213 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3214 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3215 or FL_COEFF or FL_BOUNDS not set. */
3216 u = build_int_cst (TREE_TYPE (aflags), 192);
3217 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3218 build_binary_op (NE_EXPR, integer_type_node,
3220 convert (TREE_TYPE (dimct),
3222 build_binary_op (NE_EXPR, integer_type_node,
3223 build2 (BIT_AND_EXPR,
3227 /* There is already a template in the descriptor and it is located
3228 in block 3. The fields are 64bits so they must be repacked. */
3229 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN
3231 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3232 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3235 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3237 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3239 /* Build the template in the form of a constructor. */
3240 t = tree_cons (TYPE_FIELDS (template_type), lfield,
3241 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3242 ufield, NULL_TREE));
3243 template_tree = gnat_build_constructor (template_type, t);
3244 template_tree = build3 (COND_EXPR, template_type, u,
3245 build_call_raise (CE_Length_Check_Failed, Empty,
3246 N_Raise_Constraint_Error),
3249 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
3252 case 10: /* Class NCA */
3254 post_error ("unsupported descriptor type for &", gnat_subprog);
3255 template_addr = integer_zero_node;
3259 /* Build the fat pointer in the form of a constructor. */
3260 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr64,
3261 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3262 template_addr, NULL_TREE));
3263 return gnat_build_constructor (gnu_type, t);
3270 /* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3271 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3272 which the VMS descriptor is passed. */
3275 convert_vms_descriptor32 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3277 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3278 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3279 /* The CLASS field is the 3rd field in the descriptor. */
3280 tree klass = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
3281 /* The POINTER field is the 4th field in the descriptor. */
3282 tree pointer = TREE_CHAIN (klass);
3284 /* Retrieve the value of the POINTER field. */
3286 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
3288 if (POINTER_TYPE_P (gnu_type))
3289 return convert (gnu_type, gnu_expr32);
3291 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
3293 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3294 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3295 tree template_type = TREE_TYPE (p_bounds_type);
3296 tree min_field = TYPE_FIELDS (template_type);
3297 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3298 tree template_tree, template_addr, aflags, dimct, t, u;
3299 /* See the head comment of build_vms_descriptor. */
3300 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
3302 /* Convert POINTER to the type of the P_ARRAY field. */
3303 gnu_expr32 = convert (p_array_type, gnu_expr32);
3307 case 1: /* Class S */
3308 case 15: /* Class SB */
3309 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
3310 t = TYPE_FIELDS (desc_type);
3311 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3312 t = tree_cons (min_field,
3313 convert (TREE_TYPE (min_field), integer_one_node),
3314 tree_cons (max_field,
3315 convert (TREE_TYPE (max_field), t),
3317 template_tree = gnat_build_constructor (template_type, t);
3318 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
3320 /* For class S, we are done. */
3324 /* Test that we really have a SB descriptor, like DEC Ada. */
3325 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3326 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
3327 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
3328 /* If so, there is already a template in the descriptor and
3329 it is located right after the POINTER field. */
3330 t = TREE_CHAIN (pointer);
3332 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3333 /* Otherwise use the {1, LENGTH} template we build above. */
3334 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3335 build_unary_op (ADDR_EXPR, p_bounds_type,
3340 case 4: /* Class A */
3341 /* The AFLAGS field is the 7th field in the descriptor. */
3342 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer)));
3343 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3344 /* The DIMCT field is the 8th field in the descriptor. */
3346 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3347 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3348 or FL_COEFF or FL_BOUNDS not set. */
3349 u = build_int_cst (TREE_TYPE (aflags), 192);
3350 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3351 build_binary_op (NE_EXPR, integer_type_node,
3353 convert (TREE_TYPE (dimct),
3355 build_binary_op (NE_EXPR, integer_type_node,
3356 build2 (BIT_AND_EXPR,
3360 /* There is already a template in the descriptor and it is
3361 located at the start of block 3 (12th field). */
3362 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (t))));
3364 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3365 template_tree = build3 (COND_EXPR, TREE_TYPE (t), u,
3366 build_call_raise (CE_Length_Check_Failed, Empty,
3367 N_Raise_Constraint_Error),
3370 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
3373 case 10: /* Class NCA */
3375 post_error ("unsupported descriptor type for &", gnat_subprog);
3376 template_addr = integer_zero_node;
3380 /* Build the fat pointer in the form of a constructor. */
3381 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr32,
3382 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3383 template_addr, NULL_TREE));
3385 return gnat_build_constructor (gnu_type, t);
3392 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
3393 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
3394 pointer type of GNU_EXPR. GNAT_SUBPROG is the subprogram to which the
3395 VMS descriptor is passed. */
3398 convert_vms_descriptor (tree gnu_type, tree gnu_expr, tree gnu_expr_alt_type,
3399 Entity_Id gnat_subprog)
3401 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3402 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3403 tree mbo = TYPE_FIELDS (desc_type);
3404 const char *mbostr = IDENTIFIER_POINTER (DECL_NAME (mbo));
3405 tree mbmo = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (mbo)));
3406 tree is64bit, gnu_expr32, gnu_expr64;
3408 /* If the field name is not MBO, it must be 32-bit and no alternate.
3409 Otherwise primary must be 64-bit and alternate 32-bit. */
3410 if (strcmp (mbostr, "MBO") != 0)
3411 return convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
3413 /* Build the test for 64-bit descriptor. */
3414 mbo = build3 (COMPONENT_REF, TREE_TYPE (mbo), desc, mbo, NULL_TREE);
3415 mbmo = build3 (COMPONENT_REF, TREE_TYPE (mbmo), desc, mbmo, NULL_TREE);
3417 = build_binary_op (TRUTH_ANDIF_EXPR, integer_type_node,
3418 build_binary_op (EQ_EXPR, integer_type_node,
3419 convert (integer_type_node, mbo),
3421 build_binary_op (EQ_EXPR, integer_type_node,
3422 convert (integer_type_node, mbmo),
3423 integer_minus_one_node));
3425 /* Build the 2 possible end results. */
3426 gnu_expr64 = convert_vms_descriptor64 (gnu_type, gnu_expr, gnat_subprog);
3427 gnu_expr = fold_convert (gnu_expr_alt_type, gnu_expr);
3428 gnu_expr32 = convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
3430 return build3 (COND_EXPR, gnu_type, is64bit, gnu_expr64, gnu_expr32);
3433 /* Build a stub for the subprogram specified by the GCC tree GNU_SUBPROG
3434 and the GNAT node GNAT_SUBPROG. */
3437 build_function_stub (tree gnu_subprog, Entity_Id gnat_subprog)
3439 tree gnu_subprog_type, gnu_subprog_addr, gnu_subprog_call;
3440 tree gnu_stub_param, gnu_param_list, gnu_arg_types, gnu_param;
3441 tree gnu_stub_decl = DECL_FUNCTION_STUB (gnu_subprog);
3444 gnu_subprog_type = TREE_TYPE (gnu_subprog);
3445 gnu_param_list = NULL_TREE;
3447 begin_subprog_body (gnu_stub_decl);
3450 start_stmt_group ();
3452 /* Loop over the parameters of the stub and translate any of them
3453 passed by descriptor into a by reference one. */
3454 for (gnu_stub_param = DECL_ARGUMENTS (gnu_stub_decl),
3455 gnu_arg_types = TYPE_ARG_TYPES (gnu_subprog_type);
3457 gnu_stub_param = TREE_CHAIN (gnu_stub_param),
3458 gnu_arg_types = TREE_CHAIN (gnu_arg_types))
3460 if (DECL_BY_DESCRIPTOR_P (gnu_stub_param))
3462 = convert_vms_descriptor (TREE_VALUE (gnu_arg_types),
3464 DECL_PARM_ALT_TYPE (gnu_stub_param),
3467 gnu_param = gnu_stub_param;
3469 gnu_param_list = tree_cons (NULL_TREE, gnu_param, gnu_param_list);
3472 gnu_body = end_stmt_group ();
3474 /* Invoke the internal subprogram. */
3475 gnu_subprog_addr = build1 (ADDR_EXPR, build_pointer_type (gnu_subprog_type),
3477 gnu_subprog_call = build_call_list (TREE_TYPE (gnu_subprog_type),
3479 nreverse (gnu_param_list));
3481 /* Propagate the return value, if any. */
3482 if (VOID_TYPE_P (TREE_TYPE (gnu_subprog_type)))
3483 append_to_statement_list (gnu_subprog_call, &gnu_body);
3485 append_to_statement_list (build_return_expr (DECL_RESULT (gnu_stub_decl),
3491 allocate_struct_function (gnu_stub_decl, false);
3492 end_subprog_body (gnu_body);
3495 /* Build a type to be used to represent an aliased object whose nominal
3496 type is an unconstrained array. This consists of a RECORD_TYPE containing
3497 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
3498 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
3499 is used to represent an arbitrary unconstrained object. Use NAME
3500 as the name of the record. */
3503 build_unc_object_type (tree template_type, tree object_type, tree name)
3505 tree type = make_node (RECORD_TYPE);
3506 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
3507 template_type, type, 0, 0, 0, 1);
3508 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
3511 TYPE_NAME (type) = name;
3512 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3513 finish_record_type (type,
3514 chainon (chainon (NULL_TREE, template_field),
3521 /* Same, taking a thin or fat pointer type instead of a template type. */
3524 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3529 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3532 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
3533 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3534 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3535 return build_unc_object_type (template_type, object_type, name);
3538 /* Shift the component offsets within an unconstrained object TYPE to make it
3539 suitable for use as a designated type for thin pointers. */
3542 shift_unc_components_for_thin_pointers (tree type)
3544 /* Thin pointer values designate the ARRAY data of an unconstrained object,
3545 allocated past the BOUNDS template. The designated type is adjusted to
3546 have ARRAY at position zero and the template at a negative offset, so
3547 that COMPONENT_REFs on (*thin_ptr) designate the proper location. */
3549 tree bounds_field = TYPE_FIELDS (type);
3550 tree array_field = TREE_CHAIN (TYPE_FIELDS (type));
3552 DECL_FIELD_OFFSET (bounds_field)
3553 = size_binop (MINUS_EXPR, size_zero_node, byte_position (array_field));
3555 DECL_FIELD_OFFSET (array_field) = size_zero_node;
3556 DECL_FIELD_BIT_OFFSET (array_field) = bitsize_zero_node;
3559 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3560 In the normal case this is just two adjustments, but we have more to
3561 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
3564 update_pointer_to (tree old_type, tree new_type)
3566 tree ptr = TYPE_POINTER_TO (old_type);
3567 tree ref = TYPE_REFERENCE_TO (old_type);
3571 /* If this is the main variant, process all the other variants first. */
3572 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3573 for (type = TYPE_NEXT_VARIANT (old_type); type;
3574 type = TYPE_NEXT_VARIANT (type))
3575 update_pointer_to (type, new_type);
3577 /* If no pointers and no references, we are done. */
3581 /* Merge the old type qualifiers in the new type.
3583 Each old variant has qualifiers for specific reasons, and the new
3584 designated type as well. Each set of qualifiers represents useful
3585 information grabbed at some point, and merging the two simply unifies
3586 these inputs into the final type description.
3588 Consider for instance a volatile type frozen after an access to constant
3589 type designating it; after the designated type's freeze, we get here with
3590 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3591 when the access type was processed. We will make a volatile and readonly
3592 designated type, because that's what it really is.
3594 We might also get here for a non-dummy OLD_TYPE variant with different
3595 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
3596 to private record type elaboration (see the comments around the call to
3597 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3598 the qualifiers in those cases too, to avoid accidentally discarding the
3599 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3601 = build_qualified_type (new_type,
3602 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3604 /* If old type and new type are identical, there is nothing to do. */
3605 if (old_type == new_type)
3608 /* Otherwise, first handle the simple case. */
3609 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3611 TYPE_POINTER_TO (new_type) = ptr;
3612 TYPE_REFERENCE_TO (new_type) = ref;
3614 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3615 for (ptr1 = TYPE_MAIN_VARIANT (ptr); ptr1;
3616 ptr1 = TYPE_NEXT_VARIANT (ptr1))
3617 TREE_TYPE (ptr1) = new_type;
3619 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3620 for (ref1 = TYPE_MAIN_VARIANT (ref); ref1;
3621 ref1 = TYPE_NEXT_VARIANT (ref1))
3622 TREE_TYPE (ref1) = new_type;
3625 /* Now deal with the unconstrained array case. In this case the "pointer"
3626 is actually a RECORD_TYPE where both fields are pointers to dummy nodes.
3627 Turn them into pointers to the correct types using update_pointer_to. */
3628 else if (!TYPE_IS_FAT_POINTER_P (ptr))
3633 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
3634 tree array_field = TYPE_FIELDS (ptr);
3635 tree bounds_field = TREE_CHAIN (TYPE_FIELDS (ptr));
3636 tree new_ptr = TYPE_POINTER_TO (new_type);
3640 /* Make pointers to the dummy template point to the real template. */
3642 (TREE_TYPE (TREE_TYPE (bounds_field)),
3643 TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_ptr)))));
3645 /* The references to the template bounds present in the array type
3646 are made through a PLACEHOLDER_EXPR of type NEW_PTR. Since we
3647 are updating PTR to make it a full replacement for NEW_PTR as
3648 pointer to NEW_TYPE, we must rework the PLACEHOLDER_EXPR so as
3649 to make it of type PTR. */
3650 new_ref = build3 (COMPONENT_REF, TREE_TYPE (bounds_field),
3651 build0 (PLACEHOLDER_EXPR, ptr),
3652 bounds_field, NULL_TREE);
3654 /* Create the new array for the new PLACEHOLDER_EXPR and make pointers
3655 to the dummy array point to it. */
3657 (TREE_TYPE (TREE_TYPE (array_field)),
3658 substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))),
3659 TREE_CHAIN (TYPE_FIELDS (new_ptr)), new_ref));
3661 /* Make PTR the pointer to NEW_TYPE. */
3662 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
3663 = TREE_TYPE (new_type) = ptr;
3665 /* And show the original pointer NEW_PTR to the debugger. This is the
3666 counterpart of the equivalent processing in gnat_pushdecl when the
3667 unconstrained array type is frozen after access types to it. Note
3668 that update_pointer_to can be invoked multiple times on the same
3669 couple of types because of the type variants. */
3671 && TREE_CODE (TYPE_NAME (ptr)) == TYPE_DECL
3672 && !DECL_ORIGINAL_TYPE (TYPE_NAME (ptr)))
3674 DECL_ORIGINAL_TYPE (TYPE_NAME (ptr)) = new_ptr;
3675 DECL_ARTIFICIAL (TYPE_NAME (ptr)) = 0;
3677 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
3678 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
3680 /* Now handle updating the allocation record, what the thin pointer
3681 points to. Update all pointers from the old record into the new
3682 one, update the type of the array field, and recompute the size. */
3683 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
3685 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
3686 = TREE_TYPE (TREE_TYPE (array_field));
3688 /* The size recomputation needs to account for alignment constraints, so
3689 we let layout_type work it out. This will reset the field offsets to
3690 what they would be in a regular record, so we shift them back to what
3691 we want them to be for a thin pointer designated type afterwards. */
3692 DECL_SIZE (TYPE_FIELDS (new_obj_rec)) = 0;
3693 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))) = 0;
3694 TYPE_SIZE (new_obj_rec) = 0;
3695 layout_type (new_obj_rec);
3697 shift_unc_components_for_thin_pointers (new_obj_rec);
3699 /* We are done, at last. */
3700 rest_of_record_type_compilation (ptr);
3704 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
3705 unconstrained one. This involves making or finding a template. */
3708 convert_to_fat_pointer (tree type, tree expr)
3710 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
3711 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
3712 tree etype = TREE_TYPE (expr);
3715 /* If EXPR is null, make a fat pointer that contains null pointers to the
3716 template and array. */
3717 if (integer_zerop (expr))
3719 gnat_build_constructor
3721 tree_cons (TYPE_FIELDS (type),
3722 convert (p_array_type, expr),
3723 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3724 convert (build_pointer_type (template_type),
3728 /* If EXPR is a thin pointer, make template and data from the record.. */
3729 else if (TYPE_IS_THIN_POINTER_P (etype))
3731 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
3733 expr = save_expr (expr);
3734 if (TREE_CODE (expr) == ADDR_EXPR)
3735 expr = TREE_OPERAND (expr, 0);
3737 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
3739 template_tree = build_component_ref (expr, NULL_TREE, fields, false);
3740 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3741 build_component_ref (expr, NULL_TREE,
3742 TREE_CHAIN (fields), false));
3745 /* Otherwise, build the constructor for the template. */
3747 template_tree = build_template (template_type, TREE_TYPE (etype), expr);
3749 /* The final result is a constructor for the fat pointer.
3751 If EXPR is an argument of a foreign convention subprogram, the type it
3752 points to is directly the component type. In this case, the expression
3753 type may not match the corresponding FIELD_DECL type at this point, so we
3754 call "convert" here to fix that up if necessary. This type consistency is
3755 required, for instance because it ensures that possible later folding of
3756 COMPONENT_REFs against this constructor always yields something of the
3757 same type as the initial reference.
3759 Note that the call to "build_template" above is still fine because it
3760 will only refer to the provided TEMPLATE_TYPE in this case. */
3762 gnat_build_constructor
3764 tree_cons (TYPE_FIELDS (type),
3765 convert (p_array_type, expr),
3766 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3767 build_unary_op (ADDR_EXPR, NULL_TREE,
3772 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
3773 is something that is a fat pointer, so convert to it first if it EXPR
3774 is not already a fat pointer. */
3777 convert_to_thin_pointer (tree type, tree expr)
3779 if (!TYPE_IS_FAT_POINTER_P (TREE_TYPE (expr)))
3781 = convert_to_fat_pointer
3782 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
3784 /* We get the pointer to the data and use a NOP_EXPR to make it the
3786 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)),
3788 expr = build1 (NOP_EXPR, type, expr);
3793 /* Create an expression whose value is that of EXPR,
3794 converted to type TYPE. The TREE_TYPE of the value
3795 is always TYPE. This function implements all reasonable
3796 conversions; callers should filter out those that are
3797 not permitted by the language being compiled. */
3800 convert (tree type, tree expr)
3802 enum tree_code code = TREE_CODE (type);
3803 tree etype = TREE_TYPE (expr);
3804 enum tree_code ecode = TREE_CODE (etype);
3806 /* If EXPR is already the right type, we are done. */
3810 /* If both input and output have padding and are of variable size, do this
3811 as an unchecked conversion. Likewise if one is a mere variant of the
3812 other, so we avoid a pointless unpad/repad sequence. */
3813 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
3814 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
3815 && (!TREE_CONSTANT (TYPE_SIZE (type))
3816 || !TREE_CONSTANT (TYPE_SIZE (etype))
3817 || gnat_types_compatible_p (type, etype)
3818 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
3819 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
3822 /* If the output type has padding, convert to the inner type and make a
3823 constructor to build the record, unless a variable size is involved. */
3824 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
3826 /* If we previously converted from another type and our type is
3827 of variable size, remove the conversion to avoid the need for
3828 variable-sized temporaries. Likewise for a conversion between
3829 original and packable version. */
3830 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3831 && (!TREE_CONSTANT (TYPE_SIZE (type))
3832 || (ecode == RECORD_TYPE
3833 && TYPE_NAME (etype)
3834 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
3835 expr = TREE_OPERAND (expr, 0);
3837 /* If we are just removing the padding from expr, convert the original
3838 object if we have variable size in order to avoid the need for some
3839 variable-sized temporaries. Likewise if the padding is a variant
3840 of the other, so we avoid a pointless unpad/repad sequence. */
3841 if (TREE_CODE (expr) == COMPONENT_REF
3842 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
3843 && (!TREE_CONSTANT (TYPE_SIZE (type))
3844 || gnat_types_compatible_p (type,
3845 TREE_TYPE (TREE_OPERAND (expr, 0)))
3846 || (ecode == RECORD_TYPE
3847 && TYPE_NAME (etype)
3848 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
3849 return convert (type, TREE_OPERAND (expr, 0));
3851 /* If the inner type is of self-referential size and the expression type
3852 is a record, do this as an unchecked conversion. But first pad the
3853 expression if possible to have the same size on both sides. */
3854 if (TREE_CODE (etype) == RECORD_TYPE
3855 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
3857 if (TREE_CONSTANT (TYPE_SIZE (etype)))
3858 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
3859 false, false, false, true), expr);
3860 return unchecked_convert (type, expr, false);
3863 /* If we are converting between array types with variable size, do the
3864 final conversion as an unchecked conversion, again to avoid the need
3865 for some variable-sized temporaries. If valid, this conversion is
3866 very likely purely technical and without real effects. */
3867 if (TREE_CODE (etype) == ARRAY_TYPE
3868 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
3869 && !TREE_CONSTANT (TYPE_SIZE (etype))
3870 && !TREE_CONSTANT (TYPE_SIZE (type)))
3871 return unchecked_convert (type,
3872 convert (TREE_TYPE (TYPE_FIELDS (type)),
3877 gnat_build_constructor (type,
3878 tree_cons (TYPE_FIELDS (type),
3880 (TYPE_FIELDS (type)),
3885 /* If the input type has padding, remove it and convert to the output type.
3886 The conditions ordering is arranged to ensure that the output type is not
3887 a padding type here, as it is not clear whether the conversion would
3888 always be correct if this was to happen. */
3889 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
3893 /* If we have just converted to this padded type, just get the
3894 inner expression. */
3895 if (TREE_CODE (expr) == CONSTRUCTOR
3896 && !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (expr))
3897 && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->index
3898 == TYPE_FIELDS (etype))
3900 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->value;
3902 /* Otherwise, build an explicit component reference. */
3905 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
3907 return convert (type, unpadded);
3910 /* If the input is a biased type, adjust first. */
3911 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
3912 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
3913 fold_convert (TREE_TYPE (etype),
3915 TYPE_MIN_VALUE (etype)));
3917 /* If the input is a justified modular type, we need to extract the actual
3918 object before converting it to any other type with the exceptions of an
3919 unconstrained array or of a mere type variant. It is useful to avoid the
3920 extraction and conversion in the type variant case because it could end
3921 up replacing a VAR_DECL expr by a constructor and we might be about the
3922 take the address of the result. */
3923 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
3924 && code != UNCONSTRAINED_ARRAY_TYPE
3925 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
3926 return convert (type, build_component_ref (expr, NULL_TREE,
3927 TYPE_FIELDS (etype), false));
3929 /* If converting to a type that contains a template, convert to the data
3930 type and then build the template. */
3931 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
3933 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
3935 /* If the source already has a template, get a reference to the
3936 associated array only, as we are going to rebuild a template
3937 for the target type anyway. */
3938 expr = maybe_unconstrained_array (expr);
3941 gnat_build_constructor
3943 tree_cons (TYPE_FIELDS (type),
3944 build_template (TREE_TYPE (TYPE_FIELDS (type)),
3945 obj_type, NULL_TREE),
3946 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3947 convert (obj_type, expr), NULL_TREE)));
3950 /* There are some special cases of expressions that we process
3952 switch (TREE_CODE (expr))
3958 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
3959 conversion in gnat_expand_expr. NULL_EXPR does not represent
3960 and actual value, so no conversion is needed. */
3961 expr = copy_node (expr);
3962 TREE_TYPE (expr) = type;
3966 /* If we are converting a STRING_CST to another constrained array type,
3967 just make a new one in the proper type. */
3968 if (code == ecode && AGGREGATE_TYPE_P (etype)
3969 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
3970 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
3972 expr = copy_node (expr);
3973 TREE_TYPE (expr) = type;
3979 /* If we are converting a VECTOR_CST to a mere variant type, just make
3980 a new one in the proper type. */
3981 if (code == ecode && gnat_types_compatible_p (type, etype))
3983 expr = copy_node (expr);
3984 TREE_TYPE (expr) = type;
3989 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
3990 a new one in the proper type. */
3991 if (code == ecode && gnat_types_compatible_p (type, etype))
3993 expr = copy_node (expr);
3994 TREE_TYPE (expr) = type;
3998 /* Likewise for a conversion between original and packable version, but
3999 we have to work harder in order to preserve type consistency. */
4001 && code == RECORD_TYPE
4002 && TYPE_NAME (type) == TYPE_NAME (etype))
4004 VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
4005 unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
4006 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, len);
4007 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4008 unsigned HOST_WIDE_INT idx;
4011 /* Whether we need to clear TREE_CONSTANT et al. on the output
4012 constructor when we convert in place. */
4013 bool clear_constant = false;
4015 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4017 constructor_elt *elt = VEC_quick_push (constructor_elt, v, NULL);
4018 /* We expect only simple constructors. Otherwise, punt. */
4019 if (!(index == efield || index == DECL_ORIGINAL_FIELD (efield)))
4022 elt->value = convert (TREE_TYPE (field), value);
4024 /* If packing has made this field a bitfield and the input
4025 value couldn't be emitted statically any more, we need to
4026 clear TREE_CONSTANT on our output. */
4027 if (!clear_constant && TREE_CONSTANT (expr)
4028 && !CONSTRUCTOR_BITFIELD_P (efield)
4029 && CONSTRUCTOR_BITFIELD_P (field)
4030 && !initializer_constant_valid_for_bitfield_p (value))
4031 clear_constant = true;
4033 efield = TREE_CHAIN (efield);
4034 field = TREE_CHAIN (field);
4037 /* If we have been able to match and convert all the input fields
4038 to their output type, convert in place now. We'll fallback to a
4039 view conversion downstream otherwise. */
4042 expr = copy_node (expr);
4043 TREE_TYPE (expr) = type;
4044 CONSTRUCTOR_ELTS (expr) = v;
4046 TREE_CONSTANT (expr) = TREE_STATIC (expr) = false;
4051 /* Likewise for a conversion between array type and vector type with a
4052 compatible representative array. */
4053 else if (code == VECTOR_TYPE
4054 && ecode == ARRAY_TYPE
4055 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4058 VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
4059 unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
4060 VEC(constructor_elt,gc) *v;
4061 unsigned HOST_WIDE_INT ix;
4064 /* Build a VECTOR_CST from a *constant* array constructor. */
4065 if (TREE_CONSTANT (expr))
4067 bool constant_p = true;
4069 /* Iterate through elements and check if all constructor
4070 elements are *_CSTs. */
4071 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4072 if (!CONSTANT_CLASS_P (value))
4079 return build_vector_from_ctor (type,
4080 CONSTRUCTOR_ELTS (expr));
4083 /* Otherwise, build a regular vector constructor. */
4084 v = VEC_alloc (constructor_elt, gc, len);
4085 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4087 constructor_elt *elt = VEC_quick_push (constructor_elt, v, NULL);
4088 elt->index = NULL_TREE;
4091 expr = copy_node (expr);
4092 TREE_TYPE (expr) = type;
4093 CONSTRUCTOR_ELTS (expr) = v;
4098 case UNCONSTRAINED_ARRAY_REF:
4099 /* Convert this to the type of the inner array by getting the address of
4100 the array from the template. */
4101 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4102 build_component_ref (TREE_OPERAND (expr, 0),
4103 get_identifier ("P_ARRAY"),
4105 etype = TREE_TYPE (expr);
4106 ecode = TREE_CODE (etype);
4109 case VIEW_CONVERT_EXPR:
4111 /* GCC 4.x is very sensitive to type consistency overall, and view
4112 conversions thus are very frequent. Even though just "convert"ing
4113 the inner operand to the output type is fine in most cases, it
4114 might expose unexpected input/output type mismatches in special
4115 circumstances so we avoid such recursive calls when we can. */
4116 tree op0 = TREE_OPERAND (expr, 0);
4118 /* If we are converting back to the original type, we can just
4119 lift the input conversion. This is a common occurrence with
4120 switches back-and-forth amongst type variants. */
4121 if (type == TREE_TYPE (op0))
4124 /* Otherwise, if we're converting between two aggregate or vector
4125 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4126 target type in place or to just convert the inner expression. */
4127 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4128 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
4130 /* If we are converting between mere variants, we can just
4131 substitute the VIEW_CONVERT_EXPR in place. */
4132 if (gnat_types_compatible_p (type, etype))
4133 return build1 (VIEW_CONVERT_EXPR, type, op0);
4135 /* Otherwise, we may just bypass the input view conversion unless
4136 one of the types is a fat pointer, which is handled by
4137 specialized code below which relies on exact type matching. */
4138 else if (!TYPE_IS_FAT_POINTER_P (type)
4139 && !TYPE_IS_FAT_POINTER_P (etype))
4140 return convert (type, op0);
4146 /* If both types are record types, just convert the pointer and
4147 make a new INDIRECT_REF.
4149 ??? Disable this for now since it causes problems with the
4150 code in build_binary_op for MODIFY_EXPR which wants to
4151 strip off conversions. But that code really is a mess and
4152 we need to do this a much better way some time. */
4154 && (TREE_CODE (type) == RECORD_TYPE
4155 || TREE_CODE (type) == UNION_TYPE)
4156 && (TREE_CODE (etype) == RECORD_TYPE
4157 || TREE_CODE (etype) == UNION_TYPE)
4158 && !TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4159 return build_unary_op (INDIRECT_REF, NULL_TREE,
4160 convert (build_pointer_type (type),
4161 TREE_OPERAND (expr, 0)));
4168 /* Check for converting to a pointer to an unconstrained array. */
4169 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4170 return convert_to_fat_pointer (type, expr);
4172 /* If we are converting between two aggregate or vector types that are mere
4173 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4174 to a vector type from its representative array type. */
4175 else if ((code == ecode
4176 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4177 && gnat_types_compatible_p (type, etype))
4178 || (code == VECTOR_TYPE
4179 && ecode == ARRAY_TYPE
4180 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4182 return build1 (VIEW_CONVERT_EXPR, type, expr);
4184 /* In all other cases of related types, make a NOP_EXPR. */
4185 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4186 || (code == INTEGER_CST && ecode == INTEGER_CST
4187 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
4188 return fold_convert (type, expr);
4193 return fold_build1 (CONVERT_EXPR, type, expr);
4196 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4197 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4198 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4199 return unchecked_convert (type, expr, false);
4200 else if (TYPE_BIASED_REPRESENTATION_P (type))
4201 return fold_convert (type,
4202 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4203 convert (TREE_TYPE (type), expr),
4204 TYPE_MIN_VALUE (type)));
4206 /* ... fall through ... */
4210 /* If we are converting an additive expression to an integer type
4211 with lower precision, be wary of the optimization that can be
4212 applied by convert_to_integer. There are 2 problematic cases:
4213 - if the first operand was originally of a biased type,
4214 because we could be recursively called to convert it
4215 to an intermediate type and thus rematerialize the
4216 additive operator endlessly,
4217 - if the expression contains a placeholder, because an
4218 intermediate conversion that changes the sign could
4219 be inserted and thus introduce an artificial overflow
4220 at compile time when the placeholder is substituted. */
4221 if (code == INTEGER_TYPE
4222 && ecode == INTEGER_TYPE
4223 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4224 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4226 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4228 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4229 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4230 || CONTAINS_PLACEHOLDER_P (expr))
4231 return build1 (NOP_EXPR, type, expr);
4234 return fold (convert_to_integer (type, expr));
4237 case REFERENCE_TYPE:
4238 /* If converting between two pointers to records denoting
4239 both a template and type, adjust if needed to account
4240 for any differing offsets, since one might be negative. */
4241 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4244 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
4245 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
4246 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
4247 sbitsize_int (BITS_PER_UNIT));
4249 expr = build1 (NOP_EXPR, type, expr);
4250 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
4251 if (integer_zerop (byte_diff))
4254 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4255 fold (convert (sizetype, byte_diff)));
4258 /* If converting to a thin pointer, handle specially. */
4259 if (TYPE_IS_THIN_POINTER_P (type)
4260 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
4261 return convert_to_thin_pointer (type, expr);
4263 /* If converting fat pointer to normal pointer, get the pointer to the
4264 array and then convert it. */
4265 else if (TYPE_IS_FAT_POINTER_P (etype))
4266 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
4269 return fold (convert_to_pointer (type, expr));
4272 return fold (convert_to_real (type, expr));
4275 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4277 gnat_build_constructor
4278 (type, tree_cons (TYPE_FIELDS (type),
4279 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
4282 /* ... fall through ... */
4285 /* In these cases, assume the front-end has validated the conversion.
4286 If the conversion is valid, it will be a bit-wise conversion, so
4287 it can be viewed as an unchecked conversion. */
4288 return unchecked_convert (type, expr, false);
4291 /* This is a either a conversion between a tagged type and some
4292 subtype, which we have to mark as a UNION_TYPE because of
4293 overlapping fields or a conversion of an Unchecked_Union. */
4294 return unchecked_convert (type, expr, false);
4296 case UNCONSTRAINED_ARRAY_TYPE:
4297 /* If the input is a VECTOR_TYPE, convert to the representative
4298 array type first. */
4299 if (ecode == VECTOR_TYPE)
4301 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4302 etype = TREE_TYPE (expr);
4303 ecode = TREE_CODE (etype);
4306 /* If EXPR is a constrained array, take its address, convert it to a
4307 fat pointer, and then dereference it. Likewise if EXPR is a
4308 record containing both a template and a constrained array.
4309 Note that a record representing a justified modular type
4310 always represents a packed constrained array. */
4311 if (ecode == ARRAY_TYPE
4312 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4313 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4314 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4317 (INDIRECT_REF, NULL_TREE,
4318 convert_to_fat_pointer (TREE_TYPE (type),
4319 build_unary_op (ADDR_EXPR,
4322 /* Do something very similar for converting one unconstrained
4323 array to another. */
4324 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4326 build_unary_op (INDIRECT_REF, NULL_TREE,
4327 convert (TREE_TYPE (type),
4328 build_unary_op (ADDR_EXPR,
4334 return fold (convert_to_complex (type, expr));
4341 /* Remove all conversions that are done in EXP. This includes converting
4342 from a padded type or to a justified modular type. If TRUE_ADDRESS
4343 is true, always return the address of the containing object even if
4344 the address is not bit-aligned. */
4347 remove_conversions (tree exp, bool true_address)
4349 switch (TREE_CODE (exp))
4353 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4354 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4356 remove_conversions (VEC_index (constructor_elt,
4357 CONSTRUCTOR_ELTS (exp), 0)->value,
4362 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4363 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4366 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
4368 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4377 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4378 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
4379 likewise return an expression pointing to the underlying array. */
4382 maybe_unconstrained_array (tree exp)
4384 enum tree_code code = TREE_CODE (exp);
4387 switch (TREE_CODE (TREE_TYPE (exp)))
4389 case UNCONSTRAINED_ARRAY_TYPE:
4390 if (code == UNCONSTRAINED_ARRAY_REF)
4393 = build_unary_op (INDIRECT_REF, NULL_TREE,
4394 build_component_ref (TREE_OPERAND (exp, 0),
4395 get_identifier ("P_ARRAY"),
4397 TREE_READONLY (new_exp) = TREE_STATIC (new_exp)
4398 = TREE_READONLY (exp);
4402 else if (code == NULL_EXPR)
4403 return build1 (NULL_EXPR,
4404 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
4405 (TREE_TYPE (TREE_TYPE (exp))))),
4406 TREE_OPERAND (exp, 0));
4409 /* If this is a padded type, convert to the unpadded type and see if
4410 it contains a template. */
4411 if (TYPE_PADDING_P (TREE_TYPE (exp)))
4413 new_exp = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
4414 if (TREE_CODE (TREE_TYPE (new_exp)) == RECORD_TYPE
4415 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new_exp)))
4417 build_component_ref (new_exp, NULL_TREE,
4419 (TYPE_FIELDS (TREE_TYPE (new_exp))),
4422 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
4424 build_component_ref (exp, NULL_TREE,
4425 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
4435 /* If EXP's type is a VECTOR_TYPE, return EXP converted to the associated
4436 TYPE_REPRESENTATIVE_ARRAY. */
4439 maybe_vector_array (tree exp)
4441 tree etype = TREE_TYPE (exp);
4443 if (VECTOR_TYPE_P (etype))
4444 exp = convert (TYPE_REPRESENTATIVE_ARRAY (etype), exp);
4449 /* Return true if EXPR is an expression that can be folded as an operand
4450 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
4453 can_fold_for_view_convert_p (tree expr)
4457 /* The folder will fold NOP_EXPRs between integral types with the same
4458 precision (in the middle-end's sense). We cannot allow it if the
4459 types don't have the same precision in the Ada sense as well. */
4460 if (TREE_CODE (expr) != NOP_EXPR)
4463 t1 = TREE_TYPE (expr);
4464 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4466 /* Defer to the folder for non-integral conversions. */
4467 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4470 /* Only fold conversions that preserve both precisions. */
4471 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4472 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4478 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
4479 If NOTRUNC_P is true, truncation operations should be suppressed.
4481 Special care is required with (source or target) integral types whose
4482 precision is not equal to their size, to make sure we fetch or assign
4483 the value bits whose location might depend on the endianness, e.g.
4485 Rmsize : constant := 8;
4486 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4488 type Bit_Array is array (1 .. Rmsize) of Boolean;
4489 pragma Pack (Bit_Array);
4491 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4493 Value : Int := 2#1000_0001#;
4494 Vbits : Bit_Array := To_Bit_Array (Value);
4496 we expect the 8 bits at Vbits'Address to always contain Value, while
4497 their original location depends on the endianness, at Value'Address
4498 on a little-endian architecture but not on a big-endian one. */
4501 unchecked_convert (tree type, tree expr, bool notrunc_p)
4503 tree etype = TREE_TYPE (expr);
4505 /* If the expression is already the right type, we are done. */
4509 /* If both types types are integral just do a normal conversion.
4510 Likewise for a conversion to an unconstrained array. */
4511 if ((((INTEGRAL_TYPE_P (type)
4512 && !(TREE_CODE (type) == INTEGER_TYPE
4513 && TYPE_VAX_FLOATING_POINT_P (type)))
4514 || (POINTER_TYPE_P (type) && ! TYPE_IS_THIN_POINTER_P (type))
4515 || (TREE_CODE (type) == RECORD_TYPE
4516 && TYPE_JUSTIFIED_MODULAR_P (type)))
4517 && ((INTEGRAL_TYPE_P (etype)
4518 && !(TREE_CODE (etype) == INTEGER_TYPE
4519 && TYPE_VAX_FLOATING_POINT_P (etype)))
4520 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
4521 || (TREE_CODE (etype) == RECORD_TYPE
4522 && TYPE_JUSTIFIED_MODULAR_P (etype))))
4523 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
4525 if (TREE_CODE (etype) == INTEGER_TYPE
4526 && TYPE_BIASED_REPRESENTATION_P (etype))
4528 tree ntype = copy_type (etype);
4529 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4530 TYPE_MAIN_VARIANT (ntype) = ntype;
4531 expr = build1 (NOP_EXPR, ntype, expr);
4534 if (TREE_CODE (type) == INTEGER_TYPE
4535 && TYPE_BIASED_REPRESENTATION_P (type))
4537 tree rtype = copy_type (type);
4538 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4539 TYPE_MAIN_VARIANT (rtype) = rtype;
4540 expr = convert (rtype, expr);
4541 expr = build1 (NOP_EXPR, type, expr);
4544 expr = convert (type, expr);
4547 /* If we are converting to an integral type whose precision is not equal
4548 to its size, first unchecked convert to a record that contains an
4549 object of the output type. Then extract the field. */
4550 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4551 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4552 GET_MODE_BITSIZE (TYPE_MODE (type))))
4554 tree rec_type = make_node (RECORD_TYPE);
4555 tree field = create_field_decl (get_identifier ("OBJ"), type,
4556 rec_type, 1, 0, 0, 0);
4558 TYPE_FIELDS (rec_type) = field;
4559 layout_type (rec_type);
4561 expr = unchecked_convert (rec_type, expr, notrunc_p);
4562 expr = build_component_ref (expr, NULL_TREE, field, 0);
4565 /* Similarly if we are converting from an integral type whose precision
4566 is not equal to its size. */
4567 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype)
4568 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
4569 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4571 tree rec_type = make_node (RECORD_TYPE);
4573 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
4576 TYPE_FIELDS (rec_type) = field;
4577 layout_type (rec_type);
4579 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
4580 expr = unchecked_convert (type, expr, notrunc_p);
4583 /* We have a special case when we are converting between two unconstrained
4584 array types. In that case, take the address, convert the fat pointer
4585 types, and dereference. */
4586 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
4587 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
4588 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4589 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
4590 build_unary_op (ADDR_EXPR, NULL_TREE,
4593 /* Another special case is when we are converting to a vector type from its
4594 representative array type; this a regular conversion. */
4595 else if (TREE_CODE (type) == VECTOR_TYPE
4596 && TREE_CODE (etype) == ARRAY_TYPE
4597 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4599 expr = convert (type, expr);
4603 expr = maybe_unconstrained_array (expr);
4604 etype = TREE_TYPE (expr);
4605 if (can_fold_for_view_convert_p (expr))
4606 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
4608 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
4611 /* If the result is an integral type whose precision is not equal to its
4612 size, sign- or zero-extend the result. We need not do this if the input
4613 is an integral type of the same precision and signedness or if the output
4614 is a biased type or if both the input and output are unsigned. */
4616 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4617 && !(TREE_CODE (type) == INTEGER_TYPE
4618 && TYPE_BIASED_REPRESENTATION_P (type))
4619 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4620 GET_MODE_BITSIZE (TYPE_MODE (type)))
4621 && !(INTEGRAL_TYPE_P (etype)
4622 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
4623 && operand_equal_p (TYPE_RM_SIZE (type),
4624 (TYPE_RM_SIZE (etype) != 0
4625 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
4627 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
4629 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
4630 TYPE_UNSIGNED (type));
4632 = convert (base_type,
4633 size_binop (MINUS_EXPR,
4635 (GET_MODE_BITSIZE (TYPE_MODE (type))),
4636 TYPE_RM_SIZE (type)));
4639 build_binary_op (RSHIFT_EXPR, base_type,
4640 build_binary_op (LSHIFT_EXPR, base_type,
4641 convert (base_type, expr),
4646 /* An unchecked conversion should never raise Constraint_Error. The code
4647 below assumes that GCC's conversion routines overflow the same way that
4648 the underlying hardware does. This is probably true. In the rare case
4649 when it is false, we can rely on the fact that such conversions are
4650 erroneous anyway. */
4651 if (TREE_CODE (expr) == INTEGER_CST)
4652 TREE_OVERFLOW (expr) = 0;
4654 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
4655 show no longer constant. */
4656 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4657 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
4659 TREE_CONSTANT (expr) = 0;
4664 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
4665 the latter being a record type as predicated by Is_Record_Type. */
4668 tree_code_for_record_type (Entity_Id gnat_type)
4670 Node_Id component_list
4671 = Component_List (Type_Definition
4673 (Implementation_Base_Type (gnat_type))));
4676 /* Make this a UNION_TYPE unless it's either not an Unchecked_Union or
4677 we have a non-discriminant field outside a variant. In either case,
4678 it's a RECORD_TYPE. */
4680 if (!Is_Unchecked_Union (gnat_type))
4683 for (component = First_Non_Pragma (Component_Items (component_list));
4684 Present (component);
4685 component = Next_Non_Pragma (component))
4686 if (Ekind (Defining_Entity (component)) == E_Component)
4692 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
4693 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
4694 according to the presence of an alignment clause on the type or, if it
4695 is an array, on the component type. */
4698 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
4700 gnat_type = Underlying_Type (gnat_type);
4702 *align_clause = Present (Alignment_Clause (gnat_type));
4704 if (Is_Array_Type (gnat_type))
4706 gnat_type = Underlying_Type (Component_Type (gnat_type));
4707 if (Present (Alignment_Clause (gnat_type)))
4708 *align_clause = true;
4711 if (!Is_Floating_Point_Type (gnat_type))
4714 if (UI_To_Int (Esize (gnat_type)) != 64)
4720 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
4721 size is greater or equal to 64 bits, or an array of such a type. Set
4722 ALIGN_CLAUSE according to the presence of an alignment clause on the
4723 type or, if it is an array, on the component type. */
4726 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
4728 gnat_type = Underlying_Type (gnat_type);
4730 *align_clause = Present (Alignment_Clause (gnat_type));
4732 if (Is_Array_Type (gnat_type))
4734 gnat_type = Underlying_Type (Component_Type (gnat_type));
4735 if (Present (Alignment_Clause (gnat_type)))
4736 *align_clause = true;
4739 if (!Is_Scalar_Type (gnat_type))
4742 if (UI_To_Int (Esize (gnat_type)) < 64)
4748 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
4749 component of an aggregate type. */
4752 type_for_nonaliased_component_p (tree gnu_type)
4754 /* If the type is passed by reference, we may have pointers to the
4755 component so it cannot be made non-aliased. */
4756 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
4759 /* We used to say that any component of aggregate type is aliased
4760 because the front-end may take 'Reference of it. The front-end
4761 has been enhanced in the meantime so as to use a renaming instead
4762 in most cases, but the back-end can probably take the address of
4763 such a component too so we go for the conservative stance.
4765 For instance, we might need the address of any array type, even
4766 if normally passed by copy, to construct a fat pointer if the
4767 component is used as an actual for an unconstrained formal.
4769 Likewise for record types: even if a specific record subtype is
4770 passed by copy, the parent type might be passed by ref (e.g. if
4771 it's of variable size) and we might take the address of a child
4772 component to pass to a parent formal. We have no way to check
4773 for such conditions here. */
4774 if (AGGREGATE_TYPE_P (gnu_type))
4780 /* Perform final processing on global variables. */
4783 gnat_write_global_declarations (void)
4785 /* Proceed to optimize and emit assembly.
4786 FIXME: shouldn't be the front end's responsibility to call this. */
4787 cgraph_finalize_compilation_unit ();
4789 /* Emit debug info for all global declarations. */
4790 emit_debug_global_declarations (VEC_address (tree, global_decls),
4791 VEC_length (tree, global_decls));
4794 /* ************************************************************************
4795 * * GCC builtins support *
4796 * ************************************************************************ */
4798 /* The general scheme is fairly simple:
4800 For each builtin function/type to be declared, gnat_install_builtins calls
4801 internal facilities which eventually get to gnat_push_decl, which in turn
4802 tracks the so declared builtin function decls in the 'builtin_decls' global
4803 datastructure. When an Intrinsic subprogram declaration is processed, we
4804 search this global datastructure to retrieve the associated BUILT_IN DECL
4807 /* Search the chain of currently available builtin declarations for a node
4808 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
4809 found, if any, or NULL_TREE otherwise. */
4811 builtin_decl_for (tree name)
4816 for (i = 0; VEC_iterate(tree, builtin_decls, i, decl); i++)
4817 if (DECL_NAME (decl) == name)
4823 /* The code below eventually exposes gnat_install_builtins, which declares
4824 the builtin types and functions we might need, either internally or as
4825 user accessible facilities.
4827 ??? This is a first implementation shot, still in rough shape. It is
4828 heavily inspired from the "C" family implementation, with chunks copied
4829 verbatim from there.
4831 Two obvious TODO candidates are
4832 o Use a more efficient name/decl mapping scheme
4833 o Devise a middle-end infrastructure to avoid having to copy
4834 pieces between front-ends. */
4836 /* ----------------------------------------------------------------------- *
4837 * BUILTIN ELEMENTARY TYPES *
4838 * ----------------------------------------------------------------------- */
4840 /* Standard data types to be used in builtin argument declarations. */
4844 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
4846 CTI_CONST_STRING_TYPE,
4851 static tree c_global_trees[CTI_MAX];
4853 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
4854 #define string_type_node c_global_trees[CTI_STRING_TYPE]
4855 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
4857 /* ??? In addition some attribute handlers, we currently don't support a
4858 (small) number of builtin-types, which in turns inhibits support for a
4859 number of builtin functions. */
4860 #define wint_type_node void_type_node
4861 #define intmax_type_node void_type_node
4862 #define uintmax_type_node void_type_node
4864 /* Build the void_list_node (void_type_node having been created). */
4867 build_void_list_node (void)
4869 tree t = build_tree_list (NULL_TREE, void_type_node);
4873 /* Used to help initialize the builtin-types.def table. When a type of
4874 the correct size doesn't exist, use error_mark_node instead of NULL.
4875 The later results in segfaults even when a decl using the type doesn't
4879 builtin_type_for_size (int size, bool unsignedp)
4881 tree type = lang_hooks.types.type_for_size (size, unsignedp);
4882 return type ? type : error_mark_node;
4885 /* Build/push the elementary type decls that builtin functions/types
4889 install_builtin_elementary_types (void)
4891 signed_size_type_node = size_type_node;
4892 pid_type_node = integer_type_node;
4893 void_list_node = build_void_list_node ();
4895 string_type_node = build_pointer_type (char_type_node);
4896 const_string_type_node
4897 = build_pointer_type (build_qualified_type
4898 (char_type_node, TYPE_QUAL_CONST));
4901 /* ----------------------------------------------------------------------- *
4902 * BUILTIN FUNCTION TYPES *
4903 * ----------------------------------------------------------------------- */
4905 /* Now, builtin function types per se. */
4909 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
4910 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
4911 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
4912 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
4913 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4914 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4915 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
4916 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
4917 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
4918 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
4919 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
4920 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
4921 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4922 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4923 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
4925 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
4926 #include "builtin-types.def"
4927 #undef DEF_PRIMITIVE_TYPE
4928 #undef DEF_FUNCTION_TYPE_0
4929 #undef DEF_FUNCTION_TYPE_1
4930 #undef DEF_FUNCTION_TYPE_2
4931 #undef DEF_FUNCTION_TYPE_3
4932 #undef DEF_FUNCTION_TYPE_4
4933 #undef DEF_FUNCTION_TYPE_5
4934 #undef DEF_FUNCTION_TYPE_6
4935 #undef DEF_FUNCTION_TYPE_7
4936 #undef DEF_FUNCTION_TYPE_VAR_0
4937 #undef DEF_FUNCTION_TYPE_VAR_1
4938 #undef DEF_FUNCTION_TYPE_VAR_2
4939 #undef DEF_FUNCTION_TYPE_VAR_3
4940 #undef DEF_FUNCTION_TYPE_VAR_4
4941 #undef DEF_FUNCTION_TYPE_VAR_5
4942 #undef DEF_POINTER_TYPE
4946 typedef enum c_builtin_type builtin_type;
4948 /* A temporary array used in communication with def_fn_type. */
4949 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
4951 /* A helper function for install_builtin_types. Build function type
4952 for DEF with return type RET and N arguments. If VAR is true, then the
4953 function should be variadic after those N arguments.
4955 Takes special care not to ICE if any of the types involved are
4956 error_mark_node, which indicates that said type is not in fact available
4957 (see builtin_type_for_size). In which case the function type as a whole
4958 should be error_mark_node. */
4961 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
4963 tree args = NULL, t;
4968 for (i = 0; i < n; ++i)
4970 builtin_type a = (builtin_type) va_arg (list, int);
4971 t = builtin_types[a];
4972 if (t == error_mark_node)
4974 args = tree_cons (NULL_TREE, t, args);
4978 args = nreverse (args);
4980 args = chainon (args, void_list_node);
4982 t = builtin_types[ret];
4983 if (t == error_mark_node)
4985 t = build_function_type (t, args);
4988 builtin_types[def] = t;
4991 /* Build the builtin function types and install them in the builtin_types
4992 array for later use in builtin function decls. */
4995 install_builtin_function_types (void)
4997 tree va_list_ref_type_node;
4998 tree va_list_arg_type_node;
5000 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5002 va_list_arg_type_node = va_list_ref_type_node =
5003 build_pointer_type (TREE_TYPE (va_list_type_node));
5007 va_list_arg_type_node = va_list_type_node;
5008 va_list_ref_type_node = build_reference_type (va_list_type_node);
5011 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5012 builtin_types[ENUM] = VALUE;
5013 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5014 def_fn_type (ENUM, RETURN, 0, 0);
5015 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5016 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5017 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5018 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5019 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5020 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5021 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5022 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5023 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5024 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5025 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5027 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5028 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5030 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5031 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5032 def_fn_type (ENUM, RETURN, 1, 0);
5033 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5034 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5035 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5036 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5037 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5038 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5039 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5040 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5041 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5042 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5043 #define DEF_POINTER_TYPE(ENUM, TYPE) \
5044 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5046 #include "builtin-types.def"
5048 #undef DEF_PRIMITIVE_TYPE
5049 #undef DEF_FUNCTION_TYPE_1
5050 #undef DEF_FUNCTION_TYPE_2
5051 #undef DEF_FUNCTION_TYPE_3
5052 #undef DEF_FUNCTION_TYPE_4
5053 #undef DEF_FUNCTION_TYPE_5
5054 #undef DEF_FUNCTION_TYPE_6
5055 #undef DEF_FUNCTION_TYPE_VAR_0
5056 #undef DEF_FUNCTION_TYPE_VAR_1
5057 #undef DEF_FUNCTION_TYPE_VAR_2
5058 #undef DEF_FUNCTION_TYPE_VAR_3
5059 #undef DEF_FUNCTION_TYPE_VAR_4
5060 #undef DEF_FUNCTION_TYPE_VAR_5
5061 #undef DEF_POINTER_TYPE
5062 builtin_types[(int) BT_LAST] = NULL_TREE;
5065 /* ----------------------------------------------------------------------- *
5066 * BUILTIN ATTRIBUTES *
5067 * ----------------------------------------------------------------------- */
5069 enum built_in_attribute
5071 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5072 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5073 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5074 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5075 #include "builtin-attrs.def"
5076 #undef DEF_ATTR_NULL_TREE
5078 #undef DEF_ATTR_IDENT
5079 #undef DEF_ATTR_TREE_LIST
5083 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5086 install_builtin_attributes (void)
5088 /* Fill in the built_in_attributes array. */
5089 #define DEF_ATTR_NULL_TREE(ENUM) \
5090 built_in_attributes[(int) ENUM] = NULL_TREE;
5091 #define DEF_ATTR_INT(ENUM, VALUE) \
5092 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5093 #define DEF_ATTR_IDENT(ENUM, STRING) \
5094 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5095 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5096 built_in_attributes[(int) ENUM] \
5097 = tree_cons (built_in_attributes[(int) PURPOSE], \
5098 built_in_attributes[(int) VALUE], \
5099 built_in_attributes[(int) CHAIN]);
5100 #include "builtin-attrs.def"
5101 #undef DEF_ATTR_NULL_TREE
5103 #undef DEF_ATTR_IDENT
5104 #undef DEF_ATTR_TREE_LIST
5107 /* Handle a "const" attribute; arguments as in
5108 struct attribute_spec.handler. */
5111 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5112 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5115 if (TREE_CODE (*node) == FUNCTION_DECL)
5116 TREE_READONLY (*node) = 1;
5118 *no_add_attrs = true;
5123 /* Handle a "nothrow" attribute; arguments as in
5124 struct attribute_spec.handler. */
5127 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
5128 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5131 if (TREE_CODE (*node) == FUNCTION_DECL)
5132 TREE_NOTHROW (*node) = 1;
5134 *no_add_attrs = true;
5139 /* Handle a "pure" attribute; arguments as in
5140 struct attribute_spec.handler. */
5143 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5144 int ARG_UNUSED (flags), bool *no_add_attrs)
5146 if (TREE_CODE (*node) == FUNCTION_DECL)
5147 DECL_PURE_P (*node) = 1;
5148 /* ??? TODO: Support types. */
5151 warning (OPT_Wattributes, "%qs attribute ignored",
5152 IDENTIFIER_POINTER (name));
5153 *no_add_attrs = true;
5159 /* Handle a "no vops" attribute; arguments as in
5160 struct attribute_spec.handler. */
5163 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
5164 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5165 bool *ARG_UNUSED (no_add_attrs))
5167 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
5168 DECL_IS_NOVOPS (*node) = 1;
5172 /* Helper for nonnull attribute handling; fetch the operand number
5173 from the attribute argument list. */
5176 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
5178 /* Verify the arg number is a constant. */
5179 if (TREE_CODE (arg_num_expr) != INTEGER_CST
5180 || TREE_INT_CST_HIGH (arg_num_expr) != 0)
5183 *valp = TREE_INT_CST_LOW (arg_num_expr);
5187 /* Handle the "nonnull" attribute. */
5189 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
5190 tree args, int ARG_UNUSED (flags),
5194 unsigned HOST_WIDE_INT attr_arg_num;
5196 /* If no arguments are specified, all pointer arguments should be
5197 non-null. Verify a full prototype is given so that the arguments
5198 will have the correct types when we actually check them later. */
5201 if (!TYPE_ARG_TYPES (type))
5203 error ("nonnull attribute without arguments on a non-prototype");
5204 *no_add_attrs = true;
5209 /* Argument list specified. Verify that each argument number references
5210 a pointer argument. */
5211 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5214 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5216 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5218 error ("nonnull argument has invalid operand number (argument %lu)",
5219 (unsigned long) attr_arg_num);
5220 *no_add_attrs = true;
5224 argument = TYPE_ARG_TYPES (type);
5227 for (ck_num = 1; ; ck_num++)
5229 if (!argument || ck_num == arg_num)
5231 argument = TREE_CHAIN (argument);
5235 || TREE_CODE (TREE_VALUE (argument)) == VOID_TYPE)
5237 error ("nonnull argument with out-of-range operand number (argument %lu, operand %lu)",
5238 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5239 *no_add_attrs = true;
5243 if (TREE_CODE (TREE_VALUE (argument)) != POINTER_TYPE)
5245 error ("nonnull argument references non-pointer operand (argument %lu, operand %lu)",
5246 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5247 *no_add_attrs = true;
5256 /* Handle a "sentinel" attribute. */
5259 handle_sentinel_attribute (tree *node, tree name, tree args,
5260 int ARG_UNUSED (flags), bool *no_add_attrs)
5262 tree params = TYPE_ARG_TYPES (*node);
5266 warning (OPT_Wattributes,
5267 "%qs attribute requires prototypes with named arguments",
5268 IDENTIFIER_POINTER (name));
5269 *no_add_attrs = true;
5273 while (TREE_CHAIN (params))
5274 params = TREE_CHAIN (params);
5276 if (VOID_TYPE_P (TREE_VALUE (params)))
5278 warning (OPT_Wattributes,
5279 "%qs attribute only applies to variadic functions",
5280 IDENTIFIER_POINTER (name));
5281 *no_add_attrs = true;
5287 tree position = TREE_VALUE (args);
5289 if (TREE_CODE (position) != INTEGER_CST)
5291 warning (0, "requested position is not an integer constant");
5292 *no_add_attrs = true;
5296 if (tree_int_cst_lt (position, integer_zero_node))
5298 warning (0, "requested position is less than zero");
5299 *no_add_attrs = true;
5307 /* Handle a "noreturn" attribute; arguments as in
5308 struct attribute_spec.handler. */
5311 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5312 int ARG_UNUSED (flags), bool *no_add_attrs)
5314 tree type = TREE_TYPE (*node);
5316 /* See FIXME comment in c_common_attribute_table. */
5317 if (TREE_CODE (*node) == FUNCTION_DECL)
5318 TREE_THIS_VOLATILE (*node) = 1;
5319 else if (TREE_CODE (type) == POINTER_TYPE
5320 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5322 = build_pointer_type
5323 (build_type_variant (TREE_TYPE (type),
5324 TYPE_READONLY (TREE_TYPE (type)), 1));
5327 warning (OPT_Wattributes, "%qs attribute ignored",
5328 IDENTIFIER_POINTER (name));
5329 *no_add_attrs = true;
5335 /* Handle a "malloc" attribute; arguments as in
5336 struct attribute_spec.handler. */
5339 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5340 int ARG_UNUSED (flags), bool *no_add_attrs)
5342 if (TREE_CODE (*node) == FUNCTION_DECL
5343 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
5344 DECL_IS_MALLOC (*node) = 1;
5347 warning (OPT_Wattributes, "%qs attribute ignored",
5348 IDENTIFIER_POINTER (name));
5349 *no_add_attrs = true;
5355 /* Fake handler for attributes we don't properly support. */
5358 fake_attribute_handler (tree * ARG_UNUSED (node),
5359 tree ARG_UNUSED (name),
5360 tree ARG_UNUSED (args),
5361 int ARG_UNUSED (flags),
5362 bool * ARG_UNUSED (no_add_attrs))
5367 /* Handle a "type_generic" attribute. */
5370 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
5371 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5372 bool * ARG_UNUSED (no_add_attrs))
5376 /* Ensure we have a function type. */
5377 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
5379 params = TYPE_ARG_TYPES (*node);
5380 while (params && ! VOID_TYPE_P (TREE_VALUE (params)))
5381 params = TREE_CHAIN (params);
5383 /* Ensure we have a variadic function. */
5384 gcc_assert (!params);
5389 /* Handle a "vector_size" attribute; arguments as in
5390 struct attribute_spec.handler. */
5393 handle_vector_size_attribute (tree *node, tree name, tree args,
5394 int ARG_UNUSED (flags),
5397 unsigned HOST_WIDE_INT vecsize, nunits;
5398 enum machine_mode orig_mode;
5399 tree type = *node, new_type, size;
5401 *no_add_attrs = true;
5403 size = TREE_VALUE (args);
5405 if (!host_integerp (size, 1))
5407 warning (OPT_Wattributes, "%qs attribute ignored",
5408 IDENTIFIER_POINTER (name));
5412 /* Get the vector size (in bytes). */
5413 vecsize = tree_low_cst (size, 1);
5415 /* We need to provide for vector pointers, vector arrays, and
5416 functions returning vectors. For example:
5418 __attribute__((vector_size(16))) short *foo;
5420 In this case, the mode is SI, but the type being modified is
5421 HI, so we need to look further. */
5423 while (POINTER_TYPE_P (type)
5424 || TREE_CODE (type) == FUNCTION_TYPE
5425 || TREE_CODE (type) == METHOD_TYPE
5426 || TREE_CODE (type) == ARRAY_TYPE
5427 || TREE_CODE (type) == OFFSET_TYPE)
5428 type = TREE_TYPE (type);
5430 /* Get the mode of the type being modified. */
5431 orig_mode = TYPE_MODE (type);
5433 if ((!INTEGRAL_TYPE_P (type)
5434 && !SCALAR_FLOAT_TYPE_P (type)
5435 && !FIXED_POINT_TYPE_P (type))
5436 || (!SCALAR_FLOAT_MODE_P (orig_mode)
5437 && GET_MODE_CLASS (orig_mode) != MODE_INT
5438 && !ALL_SCALAR_FIXED_POINT_MODE_P (orig_mode))
5439 || !host_integerp (TYPE_SIZE_UNIT (type), 1)
5440 || TREE_CODE (type) == BOOLEAN_TYPE)
5442 error ("invalid vector type for attribute %qs",
5443 IDENTIFIER_POINTER (name));
5447 if (vecsize % tree_low_cst (TYPE_SIZE_UNIT (type), 1))
5449 error ("vector size not an integral multiple of component size");
5455 error ("zero vector size");
5459 /* Calculate how many units fit in the vector. */
5460 nunits = vecsize / tree_low_cst (TYPE_SIZE_UNIT (type), 1);
5461 if (nunits & (nunits - 1))
5463 error ("number of components of the vector not a power of two");
5467 new_type = build_vector_type (type, nunits);
5469 /* Build back pointers if needed. */
5470 *node = lang_hooks.types.reconstruct_complex_type (*node, new_type);
5475 /* Handle a "vector_type" attribute; arguments as in
5476 struct attribute_spec.handler. */
5479 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5480 int ARG_UNUSED (flags),
5483 /* Vector representative type and size. */
5484 tree rep_type = *node;
5485 tree rep_size = TYPE_SIZE_UNIT (rep_type);
5488 /* Vector size in bytes and number of units. */
5489 unsigned HOST_WIDE_INT vec_bytes, vec_units;
5491 /* Vector element type and mode. */
5493 enum machine_mode elem_mode;
5495 *no_add_attrs = true;
5497 /* Get the representative array type, possibly nested within a
5498 padding record e.g. for alignment purposes. */
5500 if (TYPE_IS_PADDING_P (rep_type))
5501 rep_type = TREE_TYPE (TYPE_FIELDS (rep_type));
5503 if (TREE_CODE (rep_type) != ARRAY_TYPE)
5505 error ("attribute %qs applies to array types only",
5506 IDENTIFIER_POINTER (name));
5510 /* Silently punt on variable sizes. We can't make vector types for them,
5511 need to ignore them on front-end generated subtypes of unconstrained
5512 bases, and this attribute is for binding implementors, not end-users, so
5513 we should never get there from legitimate explicit uses. */
5515 if (!host_integerp (rep_size, 1))
5518 /* Get the element type/mode and check this is something we know
5519 how to make vectors of. */
5521 elem_type = TREE_TYPE (rep_type);
5522 elem_mode = TYPE_MODE (elem_type);
5524 if ((!INTEGRAL_TYPE_P (elem_type)
5525 && !SCALAR_FLOAT_TYPE_P (elem_type)
5526 && !FIXED_POINT_TYPE_P (elem_type))
5527 || (!SCALAR_FLOAT_MODE_P (elem_mode)
5528 && GET_MODE_CLASS (elem_mode) != MODE_INT
5529 && !ALL_SCALAR_FIXED_POINT_MODE_P (elem_mode))
5530 || !host_integerp (TYPE_SIZE_UNIT (elem_type), 1))
5532 error ("invalid element type for attribute %qs",
5533 IDENTIFIER_POINTER (name));
5537 /* Sanity check the vector size and element type consistency. */
5539 vec_bytes = tree_low_cst (rep_size, 1);
5541 if (vec_bytes % tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1))
5543 error ("vector size not an integral multiple of component size");
5549 error ("zero vector size");
5553 vec_units = vec_bytes / tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1);
5554 if (vec_units & (vec_units - 1))
5556 error ("number of components of the vector not a power of two");
5560 /* Build the vector type and replace. */
5562 *node = build_vector_type (elem_type, vec_units);
5563 rep_name = TYPE_NAME (rep_type);
5564 if (TREE_CODE (rep_name) == TYPE_DECL)
5565 rep_name = DECL_NAME (rep_name);
5566 TYPE_NAME (*node) = rep_name;
5567 TYPE_REPRESENTATIVE_ARRAY (*node) = rep_type;
5572 /* ----------------------------------------------------------------------- *
5573 * BUILTIN FUNCTIONS *
5574 * ----------------------------------------------------------------------- */
5576 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
5577 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
5578 if nonansi_p and flag_no_nonansi_builtin. */
5581 def_builtin_1 (enum built_in_function fncode,
5583 enum built_in_class fnclass,
5584 tree fntype, tree libtype,
5585 bool both_p, bool fallback_p,
5586 bool nonansi_p ATTRIBUTE_UNUSED,
5587 tree fnattrs, bool implicit_p)
5590 const char *libname;
5592 /* Preserve an already installed decl. It most likely was setup in advance
5593 (e.g. as part of the internal builtins) for specific reasons. */
5594 if (built_in_decls[(int) fncode] != NULL_TREE)
5597 gcc_assert ((!both_p && !fallback_p)
5598 || !strncmp (name, "__builtin_",
5599 strlen ("__builtin_")));
5601 libname = name + strlen ("__builtin_");
5602 decl = add_builtin_function (name, fntype, fncode, fnclass,
5603 (fallback_p ? libname : NULL),
5606 /* ??? This is normally further controlled by command-line options
5607 like -fno-builtin, but we don't have them for Ada. */
5608 add_builtin_function (libname, libtype, fncode, fnclass,
5611 built_in_decls[(int) fncode] = decl;
5613 implicit_built_in_decls[(int) fncode] = decl;
5616 static int flag_isoc94 = 0;
5617 static int flag_isoc99 = 0;
5619 /* Install what the common builtins.def offers. */
5622 install_builtin_functions (void)
5624 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
5625 NONANSI_P, ATTRS, IMPLICIT, COND) \
5627 def_builtin_1 (ENUM, NAME, CLASS, \
5628 builtin_types[(int) TYPE], \
5629 builtin_types[(int) LIBTYPE], \
5630 BOTH_P, FALLBACK_P, NONANSI_P, \
5631 built_in_attributes[(int) ATTRS], IMPLICIT);
5632 #include "builtins.def"
5636 /* ----------------------------------------------------------------------- *
5637 * BUILTIN FUNCTIONS *
5638 * ----------------------------------------------------------------------- */
5640 /* Install the builtin functions we might need. */
5643 gnat_install_builtins (void)
5645 install_builtin_elementary_types ();
5646 install_builtin_function_types ();
5647 install_builtin_attributes ();
5649 /* Install builtins used by generic middle-end pieces first. Some of these
5650 know about internal specificities and control attributes accordingly, for
5651 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
5652 the generic definition from builtins.def. */
5653 build_common_builtin_nodes ();
5655 /* Now, install the target specific builtins, such as the AltiVec family on
5656 ppc, and the common set as exposed by builtins.def. */
5657 targetm.init_builtins ();
5658 install_builtin_functions ();
5661 #include "gt-ada-utils.h"
5662 #include "gtype-ada.h"