1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2009, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 ****************************************************************************/
28 #include "coretypes.h"
40 #include "langhooks.h"
41 #include "pointer-set.h"
43 #include "tree-dump.h"
44 #include "tree-inline.h"
45 #include "tree-iterator.h"
62 #ifndef MAX_BITS_PER_WORD
63 #define MAX_BITS_PER_WORD BITS_PER_WORD
66 /* If nonzero, pretend we are allocating at global level. */
69 /* The default alignment of "double" floating-point types, i.e. floating
70 point types whose size is equal to 64 bits, or 0 if this alignment is
71 not specifically capped. */
72 int double_float_alignment;
74 /* The default alignment of "double" or larger scalar types, i.e. scalar
75 types whose size is greater or equal to 64 bits, or 0 if this alignment
76 is not specifically capped. */
77 int double_scalar_alignment;
79 /* Tree nodes for the various types and decls we create. */
80 tree gnat_std_decls[(int) ADT_LAST];
82 /* Functions to call for each of the possible raise reasons. */
83 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
85 /* Forward declarations for handlers of attributes. */
86 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
87 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
88 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
93 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
94 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
95 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
96 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
98 /* Fake handler for attributes we don't properly support, typically because
99 they'd require dragging a lot of the common-c front-end circuitry. */
100 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
102 /* Table of machine-independent internal attributes for Ada. We support
103 this minimal set of attributes to accommodate the needs of builtins. */
104 const struct attribute_spec gnat_internal_attribute_table[] =
106 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
107 { "const", 0, 0, true, false, false, handle_const_attribute },
108 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute },
109 { "pure", 0, 0, true, false, false, handle_pure_attribute },
110 { "no vops", 0, 0, true, false, false, handle_novops_attribute },
111 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute },
112 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute },
113 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute },
114 { "malloc", 0, 0, true, false, false, handle_malloc_attribute },
115 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute },
117 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute },
118 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute },
119 { "may_alias", 0, 0, false, true, false, NULL },
121 /* ??? format and format_arg are heavy and not supported, which actually
122 prevents support for stdio builtins, which we however declare as part
123 of the common builtins.def contents. */
124 { "format", 3, 3, false, true, true, fake_attribute_handler },
125 { "format_arg", 1, 1, false, true, true, fake_attribute_handler },
127 { NULL, 0, 0, false, false, false, NULL }
130 /* Associates a GNAT tree node to a GCC tree node. It is used in
131 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
132 of `save_gnu_tree' for more info. */
133 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
135 #define GET_GNU_TREE(GNAT_ENTITY) \
136 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
138 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
139 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
141 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
142 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
144 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
145 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
147 #define GET_DUMMY_NODE(GNAT_ENTITY) \
148 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
150 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
151 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
153 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
154 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
156 /* This variable keeps a table for types for each precision so that we only
157 allocate each of them once. Signed and unsigned types are kept separate.
159 Note that these types are only used when fold-const requests something
160 special. Perhaps we should NOT share these types; we'll see how it
162 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
164 /* Likewise for float types, but record these by mode. */
165 static GTY(()) tree float_types[NUM_MACHINE_MODES];
167 /* For each binding contour we allocate a binding_level structure to indicate
168 the binding depth. */
170 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
171 /* The binding level containing this one (the enclosing binding level). */
172 struct gnat_binding_level *chain;
173 /* The BLOCK node for this level. */
175 /* If nonzero, the setjmp buffer that needs to be updated for any
176 variable-sized definition within this context. */
180 /* The binding level currently in effect. */
181 static GTY(()) struct gnat_binding_level *current_binding_level;
183 /* A chain of gnat_binding_level structures awaiting reuse. */
184 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
186 /* An array of global declarations. */
187 static GTY(()) VEC(tree,gc) *global_decls;
189 /* An array of builtin function declarations. */
190 static GTY(()) VEC(tree,gc) *builtin_decls;
192 /* An array of global renaming pointers. */
193 static GTY(()) VEC(tree,gc) *global_renaming_pointers;
195 /* A chain of unused BLOCK nodes. */
196 static GTY((deletable)) tree free_block_chain;
198 static tree merge_sizes (tree, tree, tree, bool, bool);
199 static tree compute_related_constant (tree, tree);
200 static tree split_plus (tree, tree *);
201 static tree float_type_for_precision (int, enum machine_mode);
202 static tree convert_to_fat_pointer (tree, tree);
203 static tree convert_to_thin_pointer (tree, tree);
204 static tree make_descriptor_field (const char *,tree, tree, tree);
205 static bool potential_alignment_gap (tree, tree, tree);
207 /* Initialize the association of GNAT nodes to GCC trees. */
210 init_gnat_to_gnu (void)
212 associate_gnat_to_gnu
213 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
216 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
217 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
218 a ..._DECL node. If NO_CHECK is true, the latter check is suppressed.
220 If GNU_DECL is zero, a previous association is to be reset. */
223 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
225 /* Check that GNAT_ENTITY is not already defined and that it is being set
226 to something which is a decl. Raise gigi 401 if not. Usually, this
227 means GNAT_ENTITY is defined twice, but occasionally is due to some
229 gcc_assert (!(gnu_decl
230 && (PRESENT_GNU_TREE (gnat_entity)
231 || (!no_check && !DECL_P (gnu_decl)))));
233 SET_GNU_TREE (gnat_entity, gnu_decl);
236 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
237 Return the ..._DECL node that was associated with it. If there is no tree
238 node associated with GNAT_ENTITY, abort.
240 In some cases, such as delayed elaboration or expressions that need to
241 be elaborated only once, GNAT_ENTITY is really not an entity. */
244 get_gnu_tree (Entity_Id gnat_entity)
246 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
247 return GET_GNU_TREE (gnat_entity);
250 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
253 present_gnu_tree (Entity_Id gnat_entity)
255 return PRESENT_GNU_TREE (gnat_entity);
258 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
261 init_dummy_type (void)
264 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
267 /* Make a dummy type corresponding to GNAT_TYPE. */
270 make_dummy_type (Entity_Id gnat_type)
272 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
275 /* If there is an equivalent type, get its underlying type. */
276 if (Present (gnat_underlying))
277 gnat_underlying = Underlying_Type (gnat_underlying);
279 /* If there was no equivalent type (can only happen when just annotating
280 types) or underlying type, go back to the original type. */
281 if (No (gnat_underlying))
282 gnat_underlying = gnat_type;
284 /* If it there already a dummy type, use that one. Else make one. */
285 if (PRESENT_DUMMY_NODE (gnat_underlying))
286 return GET_DUMMY_NODE (gnat_underlying);
288 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
290 gnu_type = make_node (Is_Record_Type (gnat_underlying)
291 ? tree_code_for_record_type (gnat_underlying)
293 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
294 TYPE_DUMMY_P (gnu_type) = 1;
295 TYPE_STUB_DECL (gnu_type)
296 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
297 if (AGGREGATE_TYPE_P (gnu_type))
298 TYPE_BY_REFERENCE_P (gnu_type) = Is_By_Reference_Type (gnat_type);
300 SET_DUMMY_NODE (gnat_underlying, gnu_type);
305 /* Return nonzero if we are currently in the global binding level. */
308 global_bindings_p (void)
310 return ((force_global || !current_function_decl) ? -1 : 0);
313 /* Enter a new binding level. */
316 gnat_pushlevel (void)
318 struct gnat_binding_level *newlevel = NULL;
320 /* Reuse a struct for this binding level, if there is one. */
321 if (free_binding_level)
323 newlevel = free_binding_level;
324 free_binding_level = free_binding_level->chain;
328 = (struct gnat_binding_level *)
329 ggc_alloc (sizeof (struct gnat_binding_level));
331 /* Use a free BLOCK, if any; otherwise, allocate one. */
332 if (free_block_chain)
334 newlevel->block = free_block_chain;
335 free_block_chain = BLOCK_CHAIN (free_block_chain);
336 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
339 newlevel->block = make_node (BLOCK);
341 /* Point the BLOCK we just made to its parent. */
342 if (current_binding_level)
343 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
345 BLOCK_VARS (newlevel->block) = BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
346 TREE_USED (newlevel->block) = 1;
348 /* Add this level to the front of the chain (stack) of levels that are
350 newlevel->chain = current_binding_level;
351 newlevel->jmpbuf_decl = NULL_TREE;
352 current_binding_level = newlevel;
355 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
356 and point FNDECL to this BLOCK. */
359 set_current_block_context (tree fndecl)
361 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
362 DECL_INITIAL (fndecl) = current_binding_level->block;
365 /* Set the jmpbuf_decl for the current binding level to DECL. */
368 set_block_jmpbuf_decl (tree decl)
370 current_binding_level->jmpbuf_decl = decl;
373 /* Get the jmpbuf_decl, if any, for the current binding level. */
376 get_block_jmpbuf_decl (void)
378 return current_binding_level->jmpbuf_decl;
381 /* Exit a binding level. Set any BLOCK into the current code group. */
386 struct gnat_binding_level *level = current_binding_level;
387 tree block = level->block;
389 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
390 BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
392 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
393 are no variables free the block and merge its subblocks into those of its
394 parent block. Otherwise, add it to the list of its parent. */
395 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
397 else if (BLOCK_VARS (block) == NULL_TREE)
399 BLOCK_SUBBLOCKS (level->chain->block)
400 = chainon (BLOCK_SUBBLOCKS (block),
401 BLOCK_SUBBLOCKS (level->chain->block));
402 BLOCK_CHAIN (block) = free_block_chain;
403 free_block_chain = block;
407 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
408 BLOCK_SUBBLOCKS (level->chain->block) = block;
409 TREE_USED (block) = 1;
410 set_block_for_group (block);
413 /* Free this binding structure. */
414 current_binding_level = level->chain;
415 level->chain = free_binding_level;
416 free_binding_level = level;
420 /* Records a ..._DECL node DECL as belonging to the current lexical scope
421 and uses GNAT_NODE for location information and propagating flags. */
424 gnat_pushdecl (tree decl, Node_Id gnat_node)
426 /* If this decl is public external or at toplevel, there is no context.
427 But PARM_DECLs always go in the level of its function. */
428 if (TREE_CODE (decl) != PARM_DECL
429 && ((DECL_EXTERNAL (decl) && TREE_PUBLIC (decl))
430 || global_bindings_p ()))
431 DECL_CONTEXT (decl) = 0;
434 DECL_CONTEXT (decl) = current_function_decl;
436 /* Functions imported in another function are not really nested.
437 For really nested functions mark them initially as needing
438 a static chain for uses of that flag before unnesting;
439 lower_nested_functions will then recompute it. */
440 if (TREE_CODE (decl) == FUNCTION_DECL && !TREE_PUBLIC (decl))
441 DECL_STATIC_CHAIN (decl) = 1;
444 TREE_NO_WARNING (decl) = (gnat_node == Empty || Warnings_Off (gnat_node));
446 /* Set the location of DECL and emit a declaration for it. */
447 if (Present (gnat_node))
448 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
449 add_decl_expr (decl, gnat_node);
451 /* Put the declaration on the list. The list of declarations is in reverse
452 order. The list will be reversed later. Put global variables in the
453 globals list and builtin functions in a dedicated list to speed up
454 further lookups. Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
455 the list, as they will cause trouble with the debugger and aren't needed
457 if (TREE_CODE (decl) != TYPE_DECL
458 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
460 if (global_bindings_p ())
462 VEC_safe_push (tree, gc, global_decls, decl);
464 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
465 VEC_safe_push (tree, gc, builtin_decls, decl);
469 TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
470 BLOCK_VARS (current_binding_level->block) = decl;
474 /* For the declaration of a type, set its name if it either is not already
475 set or if the previous type name was not derived from a source name.
476 We'd rather have the type named with a real name and all the pointer
477 types to the same object have the same POINTER_TYPE node. Code in the
478 equivalent function of c-decl.c makes a copy of the type node here, but
479 that may cause us trouble with incomplete types. We make an exception
480 for fat pointer types because the compiler automatically builds them
481 for unconstrained array types and the debugger uses them to represent
482 both these and pointers to these. */
483 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
485 tree t = TREE_TYPE (decl);
487 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
489 else if (TYPE_IS_FAT_POINTER_P (t))
491 tree tt = build_variant_type_copy (t);
492 TYPE_NAME (tt) = decl;
493 TREE_USED (tt) = TREE_USED (t);
494 TREE_TYPE (decl) = tt;
495 if (DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
496 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
498 DECL_ORIGINAL_TYPE (decl) = t;
500 DECL_ARTIFICIAL (decl) = 0;
502 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
507 /* Propagate the name to all the variants. This is needed for
508 the type qualifiers machinery to work properly. */
510 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
511 TYPE_NAME (t) = decl;
515 /* Do little here. Set up the standard declarations later after the
516 front end has been run. */
519 gnat_init_decl_processing (void)
521 /* Make the binding_level structure for global names. */
522 current_function_decl = 0;
523 current_binding_level = 0;
524 free_binding_level = 0;
527 build_common_tree_nodes (true, true);
529 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
530 corresponding to the width of Pmode. In most cases when ptr_mode
531 and Pmode differ, C will use the width of ptr_mode for SIZETYPE.
532 But we get far better code using the width of Pmode. */
533 size_type_node = gnat_type_for_mode (Pmode, 0);
534 set_sizetype (size_type_node);
536 /* In Ada, we use an unsigned 8-bit type for the default boolean type. */
537 boolean_type_node = make_unsigned_type (8);
538 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
539 SET_TYPE_RM_MAX_VALUE (boolean_type_node,
540 build_int_cst (boolean_type_node, 1));
541 SET_TYPE_RM_SIZE (boolean_type_node, bitsize_int (1));
543 build_common_tree_nodes_2 (0);
544 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
546 ptr_void_type_node = build_pointer_type (void_type_node);
549 /* Record TYPE as a builtin type for Ada. NAME is the name of the type. */
552 record_builtin_type (const char *name, tree type)
554 tree type_decl = build_decl (input_location,
555 TYPE_DECL, get_identifier (name), type);
557 gnat_pushdecl (type_decl, Empty);
559 if (debug_hooks->type_decl)
560 debug_hooks->type_decl (type_decl, false);
563 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
564 finish constructing the record or union type. If REP_LEVEL is zero, this
565 record has no representation clause and so will be entirely laid out here.
566 If REP_LEVEL is one, this record has a representation clause and has been
567 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
568 this record is derived from a parent record and thus inherits its layout;
569 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
570 we need to write debug information about this type. */
573 finish_record_type (tree record_type, tree field_list, int rep_level,
576 enum tree_code code = TREE_CODE (record_type);
577 tree name = TYPE_NAME (record_type);
578 tree ada_size = bitsize_zero_node;
579 tree size = bitsize_zero_node;
580 bool had_size = TYPE_SIZE (record_type) != 0;
581 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
582 bool had_align = TYPE_ALIGN (record_type) != 0;
585 TYPE_FIELDS (record_type) = field_list;
587 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
588 generate debug info and have a parallel type. */
589 if (name && TREE_CODE (name) == TYPE_DECL)
590 name = DECL_NAME (name);
591 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
593 /* Globally initialize the record first. If this is a rep'ed record,
594 that just means some initializations; otherwise, layout the record. */
597 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
598 SET_TYPE_MODE (record_type, BLKmode);
601 TYPE_SIZE_UNIT (record_type) = size_zero_node;
603 TYPE_SIZE (record_type) = bitsize_zero_node;
605 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
606 out just like a UNION_TYPE, since the size will be fixed. */
607 else if (code == QUAL_UNION_TYPE)
612 /* Ensure there isn't a size already set. There can be in an error
613 case where there is a rep clause but all fields have errors and
614 no longer have a position. */
615 TYPE_SIZE (record_type) = 0;
616 layout_type (record_type);
619 /* At this point, the position and size of each field is known. It was
620 either set before entry by a rep clause, or by laying out the type above.
622 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
623 to compute the Ada size; the GCC size and alignment (for rep'ed records
624 that are not padding types); and the mode (for rep'ed records). We also
625 clear the DECL_BIT_FIELD indication for the cases we know have not been
626 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
628 if (code == QUAL_UNION_TYPE)
629 field_list = nreverse (field_list);
631 for (field = field_list; field; field = TREE_CHAIN (field))
633 tree type = TREE_TYPE (field);
634 tree pos = bit_position (field);
635 tree this_size = DECL_SIZE (field);
638 if ((TREE_CODE (type) == RECORD_TYPE
639 || TREE_CODE (type) == UNION_TYPE
640 || TREE_CODE (type) == QUAL_UNION_TYPE)
641 && !TYPE_FAT_POINTER_P (type)
642 && !TYPE_CONTAINS_TEMPLATE_P (type)
643 && TYPE_ADA_SIZE (type))
644 this_ada_size = TYPE_ADA_SIZE (type);
646 this_ada_size = this_size;
648 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
649 if (DECL_BIT_FIELD (field)
650 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
652 unsigned int align = TYPE_ALIGN (type);
654 /* In the general case, type alignment is required. */
655 if (value_factor_p (pos, align))
657 /* The enclosing record type must be sufficiently aligned.
658 Otherwise, if no alignment was specified for it and it
659 has been laid out already, bump its alignment to the
660 desired one if this is compatible with its size. */
661 if (TYPE_ALIGN (record_type) >= align)
663 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
664 DECL_BIT_FIELD (field) = 0;
668 && value_factor_p (TYPE_SIZE (record_type), align))
670 TYPE_ALIGN (record_type) = align;
671 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
672 DECL_BIT_FIELD (field) = 0;
676 /* In the non-strict alignment case, only byte alignment is. */
677 if (!STRICT_ALIGNMENT
678 && DECL_BIT_FIELD (field)
679 && value_factor_p (pos, BITS_PER_UNIT))
680 DECL_BIT_FIELD (field) = 0;
683 /* If we still have DECL_BIT_FIELD set at this point, we know that the
684 field is technically not addressable. Except that it can actually
685 be addressed if it is BLKmode and happens to be properly aligned. */
686 if (DECL_BIT_FIELD (field)
687 && !(DECL_MODE (field) == BLKmode
688 && value_factor_p (pos, BITS_PER_UNIT)))
689 DECL_NONADDRESSABLE_P (field) = 1;
691 /* A type must be as aligned as its most aligned field that is not
692 a bit-field. But this is already enforced by layout_type. */
693 if (rep_level > 0 && !DECL_BIT_FIELD (field))
694 TYPE_ALIGN (record_type)
695 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
700 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
701 size = size_binop (MAX_EXPR, size, this_size);
704 case QUAL_UNION_TYPE:
706 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
707 this_ada_size, ada_size);
708 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
713 /* Since we know here that all fields are sorted in order of
714 increasing bit position, the size of the record is one
715 higher than the ending bit of the last field processed
716 unless we have a rep clause, since in that case we might
717 have a field outside a QUAL_UNION_TYPE that has a higher ending
718 position. So use a MAX in that case. Also, if this field is a
719 QUAL_UNION_TYPE, we need to take into account the previous size in
720 the case of empty variants. */
722 = merge_sizes (ada_size, pos, this_ada_size,
723 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
725 = merge_sizes (size, pos, this_size,
726 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
734 if (code == QUAL_UNION_TYPE)
735 nreverse (field_list);
739 /* If this is a padding record, we never want to make the size smaller
740 than what was specified in it, if any. */
741 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
742 size = TYPE_SIZE (record_type);
744 /* Now set any of the values we've just computed that apply. */
745 if (!TYPE_FAT_POINTER_P (record_type)
746 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
747 SET_TYPE_ADA_SIZE (record_type, ada_size);
751 tree size_unit = had_size_unit
752 ? TYPE_SIZE_UNIT (record_type)
754 size_binop (CEIL_DIV_EXPR, size,
756 unsigned int align = TYPE_ALIGN (record_type);
758 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
759 TYPE_SIZE_UNIT (record_type)
760 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
762 compute_record_mode (record_type);
767 rest_of_record_type_compilation (record_type);
770 /* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
771 associated with it. It need not be invoked directly in most cases since
772 finish_record_type takes care of doing so, but this can be necessary if
773 a parallel type is to be attached to the record type. */
776 rest_of_record_type_compilation (tree record_type)
778 tree field_list = TYPE_FIELDS (record_type);
780 enum tree_code code = TREE_CODE (record_type);
781 bool var_size = false;
783 for (field = field_list; field; field = TREE_CHAIN (field))
785 /* We need to make an XVE/XVU record if any field has variable size,
786 whether or not the record does. For example, if we have a union,
787 it may be that all fields, rounded up to the alignment, have the
788 same size, in which case we'll use that size. But the debug
789 output routines (except Dwarf2) won't be able to output the fields,
790 so we need to make the special record. */
791 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
792 /* If a field has a non-constant qualifier, the record will have
793 variable size too. */
794 || (code == QUAL_UNION_TYPE
795 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
802 /* If this record is of variable size, rename it so that the
803 debugger knows it is and make a new, parallel, record
804 that tells the debugger how the record is laid out. See
805 exp_dbug.ads. But don't do this for records that are padding
806 since they confuse GDB. */
807 if (var_size && !TYPE_IS_PADDING_P (record_type))
810 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
811 ? UNION_TYPE : TREE_CODE (record_type));
812 tree orig_name = TYPE_NAME (record_type), new_name;
813 tree last_pos = bitsize_zero_node;
814 tree old_field, prev_old_field = NULL_TREE;
816 if (TREE_CODE (orig_name) == TYPE_DECL)
817 orig_name = DECL_NAME (orig_name);
820 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
822 TYPE_NAME (new_record_type) = new_name;
823 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
824 TYPE_STUB_DECL (new_record_type)
825 = create_type_stub_decl (new_name, new_record_type);
826 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
827 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
828 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
829 TYPE_SIZE_UNIT (new_record_type)
830 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
832 add_parallel_type (TYPE_STUB_DECL (record_type), new_record_type);
834 /* Now scan all the fields, replacing each field with a new
835 field corresponding to the new encoding. */
836 for (old_field = TYPE_FIELDS (record_type); old_field;
837 old_field = TREE_CHAIN (old_field))
839 tree field_type = TREE_TYPE (old_field);
840 tree field_name = DECL_NAME (old_field);
842 tree curpos = bit_position (old_field);
844 unsigned int align = 0;
847 /* See how the position was modified from the last position.
849 There are two basic cases we support: a value was added
850 to the last position or the last position was rounded to
851 a boundary and they something was added. Check for the
852 first case first. If not, see if there is any evidence
853 of rounding. If so, round the last position and try
856 If this is a union, the position can be taken as zero. */
858 /* Some computations depend on the shape of the position expression,
859 so strip conversions to make sure it's exposed. */
860 curpos = remove_conversions (curpos, true);
862 if (TREE_CODE (new_record_type) == UNION_TYPE)
863 pos = bitsize_zero_node, align = 0;
865 pos = compute_related_constant (curpos, last_pos);
867 if (!pos && TREE_CODE (curpos) == MULT_EXPR
868 && host_integerp (TREE_OPERAND (curpos, 1), 1))
870 tree offset = TREE_OPERAND (curpos, 0);
871 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
873 /* An offset which is a bitwise AND with a negative power of 2
874 means an alignment corresponding to this power of 2. */
875 offset = remove_conversions (offset, true);
876 if (TREE_CODE (offset) == BIT_AND_EXPR
877 && host_integerp (TREE_OPERAND (offset, 1), 0)
878 && tree_int_cst_sgn (TREE_OPERAND (offset, 1)) < 0)
881 = - tree_low_cst (TREE_OPERAND (offset, 1), 0);
882 if (exact_log2 (pow) > 0)
886 pos = compute_related_constant (curpos,
887 round_up (last_pos, align));
889 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
890 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
891 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
892 && host_integerp (TREE_OPERAND
893 (TREE_OPERAND (curpos, 0), 1),
898 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
899 pos = compute_related_constant (curpos,
900 round_up (last_pos, align));
902 else if (potential_alignment_gap (prev_old_field, old_field,
905 align = TYPE_ALIGN (field_type);
906 pos = compute_related_constant (curpos,
907 round_up (last_pos, align));
910 /* If we can't compute a position, set it to zero.
912 ??? We really should abort here, but it's too much work
913 to get this correct for all cases. */
916 pos = bitsize_zero_node;
918 /* See if this type is variable-sized and make a pointer type
919 and indicate the indirection if so. Beware that the debug
920 back-end may adjust the position computed above according
921 to the alignment of the field type, i.e. the pointer type
922 in this case, if we don't preventively counter that. */
923 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
925 field_type = build_pointer_type (field_type);
926 if (align != 0 && TYPE_ALIGN (field_type) > align)
928 field_type = copy_node (field_type);
929 TYPE_ALIGN (field_type) = align;
934 /* Make a new field name, if necessary. */
935 if (var || align != 0)
940 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
941 align / BITS_PER_UNIT);
943 strcpy (suffix, "XVL");
945 field_name = concat_name (field_name, suffix);
948 new_field = create_field_decl (field_name, field_type,
950 DECL_SIZE (old_field), pos, 0);
951 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
952 TYPE_FIELDS (new_record_type) = new_field;
954 /* If old_field is a QUAL_UNION_TYPE, take its size as being
955 zero. The only time it's not the last field of the record
956 is when there are other components at fixed positions after
957 it (meaning there was a rep clause for every field) and we
958 want to be able to encode them. */
959 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
960 (TREE_CODE (TREE_TYPE (old_field))
963 : DECL_SIZE (old_field));
964 prev_old_field = old_field;
967 TYPE_FIELDS (new_record_type)
968 = nreverse (TYPE_FIELDS (new_record_type));
970 rest_of_type_decl_compilation (TYPE_STUB_DECL (new_record_type));
973 rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type));
976 /* Append PARALLEL_TYPE on the chain of parallel types for decl. */
979 add_parallel_type (tree decl, tree parallel_type)
983 while (DECL_PARALLEL_TYPE (d))
984 d = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (d));
986 SET_DECL_PARALLEL_TYPE (d, parallel_type);
989 /* Return the parallel type associated to a type, if any. */
992 get_parallel_type (tree type)
994 if (TYPE_STUB_DECL (type))
995 return DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type));
1000 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1001 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
1002 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
1003 replace a value of zero with the old size. If HAS_REP is true, we take the
1004 MAX of the end position of this field with LAST_SIZE. In all other cases,
1005 we use FIRST_BIT plus SIZE. Return an expression for the size. */
1008 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1011 tree type = TREE_TYPE (last_size);
1014 if (!special || TREE_CODE (size) != COND_EXPR)
1016 new_size = size_binop (PLUS_EXPR, first_bit, size);
1018 new_size = size_binop (MAX_EXPR, last_size, new_size);
1022 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1023 integer_zerop (TREE_OPERAND (size, 1))
1024 ? last_size : merge_sizes (last_size, first_bit,
1025 TREE_OPERAND (size, 1),
1027 integer_zerop (TREE_OPERAND (size, 2))
1028 ? last_size : merge_sizes (last_size, first_bit,
1029 TREE_OPERAND (size, 2),
1032 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1033 when fed through substitute_in_expr) into thinking that a constant
1034 size is not constant. */
1035 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
1036 new_size = TREE_OPERAND (new_size, 0);
1041 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1042 related by the addition of a constant. Return that constant if so. */
1045 compute_related_constant (tree op0, tree op1)
1047 tree op0_var, op1_var;
1048 tree op0_con = split_plus (op0, &op0_var);
1049 tree op1_con = split_plus (op1, &op1_var);
1050 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1052 if (operand_equal_p (op0_var, op1_var, 0))
1054 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1060 /* Utility function of above to split a tree OP which may be a sum, into a
1061 constant part, which is returned, and a variable part, which is stored
1062 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1066 split_plus (tree in, tree *pvar)
1068 /* Strip NOPS in order to ease the tree traversal and maximize the
1069 potential for constant or plus/minus discovery. We need to be careful
1070 to always return and set *pvar to bitsizetype trees, but it's worth
1074 *pvar = convert (bitsizetype, in);
1076 if (TREE_CODE (in) == INTEGER_CST)
1078 *pvar = bitsize_zero_node;
1079 return convert (bitsizetype, in);
1081 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1083 tree lhs_var, rhs_var;
1084 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1085 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1087 if (lhs_var == TREE_OPERAND (in, 0)
1088 && rhs_var == TREE_OPERAND (in, 1))
1089 return bitsize_zero_node;
1091 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1092 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1095 return bitsize_zero_node;
1098 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1099 subprogram. If it is void_type_node, then we are dealing with a procedure,
1100 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1101 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1102 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1103 RETURNS_UNCONSTRAINED is true if the function returns an unconstrained
1104 object. RETURNS_BY_REF is true if the function returns by reference.
1105 RETURNS_BY_TARGET_PTR is true if the function is to be passed (as its
1106 first parameter) the address of the place to copy its result. */
1109 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1110 bool returns_unconstrained, bool returns_by_ref,
1111 bool returns_by_target_ptr)
1113 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1114 the subprogram formal parameters. This list is generated by traversing the
1115 input list of PARM_DECL nodes. */
1116 tree param_type_list = NULL;
1120 for (param_decl = param_decl_list; param_decl;
1121 param_decl = TREE_CHAIN (param_decl))
1122 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1125 /* The list of the function parameter types has to be terminated by the void
1126 type to signal to the back-end that we are not dealing with a variable
1127 parameter subprogram, but that the subprogram has a fixed number of
1129 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1131 /* The list of argument types has been created in reverse
1133 param_type_list = nreverse (param_type_list);
1135 type = build_function_type (return_type, param_type_list);
1137 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1138 or the new type should, make a copy of TYPE. Likewise for
1139 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1140 if (TYPE_CI_CO_LIST (type) || cico_list
1141 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1142 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref
1143 || TYPE_RETURNS_BY_TARGET_PTR_P (type) != returns_by_target_ptr)
1144 type = copy_type (type);
1146 TYPE_CI_CO_LIST (type) = cico_list;
1147 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1148 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1149 TYPE_RETURNS_BY_TARGET_PTR_P (type) = returns_by_target_ptr;
1153 /* Return a copy of TYPE but safe to modify in any way. */
1156 copy_type (tree type)
1158 tree new_type = copy_node (type);
1160 /* copy_node clears this field instead of copying it, because it is
1161 aliased with TREE_CHAIN. */
1162 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
1164 TYPE_POINTER_TO (new_type) = 0;
1165 TYPE_REFERENCE_TO (new_type) = 0;
1166 TYPE_MAIN_VARIANT (new_type) = new_type;
1167 TYPE_NEXT_VARIANT (new_type) = 0;
1172 /* Return a subtype of sizetype with range MIN to MAX and whose
1173 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
1174 of the associated TYPE_DECL. */
1177 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
1179 /* First build a type for the desired range. */
1180 tree type = build_index_2_type (min, max);
1182 /* If this type has the TYPE_INDEX_TYPE we want, return it. */
1183 if (TYPE_INDEX_TYPE (type) == index)
1186 /* Otherwise, if TYPE_INDEX_TYPE is set, make a copy. Note that we have
1187 no way of sharing these types, but that's only a small hole. */
1188 if (TYPE_INDEX_TYPE (type))
1189 type = copy_type (type);
1191 SET_TYPE_INDEX_TYPE (type, index);
1192 create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
1197 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
1198 sizetype is used. */
1201 create_range_type (tree type, tree min, tree max)
1205 if (type == NULL_TREE)
1208 /* First build a type with the base range. */
1210 = build_range_type (type, TYPE_MIN_VALUE (type), TYPE_MAX_VALUE (type));
1212 min = convert (type, min);
1213 max = convert (type, max);
1215 /* If this type has the TYPE_RM_{MIN,MAX}_VALUE we want, return it. */
1216 if (TYPE_RM_MIN_VALUE (range_type)
1217 && TYPE_RM_MAX_VALUE (range_type)
1218 && operand_equal_p (TYPE_RM_MIN_VALUE (range_type), min, 0)
1219 && operand_equal_p (TYPE_RM_MAX_VALUE (range_type), max, 0))
1222 /* Otherwise, if TYPE_RM_{MIN,MAX}_VALUE is set, make a copy. */
1223 if (TYPE_RM_MIN_VALUE (range_type) || TYPE_RM_MAX_VALUE (range_type))
1224 range_type = copy_type (range_type);
1226 /* Then set the actual range. */
1227 SET_TYPE_RM_MIN_VALUE (range_type, min);
1228 SET_TYPE_RM_MAX_VALUE (range_type, max);
1233 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
1234 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
1238 create_type_stub_decl (tree type_name, tree type)
1240 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
1241 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
1242 emitted in DWARF. */
1243 tree type_decl = build_decl (input_location,
1244 TYPE_DECL, type_name, type);
1245 DECL_ARTIFICIAL (type_decl) = 1;
1249 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
1250 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
1251 is a declaration that was generated by the compiler. DEBUG_INFO_P is
1252 true if we need to write debug information about this type. GNAT_NODE
1253 is used for the position of the decl. */
1256 create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1257 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1259 enum tree_code code = TREE_CODE (type);
1260 bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
1263 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
1264 gcc_assert (!TYPE_IS_DUMMY_P (type));
1266 /* If the type hasn't been named yet, we're naming it; preserve an existing
1267 TYPE_STUB_DECL that has been attached to it for some purpose. */
1268 if (!named && TYPE_STUB_DECL (type))
1270 type_decl = TYPE_STUB_DECL (type);
1271 DECL_NAME (type_decl) = type_name;
1274 type_decl = build_decl (input_location,
1275 TYPE_DECL, type_name, type);
1277 DECL_ARTIFICIAL (type_decl) = artificial_p;
1278 gnat_pushdecl (type_decl, gnat_node);
1279 process_attributes (type_decl, attr_list);
1281 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
1282 This causes the name to be also viewed as a "tag" by the debug
1283 back-end, with the advantage that no DW_TAG_typedef is emitted
1284 for artificial "tagged" types in DWARF. */
1286 TYPE_STUB_DECL (type) = type_decl;
1288 /* Pass the type declaration to the debug back-end unless this is an
1289 UNCONSTRAINED_ARRAY_TYPE that the back-end does not support, or a
1290 type for which debugging information was not requested, or else an
1291 ENUMERAL_TYPE or RECORD_TYPE (except for fat pointers) which are
1292 handled separately. And do not pass dummy types either. */
1293 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
1294 DECL_IGNORED_P (type_decl) = 1;
1295 else if (code != ENUMERAL_TYPE
1296 && (code != RECORD_TYPE || TYPE_FAT_POINTER_P (type))
1297 && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
1298 && TYPE_IS_DUMMY_P (TREE_TYPE (type)))
1299 && !(code == RECORD_TYPE
1301 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))))))
1302 rest_of_type_decl_compilation (type_decl);
1307 /* Return a VAR_DECL or CONST_DECL node.
1309 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1310 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1311 the GCC tree for an optional initial expression; NULL_TREE if none.
1313 CONST_FLAG is true if this variable is constant, in which case we might
1314 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
1316 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
1317 definition to be made visible outside of the current compilation unit, for
1318 instance variable definitions in a package specification.
1320 EXTERN_FLAG is true when processing an external variable declaration (as
1321 opposed to a definition: no storage is to be allocated for the variable).
1323 STATIC_FLAG is only relevant when not at top level. In that case
1324 it indicates whether to always allocate storage to the variable.
1326 GNAT_NODE is used for the position of the decl. */
1329 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
1330 bool const_flag, bool public_flag, bool extern_flag,
1331 bool static_flag, bool const_decl_allowed_p,
1332 struct attrib *attr_list, Node_Id gnat_node)
1336 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
1337 && (global_bindings_p () || static_flag
1338 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
1339 : TREE_CONSTANT (var_init)));
1341 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1342 case the initializer may be used in-lieu of the DECL node (as done in
1343 Identifier_to_gnu). This is useful to prevent the need of elaboration
1344 code when an identifier for which such a decl is made is in turn used as
1345 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1346 but extra constraints apply to this choice (see below) and are not
1347 relevant to the distinction we wish to make. */
1348 bool constant_p = const_flag && init_const;
1350 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1351 and may be used for scalars in general but not for aggregates. */
1353 = build_decl (input_location,
1354 (constant_p && const_decl_allowed_p
1355 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
1358 /* If this is external, throw away any initializations (they will be done
1359 elsewhere) unless this is a constant for which we would like to remain
1360 able to get the initializer. If we are defining a global here, leave a
1361 constant initialization and save any variable elaborations for the
1362 elaboration routine. If we are just annotating types, throw away the
1363 initialization if it isn't a constant. */
1364 if ((extern_flag && !constant_p)
1365 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1366 var_init = NULL_TREE;
1368 /* At the global level, an initializer requiring code to be generated
1369 produces elaboration statements. Check that such statements are allowed,
1370 that is, not violating a No_Elaboration_Code restriction. */
1371 if (global_bindings_p () && var_init != 0 && ! init_const)
1372 Check_Elaboration_Code_Allowed (gnat_node);
1374 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1375 try to fiddle with DECL_COMMON. However, on platforms that don't
1376 support global BSS sections, uninitialized global variables would
1377 go in DATA instead, thus increasing the size of the executable. */
1379 && TREE_CODE (var_decl) == VAR_DECL
1380 && !have_global_bss_p ())
1381 DECL_COMMON (var_decl) = 1;
1382 DECL_INITIAL (var_decl) = var_init;
1383 TREE_READONLY (var_decl) = const_flag;
1384 DECL_EXTERNAL (var_decl) = extern_flag;
1385 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1386 TREE_CONSTANT (var_decl) = constant_p;
1387 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1388 = TYPE_VOLATILE (type);
1390 /* If it's public and not external, always allocate storage for it.
1391 At the global binding level we need to allocate static storage for the
1392 variable if and only if it's not external. If we are not at the top level
1393 we allocate automatic storage unless requested not to. */
1394 TREE_STATIC (var_decl)
1395 = !extern_flag && (public_flag || static_flag || global_bindings_p ());
1397 /* For an external constant whose initializer is not absolute, do not emit
1398 debug info. In DWARF this would mean a global relocation in a read-only
1399 section which runs afoul of the PE-COFF runtime relocation mechanism. */
1402 && initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
1403 != null_pointer_node)
1404 DECL_IGNORED_P (var_decl) = 1;
1406 if (TREE_CODE (var_decl) == VAR_DECL)
1409 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1410 process_attributes (var_decl, attr_list);
1413 /* Add this decl to the current binding level. */
1414 gnat_pushdecl (var_decl, gnat_node);
1416 if (TREE_SIDE_EFFECTS (var_decl))
1417 TREE_ADDRESSABLE (var_decl) = 1;
1419 if (TREE_CODE (var_decl) != CONST_DECL)
1421 if (global_bindings_p ())
1422 rest_of_decl_compilation (var_decl, true, 0);
1425 expand_decl (var_decl);
1430 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
1433 aggregate_type_contains_array_p (tree type)
1435 switch (TREE_CODE (type))
1439 case QUAL_UNION_TYPE:
1442 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1443 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
1444 && aggregate_type_contains_array_p (TREE_TYPE (field)))
1457 /* Return a FIELD_DECL node. FIELD_NAME is the field's name, FIELD_TYPE is
1458 its type and RECORD_TYPE is the type of the enclosing record. PACKED is
1459 1 if the enclosing record is packed, -1 if it has Component_Alignment of
1460 Storage_Unit. If SIZE is nonzero, it is the specified size of the field.
1461 If POS is nonzero, it is the bit position. If ADDRESSABLE is nonzero, it
1462 means we are allowed to take the address of the field; if it is negative,
1463 we should not make a bitfield, which is used by make_aligning_type. */
1466 create_field_decl (tree field_name, tree field_type, tree record_type,
1467 int packed, tree size, tree pos, int addressable)
1469 tree field_decl = build_decl (input_location,
1470 FIELD_DECL, field_name, field_type);
1472 DECL_CONTEXT (field_decl) = record_type;
1473 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1475 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1476 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1477 Likewise for an aggregate without specified position that contains an
1478 array, because in this case slices of variable length of this array
1479 must be handled by GCC and variable-sized objects need to be aligned
1480 to at least a byte boundary. */
1481 if (packed && (TYPE_MODE (field_type) == BLKmode
1483 && AGGREGATE_TYPE_P (field_type)
1484 && aggregate_type_contains_array_p (field_type))))
1485 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1487 /* If a size is specified, use it. Otherwise, if the record type is packed
1488 compute a size to use, which may differ from the object's natural size.
1489 We always set a size in this case to trigger the checks for bitfield
1490 creation below, which is typically required when no position has been
1493 size = convert (bitsizetype, size);
1494 else if (packed == 1)
1496 size = rm_size (field_type);
1497 if (TYPE_MODE (field_type) == BLKmode)
1498 size = round_up (size, BITS_PER_UNIT);
1501 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1502 specified for two reasons: first if the size differs from the natural
1503 size. Second, if the alignment is insufficient. There are a number of
1504 ways the latter can be true.
1506 We never make a bitfield if the type of the field has a nonconstant size,
1507 because no such entity requiring bitfield operations should reach here.
1509 We do *preventively* make a bitfield when there might be the need for it
1510 but we don't have all the necessary information to decide, as is the case
1511 of a field with no specified position in a packed record.
1513 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1514 in layout_decl or finish_record_type to clear the bit_field indication if
1515 it is in fact not needed. */
1516 if (addressable >= 0
1518 && TREE_CODE (size) == INTEGER_CST
1519 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1520 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
1521 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
1523 || (TYPE_ALIGN (record_type) != 0
1524 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1526 DECL_BIT_FIELD (field_decl) = 1;
1527 DECL_SIZE (field_decl) = size;
1528 if (!packed && !pos)
1530 if (TYPE_ALIGN (record_type) != 0
1531 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))
1532 DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
1534 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1538 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1540 /* Bump the alignment if need be, either for bitfield/packing purposes or
1541 to satisfy the type requirements if no such consideration applies. When
1542 we get the alignment from the type, indicate if this is from an explicit
1543 user request, which prevents stor-layout from lowering it later on. */
1545 unsigned int bit_align
1546 = (DECL_BIT_FIELD (field_decl) ? 1
1547 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
1549 if (bit_align > DECL_ALIGN (field_decl))
1550 DECL_ALIGN (field_decl) = bit_align;
1551 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
1553 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1554 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
1560 /* We need to pass in the alignment the DECL is known to have.
1561 This is the lowest-order bit set in POS, but no more than
1562 the alignment of the record, if one is specified. Note
1563 that an alignment of 0 is taken as infinite. */
1564 unsigned int known_align;
1566 if (host_integerp (pos, 1))
1567 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1569 known_align = BITS_PER_UNIT;
1571 if (TYPE_ALIGN (record_type)
1572 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1573 known_align = TYPE_ALIGN (record_type);
1575 layout_decl (field_decl, known_align);
1576 SET_DECL_OFFSET_ALIGN (field_decl,
1577 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1579 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1580 &DECL_FIELD_BIT_OFFSET (field_decl),
1581 DECL_OFFSET_ALIGN (field_decl), pos);
1584 /* In addition to what our caller says, claim the field is addressable if we
1585 know that its type is not suitable.
1587 The field may also be "technically" nonaddressable, meaning that even if
1588 we attempt to take the field's address we will actually get the address
1589 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
1590 value we have at this point is not accurate enough, so we don't account
1591 for this here and let finish_record_type decide. */
1592 if (!addressable && !type_for_nonaliased_component_p (field_type))
1595 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1600 /* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
1601 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
1602 (either an In parameter or an address of a pass-by-ref parameter). */
1605 create_param_decl (tree param_name, tree param_type, bool readonly)
1607 tree param_decl = build_decl (input_location,
1608 PARM_DECL, param_name, param_type);
1610 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
1611 can lead to various ABI violations. */
1612 if (targetm.calls.promote_prototypes (NULL_TREE)
1613 && INTEGRAL_TYPE_P (param_type)
1614 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1616 /* We have to be careful about biased types here. Make a subtype
1617 of integer_type_node with the proper biasing. */
1618 if (TREE_CODE (param_type) == INTEGER_TYPE
1619 && TYPE_BIASED_REPRESENTATION_P (param_type))
1622 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
1623 TREE_TYPE (subtype) = integer_type_node;
1624 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
1625 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (param_type));
1626 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (param_type));
1627 param_type = subtype;
1630 param_type = integer_type_node;
1633 DECL_ARG_TYPE (param_decl) = param_type;
1634 TREE_READONLY (param_decl) = readonly;
1638 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1641 process_attributes (tree decl, struct attrib *attr_list)
1643 for (; attr_list; attr_list = attr_list->next)
1644 switch (attr_list->type)
1646 case ATTR_MACHINE_ATTRIBUTE:
1647 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1649 ATTR_FLAG_TYPE_IN_PLACE);
1652 case ATTR_LINK_ALIAS:
1653 if (! DECL_EXTERNAL (decl))
1655 TREE_STATIC (decl) = 1;
1656 assemble_alias (decl, attr_list->name);
1660 case ATTR_WEAK_EXTERNAL:
1662 declare_weak (decl);
1664 post_error ("?weak declarations not supported on this target",
1665 attr_list->error_point);
1668 case ATTR_LINK_SECTION:
1669 if (targetm.have_named_sections)
1671 DECL_SECTION_NAME (decl)
1672 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1673 IDENTIFIER_POINTER (attr_list->name));
1674 DECL_COMMON (decl) = 0;
1677 post_error ("?section attributes are not supported for this target",
1678 attr_list->error_point);
1681 case ATTR_LINK_CONSTRUCTOR:
1682 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1683 TREE_USED (decl) = 1;
1686 case ATTR_LINK_DESTRUCTOR:
1687 DECL_STATIC_DESTRUCTOR (decl) = 1;
1688 TREE_USED (decl) = 1;
1691 case ATTR_THREAD_LOCAL_STORAGE:
1692 DECL_TLS_MODEL (decl) = decl_default_tls_model (decl);
1693 DECL_COMMON (decl) = 0;
1698 /* Record DECL as a global renaming pointer. */
1701 record_global_renaming_pointer (tree decl)
1703 gcc_assert (DECL_RENAMED_OBJECT (decl));
1704 VEC_safe_push (tree, gc, global_renaming_pointers, decl);
1707 /* Invalidate the global renaming pointers. */
1710 invalidate_global_renaming_pointers (void)
1715 for (i = 0; VEC_iterate(tree, global_renaming_pointers, i, iter); i++)
1716 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
1718 VEC_free (tree, gc, global_renaming_pointers);
1721 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1725 value_factor_p (tree value, HOST_WIDE_INT factor)
1727 if (host_integerp (value, 1))
1728 return tree_low_cst (value, 1) % factor == 0;
1730 if (TREE_CODE (value) == MULT_EXPR)
1731 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1732 || value_factor_p (TREE_OPERAND (value, 1), factor));
1737 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1738 unless we can prove these 2 fields are laid out in such a way that no gap
1739 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1740 is the distance in bits between the end of PREV_FIELD and the starting
1741 position of CURR_FIELD. It is ignored if null. */
1744 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1746 /* If this is the first field of the record, there cannot be any gap */
1750 /* If the previous field is a union type, then return False: The only
1751 time when such a field is not the last field of the record is when
1752 there are other components at fixed positions after it (meaning there
1753 was a rep clause for every field), in which case we don't want the
1754 alignment constraint to override them. */
1755 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1758 /* If the distance between the end of prev_field and the beginning of
1759 curr_field is constant, then there is a gap if the value of this
1760 constant is not null. */
1761 if (offset && host_integerp (offset, 1))
1762 return !integer_zerop (offset);
1764 /* If the size and position of the previous field are constant,
1765 then check the sum of this size and position. There will be a gap
1766 iff it is not multiple of the current field alignment. */
1767 if (host_integerp (DECL_SIZE (prev_field), 1)
1768 && host_integerp (bit_position (prev_field), 1))
1769 return ((tree_low_cst (bit_position (prev_field), 1)
1770 + tree_low_cst (DECL_SIZE (prev_field), 1))
1771 % DECL_ALIGN (curr_field) != 0);
1773 /* If both the position and size of the previous field are multiples
1774 of the current field alignment, there cannot be any gap. */
1775 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1776 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1779 /* Fallback, return that there may be a potential gap */
1783 /* Returns a LABEL_DECL node for LABEL_NAME. */
1786 create_label_decl (tree label_name)
1788 tree label_decl = build_decl (input_location,
1789 LABEL_DECL, label_name, void_type_node);
1791 DECL_CONTEXT (label_decl) = current_function_decl;
1792 DECL_MODE (label_decl) = VOIDmode;
1793 DECL_SOURCE_LOCATION (label_decl) = input_location;
1798 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1799 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1800 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1801 PARM_DECL nodes chained through the TREE_CHAIN field).
1803 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1804 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1807 create_subprog_decl (tree subprog_name, tree asm_name,
1808 tree subprog_type, tree param_decl_list, bool inline_flag,
1809 bool public_flag, bool extern_flag,
1810 struct attrib *attr_list, Node_Id gnat_node)
1812 tree return_type = TREE_TYPE (subprog_type);
1813 tree subprog_decl = build_decl (input_location,
1814 FUNCTION_DECL, subprog_name, subprog_type);
1816 /* If this is a non-inline function nested inside an inlined external
1817 function, we cannot honor both requests without cloning the nested
1818 function in the current unit since it is private to the other unit.
1819 We could inline the nested function as well but it's probably better
1820 to err on the side of too little inlining. */
1822 && current_function_decl
1823 && DECL_DECLARED_INLINE_P (current_function_decl)
1824 && DECL_EXTERNAL (current_function_decl))
1825 DECL_DECLARED_INLINE_P (current_function_decl) = 0;
1827 DECL_EXTERNAL (subprog_decl) = extern_flag;
1828 TREE_PUBLIC (subprog_decl) = public_flag;
1829 TREE_STATIC (subprog_decl) = 1;
1830 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1831 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1832 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1833 DECL_DECLARED_INLINE_P (subprog_decl) = inline_flag;
1834 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1835 DECL_RESULT (subprog_decl) = build_decl (input_location,
1836 RESULT_DECL, 0, return_type);
1837 DECL_ARTIFICIAL (DECL_RESULT (subprog_decl)) = 1;
1838 DECL_IGNORED_P (DECL_RESULT (subprog_decl)) = 1;
1840 /* TREE_ADDRESSABLE is set on the result type to request the use of the
1841 target by-reference return mechanism. This is not supported all the
1842 way down to RTL expansion with GCC 4, which ICEs on temporary creation
1843 attempts with such a type and expects DECL_BY_REFERENCE to be set on
1844 the RESULT_DECL instead - see gnat_genericize for more details. */
1845 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (subprog_decl))))
1847 tree result_decl = DECL_RESULT (subprog_decl);
1849 TREE_ADDRESSABLE (TREE_TYPE (result_decl)) = 0;
1850 DECL_BY_REFERENCE (result_decl) = 1;
1855 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1857 /* The expand_main_function circuitry expects "main_identifier_node" to
1858 designate the DECL_NAME of the 'main' entry point, in turn expected
1859 to be declared as the "main" function literally by default. Ada
1860 program entry points are typically declared with a different name
1861 within the binder generated file, exported as 'main' to satisfy the
1862 system expectations. Force main_identifier_node in this case. */
1863 if (asm_name == main_identifier_node)
1864 DECL_NAME (subprog_decl) = main_identifier_node;
1867 process_attributes (subprog_decl, attr_list);
1869 /* Add this decl to the current binding level. */
1870 gnat_pushdecl (subprog_decl, gnat_node);
1872 /* Output the assembler code and/or RTL for the declaration. */
1873 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
1875 return subprog_decl;
1878 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1879 body. This routine needs to be invoked before processing the declarations
1880 appearing in the subprogram. */
1883 begin_subprog_body (tree subprog_decl)
1887 current_function_decl = subprog_decl;
1888 announce_function (subprog_decl);
1890 /* Enter a new binding level and show that all the parameters belong to
1893 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1894 param_decl = TREE_CHAIN (param_decl))
1895 DECL_CONTEXT (param_decl) = subprog_decl;
1897 make_decl_rtl (subprog_decl);
1899 /* We handle pending sizes via the elaboration of types, so we don't need to
1900 save them. This causes them to be marked as part of the outer function
1901 and then discarded. */
1902 get_pending_sizes ();
1906 /* Helper for the genericization callback. Return a dereference of VAL
1907 if it is of a reference type. */
1910 convert_from_reference (tree val)
1912 tree value_type, ref;
1914 if (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE)
1917 value_type = TREE_TYPE (TREE_TYPE (val));
1918 ref = build1 (INDIRECT_REF, value_type, val);
1920 /* See if what we reference is CONST or VOLATILE, which requires
1921 looking into array types to get to the component type. */
1923 while (TREE_CODE (value_type) == ARRAY_TYPE)
1924 value_type = TREE_TYPE (value_type);
1927 = (TYPE_QUALS (value_type) & TYPE_QUAL_CONST);
1928 TREE_THIS_VOLATILE (ref)
1929 = (TYPE_QUALS (value_type) & TYPE_QUAL_VOLATILE);
1931 TREE_SIDE_EFFECTS (ref)
1932 = (TREE_THIS_VOLATILE (ref) || TREE_SIDE_EFFECTS (val));
1937 /* Helper for the genericization callback. Returns true if T denotes
1938 a RESULT_DECL with DECL_BY_REFERENCE set. */
1941 is_byref_result (tree t)
1943 return (TREE_CODE (t) == RESULT_DECL && DECL_BY_REFERENCE (t));
1947 /* Tree walking callback for gnat_genericize. Currently ...
1949 o Adjust references to the function's DECL_RESULT if it is marked
1950 DECL_BY_REFERENCE and so has had its type turned into a reference
1951 type at the end of the function compilation. */
1954 gnat_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1956 /* This implementation is modeled after what the C++ front-end is
1957 doing, basis of the downstream passes behavior. */
1959 tree stmt = *stmt_p;
1960 struct pointer_set_t *p_set = (struct pointer_set_t*) data;
1962 /* If we have a direct mention of the result decl, dereference. */
1963 if (is_byref_result (stmt))
1965 *stmt_p = convert_from_reference (stmt);
1970 /* Otherwise, no need to walk the same tree twice. */
1971 if (pointer_set_contains (p_set, stmt))
1977 /* If we are taking the address of what now is a reference, just get the
1979 if (TREE_CODE (stmt) == ADDR_EXPR
1980 && is_byref_result (TREE_OPERAND (stmt, 0)))
1982 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1986 /* Don't dereference an by-reference RESULT_DECL inside a RETURN_EXPR. */
1987 else if (TREE_CODE (stmt) == RETURN_EXPR
1988 && TREE_OPERAND (stmt, 0)
1989 && is_byref_result (TREE_OPERAND (stmt, 0)))
1992 /* Don't look inside trees that cannot embed references of interest. */
1993 else if (IS_TYPE_OR_DECL_P (stmt))
1996 pointer_set_insert (p_set, *stmt_p);
2001 /* Perform lowering of Ada trees to GENERIC. In particular:
2003 o Turn a DECL_BY_REFERENCE RESULT_DECL into a real by-reference decl
2004 and adjust all the references to this decl accordingly. */
2007 gnat_genericize (tree fndecl)
2009 /* Prior to GCC 4, an explicit By_Reference result mechanism for a function
2010 was handled by simply setting TREE_ADDRESSABLE on the result type.
2011 Everything required to actually pass by invisible ref using the target
2012 mechanism (e.g. extra parameter) was handled at RTL expansion time.
2014 This doesn't work with GCC 4 any more for several reasons. First, the
2015 gimplification process might need the creation of temporaries of this
2016 type, and the gimplifier ICEs on such attempts. Second, the middle-end
2017 now relies on a different attribute for such cases (DECL_BY_REFERENCE on
2018 RESULT/PARM_DECLs), and expects the user invisible by-reference-ness to
2019 be explicitly accounted for by the front-end in the function body.
2021 We achieve the complete transformation in two steps:
2023 1/ create_subprog_decl performs early attribute tweaks: it clears
2024 TREE_ADDRESSABLE from the result type and sets DECL_BY_REFERENCE on
2025 the result decl. The former ensures that the bit isn't set in the GCC
2026 tree saved for the function, so prevents ICEs on temporary creation.
2027 The latter we use here to trigger the rest of the processing.
2029 2/ This function performs the type transformation on the result decl
2030 and adjusts all the references to this decl from the function body
2033 Clearing TREE_ADDRESSABLE from the type differs from the C++ front-end
2034 strategy, which escapes the gimplifier temporary creation issues by
2035 creating it's own temporaries using TARGET_EXPR nodes. Our way relies
2036 on simple specific support code in aggregate_value_p to look at the
2037 target function result decl explicitly. */
2039 struct pointer_set_t *p_set;
2040 tree decl_result = DECL_RESULT (fndecl);
2042 if (!DECL_BY_REFERENCE (decl_result))
2045 /* Make the DECL_RESULT explicitly by-reference and adjust all the
2046 occurrences in the function body using the common tree-walking facility.
2047 We want to see every occurrence of the result decl to adjust the
2048 referencing tree, so need to use our own pointer set to control which
2049 trees should be visited again or not. */
2051 p_set = pointer_set_create ();
2053 TREE_TYPE (decl_result) = build_reference_type (TREE_TYPE (decl_result));
2054 TREE_ADDRESSABLE (decl_result) = 0;
2055 relayout_decl (decl_result);
2057 walk_tree (&DECL_SAVED_TREE (fndecl), gnat_genericize_r, p_set, NULL);
2059 pointer_set_destroy (p_set);
2062 /* Finish the definition of the current subprogram BODY and finalize it. */
2065 end_subprog_body (tree body)
2067 tree fndecl = current_function_decl;
2069 /* Mark the BLOCK for this level as being for this function and pop the
2070 level. Since the vars in it are the parameters, clear them. */
2071 BLOCK_VARS (current_binding_level->block) = 0;
2072 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
2073 DECL_INITIAL (fndecl) = current_binding_level->block;
2076 /* We handle pending sizes via the elaboration of types, so we don't
2077 need to save them. */
2078 get_pending_sizes ();
2080 /* Mark the RESULT_DECL as being in this subprogram. */
2081 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
2083 DECL_SAVED_TREE (fndecl) = body;
2085 current_function_decl = DECL_CONTEXT (fndecl);
2088 /* We cannot track the location of errors past this point. */
2089 error_gnat_node = Empty;
2091 /* If we're only annotating types, don't actually compile this function. */
2092 if (type_annotate_only)
2095 /* Perform the required pre-gimplification transformations on the tree. */
2096 gnat_genericize (fndecl);
2098 /* Dump functions before gimplification. */
2099 dump_function (TDI_original, fndecl);
2101 /* ??? This special handling of nested functions is probably obsolete. */
2102 if (!DECL_CONTEXT (fndecl))
2103 cgraph_finalize_function (fndecl, false);
2105 /* Register this function with cgraph just far enough to get it
2106 added to our parent's nested function list. */
2107 (void) cgraph_node (fndecl);
2111 gnat_builtin_function (tree decl)
2113 gnat_pushdecl (decl, Empty);
2117 /* Return an integer type with the number of bits of precision given by
2118 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2119 it is a signed type. */
2122 gnat_type_for_size (unsigned precision, int unsignedp)
2127 if (precision <= 2 * MAX_BITS_PER_WORD
2128 && signed_and_unsigned_types[precision][unsignedp])
2129 return signed_and_unsigned_types[precision][unsignedp];
2132 t = make_unsigned_type (precision);
2134 t = make_signed_type (precision);
2136 if (precision <= 2 * MAX_BITS_PER_WORD)
2137 signed_and_unsigned_types[precision][unsignedp] = t;
2141 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2142 TYPE_NAME (t) = get_identifier (type_name);
2148 /* Likewise for floating-point types. */
2151 float_type_for_precision (int precision, enum machine_mode mode)
2156 if (float_types[(int) mode])
2157 return float_types[(int) mode];
2159 float_types[(int) mode] = t = make_node (REAL_TYPE);
2160 TYPE_PRECISION (t) = precision;
2163 gcc_assert (TYPE_MODE (t) == mode);
2166 sprintf (type_name, "FLOAT_%d", precision);
2167 TYPE_NAME (t) = get_identifier (type_name);
2173 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2174 an unsigned type; otherwise a signed type is returned. */
2177 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2179 if (mode == BLKmode)
2182 if (mode == VOIDmode)
2183 return void_type_node;
2185 if (COMPLEX_MODE_P (mode))
2188 if (SCALAR_FLOAT_MODE_P (mode))
2189 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2191 if (SCALAR_INT_MODE_P (mode))
2192 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2194 if (VECTOR_MODE_P (mode))
2196 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2197 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
2199 return build_vector_type_for_mode (inner_type, mode);
2205 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2208 gnat_unsigned_type (tree type_node)
2210 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2212 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2214 type = copy_node (type);
2215 TREE_TYPE (type) = type_node;
2217 else if (TREE_TYPE (type_node)
2218 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2219 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2221 type = copy_node (type);
2222 TREE_TYPE (type) = TREE_TYPE (type_node);
2228 /* Return the signed version of a TYPE_NODE, a scalar type. */
2231 gnat_signed_type (tree type_node)
2233 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2235 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2237 type = copy_node (type);
2238 TREE_TYPE (type) = type_node;
2240 else if (TREE_TYPE (type_node)
2241 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2242 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2244 type = copy_node (type);
2245 TREE_TYPE (type) = TREE_TYPE (type_node);
2251 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2252 transparently converted to each other. */
2255 gnat_types_compatible_p (tree t1, tree t2)
2257 enum tree_code code;
2259 /* This is the default criterion. */
2260 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2263 /* We only check structural equivalence here. */
2264 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2267 /* Vector types are also compatible if they have the same number of subparts
2268 and the same form of (scalar) element type. */
2269 if (code == VECTOR_TYPE
2270 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
2271 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
2272 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
2275 /* Array types are also compatible if they are constrained and have
2276 the same component type and the same domain. */
2277 if (code == ARRAY_TYPE
2278 && TREE_TYPE (t1) == TREE_TYPE (t2)
2279 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
2280 || (TYPE_DOMAIN (t1)
2282 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2283 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2284 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
2285 TYPE_MAX_VALUE (TYPE_DOMAIN (t2))))))
2288 /* Padding record types are also compatible if they pad the same
2289 type and have the same constant size. */
2290 if (code == RECORD_TYPE
2291 && TYPE_PADDING_P (t1) && TYPE_PADDING_P (t2)
2292 && TREE_TYPE (TYPE_FIELDS (t1)) == TREE_TYPE (TYPE_FIELDS (t2))
2293 && tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2)))
2299 /* EXP is an expression for the size of an object. If this size contains
2300 discriminant references, replace them with the maximum (if MAX_P) or
2301 minimum (if !MAX_P) possible value of the discriminant. */
2304 max_size (tree exp, bool max_p)
2306 enum tree_code code = TREE_CODE (exp);
2307 tree type = TREE_TYPE (exp);
2309 switch (TREE_CODE_CLASS (code))
2311 case tcc_declaration:
2316 if (code == CALL_EXPR)
2321 t = maybe_inline_call_in_expr (exp);
2323 return max_size (t, max_p);
2325 n = call_expr_nargs (exp);
2327 argarray = (tree *) alloca (n * sizeof (tree));
2328 for (i = 0; i < n; i++)
2329 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
2330 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
2335 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2336 modify. Otherwise, we treat it like a variable. */
2337 if (!CONTAINS_PLACEHOLDER_P (exp))
2340 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2342 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
2344 case tcc_comparison:
2345 return max_p ? size_one_node : size_zero_node;
2349 case tcc_expression:
2350 switch (TREE_CODE_LENGTH (code))
2353 if (code == NON_LVALUE_EXPR)
2354 return max_size (TREE_OPERAND (exp, 0), max_p);
2357 fold_build1 (code, type,
2358 max_size (TREE_OPERAND (exp, 0),
2359 code == NEGATE_EXPR ? !max_p : max_p));
2362 if (code == COMPOUND_EXPR)
2363 return max_size (TREE_OPERAND (exp, 1), max_p);
2365 /* Calculate "(A ? B : C) - D" as "A ? B - D : C - D" which
2366 may provide a tighter bound on max_size. */
2367 if (code == MINUS_EXPR
2368 && TREE_CODE (TREE_OPERAND (exp, 0)) == COND_EXPR)
2370 tree lhs = fold_build2 (MINUS_EXPR, type,
2371 TREE_OPERAND (TREE_OPERAND (exp, 0), 1),
2372 TREE_OPERAND (exp, 1));
2373 tree rhs = fold_build2 (MINUS_EXPR, type,
2374 TREE_OPERAND (TREE_OPERAND (exp, 0), 2),
2375 TREE_OPERAND (exp, 1));
2376 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2377 max_size (lhs, max_p),
2378 max_size (rhs, max_p));
2382 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2383 tree rhs = max_size (TREE_OPERAND (exp, 1),
2384 code == MINUS_EXPR ? !max_p : max_p);
2386 /* Special-case wanting the maximum value of a MIN_EXPR.
2387 In that case, if one side overflows, return the other.
2388 sizetype is signed, but we know sizes are non-negative.
2389 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2390 overflowing or the maximum possible value and the RHS
2394 && TREE_CODE (rhs) == INTEGER_CST
2395 && TREE_OVERFLOW (rhs))
2399 && TREE_CODE (lhs) == INTEGER_CST
2400 && TREE_OVERFLOW (lhs))
2402 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2403 && ((TREE_CODE (lhs) == INTEGER_CST
2404 && TREE_OVERFLOW (lhs))
2405 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2406 && !TREE_CONSTANT (rhs))
2409 return fold_build2 (code, type, lhs, rhs);
2413 if (code == SAVE_EXPR)
2415 else if (code == COND_EXPR)
2416 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2417 max_size (TREE_OPERAND (exp, 1), max_p),
2418 max_size (TREE_OPERAND (exp, 2), max_p));
2421 /* Other tree classes cannot happen. */
2429 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2430 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2431 Return a constructor for the template. */
2434 build_template (tree template_type, tree array_type, tree expr)
2436 tree template_elts = NULL_TREE;
2437 tree bound_list = NULL_TREE;
2440 while (TREE_CODE (array_type) == RECORD_TYPE
2441 && (TYPE_PADDING_P (array_type)
2442 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2443 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2445 if (TREE_CODE (array_type) == ARRAY_TYPE
2446 || (TREE_CODE (array_type) == INTEGER_TYPE
2447 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2448 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2450 /* First make the list for a CONSTRUCTOR for the template. Go down the
2451 field list of the template instead of the type chain because this
2452 array might be an Ada array of arrays and we can't tell where the
2453 nested arrays stop being the underlying object. */
2455 for (field = TYPE_FIELDS (template_type); field;
2457 ? (bound_list = TREE_CHAIN (bound_list))
2458 : (array_type = TREE_TYPE (array_type))),
2459 field = TREE_CHAIN (TREE_CHAIN (field)))
2461 tree bounds, min, max;
2463 /* If we have a bound list, get the bounds from there. Likewise
2464 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2465 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2466 This will give us a maximum range. */
2468 bounds = TREE_VALUE (bound_list);
2469 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2470 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2471 else if (expr && TREE_CODE (expr) == PARM_DECL
2472 && DECL_BY_COMPONENT_PTR_P (expr))
2473 bounds = TREE_TYPE (field);
2477 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
2478 max = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MAX_VALUE (bounds));
2480 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2481 substitute it from OBJECT. */
2482 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2483 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2485 template_elts = tree_cons (TREE_CHAIN (field), max,
2486 tree_cons (field, min, template_elts));
2489 return gnat_build_constructor (template_type, nreverse (template_elts));
2492 /* Build a 32bit VMS descriptor from a Mechanism_Type, which must specify
2493 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2494 in the type contains in its DECL_INITIAL the expression to use when
2495 a constructor is made for the type. GNAT_ENTITY is an entity used
2496 to print out an error message if the mechanism cannot be applied to
2497 an object of that type and also for the name. */
2500 build_vms_descriptor32 (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2502 tree record_type = make_node (RECORD_TYPE);
2503 tree pointer32_type;
2504 tree field_list = 0;
2513 /* If TYPE is an unconstrained array, use the underlying array type. */
2514 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2515 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2517 /* If this is an array, compute the number of dimensions in the array,
2518 get the index types, and point to the inner type. */
2519 if (TREE_CODE (type) != ARRAY_TYPE)
2522 for (ndim = 1, inner_type = type;
2523 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2524 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2525 ndim++, inner_type = TREE_TYPE (inner_type))
2528 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2530 if (mech != By_Descriptor_NCA && mech != By_Short_Descriptor_NCA
2531 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2532 for (i = ndim - 1, inner_type = type;
2534 i--, inner_type = TREE_TYPE (inner_type))
2535 idx_arr[i] = TYPE_DOMAIN (inner_type);
2537 for (i = 0, inner_type = type;
2539 i++, inner_type = TREE_TYPE (inner_type))
2540 idx_arr[i] = TYPE_DOMAIN (inner_type);
2542 /* Now get the DTYPE value. */
2543 switch (TREE_CODE (type))
2548 if (TYPE_VAX_FLOATING_POINT_P (type))
2549 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2562 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2565 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2568 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2571 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2574 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2577 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2583 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2587 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2588 && TYPE_VAX_FLOATING_POINT_P (type))
2589 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2601 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2612 /* Get the CLASS value. */
2615 case By_Descriptor_A:
2616 case By_Short_Descriptor_A:
2619 case By_Descriptor_NCA:
2620 case By_Short_Descriptor_NCA:
2623 case By_Descriptor_SB:
2624 case By_Short_Descriptor_SB:
2628 case By_Short_Descriptor:
2629 case By_Descriptor_S:
2630 case By_Short_Descriptor_S:
2636 /* Make the type for a descriptor for VMS. The first four fields
2637 are the same for all types. */
2640 = chainon (field_list,
2641 make_descriptor_field
2642 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2643 size_in_bytes ((mech == By_Descriptor_A ||
2644 mech == By_Short_Descriptor_A)
2645 ? inner_type : type)));
2647 field_list = chainon (field_list,
2648 make_descriptor_field ("DTYPE",
2649 gnat_type_for_size (8, 1),
2650 record_type, size_int (dtype)));
2651 field_list = chainon (field_list,
2652 make_descriptor_field ("CLASS",
2653 gnat_type_for_size (8, 1),
2654 record_type, size_int (klass)));
2656 /* Of course this will crash at run-time if the address space is not
2657 within the low 32 bits, but there is nothing else we can do. */
2658 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
2661 = chainon (field_list,
2662 make_descriptor_field
2663 ("POINTER", pointer32_type, record_type,
2664 build_unary_op (ADDR_EXPR,
2666 build0 (PLACEHOLDER_EXPR, type))));
2671 case By_Short_Descriptor:
2672 case By_Descriptor_S:
2673 case By_Short_Descriptor_S:
2676 case By_Descriptor_SB:
2677 case By_Short_Descriptor_SB:
2679 = chainon (field_list,
2680 make_descriptor_field
2681 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2682 TREE_CODE (type) == ARRAY_TYPE
2683 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2685 = chainon (field_list,
2686 make_descriptor_field
2687 ("SB_U1", gnat_type_for_size (32, 1), record_type,
2688 TREE_CODE (type) == ARRAY_TYPE
2689 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2692 case By_Descriptor_A:
2693 case By_Short_Descriptor_A:
2694 case By_Descriptor_NCA:
2695 case By_Short_Descriptor_NCA:
2696 field_list = chainon (field_list,
2697 make_descriptor_field ("SCALE",
2698 gnat_type_for_size (8, 1),
2702 field_list = chainon (field_list,
2703 make_descriptor_field ("DIGITS",
2704 gnat_type_for_size (8, 1),
2709 = chainon (field_list,
2710 make_descriptor_field
2711 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2712 size_int ((mech == By_Descriptor_NCA ||
2713 mech == By_Short_Descriptor_NCA)
2715 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2716 : (TREE_CODE (type) == ARRAY_TYPE
2717 && TYPE_CONVENTION_FORTRAN_P (type)
2720 field_list = chainon (field_list,
2721 make_descriptor_field ("DIMCT",
2722 gnat_type_for_size (8, 1),
2726 field_list = chainon (field_list,
2727 make_descriptor_field ("ARSIZE",
2728 gnat_type_for_size (32, 1),
2730 size_in_bytes (type)));
2732 /* Now build a pointer to the 0,0,0... element. */
2733 tem = build0 (PLACEHOLDER_EXPR, type);
2734 for (i = 0, inner_type = type; i < ndim;
2735 i++, inner_type = TREE_TYPE (inner_type))
2736 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2737 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2738 NULL_TREE, NULL_TREE);
2741 = chainon (field_list,
2742 make_descriptor_field
2744 build_pointer_type_for_mode (inner_type, SImode, false),
2747 build_pointer_type_for_mode (inner_type, SImode,
2751 /* Next come the addressing coefficients. */
2752 tem = size_one_node;
2753 for (i = 0; i < ndim; i++)
2757 = size_binop (MULT_EXPR, tem,
2758 size_binop (PLUS_EXPR,
2759 size_binop (MINUS_EXPR,
2760 TYPE_MAX_VALUE (idx_arr[i]),
2761 TYPE_MIN_VALUE (idx_arr[i])),
2764 fname[0] = ((mech == By_Descriptor_NCA ||
2765 mech == By_Short_Descriptor_NCA) ? 'S' : 'M');
2766 fname[1] = '0' + i, fname[2] = 0;
2768 = chainon (field_list,
2769 make_descriptor_field (fname,
2770 gnat_type_for_size (32, 1),
2771 record_type, idx_length));
2773 if (mech == By_Descriptor_NCA || mech == By_Short_Descriptor_NCA)
2777 /* Finally here are the bounds. */
2778 for (i = 0; i < ndim; i++)
2782 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2784 = chainon (field_list,
2785 make_descriptor_field
2786 (fname, gnat_type_for_size (32, 1), record_type,
2787 TYPE_MIN_VALUE (idx_arr[i])));
2791 = chainon (field_list,
2792 make_descriptor_field
2793 (fname, gnat_type_for_size (32, 1), record_type,
2794 TYPE_MAX_VALUE (idx_arr[i])));
2799 post_error ("unsupported descriptor type for &", gnat_entity);
2802 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC");
2803 finish_record_type (record_type, field_list, 0, false);
2807 /* Build a 64bit VMS descriptor from a Mechanism_Type, which must specify
2808 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2809 in the type contains in its DECL_INITIAL the expression to use when
2810 a constructor is made for the type. GNAT_ENTITY is an entity used
2811 to print out an error message if the mechanism cannot be applied to
2812 an object of that type and also for the name. */
2815 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2817 tree record64_type = make_node (RECORD_TYPE);
2818 tree pointer64_type;
2819 tree field_list64 = 0;
2828 /* If TYPE is an unconstrained array, use the underlying array type. */
2829 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2830 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2832 /* If this is an array, compute the number of dimensions in the array,
2833 get the index types, and point to the inner type. */
2834 if (TREE_CODE (type) != ARRAY_TYPE)
2837 for (ndim = 1, inner_type = type;
2838 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2839 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2840 ndim++, inner_type = TREE_TYPE (inner_type))
2843 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2845 if (mech != By_Descriptor_NCA
2846 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2847 for (i = ndim - 1, inner_type = type;
2849 i--, inner_type = TREE_TYPE (inner_type))
2850 idx_arr[i] = TYPE_DOMAIN (inner_type);
2852 for (i = 0, inner_type = type;
2854 i++, inner_type = TREE_TYPE (inner_type))
2855 idx_arr[i] = TYPE_DOMAIN (inner_type);
2857 /* Now get the DTYPE value. */
2858 switch (TREE_CODE (type))
2863 if (TYPE_VAX_FLOATING_POINT_P (type))
2864 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2877 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2880 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2883 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2886 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2889 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2892 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2898 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2902 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2903 && TYPE_VAX_FLOATING_POINT_P (type))
2904 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2916 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2927 /* Get the CLASS value. */
2930 case By_Descriptor_A:
2933 case By_Descriptor_NCA:
2936 case By_Descriptor_SB:
2940 case By_Descriptor_S:
2946 /* Make the type for a 64bit descriptor for VMS. The first six fields
2947 are the same for all types. */
2949 field_list64 = chainon (field_list64,
2950 make_descriptor_field ("MBO",
2951 gnat_type_for_size (16, 1),
2952 record64_type, size_int (1)));
2954 field_list64 = chainon (field_list64,
2955 make_descriptor_field ("DTYPE",
2956 gnat_type_for_size (8, 1),
2957 record64_type, size_int (dtype)));
2958 field_list64 = chainon (field_list64,
2959 make_descriptor_field ("CLASS",
2960 gnat_type_for_size (8, 1),
2961 record64_type, size_int (klass)));
2963 field_list64 = chainon (field_list64,
2964 make_descriptor_field ("MBMO",
2965 gnat_type_for_size (32, 1),
2966 record64_type, ssize_int (-1)));
2969 = chainon (field_list64,
2970 make_descriptor_field
2971 ("LENGTH", gnat_type_for_size (64, 1), record64_type,
2972 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2974 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
2977 = chainon (field_list64,
2978 make_descriptor_field
2979 ("POINTER", pointer64_type, record64_type,
2980 build_unary_op (ADDR_EXPR,
2982 build0 (PLACEHOLDER_EXPR, type))));
2987 case By_Descriptor_S:
2990 case By_Descriptor_SB:
2992 = chainon (field_list64,
2993 make_descriptor_field
2994 ("SB_L1", gnat_type_for_size (64, 1), record64_type,
2995 TREE_CODE (type) == ARRAY_TYPE
2996 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2998 = chainon (field_list64,
2999 make_descriptor_field
3000 ("SB_U1", gnat_type_for_size (64, 1), record64_type,
3001 TREE_CODE (type) == ARRAY_TYPE
3002 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
3005 case By_Descriptor_A:
3006 case By_Descriptor_NCA:
3007 field_list64 = chainon (field_list64,
3008 make_descriptor_field ("SCALE",
3009 gnat_type_for_size (8, 1),
3013 field_list64 = chainon (field_list64,
3014 make_descriptor_field ("DIGITS",
3015 gnat_type_for_size (8, 1),
3020 = chainon (field_list64,
3021 make_descriptor_field
3022 ("AFLAGS", gnat_type_for_size (8, 1), record64_type,
3023 size_int (mech == By_Descriptor_NCA
3025 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
3026 : (TREE_CODE (type) == ARRAY_TYPE
3027 && TYPE_CONVENTION_FORTRAN_P (type)
3030 field_list64 = chainon (field_list64,
3031 make_descriptor_field ("DIMCT",
3032 gnat_type_for_size (8, 1),
3036 field_list64 = chainon (field_list64,
3037 make_descriptor_field ("MBZ",
3038 gnat_type_for_size (32, 1),
3041 field_list64 = chainon (field_list64,
3042 make_descriptor_field ("ARSIZE",
3043 gnat_type_for_size (64, 1),
3045 size_in_bytes (type)));
3047 /* Now build a pointer to the 0,0,0... element. */
3048 tem = build0 (PLACEHOLDER_EXPR, type);
3049 for (i = 0, inner_type = type; i < ndim;
3050 i++, inner_type = TREE_TYPE (inner_type))
3051 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
3052 convert (TYPE_DOMAIN (inner_type), size_zero_node),
3053 NULL_TREE, NULL_TREE);
3056 = chainon (field_list64,
3057 make_descriptor_field
3059 build_pointer_type_for_mode (inner_type, DImode, false),
3062 build_pointer_type_for_mode (inner_type, DImode,
3066 /* Next come the addressing coefficients. */
3067 tem = size_one_node;
3068 for (i = 0; i < ndim; i++)
3072 = size_binop (MULT_EXPR, tem,
3073 size_binop (PLUS_EXPR,
3074 size_binop (MINUS_EXPR,
3075 TYPE_MAX_VALUE (idx_arr[i]),
3076 TYPE_MIN_VALUE (idx_arr[i])),
3079 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
3080 fname[1] = '0' + i, fname[2] = 0;
3082 = chainon (field_list64,
3083 make_descriptor_field (fname,
3084 gnat_type_for_size (64, 1),
3085 record64_type, idx_length));
3087 if (mech == By_Descriptor_NCA)
3091 /* Finally here are the bounds. */
3092 for (i = 0; i < ndim; i++)
3096 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
3098 = chainon (field_list64,
3099 make_descriptor_field
3100 (fname, gnat_type_for_size (64, 1), record64_type,
3101 TYPE_MIN_VALUE (idx_arr[i])));
3105 = chainon (field_list64,
3106 make_descriptor_field
3107 (fname, gnat_type_for_size (64, 1), record64_type,
3108 TYPE_MAX_VALUE (idx_arr[i])));
3113 post_error ("unsupported descriptor type for &", gnat_entity);
3116 TYPE_NAME (record64_type) = create_concat_name (gnat_entity, "DESC64");
3117 finish_record_type (record64_type, field_list64, 0, false);
3118 return record64_type;
3121 /* Utility routine for above code to make a field. */
3124 make_descriptor_field (const char *name, tree type,
3125 tree rec_type, tree initial)
3128 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
3130 DECL_INITIAL (field) = initial;
3134 /* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
3135 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3136 which the VMS descriptor is passed. */
3139 convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3141 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3142 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3143 /* The CLASS field is the 3rd field in the descriptor. */
3144 tree klass = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
3145 /* The POINTER field is the 6th field in the descriptor. */
3146 tree pointer64 = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (klass)));
3148 /* Retrieve the value of the POINTER field. */
3150 = build3 (COMPONENT_REF, TREE_TYPE (pointer64), desc, pointer64, NULL_TREE);
3152 if (POINTER_TYPE_P (gnu_type))
3153 return convert (gnu_type, gnu_expr64);
3155 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
3157 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3158 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3159 tree template_type = TREE_TYPE (p_bounds_type);
3160 tree min_field = TYPE_FIELDS (template_type);
3161 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3162 tree template_tree, template_addr, aflags, dimct, t, u;
3163 /* See the head comment of build_vms_descriptor. */
3164 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
3165 tree lfield, ufield;
3167 /* Convert POINTER to the type of the P_ARRAY field. */
3168 gnu_expr64 = convert (p_array_type, gnu_expr64);
3172 case 1: /* Class S */
3173 case 15: /* Class SB */
3174 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
3175 t = TREE_CHAIN (TREE_CHAIN (klass));
3176 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3177 t = tree_cons (min_field,
3178 convert (TREE_TYPE (min_field), integer_one_node),
3179 tree_cons (max_field,
3180 convert (TREE_TYPE (max_field), t),
3182 template_tree = gnat_build_constructor (template_type, t);
3183 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
3185 /* For class S, we are done. */
3189 /* Test that we really have a SB descriptor, like DEC Ada. */
3190 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3191 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
3192 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
3193 /* If so, there is already a template in the descriptor and
3194 it is located right after the POINTER field. The fields are
3195 64bits so they must be repacked. */
3196 t = TREE_CHAIN (pointer64);
3197 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3198 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3201 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3203 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3205 /* Build the template in the form of a constructor. */
3206 t = tree_cons (TYPE_FIELDS (template_type), lfield,
3207 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3208 ufield, NULL_TREE));
3209 template_tree = gnat_build_constructor (template_type, t);
3211 /* Otherwise use the {1, LENGTH} template we build above. */
3212 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3213 build_unary_op (ADDR_EXPR, p_bounds_type,
3218 case 4: /* Class A */
3219 /* The AFLAGS field is the 3rd field after the pointer in the
3221 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer64)));
3222 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3223 /* The DIMCT field is the next field in the descriptor after
3226 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3227 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3228 or FL_COEFF or FL_BOUNDS not set. */
3229 u = build_int_cst (TREE_TYPE (aflags), 192);
3230 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3231 build_binary_op (NE_EXPR, integer_type_node,
3233 convert (TREE_TYPE (dimct),
3235 build_binary_op (NE_EXPR, integer_type_node,
3236 build2 (BIT_AND_EXPR,
3240 /* There is already a template in the descriptor and it is located
3241 in block 3. The fields are 64bits so they must be repacked. */
3242 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN
3244 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3245 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3248 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3250 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3252 /* Build the template in the form of a constructor. */
3253 t = tree_cons (TYPE_FIELDS (template_type), lfield,
3254 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3255 ufield, NULL_TREE));
3256 template_tree = gnat_build_constructor (template_type, t);
3257 template_tree = build3 (COND_EXPR, template_type, u,
3258 build_call_raise (CE_Length_Check_Failed, Empty,
3259 N_Raise_Constraint_Error),
3262 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
3265 case 10: /* Class NCA */
3267 post_error ("unsupported descriptor type for &", gnat_subprog);
3268 template_addr = integer_zero_node;
3272 /* Build the fat pointer in the form of a constructor. */
3273 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr64,
3274 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3275 template_addr, NULL_TREE));
3276 return gnat_build_constructor (gnu_type, t);
3283 /* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3284 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3285 which the VMS descriptor is passed. */
3288 convert_vms_descriptor32 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3290 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3291 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3292 /* The CLASS field is the 3rd field in the descriptor. */
3293 tree klass = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
3294 /* The POINTER field is the 4th field in the descriptor. */
3295 tree pointer = TREE_CHAIN (klass);
3297 /* Retrieve the value of the POINTER field. */
3299 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
3301 if (POINTER_TYPE_P (gnu_type))
3302 return convert (gnu_type, gnu_expr32);
3304 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
3306 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3307 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3308 tree template_type = TREE_TYPE (p_bounds_type);
3309 tree min_field = TYPE_FIELDS (template_type);
3310 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3311 tree template_tree, template_addr, aflags, dimct, t, u;
3312 /* See the head comment of build_vms_descriptor. */
3313 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
3315 /* Convert POINTER to the type of the P_ARRAY field. */
3316 gnu_expr32 = convert (p_array_type, gnu_expr32);
3320 case 1: /* Class S */
3321 case 15: /* Class SB */
3322 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
3323 t = TYPE_FIELDS (desc_type);
3324 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3325 t = tree_cons (min_field,
3326 convert (TREE_TYPE (min_field), integer_one_node),
3327 tree_cons (max_field,
3328 convert (TREE_TYPE (max_field), t),
3330 template_tree = gnat_build_constructor (template_type, t);
3331 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
3333 /* For class S, we are done. */
3337 /* Test that we really have a SB descriptor, like DEC Ada. */
3338 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3339 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
3340 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
3341 /* If so, there is already a template in the descriptor and
3342 it is located right after the POINTER field. */
3343 t = TREE_CHAIN (pointer);
3345 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3346 /* Otherwise use the {1, LENGTH} template we build above. */
3347 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3348 build_unary_op (ADDR_EXPR, p_bounds_type,
3353 case 4: /* Class A */
3354 /* The AFLAGS field is the 7th field in the descriptor. */
3355 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer)));
3356 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3357 /* The DIMCT field is the 8th field in the descriptor. */
3359 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3360 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3361 or FL_COEFF or FL_BOUNDS not set. */
3362 u = build_int_cst (TREE_TYPE (aflags), 192);
3363 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3364 build_binary_op (NE_EXPR, integer_type_node,
3366 convert (TREE_TYPE (dimct),
3368 build_binary_op (NE_EXPR, integer_type_node,
3369 build2 (BIT_AND_EXPR,
3373 /* There is already a template in the descriptor and it is
3374 located at the start of block 3 (12th field). */
3375 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (t))));
3377 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3378 template_tree = build3 (COND_EXPR, TREE_TYPE (t), u,
3379 build_call_raise (CE_Length_Check_Failed, Empty,
3380 N_Raise_Constraint_Error),
3383 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
3386 case 10: /* Class NCA */
3388 post_error ("unsupported descriptor type for &", gnat_subprog);
3389 template_addr = integer_zero_node;
3393 /* Build the fat pointer in the form of a constructor. */
3394 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr32,
3395 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3396 template_addr, NULL_TREE));
3398 return gnat_build_constructor (gnu_type, t);
3405 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
3406 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
3407 pointer type of GNU_EXPR. GNAT_SUBPROG is the subprogram to which the
3408 VMS descriptor is passed. */
3411 convert_vms_descriptor (tree gnu_type, tree gnu_expr, tree gnu_expr_alt_type,
3412 Entity_Id gnat_subprog)
3414 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3415 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3416 tree mbo = TYPE_FIELDS (desc_type);
3417 const char *mbostr = IDENTIFIER_POINTER (DECL_NAME (mbo));
3418 tree mbmo = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (mbo)));
3419 tree is64bit, gnu_expr32, gnu_expr64;
3421 /* If the field name is not MBO, it must be 32-bit and no alternate.
3422 Otherwise primary must be 64-bit and alternate 32-bit. */
3423 if (strcmp (mbostr, "MBO") != 0)
3424 return convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
3426 /* Build the test for 64-bit descriptor. */
3427 mbo = build3 (COMPONENT_REF, TREE_TYPE (mbo), desc, mbo, NULL_TREE);
3428 mbmo = build3 (COMPONENT_REF, TREE_TYPE (mbmo), desc, mbmo, NULL_TREE);
3430 = build_binary_op (TRUTH_ANDIF_EXPR, integer_type_node,
3431 build_binary_op (EQ_EXPR, integer_type_node,
3432 convert (integer_type_node, mbo),
3434 build_binary_op (EQ_EXPR, integer_type_node,
3435 convert (integer_type_node, mbmo),
3436 integer_minus_one_node));
3438 /* Build the 2 possible end results. */
3439 gnu_expr64 = convert_vms_descriptor64 (gnu_type, gnu_expr, gnat_subprog);
3440 gnu_expr = fold_convert (gnu_expr_alt_type, gnu_expr);
3441 gnu_expr32 = convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
3443 return build3 (COND_EXPR, gnu_type, is64bit, gnu_expr64, gnu_expr32);
3446 /* Build a stub for the subprogram specified by the GCC tree GNU_SUBPROG
3447 and the GNAT node GNAT_SUBPROG. */
3450 build_function_stub (tree gnu_subprog, Entity_Id gnat_subprog)
3452 tree gnu_subprog_type, gnu_subprog_addr, gnu_subprog_call;
3453 tree gnu_stub_param, gnu_param_list, gnu_arg_types, gnu_param;
3454 tree gnu_stub_decl = DECL_FUNCTION_STUB (gnu_subprog);
3457 gnu_subprog_type = TREE_TYPE (gnu_subprog);
3458 gnu_param_list = NULL_TREE;
3460 begin_subprog_body (gnu_stub_decl);
3463 start_stmt_group ();
3465 /* Loop over the parameters of the stub and translate any of them
3466 passed by descriptor into a by reference one. */
3467 for (gnu_stub_param = DECL_ARGUMENTS (gnu_stub_decl),
3468 gnu_arg_types = TYPE_ARG_TYPES (gnu_subprog_type);
3470 gnu_stub_param = TREE_CHAIN (gnu_stub_param),
3471 gnu_arg_types = TREE_CHAIN (gnu_arg_types))
3473 if (DECL_BY_DESCRIPTOR_P (gnu_stub_param))
3475 = convert_vms_descriptor (TREE_VALUE (gnu_arg_types),
3477 DECL_PARM_ALT_TYPE (gnu_stub_param),
3480 gnu_param = gnu_stub_param;
3482 gnu_param_list = tree_cons (NULL_TREE, gnu_param, gnu_param_list);
3485 gnu_body = end_stmt_group ();
3487 /* Invoke the internal subprogram. */
3488 gnu_subprog_addr = build1 (ADDR_EXPR, build_pointer_type (gnu_subprog_type),
3490 gnu_subprog_call = build_call_list (TREE_TYPE (gnu_subprog_type),
3492 nreverse (gnu_param_list));
3494 /* Propagate the return value, if any. */
3495 if (VOID_TYPE_P (TREE_TYPE (gnu_subprog_type)))
3496 append_to_statement_list (gnu_subprog_call, &gnu_body);
3498 append_to_statement_list (build_return_expr (DECL_RESULT (gnu_stub_decl),
3504 allocate_struct_function (gnu_stub_decl, false);
3505 end_subprog_body (gnu_body);
3508 /* Build a type to be used to represent an aliased object whose nominal
3509 type is an unconstrained array. This consists of a RECORD_TYPE containing
3510 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
3511 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
3512 is used to represent an arbitrary unconstrained object. Use NAME
3513 as the name of the record. */
3516 build_unc_object_type (tree template_type, tree object_type, tree name)
3518 tree type = make_node (RECORD_TYPE);
3519 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
3520 template_type, type, 0, 0, 0, 1);
3521 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
3524 TYPE_NAME (type) = name;
3525 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3526 finish_record_type (type,
3527 chainon (chainon (NULL_TREE, template_field),
3534 /* Same, taking a thin or fat pointer type instead of a template type. */
3537 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3542 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3545 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
3546 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3547 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3548 return build_unc_object_type (template_type, object_type, name);
3551 /* Shift the component offsets within an unconstrained object TYPE to make it
3552 suitable for use as a designated type for thin pointers. */
3555 shift_unc_components_for_thin_pointers (tree type)
3557 /* Thin pointer values designate the ARRAY data of an unconstrained object,
3558 allocated past the BOUNDS template. The designated type is adjusted to
3559 have ARRAY at position zero and the template at a negative offset, so
3560 that COMPONENT_REFs on (*thin_ptr) designate the proper location. */
3562 tree bounds_field = TYPE_FIELDS (type);
3563 tree array_field = TREE_CHAIN (TYPE_FIELDS (type));
3565 DECL_FIELD_OFFSET (bounds_field)
3566 = size_binop (MINUS_EXPR, size_zero_node, byte_position (array_field));
3568 DECL_FIELD_OFFSET (array_field) = size_zero_node;
3569 DECL_FIELD_BIT_OFFSET (array_field) = bitsize_zero_node;
3572 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3573 In the normal case this is just two adjustments, but we have more to
3574 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
3577 update_pointer_to (tree old_type, tree new_type)
3579 tree ptr = TYPE_POINTER_TO (old_type);
3580 tree ref = TYPE_REFERENCE_TO (old_type);
3584 /* If this is the main variant, process all the other variants first. */
3585 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3586 for (type = TYPE_NEXT_VARIANT (old_type); type;
3587 type = TYPE_NEXT_VARIANT (type))
3588 update_pointer_to (type, new_type);
3590 /* If no pointers and no references, we are done. */
3594 /* Merge the old type qualifiers in the new type.
3596 Each old variant has qualifiers for specific reasons, and the new
3597 designated type as well. Each set of qualifiers represents useful
3598 information grabbed at some point, and merging the two simply unifies
3599 these inputs into the final type description.
3601 Consider for instance a volatile type frozen after an access to constant
3602 type designating it; after the designated type's freeze, we get here with
3603 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3604 when the access type was processed. We will make a volatile and readonly
3605 designated type, because that's what it really is.
3607 We might also get here for a non-dummy OLD_TYPE variant with different
3608 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
3609 to private record type elaboration (see the comments around the call to
3610 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3611 the qualifiers in those cases too, to avoid accidentally discarding the
3612 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3614 = build_qualified_type (new_type,
3615 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3617 /* If old type and new type are identical, there is nothing to do. */
3618 if (old_type == new_type)
3621 /* Otherwise, first handle the simple case. */
3622 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3624 TYPE_POINTER_TO (new_type) = ptr;
3625 TYPE_REFERENCE_TO (new_type) = ref;
3627 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3628 for (ptr1 = TYPE_MAIN_VARIANT (ptr); ptr1;
3629 ptr1 = TYPE_NEXT_VARIANT (ptr1))
3630 TREE_TYPE (ptr1) = new_type;
3632 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3633 for (ref1 = TYPE_MAIN_VARIANT (ref); ref1;
3634 ref1 = TYPE_NEXT_VARIANT (ref1))
3635 TREE_TYPE (ref1) = new_type;
3638 /* Now deal with the unconstrained array case. In this case the "pointer"
3639 is actually a RECORD_TYPE where both fields are pointers to dummy nodes.
3640 Turn them into pointers to the correct types using update_pointer_to. */
3641 else if (!TYPE_IS_FAT_POINTER_P (ptr))
3646 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
3647 tree array_field = TYPE_FIELDS (ptr);
3648 tree bounds_field = TREE_CHAIN (TYPE_FIELDS (ptr));
3649 tree new_ptr = TYPE_POINTER_TO (new_type);
3653 /* Make pointers to the dummy template point to the real template. */
3655 (TREE_TYPE (TREE_TYPE (bounds_field)),
3656 TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_ptr)))));
3658 /* The references to the template bounds present in the array type
3659 are made through a PLACEHOLDER_EXPR of type NEW_PTR. Since we
3660 are updating PTR to make it a full replacement for NEW_PTR as
3661 pointer to NEW_TYPE, we must rework the PLACEHOLDER_EXPR so as
3662 to make it of type PTR. */
3663 new_ref = build3 (COMPONENT_REF, TREE_TYPE (bounds_field),
3664 build0 (PLACEHOLDER_EXPR, ptr),
3665 bounds_field, NULL_TREE);
3667 /* Create the new array for the new PLACEHOLDER_EXPR and make pointers
3668 to the dummy array point to it. */
3670 (TREE_TYPE (TREE_TYPE (array_field)),
3671 substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))),
3672 TREE_CHAIN (TYPE_FIELDS (new_ptr)), new_ref));
3674 /* Make PTR the pointer to NEW_TYPE. */
3675 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
3676 = TREE_TYPE (new_type) = ptr;
3678 /* And show the original pointer NEW_PTR to the debugger. This is the
3679 counterpart of the equivalent processing in gnat_pushdecl when the
3680 unconstrained array type is frozen after access types to it. Note
3681 that update_pointer_to can be invoked multiple times on the same
3682 couple of types because of the type variants. */
3684 && TREE_CODE (TYPE_NAME (ptr)) == TYPE_DECL
3685 && !DECL_ORIGINAL_TYPE (TYPE_NAME (ptr)))
3687 DECL_ORIGINAL_TYPE (TYPE_NAME (ptr)) = new_ptr;
3688 DECL_ARTIFICIAL (TYPE_NAME (ptr)) = 0;
3690 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
3691 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
3693 /* Now handle updating the allocation record, what the thin pointer
3694 points to. Update all pointers from the old record into the new
3695 one, update the type of the array field, and recompute the size. */
3696 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
3698 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
3699 = TREE_TYPE (TREE_TYPE (array_field));
3701 /* The size recomputation needs to account for alignment constraints, so
3702 we let layout_type work it out. This will reset the field offsets to
3703 what they would be in a regular record, so we shift them back to what
3704 we want them to be for a thin pointer designated type afterwards. */
3705 DECL_SIZE (TYPE_FIELDS (new_obj_rec)) = 0;
3706 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))) = 0;
3707 TYPE_SIZE (new_obj_rec) = 0;
3708 layout_type (new_obj_rec);
3710 shift_unc_components_for_thin_pointers (new_obj_rec);
3712 /* We are done, at last. */
3713 rest_of_record_type_compilation (ptr);
3717 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
3718 unconstrained one. This involves making or finding a template. */
3721 convert_to_fat_pointer (tree type, tree expr)
3723 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
3724 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
3725 tree etype = TREE_TYPE (expr);
3728 /* If EXPR is null, make a fat pointer that contains null pointers to the
3729 template and array. */
3730 if (integer_zerop (expr))
3732 gnat_build_constructor
3734 tree_cons (TYPE_FIELDS (type),
3735 convert (p_array_type, expr),
3736 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3737 convert (build_pointer_type (template_type),
3741 /* If EXPR is a thin pointer, make template and data from the record.. */
3742 else if (TYPE_IS_THIN_POINTER_P (etype))
3744 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
3746 expr = save_expr (expr);
3747 if (TREE_CODE (expr) == ADDR_EXPR)
3748 expr = TREE_OPERAND (expr, 0);
3750 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
3752 template_tree = build_component_ref (expr, NULL_TREE, fields, false);
3753 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3754 build_component_ref (expr, NULL_TREE,
3755 TREE_CHAIN (fields), false));
3758 /* Otherwise, build the constructor for the template. */
3760 template_tree = build_template (template_type, TREE_TYPE (etype), expr);
3762 /* The final result is a constructor for the fat pointer.
3764 If EXPR is an argument of a foreign convention subprogram, the type it
3765 points to is directly the component type. In this case, the expression
3766 type may not match the corresponding FIELD_DECL type at this point, so we
3767 call "convert" here to fix that up if necessary. This type consistency is
3768 required, for instance because it ensures that possible later folding of
3769 COMPONENT_REFs against this constructor always yields something of the
3770 same type as the initial reference.
3772 Note that the call to "build_template" above is still fine because it
3773 will only refer to the provided TEMPLATE_TYPE in this case. */
3775 gnat_build_constructor
3777 tree_cons (TYPE_FIELDS (type),
3778 convert (p_array_type, expr),
3779 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3780 build_unary_op (ADDR_EXPR, NULL_TREE,
3785 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
3786 is something that is a fat pointer, so convert to it first if it EXPR
3787 is not already a fat pointer. */
3790 convert_to_thin_pointer (tree type, tree expr)
3792 if (!TYPE_IS_FAT_POINTER_P (TREE_TYPE (expr)))
3794 = convert_to_fat_pointer
3795 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
3797 /* We get the pointer to the data and use a NOP_EXPR to make it the
3799 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)),
3801 expr = build1 (NOP_EXPR, type, expr);
3806 /* Create an expression whose value is that of EXPR,
3807 converted to type TYPE. The TREE_TYPE of the value
3808 is always TYPE. This function implements all reasonable
3809 conversions; callers should filter out those that are
3810 not permitted by the language being compiled. */
3813 convert (tree type, tree expr)
3815 enum tree_code code = TREE_CODE (type);
3816 tree etype = TREE_TYPE (expr);
3817 enum tree_code ecode = TREE_CODE (etype);
3819 /* If EXPR is already the right type, we are done. */
3823 /* If both input and output have padding and are of variable size, do this
3824 as an unchecked conversion. Likewise if one is a mere variant of the
3825 other, so we avoid a pointless unpad/repad sequence. */
3826 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
3827 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
3828 && (!TREE_CONSTANT (TYPE_SIZE (type))
3829 || !TREE_CONSTANT (TYPE_SIZE (etype))
3830 || gnat_types_compatible_p (type, etype)
3831 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
3832 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
3835 /* If the output type has padding, convert to the inner type and make a
3836 constructor to build the record, unless a variable size is involved. */
3837 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
3839 /* If we previously converted from another type and our type is
3840 of variable size, remove the conversion to avoid the need for
3841 variable-sized temporaries. Likewise for a conversion between
3842 original and packable version. */
3843 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3844 && (!TREE_CONSTANT (TYPE_SIZE (type))
3845 || (ecode == RECORD_TYPE
3846 && TYPE_NAME (etype)
3847 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
3848 expr = TREE_OPERAND (expr, 0);
3850 /* If we are just removing the padding from expr, convert the original
3851 object if we have variable size in order to avoid the need for some
3852 variable-sized temporaries. Likewise if the padding is a variant
3853 of the other, so we avoid a pointless unpad/repad sequence. */
3854 if (TREE_CODE (expr) == COMPONENT_REF
3855 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
3856 && (!TREE_CONSTANT (TYPE_SIZE (type))
3857 || gnat_types_compatible_p (type,
3858 TREE_TYPE (TREE_OPERAND (expr, 0)))
3859 || (ecode == RECORD_TYPE
3860 && TYPE_NAME (etype)
3861 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
3862 return convert (type, TREE_OPERAND (expr, 0));
3864 /* If the inner type is of self-referential size and the expression type
3865 is a record, do this as an unchecked conversion. But first pad the
3866 expression if possible to have the same size on both sides. */
3867 if (TREE_CODE (etype) == RECORD_TYPE
3868 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
3870 if (TREE_CONSTANT (TYPE_SIZE (etype)))
3871 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
3872 false, false, false, true), expr);
3873 return unchecked_convert (type, expr, false);
3876 /* If we are converting between array types with variable size, do the
3877 final conversion as an unchecked conversion, again to avoid the need
3878 for some variable-sized temporaries. If valid, this conversion is
3879 very likely purely technical and without real effects. */
3880 if (TREE_CODE (etype) == ARRAY_TYPE
3881 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
3882 && !TREE_CONSTANT (TYPE_SIZE (etype))
3883 && !TREE_CONSTANT (TYPE_SIZE (type)))
3884 return unchecked_convert (type,
3885 convert (TREE_TYPE (TYPE_FIELDS (type)),
3890 gnat_build_constructor (type,
3891 tree_cons (TYPE_FIELDS (type),
3893 (TYPE_FIELDS (type)),
3898 /* If the input type has padding, remove it and convert to the output type.
3899 The conditions ordering is arranged to ensure that the output type is not
3900 a padding type here, as it is not clear whether the conversion would
3901 always be correct if this was to happen. */
3902 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
3906 /* If we have just converted to this padded type, just get the
3907 inner expression. */
3908 if (TREE_CODE (expr) == CONSTRUCTOR
3909 && !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (expr))
3910 && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->index
3911 == TYPE_FIELDS (etype))
3913 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->value;
3915 /* Otherwise, build an explicit component reference. */
3918 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
3920 return convert (type, unpadded);
3923 /* If the input is a biased type, adjust first. */
3924 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
3925 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
3926 fold_convert (TREE_TYPE (etype),
3928 TYPE_MIN_VALUE (etype)));
3930 /* If the input is a justified modular type, we need to extract the actual
3931 object before converting it to any other type with the exceptions of an
3932 unconstrained array or of a mere type variant. It is useful to avoid the
3933 extraction and conversion in the type variant case because it could end
3934 up replacing a VAR_DECL expr by a constructor and we might be about the
3935 take the address of the result. */
3936 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
3937 && code != UNCONSTRAINED_ARRAY_TYPE
3938 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
3939 return convert (type, build_component_ref (expr, NULL_TREE,
3940 TYPE_FIELDS (etype), false));
3942 /* If converting to a type that contains a template, convert to the data
3943 type and then build the template. */
3944 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
3946 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
3948 /* If the source already has a template, get a reference to the
3949 associated array only, as we are going to rebuild a template
3950 for the target type anyway. */
3951 expr = maybe_unconstrained_array (expr);
3954 gnat_build_constructor
3956 tree_cons (TYPE_FIELDS (type),
3957 build_template (TREE_TYPE (TYPE_FIELDS (type)),
3958 obj_type, NULL_TREE),
3959 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3960 convert (obj_type, expr), NULL_TREE)));
3963 /* There are some special cases of expressions that we process
3965 switch (TREE_CODE (expr))
3971 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
3972 conversion in gnat_expand_expr. NULL_EXPR does not represent
3973 and actual value, so no conversion is needed. */
3974 expr = copy_node (expr);
3975 TREE_TYPE (expr) = type;
3979 /* If we are converting a STRING_CST to another constrained array type,
3980 just make a new one in the proper type. */
3981 if (code == ecode && AGGREGATE_TYPE_P (etype)
3982 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
3983 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
3985 expr = copy_node (expr);
3986 TREE_TYPE (expr) = type;
3992 /* If we are converting a VECTOR_CST to a mere variant type, just make
3993 a new one in the proper type. */
3994 if (code == ecode && gnat_types_compatible_p (type, etype))
3996 expr = copy_node (expr);
3997 TREE_TYPE (expr) = type;
4002 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
4003 a new one in the proper type. */
4004 if (code == ecode && gnat_types_compatible_p (type, etype))
4006 expr = copy_node (expr);
4007 TREE_TYPE (expr) = type;
4011 /* Likewise for a conversion between original and packable version, but
4012 we have to work harder in order to preserve type consistency. */
4014 && code == RECORD_TYPE
4015 && TYPE_NAME (type) == TYPE_NAME (etype))
4017 VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
4018 unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
4019 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, len);
4020 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4021 unsigned HOST_WIDE_INT idx;
4024 /* Whether we need to clear TREE_CONSTANT et al. on the output
4025 constructor when we convert in place. */
4026 bool clear_constant = false;
4028 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4030 constructor_elt *elt = VEC_quick_push (constructor_elt, v, NULL);
4031 /* We expect only simple constructors. Otherwise, punt. */
4032 if (!(index == efield || index == DECL_ORIGINAL_FIELD (efield)))
4035 elt->value = convert (TREE_TYPE (field), value);
4037 /* If packing has made this field a bitfield and the input
4038 value couldn't be emitted statically any more, we need to
4039 clear TREE_CONSTANT on our output. */
4040 if (!clear_constant && TREE_CONSTANT (expr)
4041 && !CONSTRUCTOR_BITFIELD_P (efield)
4042 && CONSTRUCTOR_BITFIELD_P (field)
4043 && !initializer_constant_valid_for_bitfield_p (value))
4044 clear_constant = true;
4046 efield = TREE_CHAIN (efield);
4047 field = TREE_CHAIN (field);
4050 /* If we have been able to match and convert all the input fields
4051 to their output type, convert in place now. We'll fallback to a
4052 view conversion downstream otherwise. */
4055 expr = copy_node (expr);
4056 TREE_TYPE (expr) = type;
4057 CONSTRUCTOR_ELTS (expr) = v;
4059 TREE_CONSTANT (expr) = TREE_STATIC (expr) = false;
4064 /* Likewise for a conversion between array type and vector type with a
4065 compatible representative array. */
4066 else if (code == VECTOR_TYPE
4067 && ecode == ARRAY_TYPE
4068 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4071 VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
4072 unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
4073 VEC(constructor_elt,gc) *v;
4074 unsigned HOST_WIDE_INT ix;
4077 /* Build a VECTOR_CST from a *constant* array constructor. */
4078 if (TREE_CONSTANT (expr))
4080 bool constant_p = true;
4082 /* Iterate through elements and check if all constructor
4083 elements are *_CSTs. */
4084 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4085 if (!CONSTANT_CLASS_P (value))
4092 return build_vector_from_ctor (type,
4093 CONSTRUCTOR_ELTS (expr));
4096 /* Otherwise, build a regular vector constructor. */
4097 v = VEC_alloc (constructor_elt, gc, len);
4098 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4100 constructor_elt *elt = VEC_quick_push (constructor_elt, v, NULL);
4101 elt->index = NULL_TREE;
4104 expr = copy_node (expr);
4105 TREE_TYPE (expr) = type;
4106 CONSTRUCTOR_ELTS (expr) = v;
4111 case UNCONSTRAINED_ARRAY_REF:
4112 /* Convert this to the type of the inner array by getting the address of
4113 the array from the template. */
4114 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4115 build_component_ref (TREE_OPERAND (expr, 0),
4116 get_identifier ("P_ARRAY"),
4118 etype = TREE_TYPE (expr);
4119 ecode = TREE_CODE (etype);
4122 case VIEW_CONVERT_EXPR:
4124 /* GCC 4.x is very sensitive to type consistency overall, and view
4125 conversions thus are very frequent. Even though just "convert"ing
4126 the inner operand to the output type is fine in most cases, it
4127 might expose unexpected input/output type mismatches in special
4128 circumstances so we avoid such recursive calls when we can. */
4129 tree op0 = TREE_OPERAND (expr, 0);
4131 /* If we are converting back to the original type, we can just
4132 lift the input conversion. This is a common occurrence with
4133 switches back-and-forth amongst type variants. */
4134 if (type == TREE_TYPE (op0))
4137 /* Otherwise, if we're converting between two aggregate or vector
4138 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4139 target type in place or to just convert the inner expression. */
4140 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4141 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
4143 /* If we are converting between mere variants, we can just
4144 substitute the VIEW_CONVERT_EXPR in place. */
4145 if (gnat_types_compatible_p (type, etype))
4146 return build1 (VIEW_CONVERT_EXPR, type, op0);
4148 /* Otherwise, we may just bypass the input view conversion unless
4149 one of the types is a fat pointer, which is handled by
4150 specialized code below which relies on exact type matching. */
4151 else if (!TYPE_IS_FAT_POINTER_P (type)
4152 && !TYPE_IS_FAT_POINTER_P (etype))
4153 return convert (type, op0);
4159 /* If both types are record types, just convert the pointer and
4160 make a new INDIRECT_REF.
4162 ??? Disable this for now since it causes problems with the
4163 code in build_binary_op for MODIFY_EXPR which wants to
4164 strip off conversions. But that code really is a mess and
4165 we need to do this a much better way some time. */
4167 && (TREE_CODE (type) == RECORD_TYPE
4168 || TREE_CODE (type) == UNION_TYPE)
4169 && (TREE_CODE (etype) == RECORD_TYPE
4170 || TREE_CODE (etype) == UNION_TYPE)
4171 && !TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4172 return build_unary_op (INDIRECT_REF, NULL_TREE,
4173 convert (build_pointer_type (type),
4174 TREE_OPERAND (expr, 0)));
4181 /* Check for converting to a pointer to an unconstrained array. */
4182 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4183 return convert_to_fat_pointer (type, expr);
4185 /* If we are converting between two aggregate or vector types that are mere
4186 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4187 to a vector type from its representative array type. */
4188 else if ((code == ecode
4189 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4190 && gnat_types_compatible_p (type, etype))
4191 || (code == VECTOR_TYPE
4192 && ecode == ARRAY_TYPE
4193 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4195 return build1 (VIEW_CONVERT_EXPR, type, expr);
4197 /* In all other cases of related types, make a NOP_EXPR. */
4198 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4199 || (code == INTEGER_CST && ecode == INTEGER_CST
4200 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
4201 return fold_convert (type, expr);
4206 return fold_build1 (CONVERT_EXPR, type, expr);
4209 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4210 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4211 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4212 return unchecked_convert (type, expr, false);
4213 else if (TYPE_BIASED_REPRESENTATION_P (type))
4214 return fold_convert (type,
4215 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4216 convert (TREE_TYPE (type), expr),
4217 TYPE_MIN_VALUE (type)));
4219 /* ... fall through ... */
4223 /* If we are converting an additive expression to an integer type
4224 with lower precision, be wary of the optimization that can be
4225 applied by convert_to_integer. There are 2 problematic cases:
4226 - if the first operand was originally of a biased type,
4227 because we could be recursively called to convert it
4228 to an intermediate type and thus rematerialize the
4229 additive operator endlessly,
4230 - if the expression contains a placeholder, because an
4231 intermediate conversion that changes the sign could
4232 be inserted and thus introduce an artificial overflow
4233 at compile time when the placeholder is substituted. */
4234 if (code == INTEGER_TYPE
4235 && ecode == INTEGER_TYPE
4236 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4237 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4239 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4241 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4242 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4243 || CONTAINS_PLACEHOLDER_P (expr))
4244 return build1 (NOP_EXPR, type, expr);
4247 return fold (convert_to_integer (type, expr));
4250 case REFERENCE_TYPE:
4251 /* If converting between two pointers to records denoting
4252 both a template and type, adjust if needed to account
4253 for any differing offsets, since one might be negative. */
4254 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4257 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
4258 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
4259 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
4260 sbitsize_int (BITS_PER_UNIT));
4262 expr = build1 (NOP_EXPR, type, expr);
4263 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
4264 if (integer_zerop (byte_diff))
4267 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4268 fold (convert (sizetype, byte_diff)));
4271 /* If converting to a thin pointer, handle specially. */
4272 if (TYPE_IS_THIN_POINTER_P (type)
4273 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
4274 return convert_to_thin_pointer (type, expr);
4276 /* If converting fat pointer to normal pointer, get the pointer to the
4277 array and then convert it. */
4278 else if (TYPE_IS_FAT_POINTER_P (etype))
4279 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
4282 return fold (convert_to_pointer (type, expr));
4285 return fold (convert_to_real (type, expr));
4288 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4290 gnat_build_constructor
4291 (type, tree_cons (TYPE_FIELDS (type),
4292 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
4295 /* ... fall through ... */
4298 /* In these cases, assume the front-end has validated the conversion.
4299 If the conversion is valid, it will be a bit-wise conversion, so
4300 it can be viewed as an unchecked conversion. */
4301 return unchecked_convert (type, expr, false);
4304 /* This is a either a conversion between a tagged type and some
4305 subtype, which we have to mark as a UNION_TYPE because of
4306 overlapping fields or a conversion of an Unchecked_Union. */
4307 return unchecked_convert (type, expr, false);
4309 case UNCONSTRAINED_ARRAY_TYPE:
4310 /* If the input is a VECTOR_TYPE, convert to the representative
4311 array type first. */
4312 if (ecode == VECTOR_TYPE)
4314 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4315 etype = TREE_TYPE (expr);
4316 ecode = TREE_CODE (etype);
4319 /* If EXPR is a constrained array, take its address, convert it to a
4320 fat pointer, and then dereference it. Likewise if EXPR is a
4321 record containing both a template and a constrained array.
4322 Note that a record representing a justified modular type
4323 always represents a packed constrained array. */
4324 if (ecode == ARRAY_TYPE
4325 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4326 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4327 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4330 (INDIRECT_REF, NULL_TREE,
4331 convert_to_fat_pointer (TREE_TYPE (type),
4332 build_unary_op (ADDR_EXPR,
4335 /* Do something very similar for converting one unconstrained
4336 array to another. */
4337 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4339 build_unary_op (INDIRECT_REF, NULL_TREE,
4340 convert (TREE_TYPE (type),
4341 build_unary_op (ADDR_EXPR,
4347 return fold (convert_to_complex (type, expr));
4354 /* Remove all conversions that are done in EXP. This includes converting
4355 from a padded type or to a justified modular type. If TRUE_ADDRESS
4356 is true, always return the address of the containing object even if
4357 the address is not bit-aligned. */
4360 remove_conversions (tree exp, bool true_address)
4362 switch (TREE_CODE (exp))
4366 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4367 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4369 remove_conversions (VEC_index (constructor_elt,
4370 CONSTRUCTOR_ELTS (exp), 0)->value,
4375 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4376 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4379 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
4381 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4390 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4391 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
4392 likewise return an expression pointing to the underlying array. */
4395 maybe_unconstrained_array (tree exp)
4397 enum tree_code code = TREE_CODE (exp);
4400 switch (TREE_CODE (TREE_TYPE (exp)))
4402 case UNCONSTRAINED_ARRAY_TYPE:
4403 if (code == UNCONSTRAINED_ARRAY_REF)
4406 = build_unary_op (INDIRECT_REF, NULL_TREE,
4407 build_component_ref (TREE_OPERAND (exp, 0),
4408 get_identifier ("P_ARRAY"),
4410 TREE_READONLY (new_exp) = TREE_STATIC (new_exp)
4411 = TREE_READONLY (exp);
4415 else if (code == NULL_EXPR)
4416 return build1 (NULL_EXPR,
4417 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
4418 (TREE_TYPE (TREE_TYPE (exp))))),
4419 TREE_OPERAND (exp, 0));
4422 /* If this is a padded type, convert to the unpadded type and see if
4423 it contains a template. */
4424 if (TYPE_PADDING_P (TREE_TYPE (exp)))
4426 new_exp = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
4427 if (TREE_CODE (TREE_TYPE (new_exp)) == RECORD_TYPE
4428 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new_exp)))
4430 build_component_ref (new_exp, NULL_TREE,
4432 (TYPE_FIELDS (TREE_TYPE (new_exp))),
4435 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
4437 build_component_ref (exp, NULL_TREE,
4438 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
4448 /* If EXP's type is a VECTOR_TYPE, return EXP converted to the associated
4449 TYPE_REPRESENTATIVE_ARRAY. */
4452 maybe_vector_array (tree exp)
4454 tree etype = TREE_TYPE (exp);
4456 if (VECTOR_TYPE_P (etype))
4457 exp = convert (TYPE_REPRESENTATIVE_ARRAY (etype), exp);
4462 /* Return true if EXPR is an expression that can be folded as an operand
4463 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
4466 can_fold_for_view_convert_p (tree expr)
4470 /* The folder will fold NOP_EXPRs between integral types with the same
4471 precision (in the middle-end's sense). We cannot allow it if the
4472 types don't have the same precision in the Ada sense as well. */
4473 if (TREE_CODE (expr) != NOP_EXPR)
4476 t1 = TREE_TYPE (expr);
4477 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4479 /* Defer to the folder for non-integral conversions. */
4480 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4483 /* Only fold conversions that preserve both precisions. */
4484 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4485 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4491 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
4492 If NOTRUNC_P is true, truncation operations should be suppressed.
4494 Special care is required with (source or target) integral types whose
4495 precision is not equal to their size, to make sure we fetch or assign
4496 the value bits whose location might depend on the endianness, e.g.
4498 Rmsize : constant := 8;
4499 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4501 type Bit_Array is array (1 .. Rmsize) of Boolean;
4502 pragma Pack (Bit_Array);
4504 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4506 Value : Int := 2#1000_0001#;
4507 Vbits : Bit_Array := To_Bit_Array (Value);
4509 we expect the 8 bits at Vbits'Address to always contain Value, while
4510 their original location depends on the endianness, at Value'Address
4511 on a little-endian architecture but not on a big-endian one. */
4514 unchecked_convert (tree type, tree expr, bool notrunc_p)
4516 tree etype = TREE_TYPE (expr);
4518 /* If the expression is already the right type, we are done. */
4522 /* If both types types are integral just do a normal conversion.
4523 Likewise for a conversion to an unconstrained array. */
4524 if ((((INTEGRAL_TYPE_P (type)
4525 && !(TREE_CODE (type) == INTEGER_TYPE
4526 && TYPE_VAX_FLOATING_POINT_P (type)))
4527 || (POINTER_TYPE_P (type) && ! TYPE_IS_THIN_POINTER_P (type))
4528 || (TREE_CODE (type) == RECORD_TYPE
4529 && TYPE_JUSTIFIED_MODULAR_P (type)))
4530 && ((INTEGRAL_TYPE_P (etype)
4531 && !(TREE_CODE (etype) == INTEGER_TYPE
4532 && TYPE_VAX_FLOATING_POINT_P (etype)))
4533 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
4534 || (TREE_CODE (etype) == RECORD_TYPE
4535 && TYPE_JUSTIFIED_MODULAR_P (etype))))
4536 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
4538 if (TREE_CODE (etype) == INTEGER_TYPE
4539 && TYPE_BIASED_REPRESENTATION_P (etype))
4541 tree ntype = copy_type (etype);
4542 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4543 TYPE_MAIN_VARIANT (ntype) = ntype;
4544 expr = build1 (NOP_EXPR, ntype, expr);
4547 if (TREE_CODE (type) == INTEGER_TYPE
4548 && TYPE_BIASED_REPRESENTATION_P (type))
4550 tree rtype = copy_type (type);
4551 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4552 TYPE_MAIN_VARIANT (rtype) = rtype;
4553 expr = convert (rtype, expr);
4554 expr = build1 (NOP_EXPR, type, expr);
4557 expr = convert (type, expr);
4560 /* If we are converting to an integral type whose precision is not equal
4561 to its size, first unchecked convert to a record that contains an
4562 object of the output type. Then extract the field. */
4563 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4564 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4565 GET_MODE_BITSIZE (TYPE_MODE (type))))
4567 tree rec_type = make_node (RECORD_TYPE);
4568 tree field = create_field_decl (get_identifier ("OBJ"), type,
4569 rec_type, 1, 0, 0, 0);
4571 TYPE_FIELDS (rec_type) = field;
4572 layout_type (rec_type);
4574 expr = unchecked_convert (rec_type, expr, notrunc_p);
4575 expr = build_component_ref (expr, NULL_TREE, field, 0);
4578 /* Similarly if we are converting from an integral type whose precision
4579 is not equal to its size. */
4580 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype)
4581 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
4582 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4584 tree rec_type = make_node (RECORD_TYPE);
4586 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
4589 TYPE_FIELDS (rec_type) = field;
4590 layout_type (rec_type);
4592 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
4593 expr = unchecked_convert (type, expr, notrunc_p);
4596 /* We have a special case when we are converting between two unconstrained
4597 array types. In that case, take the address, convert the fat pointer
4598 types, and dereference. */
4599 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
4600 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
4601 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4602 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
4603 build_unary_op (ADDR_EXPR, NULL_TREE,
4606 /* Another special case is when we are converting to a vector type from its
4607 representative array type; this a regular conversion. */
4608 else if (TREE_CODE (type) == VECTOR_TYPE
4609 && TREE_CODE (etype) == ARRAY_TYPE
4610 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4612 expr = convert (type, expr);
4616 expr = maybe_unconstrained_array (expr);
4617 etype = TREE_TYPE (expr);
4618 if (can_fold_for_view_convert_p (expr))
4619 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
4621 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
4624 /* If the result is an integral type whose precision is not equal to its
4625 size, sign- or zero-extend the result. We need not do this if the input
4626 is an integral type of the same precision and signedness or if the output
4627 is a biased type or if both the input and output are unsigned. */
4629 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4630 && !(TREE_CODE (type) == INTEGER_TYPE
4631 && TYPE_BIASED_REPRESENTATION_P (type))
4632 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4633 GET_MODE_BITSIZE (TYPE_MODE (type)))
4634 && !(INTEGRAL_TYPE_P (etype)
4635 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
4636 && operand_equal_p (TYPE_RM_SIZE (type),
4637 (TYPE_RM_SIZE (etype) != 0
4638 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
4640 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
4642 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
4643 TYPE_UNSIGNED (type));
4645 = convert (base_type,
4646 size_binop (MINUS_EXPR,
4648 (GET_MODE_BITSIZE (TYPE_MODE (type))),
4649 TYPE_RM_SIZE (type)));
4652 build_binary_op (RSHIFT_EXPR, base_type,
4653 build_binary_op (LSHIFT_EXPR, base_type,
4654 convert (base_type, expr),
4659 /* An unchecked conversion should never raise Constraint_Error. The code
4660 below assumes that GCC's conversion routines overflow the same way that
4661 the underlying hardware does. This is probably true. In the rare case
4662 when it is false, we can rely on the fact that such conversions are
4663 erroneous anyway. */
4664 if (TREE_CODE (expr) == INTEGER_CST)
4665 TREE_OVERFLOW (expr) = 0;
4667 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
4668 show no longer constant. */
4669 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4670 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
4672 TREE_CONSTANT (expr) = 0;
4677 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
4678 the latter being a record type as predicated by Is_Record_Type. */
4681 tree_code_for_record_type (Entity_Id gnat_type)
4683 Node_Id component_list
4684 = Component_List (Type_Definition
4686 (Implementation_Base_Type (gnat_type))));
4689 /* Make this a UNION_TYPE unless it's either not an Unchecked_Union or
4690 we have a non-discriminant field outside a variant. In either case,
4691 it's a RECORD_TYPE. */
4693 if (!Is_Unchecked_Union (gnat_type))
4696 for (component = First_Non_Pragma (Component_Items (component_list));
4697 Present (component);
4698 component = Next_Non_Pragma (component))
4699 if (Ekind (Defining_Entity (component)) == E_Component)
4705 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
4706 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
4707 according to the presence of an alignment clause on the type or, if it
4708 is an array, on the component type. */
4711 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
4713 gnat_type = Underlying_Type (gnat_type);
4715 *align_clause = Present (Alignment_Clause (gnat_type));
4717 if (Is_Array_Type (gnat_type))
4719 gnat_type = Underlying_Type (Component_Type (gnat_type));
4720 if (Present (Alignment_Clause (gnat_type)))
4721 *align_clause = true;
4724 if (!Is_Floating_Point_Type (gnat_type))
4727 if (UI_To_Int (Esize (gnat_type)) != 64)
4733 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
4734 size is greater or equal to 64 bits, or an array of such a type. Set
4735 ALIGN_CLAUSE according to the presence of an alignment clause on the
4736 type or, if it is an array, on the component type. */
4739 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
4741 gnat_type = Underlying_Type (gnat_type);
4743 *align_clause = Present (Alignment_Clause (gnat_type));
4745 if (Is_Array_Type (gnat_type))
4747 gnat_type = Underlying_Type (Component_Type (gnat_type));
4748 if (Present (Alignment_Clause (gnat_type)))
4749 *align_clause = true;
4752 if (!Is_Scalar_Type (gnat_type))
4755 if (UI_To_Int (Esize (gnat_type)) < 64)
4761 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
4762 component of an aggregate type. */
4765 type_for_nonaliased_component_p (tree gnu_type)
4767 /* If the type is passed by reference, we may have pointers to the
4768 component so it cannot be made non-aliased. */
4769 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
4772 /* We used to say that any component of aggregate type is aliased
4773 because the front-end may take 'Reference of it. The front-end
4774 has been enhanced in the meantime so as to use a renaming instead
4775 in most cases, but the back-end can probably take the address of
4776 such a component too so we go for the conservative stance.
4778 For instance, we might need the address of any array type, even
4779 if normally passed by copy, to construct a fat pointer if the
4780 component is used as an actual for an unconstrained formal.
4782 Likewise for record types: even if a specific record subtype is
4783 passed by copy, the parent type might be passed by ref (e.g. if
4784 it's of variable size) and we might take the address of a child
4785 component to pass to a parent formal. We have no way to check
4786 for such conditions here. */
4787 if (AGGREGATE_TYPE_P (gnu_type))
4793 /* Perform final processing on global variables. */
4796 gnat_write_global_declarations (void)
4798 /* Proceed to optimize and emit assembly.
4799 FIXME: shouldn't be the front end's responsibility to call this. */
4800 cgraph_finalize_compilation_unit ();
4802 /* Emit debug info for all global declarations. */
4803 emit_debug_global_declarations (VEC_address (tree, global_decls),
4804 VEC_length (tree, global_decls));
4807 /* ************************************************************************
4808 * * GCC builtins support *
4809 * ************************************************************************ */
4811 /* The general scheme is fairly simple:
4813 For each builtin function/type to be declared, gnat_install_builtins calls
4814 internal facilities which eventually get to gnat_push_decl, which in turn
4815 tracks the so declared builtin function decls in the 'builtin_decls' global
4816 datastructure. When an Intrinsic subprogram declaration is processed, we
4817 search this global datastructure to retrieve the associated BUILT_IN DECL
4820 /* Search the chain of currently available builtin declarations for a node
4821 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
4822 found, if any, or NULL_TREE otherwise. */
4824 builtin_decl_for (tree name)
4829 for (i = 0; VEC_iterate(tree, builtin_decls, i, decl); i++)
4830 if (DECL_NAME (decl) == name)
4836 /* The code below eventually exposes gnat_install_builtins, which declares
4837 the builtin types and functions we might need, either internally or as
4838 user accessible facilities.
4840 ??? This is a first implementation shot, still in rough shape. It is
4841 heavily inspired from the "C" family implementation, with chunks copied
4842 verbatim from there.
4844 Two obvious TODO candidates are
4845 o Use a more efficient name/decl mapping scheme
4846 o Devise a middle-end infrastructure to avoid having to copy
4847 pieces between front-ends. */
4849 /* ----------------------------------------------------------------------- *
4850 * BUILTIN ELEMENTARY TYPES *
4851 * ----------------------------------------------------------------------- */
4853 /* Standard data types to be used in builtin argument declarations. */
4857 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
4859 CTI_CONST_STRING_TYPE,
4864 static tree c_global_trees[CTI_MAX];
4866 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
4867 #define string_type_node c_global_trees[CTI_STRING_TYPE]
4868 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
4870 /* ??? In addition some attribute handlers, we currently don't support a
4871 (small) number of builtin-types, which in turns inhibits support for a
4872 number of builtin functions. */
4873 #define wint_type_node void_type_node
4874 #define intmax_type_node void_type_node
4875 #define uintmax_type_node void_type_node
4877 /* Build the void_list_node (void_type_node having been created). */
4880 build_void_list_node (void)
4882 tree t = build_tree_list (NULL_TREE, void_type_node);
4886 /* Used to help initialize the builtin-types.def table. When a type of
4887 the correct size doesn't exist, use error_mark_node instead of NULL.
4888 The later results in segfaults even when a decl using the type doesn't
4892 builtin_type_for_size (int size, bool unsignedp)
4894 tree type = lang_hooks.types.type_for_size (size, unsignedp);
4895 return type ? type : error_mark_node;
4898 /* Build/push the elementary type decls that builtin functions/types
4902 install_builtin_elementary_types (void)
4904 signed_size_type_node = size_type_node;
4905 pid_type_node = integer_type_node;
4906 void_list_node = build_void_list_node ();
4908 string_type_node = build_pointer_type (char_type_node);
4909 const_string_type_node
4910 = build_pointer_type (build_qualified_type
4911 (char_type_node, TYPE_QUAL_CONST));
4914 /* ----------------------------------------------------------------------- *
4915 * BUILTIN FUNCTION TYPES *
4916 * ----------------------------------------------------------------------- */
4918 /* Now, builtin function types per se. */
4922 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
4923 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
4924 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
4925 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
4926 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4927 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4928 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
4929 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
4930 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
4931 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
4932 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
4933 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
4934 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4935 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4936 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
4938 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
4939 #include "builtin-types.def"
4940 #undef DEF_PRIMITIVE_TYPE
4941 #undef DEF_FUNCTION_TYPE_0
4942 #undef DEF_FUNCTION_TYPE_1
4943 #undef DEF_FUNCTION_TYPE_2
4944 #undef DEF_FUNCTION_TYPE_3
4945 #undef DEF_FUNCTION_TYPE_4
4946 #undef DEF_FUNCTION_TYPE_5
4947 #undef DEF_FUNCTION_TYPE_6
4948 #undef DEF_FUNCTION_TYPE_7
4949 #undef DEF_FUNCTION_TYPE_VAR_0
4950 #undef DEF_FUNCTION_TYPE_VAR_1
4951 #undef DEF_FUNCTION_TYPE_VAR_2
4952 #undef DEF_FUNCTION_TYPE_VAR_3
4953 #undef DEF_FUNCTION_TYPE_VAR_4
4954 #undef DEF_FUNCTION_TYPE_VAR_5
4955 #undef DEF_POINTER_TYPE
4959 typedef enum c_builtin_type builtin_type;
4961 /* A temporary array used in communication with def_fn_type. */
4962 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
4964 /* A helper function for install_builtin_types. Build function type
4965 for DEF with return type RET and N arguments. If VAR is true, then the
4966 function should be variadic after those N arguments.
4968 Takes special care not to ICE if any of the types involved are
4969 error_mark_node, which indicates that said type is not in fact available
4970 (see builtin_type_for_size). In which case the function type as a whole
4971 should be error_mark_node. */
4974 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
4976 tree args = NULL, t;
4981 for (i = 0; i < n; ++i)
4983 builtin_type a = (builtin_type) va_arg (list, int);
4984 t = builtin_types[a];
4985 if (t == error_mark_node)
4987 args = tree_cons (NULL_TREE, t, args);
4991 args = nreverse (args);
4993 args = chainon (args, void_list_node);
4995 t = builtin_types[ret];
4996 if (t == error_mark_node)
4998 t = build_function_type (t, args);
5001 builtin_types[def] = t;
5004 /* Build the builtin function types and install them in the builtin_types
5005 array for later use in builtin function decls. */
5008 install_builtin_function_types (void)
5010 tree va_list_ref_type_node;
5011 tree va_list_arg_type_node;
5013 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5015 va_list_arg_type_node = va_list_ref_type_node =
5016 build_pointer_type (TREE_TYPE (va_list_type_node));
5020 va_list_arg_type_node = va_list_type_node;
5021 va_list_ref_type_node = build_reference_type (va_list_type_node);
5024 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5025 builtin_types[ENUM] = VALUE;
5026 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5027 def_fn_type (ENUM, RETURN, 0, 0);
5028 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5029 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5030 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5031 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5032 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5033 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5034 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5035 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5036 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5037 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5038 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5040 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5041 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5043 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5044 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5045 def_fn_type (ENUM, RETURN, 1, 0);
5046 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5047 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5048 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5049 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5050 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5051 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5052 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5053 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5054 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5055 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5056 #define DEF_POINTER_TYPE(ENUM, TYPE) \
5057 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5059 #include "builtin-types.def"
5061 #undef DEF_PRIMITIVE_TYPE
5062 #undef DEF_FUNCTION_TYPE_1
5063 #undef DEF_FUNCTION_TYPE_2
5064 #undef DEF_FUNCTION_TYPE_3
5065 #undef DEF_FUNCTION_TYPE_4
5066 #undef DEF_FUNCTION_TYPE_5
5067 #undef DEF_FUNCTION_TYPE_6
5068 #undef DEF_FUNCTION_TYPE_VAR_0
5069 #undef DEF_FUNCTION_TYPE_VAR_1
5070 #undef DEF_FUNCTION_TYPE_VAR_2
5071 #undef DEF_FUNCTION_TYPE_VAR_3
5072 #undef DEF_FUNCTION_TYPE_VAR_4
5073 #undef DEF_FUNCTION_TYPE_VAR_5
5074 #undef DEF_POINTER_TYPE
5075 builtin_types[(int) BT_LAST] = NULL_TREE;
5078 /* ----------------------------------------------------------------------- *
5079 * BUILTIN ATTRIBUTES *
5080 * ----------------------------------------------------------------------- */
5082 enum built_in_attribute
5084 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5085 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5086 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5087 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5088 #include "builtin-attrs.def"
5089 #undef DEF_ATTR_NULL_TREE
5091 #undef DEF_ATTR_IDENT
5092 #undef DEF_ATTR_TREE_LIST
5096 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5099 install_builtin_attributes (void)
5101 /* Fill in the built_in_attributes array. */
5102 #define DEF_ATTR_NULL_TREE(ENUM) \
5103 built_in_attributes[(int) ENUM] = NULL_TREE;
5104 #define DEF_ATTR_INT(ENUM, VALUE) \
5105 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5106 #define DEF_ATTR_IDENT(ENUM, STRING) \
5107 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5108 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5109 built_in_attributes[(int) ENUM] \
5110 = tree_cons (built_in_attributes[(int) PURPOSE], \
5111 built_in_attributes[(int) VALUE], \
5112 built_in_attributes[(int) CHAIN]);
5113 #include "builtin-attrs.def"
5114 #undef DEF_ATTR_NULL_TREE
5116 #undef DEF_ATTR_IDENT
5117 #undef DEF_ATTR_TREE_LIST
5120 /* Handle a "const" attribute; arguments as in
5121 struct attribute_spec.handler. */
5124 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5125 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5128 if (TREE_CODE (*node) == FUNCTION_DECL)
5129 TREE_READONLY (*node) = 1;
5131 *no_add_attrs = true;
5136 /* Handle a "nothrow" attribute; arguments as in
5137 struct attribute_spec.handler. */
5140 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
5141 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5144 if (TREE_CODE (*node) == FUNCTION_DECL)
5145 TREE_NOTHROW (*node) = 1;
5147 *no_add_attrs = true;
5152 /* Handle a "pure" attribute; arguments as in
5153 struct attribute_spec.handler. */
5156 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5157 int ARG_UNUSED (flags), bool *no_add_attrs)
5159 if (TREE_CODE (*node) == FUNCTION_DECL)
5160 DECL_PURE_P (*node) = 1;
5161 /* ??? TODO: Support types. */
5164 warning (OPT_Wattributes, "%qs attribute ignored",
5165 IDENTIFIER_POINTER (name));
5166 *no_add_attrs = true;
5172 /* Handle a "no vops" attribute; arguments as in
5173 struct attribute_spec.handler. */
5176 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
5177 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5178 bool *ARG_UNUSED (no_add_attrs))
5180 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
5181 DECL_IS_NOVOPS (*node) = 1;
5185 /* Helper for nonnull attribute handling; fetch the operand number
5186 from the attribute argument list. */
5189 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
5191 /* Verify the arg number is a constant. */
5192 if (TREE_CODE (arg_num_expr) != INTEGER_CST
5193 || TREE_INT_CST_HIGH (arg_num_expr) != 0)
5196 *valp = TREE_INT_CST_LOW (arg_num_expr);
5200 /* Handle the "nonnull" attribute. */
5202 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
5203 tree args, int ARG_UNUSED (flags),
5207 unsigned HOST_WIDE_INT attr_arg_num;
5209 /* If no arguments are specified, all pointer arguments should be
5210 non-null. Verify a full prototype is given so that the arguments
5211 will have the correct types when we actually check them later. */
5214 if (!TYPE_ARG_TYPES (type))
5216 error ("nonnull attribute without arguments on a non-prototype");
5217 *no_add_attrs = true;
5222 /* Argument list specified. Verify that each argument number references
5223 a pointer argument. */
5224 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5227 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5229 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5231 error ("nonnull argument has invalid operand number (argument %lu)",
5232 (unsigned long) attr_arg_num);
5233 *no_add_attrs = true;
5237 argument = TYPE_ARG_TYPES (type);
5240 for (ck_num = 1; ; ck_num++)
5242 if (!argument || ck_num == arg_num)
5244 argument = TREE_CHAIN (argument);
5248 || TREE_CODE (TREE_VALUE (argument)) == VOID_TYPE)
5250 error ("nonnull argument with out-of-range operand number (argument %lu, operand %lu)",
5251 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5252 *no_add_attrs = true;
5256 if (TREE_CODE (TREE_VALUE (argument)) != POINTER_TYPE)
5258 error ("nonnull argument references non-pointer operand (argument %lu, operand %lu)",
5259 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5260 *no_add_attrs = true;
5269 /* Handle a "sentinel" attribute. */
5272 handle_sentinel_attribute (tree *node, tree name, tree args,
5273 int ARG_UNUSED (flags), bool *no_add_attrs)
5275 tree params = TYPE_ARG_TYPES (*node);
5279 warning (OPT_Wattributes,
5280 "%qs attribute requires prototypes with named arguments",
5281 IDENTIFIER_POINTER (name));
5282 *no_add_attrs = true;
5286 while (TREE_CHAIN (params))
5287 params = TREE_CHAIN (params);
5289 if (VOID_TYPE_P (TREE_VALUE (params)))
5291 warning (OPT_Wattributes,
5292 "%qs attribute only applies to variadic functions",
5293 IDENTIFIER_POINTER (name));
5294 *no_add_attrs = true;
5300 tree position = TREE_VALUE (args);
5302 if (TREE_CODE (position) != INTEGER_CST)
5304 warning (0, "requested position is not an integer constant");
5305 *no_add_attrs = true;
5309 if (tree_int_cst_lt (position, integer_zero_node))
5311 warning (0, "requested position is less than zero");
5312 *no_add_attrs = true;
5320 /* Handle a "noreturn" attribute; arguments as in
5321 struct attribute_spec.handler. */
5324 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5325 int ARG_UNUSED (flags), bool *no_add_attrs)
5327 tree type = TREE_TYPE (*node);
5329 /* See FIXME comment in c_common_attribute_table. */
5330 if (TREE_CODE (*node) == FUNCTION_DECL)
5331 TREE_THIS_VOLATILE (*node) = 1;
5332 else if (TREE_CODE (type) == POINTER_TYPE
5333 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5335 = build_pointer_type
5336 (build_type_variant (TREE_TYPE (type),
5337 TYPE_READONLY (TREE_TYPE (type)), 1));
5340 warning (OPT_Wattributes, "%qs attribute ignored",
5341 IDENTIFIER_POINTER (name));
5342 *no_add_attrs = true;
5348 /* Handle a "malloc" attribute; arguments as in
5349 struct attribute_spec.handler. */
5352 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5353 int ARG_UNUSED (flags), bool *no_add_attrs)
5355 if (TREE_CODE (*node) == FUNCTION_DECL
5356 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
5357 DECL_IS_MALLOC (*node) = 1;
5360 warning (OPT_Wattributes, "%qs attribute ignored",
5361 IDENTIFIER_POINTER (name));
5362 *no_add_attrs = true;
5368 /* Fake handler for attributes we don't properly support. */
5371 fake_attribute_handler (tree * ARG_UNUSED (node),
5372 tree ARG_UNUSED (name),
5373 tree ARG_UNUSED (args),
5374 int ARG_UNUSED (flags),
5375 bool * ARG_UNUSED (no_add_attrs))
5380 /* Handle a "type_generic" attribute. */
5383 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
5384 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5385 bool * ARG_UNUSED (no_add_attrs))
5389 /* Ensure we have a function type. */
5390 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
5392 params = TYPE_ARG_TYPES (*node);
5393 while (params && ! VOID_TYPE_P (TREE_VALUE (params)))
5394 params = TREE_CHAIN (params);
5396 /* Ensure we have a variadic function. */
5397 gcc_assert (!params);
5402 /* Handle a "vector_size" attribute; arguments as in
5403 struct attribute_spec.handler. */
5406 handle_vector_size_attribute (tree *node, tree name, tree args,
5407 int ARG_UNUSED (flags),
5410 unsigned HOST_WIDE_INT vecsize, nunits;
5411 enum machine_mode orig_mode;
5412 tree type = *node, new_type, size;
5414 *no_add_attrs = true;
5416 size = TREE_VALUE (args);
5418 if (!host_integerp (size, 1))
5420 warning (OPT_Wattributes, "%qs attribute ignored",
5421 IDENTIFIER_POINTER (name));
5425 /* Get the vector size (in bytes). */
5426 vecsize = tree_low_cst (size, 1);
5428 /* We need to provide for vector pointers, vector arrays, and
5429 functions returning vectors. For example:
5431 __attribute__((vector_size(16))) short *foo;
5433 In this case, the mode is SI, but the type being modified is
5434 HI, so we need to look further. */
5436 while (POINTER_TYPE_P (type)
5437 || TREE_CODE (type) == FUNCTION_TYPE
5438 || TREE_CODE (type) == METHOD_TYPE
5439 || TREE_CODE (type) == ARRAY_TYPE
5440 || TREE_CODE (type) == OFFSET_TYPE)
5441 type = TREE_TYPE (type);
5443 /* Get the mode of the type being modified. */
5444 orig_mode = TYPE_MODE (type);
5446 if ((!INTEGRAL_TYPE_P (type)
5447 && !SCALAR_FLOAT_TYPE_P (type)
5448 && !FIXED_POINT_TYPE_P (type))
5449 || (!SCALAR_FLOAT_MODE_P (orig_mode)
5450 && GET_MODE_CLASS (orig_mode) != MODE_INT
5451 && !ALL_SCALAR_FIXED_POINT_MODE_P (orig_mode))
5452 || !host_integerp (TYPE_SIZE_UNIT (type), 1)
5453 || TREE_CODE (type) == BOOLEAN_TYPE)
5455 error ("invalid vector type for attribute %qs",
5456 IDENTIFIER_POINTER (name));
5460 if (vecsize % tree_low_cst (TYPE_SIZE_UNIT (type), 1))
5462 error ("vector size not an integral multiple of component size");
5468 error ("zero vector size");
5472 /* Calculate how many units fit in the vector. */
5473 nunits = vecsize / tree_low_cst (TYPE_SIZE_UNIT (type), 1);
5474 if (nunits & (nunits - 1))
5476 error ("number of components of the vector not a power of two");
5480 new_type = build_vector_type (type, nunits);
5482 /* Build back pointers if needed. */
5483 *node = lang_hooks.types.reconstruct_complex_type (*node, new_type);
5488 /* Handle a "vector_type" attribute; arguments as in
5489 struct attribute_spec.handler. */
5492 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5493 int ARG_UNUSED (flags),
5496 /* Vector representative type and size. */
5497 tree rep_type = *node;
5498 tree rep_size = TYPE_SIZE_UNIT (rep_type);
5501 /* Vector size in bytes and number of units. */
5502 unsigned HOST_WIDE_INT vec_bytes, vec_units;
5504 /* Vector element type and mode. */
5506 enum machine_mode elem_mode;
5508 *no_add_attrs = true;
5510 /* Get the representative array type, possibly nested within a
5511 padding record e.g. for alignment purposes. */
5513 if (TYPE_IS_PADDING_P (rep_type))
5514 rep_type = TREE_TYPE (TYPE_FIELDS (rep_type));
5516 if (TREE_CODE (rep_type) != ARRAY_TYPE)
5518 error ("attribute %qs applies to array types only",
5519 IDENTIFIER_POINTER (name));
5523 /* Silently punt on variable sizes. We can't make vector types for them,
5524 need to ignore them on front-end generated subtypes of unconstrained
5525 bases, and this attribute is for binding implementors, not end-users, so
5526 we should never get there from legitimate explicit uses. */
5528 if (!host_integerp (rep_size, 1))
5531 /* Get the element type/mode and check this is something we know
5532 how to make vectors of. */
5534 elem_type = TREE_TYPE (rep_type);
5535 elem_mode = TYPE_MODE (elem_type);
5537 if ((!INTEGRAL_TYPE_P (elem_type)
5538 && !SCALAR_FLOAT_TYPE_P (elem_type)
5539 && !FIXED_POINT_TYPE_P (elem_type))
5540 || (!SCALAR_FLOAT_MODE_P (elem_mode)
5541 && GET_MODE_CLASS (elem_mode) != MODE_INT
5542 && !ALL_SCALAR_FIXED_POINT_MODE_P (elem_mode))
5543 || !host_integerp (TYPE_SIZE_UNIT (elem_type), 1))
5545 error ("invalid element type for attribute %qs",
5546 IDENTIFIER_POINTER (name));
5550 /* Sanity check the vector size and element type consistency. */
5552 vec_bytes = tree_low_cst (rep_size, 1);
5554 if (vec_bytes % tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1))
5556 error ("vector size not an integral multiple of component size");
5562 error ("zero vector size");
5566 vec_units = vec_bytes / tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1);
5567 if (vec_units & (vec_units - 1))
5569 error ("number of components of the vector not a power of two");
5573 /* Build the vector type and replace. */
5575 *node = build_vector_type (elem_type, vec_units);
5576 rep_name = TYPE_NAME (rep_type);
5577 if (TREE_CODE (rep_name) == TYPE_DECL)
5578 rep_name = DECL_NAME (rep_name);
5579 TYPE_NAME (*node) = rep_name;
5580 TYPE_REPRESENTATIVE_ARRAY (*node) = rep_type;
5585 /* ----------------------------------------------------------------------- *
5586 * BUILTIN FUNCTIONS *
5587 * ----------------------------------------------------------------------- */
5589 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
5590 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
5591 if nonansi_p and flag_no_nonansi_builtin. */
5594 def_builtin_1 (enum built_in_function fncode,
5596 enum built_in_class fnclass,
5597 tree fntype, tree libtype,
5598 bool both_p, bool fallback_p,
5599 bool nonansi_p ATTRIBUTE_UNUSED,
5600 tree fnattrs, bool implicit_p)
5603 const char *libname;
5605 /* Preserve an already installed decl. It most likely was setup in advance
5606 (e.g. as part of the internal builtins) for specific reasons. */
5607 if (built_in_decls[(int) fncode] != NULL_TREE)
5610 gcc_assert ((!both_p && !fallback_p)
5611 || !strncmp (name, "__builtin_",
5612 strlen ("__builtin_")));
5614 libname = name + strlen ("__builtin_");
5615 decl = add_builtin_function (name, fntype, fncode, fnclass,
5616 (fallback_p ? libname : NULL),
5619 /* ??? This is normally further controlled by command-line options
5620 like -fno-builtin, but we don't have them for Ada. */
5621 add_builtin_function (libname, libtype, fncode, fnclass,
5624 built_in_decls[(int) fncode] = decl;
5626 implicit_built_in_decls[(int) fncode] = decl;
5629 static int flag_isoc94 = 0;
5630 static int flag_isoc99 = 0;
5632 /* Install what the common builtins.def offers. */
5635 install_builtin_functions (void)
5637 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
5638 NONANSI_P, ATTRS, IMPLICIT, COND) \
5640 def_builtin_1 (ENUM, NAME, CLASS, \
5641 builtin_types[(int) TYPE], \
5642 builtin_types[(int) LIBTYPE], \
5643 BOTH_P, FALLBACK_P, NONANSI_P, \
5644 built_in_attributes[(int) ATTRS], IMPLICIT);
5645 #include "builtins.def"
5649 /* ----------------------------------------------------------------------- *
5650 * BUILTIN FUNCTIONS *
5651 * ----------------------------------------------------------------------- */
5653 /* Install the builtin functions we might need. */
5656 gnat_install_builtins (void)
5658 install_builtin_elementary_types ();
5659 install_builtin_function_types ();
5660 install_builtin_attributes ();
5662 /* Install builtins used by generic middle-end pieces first. Some of these
5663 know about internal specificities and control attributes accordingly, for
5664 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
5665 the generic definition from builtins.def. */
5666 build_common_builtin_nodes ();
5668 /* Now, install the target specific builtins, such as the AltiVec family on
5669 ppc, and the common set as exposed by builtins.def. */
5670 targetm.init_builtins ();
5671 install_builtin_functions ();
5674 #include "gt-ada-utils.h"
5675 #include "gtype-ada.h"